lang
stringclasses
3 values
file_path
stringlengths
5
150
repo_name
stringlengths
6
110
commit
stringlengths
40
40
file_code
stringlengths
1.52k
18.9k
prefix
stringlengths
82
16.5k
suffix
stringlengths
0
15.1k
middle
stringlengths
121
8.18k
strategy
stringclasses
8 values
context_items
listlengths
0
100
Rust
src/cli/utils.rs
lmapii/run_clang_format
153a3fd24813ce4ea8846bf38f1263dcb4f044db
use std::{fs, path}; use color_eyre::{eyre::eyre, eyre::WrapErr}; pub fn path_or_err<P>(path: P) -> eyre::Result<path::PathBuf> where P: AsRef<path::Path>, { let path_as_buf = path::PathBuf::from(path.as_ref()); if !path_as_buf.exists() { return Err(eyre!("Path not found or permission denied")) .wrap_err(format!("'{}' is not a path", path_as_buf.to_string_lossy())); } Ok(path_as_buf) } pub fn file_or_err<P>(path: P) -> eyre::Result<path::PathBuf> where P: AsRef<path::Path>, { let path_as_buf = path::PathBuf::from(path.as_ref()); if !path_as_buf.is_file() { return Err(eyre!("File not found or permission denied")) .wrap_err(format!("'{}' is not a file", path_as_buf.to_string_lossy())); } Ok(path_as_buf) } pub fn dir_or_err<P>(path: P) -> eyre::Result<path::PathBuf> where P: AsRef<path::Path>, { let path_as_buf = path::PathBuf::from(path.as_ref()); let meta = fs::metadata(path.as_ref()).wrap_err(format!( "'{}' is not a directory", path_as_buf.to_string_lossy() ))?; if !meta.is_dir() { return Err(eyre!("Directory not found")).wrap_err(format!( "'{}' is not a directory", path_as_buf.to_string_lossy() )); } Ok(path_as_buf) } pub fn file_with_name<P>(path: P, name: &str) -> eyre::Result<path::PathBuf> where P: AsRef<path::Path>, { let buf = file_or_err(path.as_ref())?; let name_str = buf.to_string_lossy(); let file_name = path .as_ref() .file_name() .and_then(std::ffi::OsStr::to_str) .ok_or(eyre!(format!( "Expected file with name '{}', got '{}'", name, name_str )))?; if file_name.to_lowercase() != name.to_lowercase() { return Err(eyre!(format!( "Expected file with name '{}', got '{}'", name, name_str ))); } Ok(buf) } pub fn file_with_ext<P>(path: P, ext: &str, strict: bool) -> eyre::Result<path::PathBuf> where P: AsRef<path::Path>, { let buf = file_or_err(path.as_ref())?; let name = buf.to_string_lossy(); let file_ext = path .as_ref() .extension() .and_then(std::ffi::OsStr::to_str) .ok_or(eyre!(format!( "Expected file with extension '{}', got file '{}'", ext, name )))?; let ext_minus = match ext.chars().next() { Some(c) if c == '.' && !strict => &ext[1..], _ => ext, }; if ext_minus.to_lowercase() != file_ext.to_lowercase() { return Err(eyre!(format!( "Expected file extension '{}', got '{}'", ext_minus, file_ext ))); } Ok(buf) } pub fn file_with_name_or_ext<P>(path: P, name_or_ext: &str) -> eyre::Result<path::PathBuf> where P: AsRef<path::Path>, { let buf = file_or_err(path.as_ref())?; let f_for_name = file_with_name(path.as_ref(), name_or_ext); let f_for_ext = file_with_ext(path.as_ref(), name_or_ext, false); match f_for_name { Ok(path) => Ok(path), Err(_) => match f_for_ext { Ok(path) => Ok(path), Err(_) => Err(eyre!(format!( "Expected file with name or extension '{}', got '{}'", name_or_ext, buf.to_string_lossy() ))), }, } } pub fn filename_or_exists<P>(path: P, root: Option<P>) -> eyre::Result<path::PathBuf> where P: AsRef<path::Path>, { if path.as_ref().is_absolute() && !path.as_ref().exists() { return Err(eyre::eyre!(format!( "'{}' does not exist", path.as_ref().to_string_lossy() ))); } let is_file = path .as_ref() .file_name() .and_then(|file_name| (path.as_ref().as_os_str() == file_name).then(|| true)) .is_some(); if is_file { return Ok(path.as_ref().to_path_buf()); } if path.as_ref().is_relative() { let full_path = match root { None => path.as_ref().to_path_buf(), Some(root) => { let mut full_path = root.as_ref().to_path_buf(); full_path.push(path.as_ref()); full_path } }; if !full_path.exists() { return Err(eyre::eyre!(format!( "'{}' does not exist", path.as_ref().to_string_lossy() ))); } return Ok(full_path); } Ok(path.as_ref().to_path_buf()) } pub fn filename_or_exists_with_ext<P>( path: P, root: Option<P>, ext: Option<&str>, ) -> eyre::Result<path::PathBuf> where P: AsRef<path::Path>, { let path_buf = path.as_ref().to_path_buf(); let root_buf = root.map(|p| p.as_ref().to_path_buf()); let mut checks = vec![filename_or_exists(path_buf, root_buf.clone())]; if let Some(ext) = ext { let mut try_ext = path.as_ref().to_path_buf(); try_ext.set_extension(ext); checks.push(filename_or_exists(try_ext, root_buf)); } let has_path = checks.iter().find(|result| result.is_ok()); if let Some(cmd) = has_path { return Ok(cmd.as_ref().unwrap().as_path().to_path_buf()); } Err(checks.remove(0).unwrap_err()) } pub fn executable_or_exists<P>(path: P, root: Option<P>) -> eyre::Result<path::PathBuf> where P: AsRef<path::Path>, { let ext = if cfg!(windows) { Some("exe") } else { None }; filename_or_exists_with_ext(path, root, ext) } #[cfg(test)] mod tests { use super::*; #[test] fn test_path() { let path = path::Path::new("some/path/to/.clang-format"); let file_name = path.file_name().and_then(std::ffi::OsStr::to_str).unwrap(); assert_eq!(".clang-format", file_name.to_lowercase()); } }
use std::{fs, path}; use color_eyre::{eyre::eyre, eyre::WrapErr}; pub fn path_or_err<P>(path: P) -> eyre::Result<path::PathBuf> where P: AsRef<path::Path>, { let path_as_buf = path::PathBuf::from(path.as_ref()); if !path_as_buf.exists() { return Err(eyre!("Path not found or permission denied")) .wrap_err(format!("'{}' is not a path", path_as_buf.to_string_lossy())); } Ok(path_as_buf) } pub fn file_or_err<P>(path: P) -> eyre::Result<path::PathBuf> where P: AsRef<path::Path>, { let path_as_buf = path::PathBuf::from(path.as_ref()); if !path_as_buf.is_file() { return Err(eyre!("File not found or permission denied")) .wrap_err(format!("'{}' is not a file", path_as_buf.to_string_lossy())); } Ok(path_as_buf) } pub fn dir_or_err<P>(path: P) -> eyre::Result<path::PathBuf> where P: AsRef<path::Path>, { let path_as_buf = path::PathBuf::from(path.as_ref()); let meta = fs::metadata(path.as_ref()).wrap_err(format!( "'{}' is not a directory", path_as_buf.to_string_lossy() ))?; if !meta.is_dir() { return Err(eyre!("Directory not found")).wrap_err(format!( "'{}' is not a directory", path_as_buf.to_string_lossy() )); } Ok(path_as_buf) } pub fn file_with_name<P>(path: P, name: &str) -> eyre::Result<path::PathBuf> where P: AsRef<path::Path>, { let buf = file_or_err(path.as_ref())?; let name_str = buf.to_string_lossy(); let file_name = path .as_ref() .file_name() .and_then(std::ffi::OsStr::to_str) .ok_or(eyre!(format!( "Expected file with name '{}', got '{}'", name, name_str )))?; if file_name.to_lowercase() != name.to_lowercase() { return Err(eyre!(format!( "Expected file with name '{}', got '{}'", name, name_str ))); } Ok(buf) } pub fn file_with_ext<P>(path: P, ext: &str, strict: bool) -> eyre::Result<path::PathBuf> where P: AsRef<path::Path>, { let buf = file_or_err(path.as_ref())?; let name = buf.to_string_lossy(); let file_ext = path .as_ref() .extension() .and_then(std::ffi::OsStr::to_str) .ok_or(eyre!(format!( "Expected file with extension '{}', got file '{}'", ext, name )))?; let ext_minus = match ext.chars().next() { Some(c) if c == '.' && !strict => &ext[1..], _ => ext, }; if ext_minus.to_lowercase() != file_ext.to_lowercase() { return Err(eyre!(format!( "Expected file extension '{}', got '{}'", ext_minus, file_ext ))); } Ok(buf) } pub fn file_with_name_or_ext<P>(path: P, name_or_ext: &str) -> eyre::Result<path::PathBuf> where P: AsRef<path::Path>, { let buf = file_or_err(path.as_ref())?; let f_for_name = file_with_name(path.as_ref(), name_or_ext); let f_for_ext = file_with_ext(path.as_ref(), name_or_ext, false); match f_for_name { Ok(path) => Ok(path), Err(_) => match f_for_ext { Ok(path) => Ok(path), Err(_) => Err(eyre!(format!( "Expected file with name or extension '{}', got '{}'", name_or_ext, buf.to_string_lossy() ))), }, } } pub fn filename_or_exists<P>(path: P, root: Option<P>) -> eyre::Result<path::PathBuf> where P: AsRef<path::Path>, { if path.as_ref().is_absolute() && !path.as_ref().exists() { return Err(eyre::eyre!(format!( "'{}' does not exist", path.as_ref().to_string_lossy() ))); } let is_file = path .as_ref() .file_name() .and_then(|file_name| (path.as_ref().as_os_str() == file_name).then(|| true)) .is_some(); if is_file { return Ok(path.as_ref().to_path_buf()); } if path.as_ref().is_relative() { let full_path = match root { None => path.as_ref().to_path_buf(), Some(root) => { let mut full_path = root.as_ref().to_path_buf(); full_path.push(path.as_ref()); full_path } }; if !full_path.exists() { return Err(eyre::eyre!(format!( "'{}' does not exist", path.as_ref().to_string_lossy() ))); } return Ok(full_path); } Ok(path.as_ref().to_path_buf()) } pub fn filename_or_exists_with_ext<P>( path: P, root: Option<P>, ext: Option<&str>, ) -> eyre::Result<path::PathBuf> where P: AsRef<path::Path>, { let path_buf = path.as_ref().to_path_buf(); let root_buf = root.map(|p| p.as_ref().to_path_buf()); let mut checks = vec![filename_or_exists(path_buf, root_buf.clone())]; if let Some(ext) = ext { let mut try_ext = path.as_ref().to_path_buf(); try_ext.set_extension(ext); checks.push(filename_or_exists(try_ext, root_buf)); } let has_path = checks.iter().find(|result| result.is_ok()); if let Some(cmd) = has_path { return Ok(cmd.as_ref().unwrap().as_path().to_path_buf()); } Err(checks.remove(0).unwrap_err()) } pub fn executable_or_exists<P>(path: P, root: Option<P>) -> eyre::Result<path::PathBuf> where P: AsRef<path::Path>, { let ext = if cfg!(windows) { Some("exe") } else { None }; filename_or_exists_with_ext(path, root, ext) } #[cfg(test)] mod tests { use super::*; #[test]
rcase()); } }
fn test_path() { let path = path::Path::new("some/path/to/.clang-format"); let file_name = path.file_name().and_then(std::ffi::OsStr::to_str).unwrap(); assert_eq!(".clang-format", file_name.to_lowe
function_block-random_span
[ { "content": "fn crate_root_rel(path: &str) -> path::PathBuf {\n\n crate_root().join(path)\n\n}\n\n\n", "file_path": "tests/invoke.rs", "rank": 5, "score": 193585.52881078637 }, { "content": "pub fn match_paths<P>(\n\n candidates: Vec<globmatch::Matcher<P>>,\n\n filter: Option<Vec<globmatch::GlobSet>>,\n\n filter_post: Option<Vec<globmatch::GlobSet>>,\n\n) -> (Vec<path::PathBuf>, Vec<path::PathBuf>)\n\nwhere\n\n P: AsRef<path::Path>,\n\n{\n\n let (paths, filtered) = globmatch::wrappers::match_paths(candidates, filter, filter_post);\n\n\n\n let paths = paths\n\n .into_iter()\n\n .filter(|path| path.as_path().is_file())\n\n .collect(); // accept only files\n\n\n\n (paths, filtered)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "src/lib/globs.rs", "rank": 9, "score": 173935.57521825182 }, { "content": "fn crate_root() -> path::PathBuf {\n\n path::PathBuf::from(env!(\"CARGO_MANIFEST_DIR\"))\n\n}\n\n\n", "file_path": "tests/invoke.rs", "rank": 11, "score": 161626.71985681972 }, { "content": "pub fn style_and_root(data: &cli::Data) -> eyre::Result<Option<(path::PathBuf, path::PathBuf)>> {\n\n let style_file = resolve_style_file(data)?;\n\n let style_root = match &data.json.style_root {\n\n None => None,\n\n Some(path) => {\n\n let path = if path.is_absolute() {\n\n path::PathBuf::from(path.as_path())\n\n } else {\n\n let mut full_path = path::PathBuf::from(data.json.root.as_path());\n\n full_path.push(path);\n\n full_path\n\n };\n\n Some(\n\n utils::dir_or_err(path.as_path())\n\n .wrap_err(\"Invalid configuration for 'styleRoot'\")\n\n .suggestion(\n\n \"Please make sure that 'styleRoot' is a valid \\\n\n directory and check the access permissions\",\n\n )?\n\n .canonicalize()\n", "file_path": "src/lib/resolve.rs", "rank": 12, "score": 143222.82529475744 }, { "content": "fn run_cmd_and_assert(cmd: &mut Command, should_pass: bool) {\n\n let output = cmd.output().unwrap();\n\n\n\n if output.status.success() != should_pass {\n\n println!(\"status: {}\", output.status);\n\n println!(\"{}\", String::from_utf8(output.stdout).unwrap());\n\n println!(\"{}\", String::from_utf8(output.stderr).unwrap());\n\n }\n\n\n\n if cfg!(windows) {\n\n // on windows deleting files (the temporary clang-format file) can take some time\n\n thread::sleep(time::Duration::from_millis(500));\n\n }\n\n\n\n assert_eq!(output.status.success(), should_pass);\n\n}\n\n\n", "file_path": "tests/invoke.rs", "rank": 13, "score": 128805.40151733179 }, { "content": "pub fn build_matchers_from<'a, P>(\n\n globs: &'a [String],\n\n root: P,\n\n field: &str,\n\n file: &str,\n\n) -> eyre::Result<Vec<globmatch::Matcher<'a, path::PathBuf>>>\n\nwhere\n\n P: AsRef<path::Path>,\n\n{\n\n // https://stackoverflow.com/a/33217302/7281683\n\n let globs: Vec<_> = globs.iter().map(|s| &**s).collect();\n\n\n\n wrap_result(\n\n globmatch::wrappers::build_matchers(&globs, root).map_err(|err| eyre!(err)),\n\n field,\n\n file,\n\n )\n\n}\n\n\n", "file_path": "src/lib/globs.rs", "rank": 14, "score": 120818.03524048417 }, { "content": "pub fn command(data: &cli::Data) -> eyre::Result<path::PathBuf> {\n\n let mut from_json = false;\n\n\n\n let cmd = match &data.json.command {\n\n None => match &data.command {\n\n // use default value if not specified in configuration file nor as parameter\n\n None => path::PathBuf::from(\"clang-format\"),\n\n // cmd defined as CLI parameter but not in the .json configuration file\n\n Some(cmd_cli) => path::PathBuf::from(cmd_cli.as_path()),\n\n },\n\n Some(cmd_cfg) => match &data.command {\n\n // cmd defined in the .json configuration file but not as CLI parameter\n\n None => {\n\n from_json = true;\n\n path::PathBuf::from(cmd_cfg.as_path())\n\n }\n\n // cmd defined in both, the .json configuration file and as CLI parameter\n\n Some(cmd_cli) => {\n\n log::debug!(\n\n \"Override detected:\\nCommand '{}' \\\n", "file_path": "src/lib/resolve.rs", "rank": 15, "score": 119953.42012891747 }, { "content": "fn log_pretty() -> bool {\n\n // fancy logging using indicatif is only done for log level \"info\". when debugging we\n\n // do not use a progress bar, if info is not enabled at all (\"quiet\") then the progress\n\n // is also not shown\n\n !log::log_enabled!(log::Level::Debug) && log::log_enabled!(log::Level::Info)\n\n}\n\n\n", "file_path": "src/lib/mod.rs", "rank": 16, "score": 112831.30852679753 }, { "content": "pub fn setup(matches: &clap::ArgMatches) {\n\n let lvl = log_level(matches);\n\n\n\n env_logger::Builder::new()\n\n .format(move |f, record| {\n\n // Color::White renders as gray on black background terminals\n\n let mut s = f.style();\n\n let (lvl_str, s) = match record.level() {\n\n log::Level::Error => (\"<e>\", s.set_bold(true).set_color(fmt::Color::Red)),\n\n log::Level::Warn => (\"<w>\", s.set_bold(true).set_color(fmt::Color::Yellow)),\n\n log::Level::Info => (\"<i>\", s.set_bold(false).set_color(fmt::Color::White)),\n\n log::Level::Debug => (\"<d>\", s.set_bold(false).set_color(fmt::Color::Blue)),\n\n log::Level::Trace => (\"<t>\", s.set_bold(false).set_color(fmt::Color::Magenta)),\n\n };\n\n\n\n let (target, tstamp) = match lvl {\n\n l if l >= log::Level::Debug => (record.module_path(), Some(f.timestamp_millis())),\n\n _ => (None, None), // f.timestamp_seconds()),\n\n };\n\n\n", "file_path": "src/cli/logging.rs", "rank": 17, "score": 104045.56087907772 }, { "content": "fn resolve_style_file(data: &cli::Data) -> eyre::Result<eyre::Result<path::PathBuf>> {\n\n let style_json = match &data.json.style_file {\n\n None => None,\n\n Some(path) => {\n\n let mut full_path = path::PathBuf::from(data.json.root.as_path());\n\n full_path.push(path);\n\n // do not perform the validation for the 'styleFile' yet since a valid override\n\n // might have been passed as parameter to the tool\n\n Some(full_path)\n\n }\n\n };\n\n\n\n let style = match style_json {\n\n None => match &data.style {\n\n None => Err(eyre::eyre!(\n\n \"Style file must either be specified as \\\n\n command-line parameter or within the configuration file\"\n\n )),\n\n // style defined as CLI parameter but not in the .json configuration file\n\n Some(s_cli) => Ok(path::PathBuf::from(s_cli.as_path()).canonicalize().unwrap()),\n", "file_path": "src/lib/resolve.rs", "rank": 18, "score": 99298.11181452393 }, { "content": "#[test]\n\nfn invoke_arg_check() {\n\n // given: configuration file where --check should fail\n\n let json = crate_root_rel(\"test-files/json/test-err-format.json\");\n\n run_cmd_and_assert(cmd_with_path().arg(json.as_os_str()).arg(\"--check\"), false);\n\n\n\n // given: configuration file where --check should fail\n\n let json = crate_root_rel(\"test-files/json/test-ok-format.json\");\n\n run_cmd_and_assert(cmd_with_path().arg(json.as_os_str()).arg(\"--check\"), true);\n\n}\n\n\n", "file_path": "tests/invoke.rs", "rank": 19, "score": 96005.49716664253 }, { "content": "fn wrap_result<T>(result: eyre::Result<T>, field: &str, file: &str) -> eyre::Result<T> {\n\n result\n\n .wrap_err(format!(\"Error while parsing '{}'\", field))\n\n .suggestion(format!(\n\n \"Check the format of the field '{}' in the provided file '{}'.\",\n\n field, file\n\n ))\n\n}\n\n\n", "file_path": "src/lib/globs.rs", "rank": 20, "score": 94342.53909603402 }, { "content": "fn cmd_with_path() -> Command {\n\n let mut cmd = cmd();\n\n cmd.env(\"PATH\", crate_root().join(\"artifacts/clang\"));\n\n cmd\n\n}\n\n\n", "file_path": "tests/invoke.rs", "rank": 21, "score": 93425.28402818268 }, { "content": "fn place_style_file(\n\n file_and_root: Option<(path::PathBuf, path::PathBuf)>,\n\n step: &mut LogStep,\n\n) -> eyre::Result<Option<path::PathBuf>> {\n\n if file_and_root.is_none() {\n\n // in case no style file has been specified there's nothing to do\n\n return Ok(None);\n\n }\n\n\n\n // the style file `src` should be copied to the destination directory `dst`\n\n let (src_file, dst_root) = file_and_root.unwrap();\n\n let mut dst_file = path::PathBuf::from(dst_root.as_path());\n\n // by adding the filename of the style file we get the final name of the destination file\n\n dst_file.push(\".clang-format\");\n\n\n\n // it may happen that there is already a .clang-format file at the destination folder, e.g.,\n\n // because the user placed it there while working with an editor supporting `clang-format`.\n\n // in such a case we provide feedback by comparing the file contents and abort with an error\n\n // if they do not match.\n\n if dst_file.exists() {\n", "file_path": "src/lib/mod.rs", "rank": 22, "score": 91609.88161426909 }, { "content": "pub fn run(data: cli::Data) -> eyre::Result<()> {\n\n let start = std::time::Instant::now();\n\n\n\n log::info!(\" \");\n\n let mut step = LogStep::new();\n\n\n\n let style_and_root = resolve::style_and_root(&data)?;\n\n if let Some((style_file, _)) = &style_and_root {\n\n log::info!(\n\n \"{} Found style file {}\",\n\n step.next(),\n\n console::style(style_file.to_string_lossy()).bold(),\n\n );\n\n } else {\n\n log::info!(\n\n \"{} No style file specified, assuming .clang-format exists in the project tree\",\n\n step.next()\n\n );\n\n }\n\n\n", "file_path": "src/lib/mod.rs", "rank": 23, "score": 87820.50565675819 }, { "content": "pub fn build_glob_set_from<'a>(\n\n filter: &'a Option<Vec<String>>,\n\n field: &str,\n\n file: &str,\n\n) -> eyre::Result<Option<Vec<globmatch::GlobSet<'a>>>> {\n\n let filter = filter\n\n .as_ref()\n\n .map(|filter| filter.iter().map(|s| &**s).collect());\n\n\n\n wrap_result(\n\n globmatch::wrappers::build_glob_set(&filter, !cfg!(windows)).map_err(|err| eyre!(err)),\n\n field,\n\n file,\n\n )\n\n}\n\n\n", "file_path": "src/lib/globs.rs", "rank": 24, "score": 81852.57449536031 }, { "content": "#[test]\n\nfn invoke_subs() {\n\n // an empty command fails since <JSON> is required\n\n cmd().assert().failure();\n\n\n\n // sub-commands need no parameters.\n\n let empty_ok = vec![\"help\", \"schema\", \"--version\"];\n\n for arg in empty_ok.into_iter() {\n\n cmd().arg(arg).assert().success();\n\n }\n\n}\n\n\n", "file_path": "tests/invoke.rs", "rank": 25, "score": 71569.97479557015 }, { "content": "#[test]\n\nfn invoke_quiet() {\n\n fn assert_quiet(cmd: &mut Command, expect_quiet: bool) {\n\n let output = cmd.output().unwrap();\n\n\n\n let stdout = String::from_utf8(output.stdout).unwrap();\n\n let stderr = String::from_utf8(output.stderr).unwrap();\n\n\n\n println!(\"status: {}\", output.status);\n\n println!(\"{}\", stdout);\n\n println!(\"{}\", stderr);\n\n\n\n if expect_quiet {\n\n assert_eq!(0, stdout.len());\n\n assert_eq!(0, stderr.len());\n\n } else {\n\n assert_ne!(0, stderr.len());\n\n }\n\n }\n\n\n\n assert_quiet(\n", "file_path": "tests/invoke.rs", "rank": 26, "score": 71569.97479557015 }, { "content": "fn cmd() -> Command {\n\n let mut cmd = Command::cargo_bin(crate_name!()).unwrap();\n\n cmd.env_clear();\n\n cmd.env_remove(\"PATH\");\n\n cmd\n\n}\n\n\n", "file_path": "tests/invoke.rs", "rank": 27, "score": 69564.25952462542 }, { "content": "#[test]\n\nfn invoke_json_glob() {\n\n // test that an invalid glob leads to an error\n\n let json = crate_root_rel(\"test-files/json/test-err-invalid-glob.json\");\n\n run_cmd_and_assert(cmd_with_path().arg(json.as_os_str()), false);\n\n}\n\n\n", "file_path": "tests/invoke.rs", "rank": 28, "score": 69342.52782141551 }, { "content": "#[test]\n\nfn invoke_json_and_bin() {\n\n // empty .json file is not accepted\n\n let json = crate_root_rel(\"test-files/json/test-err-empty.json\");\n\n cmd().arg(json.as_os_str()).assert().failure();\n\n\n\n let json = crate_root_rel(\"test-files/json/test-ok-empty-paths.json\");\n\n // .json file with empty paths is accepted, but clang-format is not in the $PATH\n\n if cfg!(linux) {\n\n // TODO: cmd() does not seem to properly clear the env/path in linux ?\n\n // this might be related since we're invoking a command within a command\n\n // so on linux the original PATH might apply for each invocation within this command\n\n cmd().arg(json.as_os_str()).assert().failure();\n\n }\n\n // as soon as we add the path to clang-format to $PATH the execution is successful\n\n cmd_with_path().arg(json.as_os_str()).assert().success();\n\n}\n\n\n", "file_path": "tests/invoke.rs", "rank": 29, "score": 69342.52782141551 }, { "content": "#[test]\n\nfn invoke_json_command() {\n\n let combinations = vec![\n\n // path to command does not exist\n\n (\"test-files/json/test-err-invalid-command.json\", false),\n\n // path to command exists, but it is not an executable\n\n (\"test-files/json/test-err-invalid-command-file.json\", false),\n\n // command is not a path and an invalid executable name\n\n (\"test-files/json/test-err-invalid-command-name.json\", false),\n\n // valid command has been provided as path\n\n (\"test-files/json/test-ok-style-and-command.json\", true),\n\n ];\n\n\n\n for test in combinations.into_iter() {\n\n println!(\"checking {}\", test.0);\n\n let json = crate_root_rel(test.0);\n\n // using command WITHOUT path\n\n run_cmd_and_assert(cmd().arg(json.as_os_str()), test.1);\n\n }\n\n\n\n // test that also a valid executable name can be provided as command field (requires $PATH)\n\n let json = crate_root_rel(\"test-files/json/test-ok-style-and-command-name.json\");\n\n run_cmd_and_assert(cmd_with_path().arg(json.as_os_str()), true);\n\n}\n\n\n", "file_path": "tests/invoke.rs", "rank": 30, "score": 69342.52782141551 }, { "content": "#[test]\n\nfn invoke_arg_command() {\n\n // given: a valid .json configuration file\n\n let json = crate_root_rel(\"test-files/json/test-ok-style-and-command.json\");\n\n\n\n // paired with an invalid --command parameter, leads to an error (overrides valid .json)\n\n run_cmd_and_assert(\n\n cmd().arg(json.as_os_str()).arg(\"--command=i/do/not/exist\"),\n\n false,\n\n );\n\n\n\n // paired with an valid path as --command parameter, success\n\n run_cmd_and_assert(\n\n cmd().arg(json.as_os_str()).arg(format!(\n\n \"--command={}\",\n\n crate_root_rel(\"artifacts/clang/clang-format\").to_string_lossy()\n\n )),\n\n true,\n\n );\n\n\n\n // paired with an valid COMMAND as --command parameter, success\n", "file_path": "tests/invoke.rs", "rank": 31, "score": 69342.52782141551 }, { "content": "#[test]\n\nfn invoke_json_style() {\n\n let combinations = vec![\n\n // path to styleFile does not exist\n\n (\"test-files/json/test-err-invalid-style-path.json\", false),\n\n // path to styleFile exists, but this is not a style file\n\n (\"test-files/json/test-err-invalid-style-file.json\", false),\n\n // path to styleFile exists, file has name \".clang-format\", but no 'styleRoot' exists\n\n (\"test-files/json/test-err-no-root.json\", false),\n\n // path to styleFile exists, file has name \".clang-format\", but 'styleRoot' is an invalid path\n\n (\"test-files/json/test-err-invalid-root.json\", false),\n\n // path to styleFile exists, file has name \".clang-format\", and 'styleRoot' exists\n\n (\"test-files/json/test-ok-style.json\", true),\n\n // path to styleFile exists, file has name \"named.clang-format\", and 'styleRoot' exists\n\n (\"test-files/json/test-ok-style-named.json\", true),\n\n ];\n\n\n\n for test in combinations.into_iter() {\n\n println!(\"checking {}\", test.0);\n\n let json = crate_root_rel(test.0);\n\n run_cmd_and_assert(cmd_with_path().arg(json.as_os_str()), test.1);\n\n }\n\n}\n\n\n", "file_path": "tests/invoke.rs", "rank": 32, "score": 69342.52782141551 }, { "content": "#[test]\n\nfn invoke_arg_style() {\n\n // given: a valid .json configuration file\n\n let json = crate_root_rel(\"test-files/json/test-ok-style.json\");\n\n\n\n // paired with an invalid --style parameter, leads to an error (overrides valid .json)\n\n run_cmd_and_assert(\n\n cmd_with_path()\n\n .arg(json.as_os_str())\n\n .arg(\"--style=i/do/not/exist.clang-format\"),\n\n false,\n\n );\n\n\n\n // paired with an valid --style parameter, success\n\n run_cmd_and_assert(\n\n cmd_with_path().arg(json.as_os_str()).arg(format!(\n\n \"--style={}\",\n\n crate_root_rel(\"test-files/clang-format/named.clang-format\").to_string_lossy()\n\n )),\n\n true,\n\n );\n", "file_path": "tests/invoke.rs", "rank": 33, "score": 69342.52782141551 }, { "content": "fn log_step(\n\n prefix: &str,\n\n path: &path::Path,\n\n strip_path: &Option<path::PathBuf>,\n\n progress: &indicatif::ProgressBar,\n\n style: console::Style,\n\n) {\n\n // let style = console::Style::new().green().bold();\n\n let print_path = match strip_path {\n\n None => path,\n\n Some(strip) => path.strip_prefix(strip).unwrap(),\n\n };\n\n\n\n if log_pretty() {\n\n progress.println(format!(\n\n \"{:>12} {}\",\n\n style.apply_to(prefix),\n\n print_path.to_string_lossy(),\n\n ));\n\n progress.inc(1);\n\n } else {\n\n log::info!(\" + {}\", path.to_string_lossy());\n\n }\n\n}\n", "file_path": "src/lib/mod.rs", "rank": 34, "score": 69285.06168270754 }, { "content": "fn log_level(matches: &clap::ArgMatches) -> log::Level {\n\n if matches.is_present(\"quiet\") {\n\n log::Level::Error\n\n } else {\n\n match matches.occurrences_of(\"verbose\") {\n\n // ArgMatches::occurrences_of which will return 0 if the argument was not used at\n\n // runtime. This demo always displays error or warning messages, so by default -v is\n\n // always used. The --quiet option must be used to silence all.\n\n // _ => log::Level::Error,\n\n // _ => log::Level::Warn,\n\n 0 | 1 => log::Level::Info,\n\n 2 => log::Level::Debug,\n\n _ => log::Level::Trace, // 3 | _\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/cli/logging.rs", "rank": 35, "score": 61933.55229423445 }, { "content": "fn setup_jobs(jobs: Option<u8>) -> eyre::Result<()> {\n\n // configure rayon to use the specified number of threads (globally)\n\n if let Some(jobs) = jobs {\n\n let jobs = if jobs == 0 { 1u8 } else { jobs };\n\n let pool = rayon::ThreadPoolBuilder::new()\n\n .num_threads(jobs.into())\n\n .build_global();\n\n\n\n if let Err(err) = pool {\n\n return Err(err)\n\n .wrap_err(format!(\"Failed to create thread pool of size {}\", jobs))\n\n .suggestion(\"Please try to decrease the number of jobs\");\n\n }\n\n };\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib/mod.rs", "rank": 36, "score": 53410.10342244034 }, { "content": "fn get_command(data: &cli::Data) -> eyre::Result<cmd::Runner> {\n\n let cmd_path = resolve::command(data)?;\n\n let mut cmd = cmd::Runner::new(&cmd_path);\n\n\n\n cmd.validate()\n\n .wrap_err(format!(\n\n \"Failed to execute the specified command '{}'\",\n\n cmd_path.to_string_lossy()\n\n ))\n\n .suggestion(format!(\n\n \"Please make sure that the command '{}' exists or is in your search path\",\n\n cmd_path.to_string_lossy()\n\n ))?;\n\n\n\n Ok(cmd)\n\n}\n\n\n", "file_path": "src/lib/mod.rs", "rank": 37, "score": 49130.39279644789 }, { "content": "fn main() -> eyre::Result<()> {\n\n // println!(\n\n // \"Executing \\n{} from \\n{}\\n\",\n\n // std::env::current_exe().unwrap().to_string_lossy(),\n\n // std::env::current_dir().unwrap().to_string_lossy()\n\n // );\n\n\n\n let data = cli::Builder::build().parse()?;\n\n lib::run(data)\n\n}\n", "file_path": "src/main.rs", "rank": 38, "score": 40905.433232354844 }, { "content": "static volatile bool _changeme = false;\n", "file_path": "test-files/c-demo/pkg_a/module_a/module_a.c", "rank": 39, "score": 39943.25640710018 }, { "content": "int main (int argc, const char *argv[]) // NOLINT : unused argument argv\n\n{\n\n // uint8_t i = 0;\n\n // module_a_init ();\n\n\n\n module_b_init ();\n\n module_c_init ();\n\n\n\n _some_variable[0] = 123; // NOLINT: magic number\n\n _some_variable[0] = 2;\n\n\n\n // for (i = 0; i < _MAXLOOP; i++)\n\n // {\n\n // _some_variable[0] += 1;\n\n // }\n", "file_path": "test-files/c-demo/project/src/main.c", "rank": 40, "score": 39943.25640710018 }, { "content": "static volatile uint8_t _some_variable[] = {1, 2, 3};\n", "file_path": "test-files/c-demo/project/src/main.c", "rank": 41, "score": 39943.25640710018 }, { "content": "void module_c_init (void)\n\n{\n\n // nothing to see here\n", "file_path": "test-files/c-demo/pkg_b/module_c/module_c.c", "rank": 42, "score": 38626.71937220213 }, { "content": "void module_a_init (void)\n\n{\n\n uint32_t some_value = 1;\n\n // nothing to see here\n\n _changeme = !_changeme;\n\n\n\n CALC (_changeme, some_value);\n\n\n\n if (some_value < MODULE_A_SMTH)\n\n {\n\n some_value++;\n\n }\n", "file_path": "test-files/c-demo/pkg_a/module_a/module_a.c", "rank": 43, "score": 38626.71937220213 }, { "content": "void module_b_init (void)\n\n{\n\n module_a_init ();\n\n // nothing to see here\n", "file_path": "test-files/c-demo/pkg_b/module_b/module_b.c", "rank": 44, "score": 38626.71937220213 }, { "content": "void module_b_init (void);\n", "file_path": "test-files/c-demo/pkg_b/module_b/module_b.h", "rank": 45, "score": 38626.71937220213 }, { "content": "void module_a_init (void);\n", "file_path": "test-files/c-demo/pkg_a/module_a/module_a.h", "rank": 46, "score": 38626.71937220213 }, { "content": "void module_c_init (void);\n", "file_path": "test-files/c-demo/pkg_b/module_c/module_c.h", "rank": 47, "score": 38626.71937220213 }, { "content": "static bool _changeme = false;\n", "file_path": "test-files/c-demo/subfolder/pkg_c/module_sub_a/module_sub_a.c", "rank": 48, "score": 36237.903519802 }, { "content": "void module_unused_init (void);\n", "file_path": "test-files/c-demo/subfolder/pkg_c/module_unused/module_unused.h", "rank": 49, "score": 34127.34521764128 }, { "content": "uint32_t module_sub_a_some_function (uint32_t some_parameter);\n", "file_path": "test-files/c-demo/subfolder/pkg_c/module_sub_a/module_sub_a.h", "rank": 50, "score": 34127.34521764128 }, { "content": "void module_sub_a_init (void)\n\n{\n\n // nothing to see here\n\n _changeme = !_changeme;\n\n _module_sub_a_some_function (_changeme);\n", "file_path": "test-files/c-demo/subfolder/pkg_c/module_sub_a/module_sub_a.c", "rank": 51, "score": 34127.34521764128 }, { "content": "void module_sub_b_init (void)\n\n{\n\n module_sub_a_init ();\n\n // nothing to see here\n", "file_path": "test-files/c-demo/subfolder/pkg_c/module_sub_b/module_sub_b.c", "rank": 52, "score": 34127.34521764128 }, { "content": "#define _MODULE_SUB_A_SMTH 1u\n\n\n", "file_path": "test-files/c-demo/subfolder/pkg_c/module_sub_a/module_sub_a.c", "rank": 53, "score": 34127.34521764128 }, { "content": "void module_nofmt_init (void );\n", "file_path": "test-files/c-demo/subfolder/pkg_c/module_nofmt/module_nofmt.h", "rank": 54, "score": 34127.34521764128 }, { "content": "void module_sub_b_init (void);\n", "file_path": "test-files/c-demo/subfolder/pkg_c/module_sub_b/module_sub_b.h", "rank": 55, "score": 34127.34521764128 }, { "content": "uint32_t module_sub_a_some_function (uint32_t some_parameter)\n\n{\n\n some_parameter += 1; // MODULE_SUB_A_SMTH;\n\n if ((some_parameter > 2 && some_parameter <= 3) || some_parameter == 1)\n\n {\n\n return 0UL;\n\n }\n\n\n\n return 0UL;\n", "file_path": "test-files/c-demo/subfolder/pkg_c/module_sub_a/module_sub_a.c", "rank": 56, "score": 34127.34521764128 }, { "content": "void module_nofmt_init (void)\n\n {\n\n // nothing to see here\n", "file_path": "test-files/c-demo/subfolder/pkg_c/module_nofmt/module_nofmt.c", "rank": 57, "score": 34127.34521764128 }, { "content": "static void _module_sub_a_some_function (uint16_t unused);\n", "file_path": "test-files/c-demo/subfolder/pkg_c/module_sub_a/module_sub_a.c", "rank": 58, "score": 34127.34521764128 }, { "content": "void module_unused_init (void)\n\n{\n\n // nothing to see here\n", "file_path": "test-files/c-demo/subfolder/pkg_c/module_unused/module_unused.c", "rank": 59, "score": 34127.34521764128 }, { "content": "void module_sub_a_init (void);\n", "file_path": "test-files/c-demo/subfolder/pkg_c/module_sub_a/module_sub_a.h", "rank": 60, "score": 34127.34521764128 }, { "content": " cmd_with_path()\n\n .arg(crate_root_rel(\"test-files/json/test-ok-style.json\").as_os_str())\n\n .arg(\"-vvvv\")\n\n .arg(\"--quiet\"),\n\n true,\n\n );\n\n\n\n assert_quiet(\n\n cmd_with_path()\n\n .arg(crate_root_rel(\"test-files/json/test-err-empty.json\").as_os_str())\n\n .arg(\"-vvvv\")\n\n .arg(\"--quiet\"),\n\n false,\n\n );\n\n}\n", "file_path": "tests/invoke.rs", "rank": 61, "score": 28470.431570953537 }, { "content": "\n\n let json = crate_root_rel(\"test-files/json/test-err-invalid-style-file.json\");\n\n // a valid --style parameter even overrides an invalid json configuration file\n\n run_cmd_and_assert(\n\n cmd_with_path().arg(json.as_os_str()).arg(format!(\n\n \"--style={}\",\n\n crate_root_rel(\"test-files/clang-format/named.clang-format\").to_string_lossy()\n\n )),\n\n true,\n\n );\n\n}\n\n\n", "file_path": "tests/invoke.rs", "rank": 62, "score": 28469.791242354077 }, { "content": " run_cmd_and_assert(\n\n cmd_with_path()\n\n .arg(json.as_os_str())\n\n .arg(\"--command=clang-format\"),\n\n true,\n\n );\n\n\n\n let json = crate_root_rel(\"test-files/json/test-err-invalid-command.json\");\n\n // a valid --command parameter even overrides an invalid json configuration file\n\n run_cmd_and_assert(\n\n cmd_with_path()\n\n .arg(json.as_os_str())\n\n .arg(\"--command=clang-format\"),\n\n true,\n\n );\n\n}\n\n\n", "file_path": "tests/invoke.rs", "rank": 63, "score": 28469.058961248353 }, { "content": "// input\n\n// https://github.com/mattgathu/duma/blob/master/tests/\n\n// https://crates.io/crates/assert_cmd\n\n\n\nuse std::{path, thread, time};\n\n\n\nuse assert_cmd::Command;\n\nuse clap::crate_name;\n\n\n", "file_path": "tests/invoke.rs", "rank": 64, "score": 28467.368963807534 }, { "content": " /// Parent directory of the Json file, used to resolve paths specified within\n\n pub root: path::PathBuf,\n\n #[serde(skip)]\n\n /// Lossy Json filename\n\n pub name: String,\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum Command {\n\n Format,\n\n Check,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Data {\n\n /// Json input data\n\n pub json: JsonModel,\n\n /// Command-line override for the style file\n\n pub style: Option<path::PathBuf>,\n\n /// Command-line override for the clang-format executable\n", "file_path": "src/cli/mod.rs", "rank": 65, "score": 27081.11338701257 }, { "content": " necessary permissions to modify all files\",\n\n )?;\n\n Ok(())\n\n }),\n\n cli::Command::Check => {\n\n if let Err(err) = cmd.supports_check_or_err() {\n\n return Err(err).wrap_err(\"Check mode is not supported\").suggestion(\n\n \"Please use a version of clang-format that supports the --dry-run option\",\n\n );\n\n }\n\n\n\n let failures: Vec<_> = paths\n\n .into_par_iter()\n\n .map(|path| {\n\n let result = match cmd.run_check(&path) {\n\n Ok(_) => None,\n\n Err(err) => {\n\n let print_path = match &strip_root {\n\n None => path.clone(),\n\n Some(strip) => path.strip_prefix(strip).unwrap().to_path_buf(),\n", "file_path": "src/lib/mod.rs", "rank": 66, "score": 27080.7547513779 }, { "content": "\n\n Ok(Data {\n\n json,\n\n style,\n\n command,\n\n jobs,\n\n cmd,\n\n })\n\n }\n\n\n\n fn path_for_key(&self, key: &str, check_exists: bool) -> eyre::Result<path::PathBuf> {\n\n let path = self\n\n .matches\n\n .value_of_os(key)\n\n .map(std::path::PathBuf::from)\n\n .ok_or(eyre!(format!(\n\n \"Could not convert parameter '{}' to path\",\n\n key\n\n )))?;\n\n\n", "file_path": "src/cli/mod.rs", "rank": 67, "score": 27080.718744194957 }, { "content": " pub command: Option<path::PathBuf>,\n\n /// Command-line parameter for the number of jobs to use for executing clang-format\n\n /// If `None` then all available jobs should be used, else the specified number of jobs.\n\n pub jobs: Option<u8>,\n\n /// Command to execute.\n\n pub cmd: Command,\n\n}\n\n\n\npub struct Builder {\n\n pub matches: clap::ArgMatches,\n\n}\n\n\n\nimpl Builder {\n\n fn app() -> clap::Command<'static> {\n\n clap::Command::new(crate_name!())\n\n .arg_required_else_help(true)\n\n .version(crate_version!())\n\n .author(crate_authors!())\n\n .about(crate_description!())\n\n .arg(\n", "file_path": "src/cli/mod.rs", "rank": 68, "score": 27079.944561099368 }, { "content": " .arg(\"-fallback-style=none\")\n\n .arg(\"-style=file\")\n\n .arg(\"--Werror\")\n\n .arg(\"-i\");\n\n\n\n Runner::run(cmd)\n\n }\n\n\n\n pub fn run_check<P>(&self, file: P) -> Result<(), io::Error>\n\n where\n\n P: AsRef<path::Path>,\n\n {\n\n let _ = self.supports_check_or_err()?;\n\n let mut cmd = process::Command::new(self.cmd.as_path());\n\n\n\n cmd.arg(file.as_ref().as_os_str())\n\n .arg(\"-fallback-style=none\")\n\n .arg(\"-style=file\")\n\n .arg(\"--Werror\")\n\n .arg(\"--dry-run\");\n", "file_path": "src/cmd/mod.rs", "rank": 69, "score": 27079.713563445654 }, { "content": "\n\npub struct Runner {\n\n cmd: path::PathBuf,\n\n version: Option<Version>,\n\n}\n\n\n\nimpl Runner {\n\n pub fn new<P>(path: P) -> Runner\n\n where\n\n P: AsRef<path::Path>,\n\n {\n\n let cmd = path::PathBuf::from(path.as_ref());\n\n Runner { cmd, version: None }\n\n }\n\n\n\n fn eval_status(status: process::ExitStatus) -> Result<(), io::Error> {\n\n match status.code() {\n\n Some(code) if code == 0 => (),\n\n Some(code) => {\n\n return Err(io::Error::new(\n", "file_path": "src/cmd/mod.rs", "rank": 70, "score": 27079.36936166613 }, { "content": " filtered\n\n );\n\n\n\n let cmd = get_command(&data)?;\n\n let cmd_path = match cmd.get_path().canonicalize() {\n\n Ok(path) => path,\n\n Err(_) => cmd.get_path(),\n\n };\n\n log::info!(\n\n \"{} Found clang-format version {} using command {}\",\n\n step.next(),\n\n console::style(cmd.get_version().unwrap()).bold(),\n\n console::style(cmd_path.to_string_lossy()).bold(),\n\n );\n\n\n\n let strip_root = if let Some((_, style_root)) = &style_and_root {\n\n Some(path::PathBuf::from(style_root.as_path()))\n\n } else {\n\n None\n\n };\n", "file_path": "src/lib/mod.rs", "rank": 71, "score": 27078.914098158955 }, { "content": "use std::{fs, path};\n\n\n\n#[allow(unused_imports)]\n\nuse color_eyre::{eyre::eyre, eyre::WrapErr, Help};\n\nuse rayon::iter::{IntoParallelIterator, ParallelIterator};\n\nuse serde::Deserialize;\n\n\n\nuse crate::cli;\n\nuse crate::cmd;\n\n\n\nmod globs;\n\nmod resolve;\n\n\n\n// TODO: UTF-8 restriction?\n\n#[derive(Deserialize, Debug)]\n\n#[serde(rename_all = \"camelCase\", deny_unknown_fields)]\n\npub struct JsonModel {\n\n pub paths: Vec<String>,\n\n pub filter_post: Option<Vec<String>>,\n\n pub style: Option<path::PathBuf>,\n\n}\n\n\n", "file_path": "src/lib/mod.rs", "rank": 72, "score": 27078.589423177014 }, { "content": " println!(\"{}\", JsonModel::schema(),);\n\n process::exit(0);\n\n }\n\n\n\n let json_path = self.path_for_key(\"JSON\", true)?;\n\n let json = JsonModel::load(&json_path).wrap_err(\"Invalid parameter for <JSON>\")?;\n\n\n\n let style = match self.matches.is_present(\"style\") {\n\n false => None,\n\n true => {\n\n let style_path = self\n\n .path_for_key(\"style\", true)\n\n .wrap_err(\"Invalid parameter for option --style\")?;\n\n let path = utils::file_with_name_or_ext(&style_path, \".clang-format\")\n\n .wrap_err(\"Invalid parameter for option --style\")?;\n\n Some(path)\n\n }\n\n };\n\n\n\n let command = match self.matches.value_of_os(\"command\") {\n", "file_path": "src/cli/mod.rs", "rank": 73, "score": 27077.981999173026 }, { "content": " let mut json: JsonModel = serde_json::from_reader(std::io::BufReader::new(f))\n\n .wrap_err(format!(\"Validation failed for '{}'\", json_name))\n\n .suggestion(format!(\n\n \"Please make sure that '{}' is a valid .json file and the contents match the required schema.\",\n\n json_name))?;\n\n\n\n json.root = json_path\n\n .canonicalize()\n\n .unwrap()\n\n .parent()\n\n .unwrap()\n\n .to_path_buf();\n\n\n\n json.name = json_path.to_string_lossy().into();\n\n Ok(json)\n\n }\n\n}\n", "file_path": "src/cli/mod.rs", "rank": 74, "score": 27077.697307775747 }, { "content": " if check_exists {\n\n return utils::path_or_err(path);\n\n }\n\n Ok(path)\n\n }\n\n}\n\n\n\nimpl JsonModel {\n\n fn schema() -> String {\n\n let schema = schema_for!(JsonModel);\n\n serde_json::to_string_pretty(&schema).unwrap()\n\n }\n\n\n\n fn load(path: impl AsRef<path::Path>) -> eyre::Result<JsonModel> {\n\n let json_path = utils::file_with_ext(path.as_ref(), \"json\", true)?;\n\n let json_name = json_path.to_string_lossy();\n\n\n\n let f = std::fs::File::open(path.as_ref())\n\n .wrap_err(format!(\"Failed to open provided JSON file '{}'\", json_name))?;\n\n\n", "file_path": "src/cli/mod.rs", "rank": 75, "score": 27077.636212486057 }, { "content": "use std::{path, process};\n\n\n\nmod handlers;\n\nmod logging;\n\npub mod utils;\n\n\n\nuse clap::{arg, crate_authors, crate_description, crate_name, crate_version};\n\n#[allow(unused_imports)]\n\nuse color_eyre::{eyre::eyre, eyre::WrapErr, Help};\n\nuse schemars::{schema_for, JsonSchema};\n\nuse serde::Deserialize;\n\n\n\n#[derive(Deserialize, Debug, JsonSchema)]\n\n#[serde(rename_all = \"camelCase\")] // removed: deny_unknown_fields\n\npub struct JsonModel {\n\n /// List of paths and/or globs.\n\n /// This list may contain paths or shell-style globs to define the files that should be\n\n /// filtered. Paths or globs that resolve to folders will be silently ignored. Any path\n\n /// contained in this list must be specified relative to the configuration file.\n\n pub paths: Vec<String>,\n", "file_path": "src/cli/mod.rs", "rank": 76, "score": 27077.449781221094 }, { "content": " /// Optional list of globs used for efficiently pre-filtering paths.\n\n /// In contrast to the post-filter, searching will completely skip all paths and its siblings\n\n /// for any match with any pattern. E.g., [\".git\"] will skip all \".git\" folders completely.\n\n /// By default, i.e., if this field is not present in the configuration, the tool will skip all\n\n /// hidden paths and files. Set this entry to an empty list to prevent any kind of\n\n /// pre-filtering.\n\n pub filter_pre: Option<Vec<String>>,\n\n /// Optional list of globs to use for post-filtering.\n\n /// This filter will be applied for all paths _after_ they have been resolved. In contrast to\n\n /// the pre-filter, siblings of paths will not be filtered without the corresponding glob. E.g.,\n\n /// \".git\" will not filter any files, only \".git/**\" would. Notice that only\n\n pub filter_post: Option<Vec<String>>,\n\n /// Optional path to a `.clang-format` style file (can be specified via --style)\n\n pub style_file: Option<path::PathBuf>,\n\n /// Optional path where the `.clang-format` file should be copied to while executing\n\n pub style_root: Option<path::PathBuf>,\n\n /// Optional path to the `clang-format` executable or command name\n\n pub command: Option<path::PathBuf>,\n\n\n\n #[serde(skip)]\n", "file_path": "src/cli/mod.rs", "rank": 77, "score": 27077.37249418242 }, { "content": "\n\n if stderr.len() != 0 {\n\n return Err(io::Error::new(\n\n io::ErrorKind::Other,\n\n format!(\"{}\\n---\\n{}---\", err, stderr),\n\n ));\n\n }\n\n return Err(err);\n\n }\n\n Ok(())\n\n }\n\n\n\n pub fn run_format<P>(&self, file: P) -> Result<(), io::Error>\n\n where\n\n P: AsRef<path::Path>,\n\n {\n\n let mut cmd = process::Command::new(self.cmd.as_path());\n\n\n\n // execute clang-format to edit in place, using style file\n\n cmd.arg(file.as_ref().as_os_str())\n", "file_path": "src/cmd/mod.rs", "rank": 78, "score": 27077.19788074542 }, { "content": " None => None,\n\n Some(_) => Some(\n\n utils::executable_or_exists(self.path_for_key(\"command\", false)?, None)\n\n .wrap_err(\"Invalid parameter for option --command\")\n\n .suggestion(\n\n \"Please make sure that '--command' is either a valid absolute path, \\\n\n a valid path relative to the current working directory \\\n\n or a known application\",\n\n )?,\n\n ),\n\n };\n\n\n\n // cannot use \"and\" since it is not lazily evaluated, and cannot use \"and_then\" nicely\n\n // since the question mark operator does not work in closures\n\n // let command = self\n\n // .matches\n\n // .value_of_os(\"command\")\n\n // .and_then(|_| Some(self.path_for_key(\"command\", false)?));\n\n\n\n // unwrap is safe to call since jobs has a default value\n", "file_path": "src/cli/mod.rs", "rank": 79, "score": 27076.87129473541 }, { "content": "\n\n // no file found at destination, copy the provided style file\n\n let _ = fs::copy(&src_file, &dst_file)\n\n .wrap_err(format!(\n\n \"Failed to copy style file to {}\",\n\n dst_root.to_string_lossy(),\n\n ))\n\n .suggestion(format!(\n\n \"Please check the permissions for the folder {}\",\n\n dst_root.to_string_lossy()\n\n ))?;\n\n\n\n Ok(Some(dst_file))\n\n}\n\n\n", "file_path": "src/lib/mod.rs", "rank": 80, "score": 27076.626705213053 }, { "content": "\n\n let style = place_style_file(style_and_root, &mut step)?;\n\n // binding for scope guard is not used, but an action needed when the variable goes out of scope\n\n let _style = scopeguard::guard(style, |path| {\n\n // ensure we delete the temporary style file at return or panic\n\n if let Some(path) = path {\n\n let str = format!(\"Cleaning up temporary file {}\\n\", path.to_string_lossy());\n\n let str = console::style(str).dim().italic();\n\n\n\n log::info!(\"{}\", str);\n\n let _ = fs::remove_file(path);\n\n }\n\n });\n\n\n\n setup_jobs(data.jobs)?;\n\n log::info!(\"{} Executing clang-format ...\\n\", step.next(),);\n\n\n\n let pb = indicatif::ProgressBar::new(paths.len() as u64);\n\n pb.set_style(\n\n indicatif::ProgressStyle::default_bar()\n", "file_path": "src/lib/mod.rs", "rank": 81, "score": 27076.34703602473 }, { "content": " let src_name = src_file.to_string_lossy();\n\n let dst_name = dst_file.to_string_lossy();\n\n\n\n log::warn!(\"Encountered existing style file {}\", dst_name);\n\n\n\n let content_src =\n\n fs::read_to_string(&src_file).wrap_err(format!(\"Failed to read '{}'\", dst_name))?;\n\n let content_dst = fs::read_to_string(&dst_file.as_path())\n\n .wrap_err(format!(\"Failed to read '{}'\", dst_name))\n\n .wrap_err(\"Error while trying to compare existing style file\")\n\n .suggestion(format!(\n\n \"Please delete or fix the existing style file {}\",\n\n dst_name\n\n ))?;\n\n\n\n if content_src == content_dst {\n\n log::info!(\n\n \"{} Existing style file matches {}, skipping placement\",\n\n step.next(),\n\n src_name\n", "file_path": "src/lib/mod.rs", "rank": 82, "score": 27076.068322978936 }, { "content": " );\n\n return Ok(None);\n\n }\n\n\n\n return Err(eyre::eyre!(\n\n \"Existing style file {} does not match provided style file {}\",\n\n dst_name,\n\n src_name\n\n )\n\n .suggestion(format!(\n\n \"Please either delete the file {} or align the contents with {}\",\n\n dst_name, src_name\n\n )));\n\n }\n\n\n\n log::info!(\n\n \"{} Copying style file to {}\",\n\n step.next(),\n\n console::style(dst_file.to_string_lossy()).bold(),\n\n );\n", "file_path": "src/lib/mod.rs", "rank": 83, "score": 27075.42585380136 }, { "content": "use std::{io, path, process, str::FromStr};\n\n\n\n#[derive(Clone)]\n", "file_path": "src/cmd/mod.rs", "rank": 84, "score": 27074.394053444852 }, { "content": " let jobs = {\n\n let mut val = self.matches.values_of(\"jobs\").unwrap();\n\n if val.len() == 0 {\n\n None\n\n } else {\n\n let val: u8 = val\n\n .next()\n\n .unwrap()\n\n .parse()\n\n .map_err(|_| eyre!(\"Invalid parameter for option --jobs\"))\n\n .suggestion(\"Please provide a number in the range [0 .. 255]\")?;\n\n Some(val)\n\n }\n\n };\n\n\n\n let cmd = if self.matches.is_present(\"check\") {\n\n Command::Check\n\n } else {\n\n Command::Format\n\n };\n", "file_path": "src/cli/mod.rs", "rank": 85, "score": 27074.3608691791 }, { "content": " let candidates =\n\n globs::build_matchers_from(&data.json.paths, &data.json.root, \"paths\", &data.json.name)?;\n\n let filter_pre =\n\n globs::build_glob_set_from(&data.json.filter_pre, \"preFilter\", &data.json.name)?;\n\n let filter_post =\n\n globs::build_glob_set_from(&data.json.filter_post, \"postFilter\", &data.json.name)?;\n\n\n\n let (paths, filtered) = globs::match_paths(candidates, filter_pre, filter_post);\n\n let paths = paths.into_iter().map(|p| p.canonicalize().unwrap());\n\n\n\n let filtered = if filtered.is_empty() {\n\n \"\".to_string()\n\n } else {\n\n format!(\" (filtered {} paths)\", filtered.len())\n\n };\n\n\n\n log::info!(\n\n \"{} Found {} files for the provided path patterns{}\",\n\n step.next(),\n\n console::style(paths.len()).bold(),\n", "file_path": "src/lib/mod.rs", "rank": 86, "score": 27074.27580634522 }, { "content": " )\n\n .arg(arg!(-q --quiet \"Suppress all output except for errors; overrides -v\"))\n\n .subcommand_negates_reqs(true)\n\n .subcommand(\n\n clap::Command::new(\"schema\")\n\n .about(\"Print the schema used for the <JSON> configuration file\"),\n\n )\n\n }\n\n\n\n pub fn build() -> Builder {\n\n let cmd = Builder::app();\n\n let builder = Builder {\n\n matches: cmd.get_matches(),\n\n };\n\n logging::setup(&builder.matches);\n\n builder\n\n }\n\n\n\n pub fn parse(self) -> eyre::Result<Data> {\n\n if self.matches.subcommand_matches(\"schema\").is_some() {\n", "file_path": "src/cli/mod.rs", "rank": 87, "score": 27074.234914747994 }, { "content": " };\n\n Some((print_path, format!(\"{}\", err)))\n\n }\n\n };\n\n let (prefix, style) = match result {\n\n Some(_) => (\"Error\", console::Style::new().red().bold()),\n\n None => (\"Match\", console::Style::new().green().bold()),\n\n };\n\n log_step(prefix, path.as_path(), &strip_root, &pb, style);\n\n if let Some(err) = &result {\n\n if !log_pretty() {\n\n log::error!(\"{}\", err.1);\n\n }\n\n }\n\n result\n\n })\n\n .flatten()\n\n .collect();\n\n\n\n if !failures.is_empty() {\n", "file_path": "src/lib/mod.rs", "rank": 88, "score": 27073.981192650546 }, { "content": " pub fn get_path(&self) -> path::PathBuf {\n\n self.cmd.clone()\n\n }\n\n\n\n pub fn validate(&mut self) -> Result<(), io::Error> {\n\n let cmd = process::Command::new(self.cmd.as_path())\n\n .arg(\"--version\")\n\n .output()?;\n\n\n\n if let Err(err) = Runner::eval_status(cmd.status) {\n\n log::error!(\n\n \"Execution failed:\\n{}\",\n\n String::from_utf8_lossy(&cmd.stderr)\n\n );\n\n return Err(err);\n\n }\n\n\n\n // example output of clang-format:\n\n // clang-format version 4.0.0 (tags/checker/checker-279)\n\n let stdout = String::from_utf8_lossy(&cmd.stdout);\n", "file_path": "src/cmd/mod.rs", "rank": 89, "score": 27073.374665486124 }, { "content": " )\n\n .arg(\n\n arg!(-j --jobs ... \"Optional parameter to define the number of jobs to use. \\\n\n If provided without value (e.g., '-j') all available logical \\\n\n cores are used. Maximum value is 255\")\n\n .default_value(\"1\")\n\n .takes_value(true)\n\n .min_values(0)\n\n .max_values(1)\n\n .required(false),\n\n )\n\n .arg(\n\n arg!(-v --verbose ... \"Verbosity, use -vv... for verbose output.\")\n\n .global(true)\n\n .multiple_values(false),\n\n )\n\n .arg(\n\n arg!(--check \"Run in check mode instead of formatting. Use -vv to \\\n\n log the output of clang-format for each mismatch. \\\n\n Requires clang-format 10 or higher.\"),\n", "file_path": "src/cli/mod.rs", "rank": 90, "score": 27072.617288688656 }, { "content": " pb.set_prefix(\"Running\");\n\n }\n\n let paths: Vec<_> = paths.collect();\n\n\n\n let result: eyre::Result<()> = match data.cmd {\n\n cli::Command::Format => paths.into_par_iter().try_for_each(|path| {\n\n log_step(\n\n \"Formatting\",\n\n path.as_path(),\n\n &strip_root,\n\n &pb,\n\n console::Style::new().green().bold(),\n\n );\n\n\n\n let _ = cmd\n\n .run_format(&path)\n\n .wrap_err(format!(\"Failed to format {}\", path.to_string_lossy()))\n\n .suggestion(\n\n \"Please make sure that your style file matches \\\n\n the version of clang-format and that you have the \\\n", "file_path": "src/lib/mod.rs", "rank": 91, "score": 27072.426017648075 }, { "content": " arg!(<JSON>)\n\n .help(\"Path/configuration as .json\")\n\n // invalid UTF-8 characters must be allowed since we'll be using value_of_os\n\n // and paths do not necessarily only contain valid UTF-8 characters.\n\n .allow_invalid_utf8(true),\n\n )\n\n .arg(\n\n arg!(-s --style ... \"Optional path to .clang-format style file. \\\n\n Overrides <JSON> configuration\")\n\n .allow_invalid_utf8(true)\n\n .takes_value(true)\n\n .required(false),\n\n )\n\n .arg(\n\n arg!(-c --command ... \"Optional path to executable or clang-format command. \\\n\n Overrides <JSON> configuration, defaults to `clang-format`\")\n\n // .default_value(\"clang-format\")\n\n .allow_invalid_utf8(true)\n\n .takes_value(true)\n\n .required(false),\n", "file_path": "src/cli/mod.rs", "rank": 92, "score": 27072.161376149 }, { "content": "\n\n Runner::run(cmd)\n\n }\n\n}\n\n\n\nimpl Clone for Runner {\n\n fn clone(&self) -> Runner {\n\n Runner {\n\n cmd: path::PathBuf::from(self.cmd.as_path()),\n\n version: self.version.clone(),\n\n }\n\n }\n\n}\n", "file_path": "src/cmd/mod.rs", "rank": 93, "score": 27071.184871345173 }, { "content": "\n\n self.version = Some(stdout.parse::<Version>().map_err(|err| {\n\n io::Error::new(\n\n io::ErrorKind::Other,\n\n format!(\"Failed to parse --version output {}: {}\", stdout, err),\n\n )\n\n })?);\n\n Ok(())\n\n }\n\n\n\n pub fn supports_check_or_err(&self) -> Result<(), io::Error> {\n\n if self.version.is_none() {\n\n return Err(io::Error::new(\n\n io::ErrorKind::Other,\n\n \"Unknown version, --dry-run requires \\\n\n clang-format version 10.0.1 or higher\",\n\n ));\n\n }\n\n\n\n // https://github.com/llvm/llvm-project/commit/6a1f7d6c9ff8228328d0e65b8678a9c6dff49837\n", "file_path": "src/cmd/mod.rs", "rank": 94, "score": 27070.135332698173 }, { "content": " io::ErrorKind::Other,\n\n format!(\"Process terminated with code {}\", code),\n\n ));\n\n }\n\n None => {\n\n return Err(io::Error::new(\n\n io::ErrorKind::Interrupted,\n\n \"Process terminated by signal\",\n\n ))\n\n }\n\n };\n\n Ok(())\n\n }\n\n\n\n pub fn get_version(&self) -> Option<String> {\n\n self.version\n\n .as_ref()\n\n .map(|v| format!(\"{}.{}.{}\", v.major, v.minor, v.patch))\n\n }\n\n\n", "file_path": "src/cmd/mod.rs", "rank": 95, "score": 27069.531433280412 }, { "content": " let version = self.version.as_ref().unwrap();\n\n if version.major < 9u8 {\n\n return Err(io::Error::new(\n\n io::ErrorKind::Other,\n\n format!(\n\n \"Invalid version {}, --dry-run check requires \\\n\n clang-format version 10.0.1 or higher\",\n\n self.get_version().unwrap()\n\n ),\n\n ));\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n fn run(mut cmd: process::Command) -> Result<(), io::Error> {\n\n let output = cmd.output()?;\n\n\n\n if let Err(err) = Runner::eval_status(output.status) {\n\n let stderr = String::from_utf8_lossy(&output.stderr);\n", "file_path": "src/cmd/mod.rs", "rank": 96, "score": 27068.717792390547 }, { "content": " Err(eyre::eyre!(format!(\n\n \"Format check failed for the following files:\\n{}\",\n\n failures\n\n .into_iter()\n\n .map(|result| format!(\"{}\", result.0.to_string_lossy()))\n\n .collect::<Vec<_>>()\n\n .join(\"\\n\")\n\n )))\n\n } else {\n\n Ok(())\n\n }\n\n }\n\n };\n\n result?;\n\n\n\n let duration = start.elapsed();\n\n if log_pretty() {\n\n pb.finish();\n\n\n\n println!(\n", "file_path": "src/lib/mod.rs", "rank": 97, "score": 27067.684775234735 }, { "content": " .template(if console::Term::stdout().size().1 > 80 {\n\n \"{prefix:>12.cyan.bold} [{bar:26}] {pos}/{len} {wide_msg}\"\n\n } else {\n\n \"{prefix:>12.cyan.bold} [{bar:26}] {pos}/{len}\"\n\n })\n\n .progress_chars(\"=> \"),\n\n );\n\n\n\n // preparation for indicatif 0.17\n\n // pb.set_style(\n\n // indicatif::ProgressStyle::with_template(if console::Term::stdout().size().1 > 80 {\n\n // \"{prefix:>12.cyan.bold} [{bar:26}] {pos}/{len} {wide_msg}\"\n\n // } else {\n\n // \"{prefix:>12.cyan.bold} [{bar:26}] {pos}/{len}\"\n\n // })\n\n // .unwrap()\n\n // .progress_chars(\"=> \"),\n\n // );\n\n\n\n if log_pretty() {\n", "file_path": "src/lib/mod.rs", "rank": 98, "score": 27065.736098228797 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::path::PathBuf;\n\n\n\n #[test]\n\n #[cfg(not(windows))]\n\n fn test_command_path() {\n\n #[derive(Debug)]\n\n struct TestPaths {\n\n path: PathBuf,\n\n is_absolute: bool,\n\n is_relative: bool,\n\n is_file: bool,\n\n }\n\n\n\n let tests = vec![\n\n TestPaths {\n\n path: \"some/path/to/clang-format\".into(),\n", "file_path": "src/lib/resolve.rs", "rank": 99, "score": 18.146664558286012 } ]
Rust
base_layer/core/src/covenants/output_set.rs
AaronFeickert/tari
5e55bf22110ac40ffc0dea88d88ba836982591eb
use std::{ cmp::Ordering, collections::BTreeSet, iter::FromIterator, ops::{Deref, DerefMut}, }; use crate::{covenants::error::CovenantError, transactions::transaction_components::TransactionOutput}; #[derive(Debug, Clone)] pub struct OutputSet<'a>(BTreeSet<Indexed<&'a TransactionOutput>>); impl<'a> OutputSet<'a> { pub fn new(outputs: &'a [TransactionOutput]) -> Self { outputs.iter().enumerate().collect() } pub fn len(&self) -> usize { self.0.len() } pub fn is_empty(&self) -> bool { self.0.is_empty() } pub fn set(&mut self, new_set: Self) { *self = new_set; } pub fn retain<F>(&mut self, mut f: F) -> Result<(), CovenantError> where F: FnMut(&'a TransactionOutput) -> Result<bool, CovenantError> { let mut err = None; self.0.retain(|output| match f(**output) { Ok(b) => b, Err(e) => { err = Some(e); false }, }); match err { Some(err) => Err(err), None => Ok(()), } } pub fn union(&self, other: &Self) -> Self { self.0.union(&other.0).copied().collect() } pub fn difference(&self, other: &Self) -> Self { self.0.difference(&other.0).copied().collect() } pub fn symmetric_difference(&self, other: &Self) -> Self { self.0.symmetric_difference(&other.0).copied().collect() } pub fn find_inplace<F>(&mut self, mut pred: F) where F: FnMut(&TransactionOutput) -> bool { match self.0.iter().find(|indexed| pred(&**indexed)) { Some(output) => { let output = *output; self.clear(); self.0.insert(output); }, None => { self.clear(); }, } } pub fn clear(&mut self) { self.0.clear(); } #[cfg(test)] pub(super) fn get(&self, index: usize) -> Option<&TransactionOutput> { self.0 .iter() .find(|output| output.index == index) .map(|output| **output) } #[cfg(test)] pub(super) fn get_selected_indexes(&self) -> Vec<usize> { self.0.iter().map(|idx| idx.index).collect() } } impl<'a> FromIterator<(usize, &'a TransactionOutput)> for OutputSet<'a> { fn from_iter<T: IntoIterator<Item = (usize, &'a TransactionOutput)>>(iter: T) -> Self { iter.into_iter().map(|(i, output)| Indexed::new(i, output)).collect() } } impl<'a> FromIterator<Indexed<&'a TransactionOutput>> for OutputSet<'a> { fn from_iter<T: IntoIterator<Item = Indexed<&'a TransactionOutput>>>(iter: T) -> Self { Self(iter.into_iter().collect()) } } #[derive(Debug, Clone, Copy)] struct Indexed<T> { index: usize, value: T, } impl<T> Indexed<T> { pub fn new(index: usize, value: T) -> Self { Self { index, value } } } impl<T> PartialEq for Indexed<T> { fn eq(&self, other: &Self) -> bool { self.index == other.index } } impl<T> Eq for Indexed<T> {} impl<T> PartialOrd for Indexed<T> { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { self.index.partial_cmp(&other.index) } } impl<T> Ord for Indexed<T> { fn cmp(&self, other: &Self) -> Ordering { self.index.cmp(&other.index) } } impl<T> Deref for Indexed<T> { type Target = T; fn deref(&self) -> &Self::Target { &self.value } } impl<T> DerefMut for Indexed<T> { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.value } }
use std::{ cmp::Ordering, collections::BTreeSet, iter::FromIterator, ops::{Deref, DerefMut}, }; use crate::{covenants::error::CovenantError, transactions::transaction_components::TransactionOutput}; #[derive(Debug, Clone)] pub struct OutputSet<'a>(BTreeSet<Indexed<&'a TransactionOutput>>); impl<'a> OutputSet<'a> { pub fn new(outputs: &'a [TransactionOutput]) -> Self { outputs.iter().enumerate().collect() } pub fn len(&self) -> usize { self.0.len() } pub fn is_empty(&self) -> bool { self.0.is_empty() } pub fn set(&mut self, new_set: Self) { *self = new_set; } pub fn retain<F>(&mut self, mut f: F) -> Result<(), CovenantError> where F: FnMut(&'a TransactionOutput) -> Result<bool, CovenantError> { let mut err = None; self.0.retain(|output| match f(**output) { Ok(b) => b, Err(e) => { err = Some(e); false }, }); match err { Some(err) => Err(err), None => Ok(()), } } pub fn union(&self, other: &Self) -> Self { self.0.union(&other.0).copied().collect() } pub fn difference(&self, other: &Self) -> Self { self.0.difference(&other.0).copied().collect() } pub fn symmetric_difference(&self, other: &Self) -> Self { self.0.symmetric_difference(&other.0).copied().collect() }
pub fn clear(&mut self) { self.0.clear(); } #[cfg(test)] pub(super) fn get(&self, index: usize) -> Option<&TransactionOutput> { self.0 .iter() .find(|output| output.index == index) .map(|output| **output) } #[cfg(test)] pub(super) fn get_selected_indexes(&self) -> Vec<usize> { self.0.iter().map(|idx| idx.index).collect() } } impl<'a> FromIterator<(usize, &'a TransactionOutput)> for OutputSet<'a> { fn from_iter<T: IntoIterator<Item = (usize, &'a TransactionOutput)>>(iter: T) -> Self { iter.into_iter().map(|(i, output)| Indexed::new(i, output)).collect() } } impl<'a> FromIterator<Indexed<&'a TransactionOutput>> for OutputSet<'a> { fn from_iter<T: IntoIterator<Item = Indexed<&'a TransactionOutput>>>(iter: T) -> Self { Self(iter.into_iter().collect()) } } #[derive(Debug, Clone, Copy)] struct Indexed<T> { index: usize, value: T, } impl<T> Indexed<T> { pub fn new(index: usize, value: T) -> Self { Self { index, value } } } impl<T> PartialEq for Indexed<T> { fn eq(&self, other: &Self) -> bool { self.index == other.index } } impl<T> Eq for Indexed<T> {} impl<T> PartialOrd for Indexed<T> { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { self.index.partial_cmp(&other.index) } } impl<T> Ord for Indexed<T> { fn cmp(&self, other: &Self) -> Ordering { self.index.cmp(&other.index) } } impl<T> Deref for Indexed<T> { type Target = T; fn deref(&self) -> &Self::Target { &self.value } } impl<T> DerefMut for Indexed<T> { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.value } }
pub fn find_inplace<F>(&mut self, mut pred: F) where F: FnMut(&TransactionOutput) -> bool { match self.0.iter().find(|indexed| pred(&**indexed)) { Some(output) => { let output = *output; self.clear(); self.0.insert(output); }, None => { self.clear(); }, } }
function_block-full_function
[ { "content": "/// Is this position a leaf in the MMR?\n\n/// We know the positions of all leaves based on the postorder height of an MMR of any size (somewhat unintuitively\n\n/// but this is how the PMMR is \"append only\").\n\npub fn is_leaf(pos: usize) -> bool {\n\n bintree_height(pos) == 0\n\n}\n\n\n", "file_path": "base_layer/mmr/src/common.rs", "rank": 0, "score": 324352.1353268485 }, { "content": "/// Is the node at this pos the \"left\" sibling of its parent?\n\npub fn is_left_sibling(pos: usize) -> bool {\n\n let (peak_map, height) = peak_map_height(pos);\n\n let peak = 1 << height;\n\n (peak_map & peak) == 0\n\n}\n\n\n", "file_path": "base_layer/mmr/src/common.rs", "rank": 1, "score": 319978.8310174921 }, { "content": "/// return (peak_map, pos_height) of given 0-based node pos prior to its addition\n\n/// Example: on input 4 returns (0b11, 0) as mmr state before adding 4 was\n\n/// 2\n\n/// / \\\n\n/// 0 1 3\n\n/// with 0b11 indicating presence of peaks of height 0 and 1.\n\n/// NOTE:\n\n/// the peak map also encodes the path taken from the root to the added node since the path turns left (resp. right)\n\n/// if-and-only-if a peak at that height is absent (resp. present)\n\npub fn peak_map_height(mut pos: usize) -> (usize, usize) {\n\n if pos == 0 {\n\n return (0, 0);\n\n }\n\n let mut peak_size = ALL_ONES >> pos.leading_zeros();\n\n let mut bitmap = 0;\n\n while peak_size != 0 {\n\n bitmap <<= 1;\n\n if pos >= peak_size {\n\n pos -= peak_size;\n\n bitmap |= 1;\n\n }\n\n peak_size >>= 1;\n\n }\n\n (bitmap, pos)\n\n}\n\n\n", "file_path": "base_layer/mmr/src/common.rs", "rank": 2, "score": 318152.5409838251 }, { "content": "pub fn check_not_bad_block<B: BlockchainBackend>(db: &B, hash: &[u8]) -> Result<(), ValidationError> {\n\n if db.bad_block_exists(hash.to_vec())? {\n\n return Err(ValidationError::BadBlockFound { hash: to_hex(hash) });\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "base_layer/core/src/validation/helpers.rs", "rank": 3, "score": 314262.4000054116 }, { "content": "/// This function checks that an input is a valid spendable UTXO\n\npub fn check_input_is_utxo<B: BlockchainBackend>(db: &B, input: &TransactionInput) -> Result<(), ValidationError> {\n\n let output_hash = input.output_hash();\n\n if let Some(utxo_hash) = db.fetch_unspent_output_hash_by_commitment(input.commitment()?)? {\n\n // We know that the commitment exists in the UTXO set. Check that the output hash matches (i.e. all fields\n\n // like output features match)\n\n if utxo_hash == output_hash {\n\n // Check that the input found by commitment, matches the input given here\n\n match db\n\n .fetch_output(&utxo_hash)?\n\n .and_then(|output| output.output.into_unpruned_output())\n\n {\n\n Some(output) => {\n\n let mut compact = input.to_compact();\n\n compact.add_output_data(\n\n output.version,\n\n output.features,\n\n output.commitment,\n\n output.script,\n\n output.sender_offset_public_key,\n\n output.covenant,\n", "file_path": "base_layer/core/src/validation/helpers.rs", "rank": 4, "score": 311275.8628883241 }, { "content": "/// This function checks that all inputs in the blocks are valid UTXO's to be spent\n\npub fn check_inputs_are_utxos<B: BlockchainBackend>(db: &B, body: &AggregateBody) -> Result<(), ValidationError> {\n\n let mut not_found_inputs = Vec::new();\n\n let mut output_hashes = None;\n\n let output_unique_ids = body\n\n .outputs()\n\n .iter()\n\n .filter_map(|output| {\n\n output\n\n .features\n\n .unique_id\n\n .as_ref()\n\n .map(|ui| (output.features.parent_public_key.as_ref(), ui))\n\n })\n\n .collect::<Vec<_>>();\n\n for input in body.inputs() {\n\n // If spending a unique_id, a new output must contain the unique id\n\n if let Some(ref unique_id) = input.features()?.unique_id {\n\n let exactly_one = output_unique_ids\n\n .iter()\n\n .filter_map(|(parent_public_key, output_unique_id)| match input.features() {\n", "file_path": "base_layer/core/src/validation/helpers.rs", "rank": 5, "score": 311275.8628883241 }, { "content": "/// Checks whether the last digit in the array matches the checksum for the array minus the last digit.\n\npub fn is_valid(arr: &[usize], dict_len: usize) -> bool {\n\n if arr.len() < 2 {\n\n return false;\n\n }\n\n let cs = checksum(&arr[..arr.len() - 1], dict_len);\n\n cs == arr[arr.len() - 1]\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use crate::luhn::{checksum, is_valid};\n\n\n\n #[test]\n\n fn luhn_6() {\n\n assert_eq!(checksum(&[0, 1, 2, 3, 4, 5], 6), 4);\n\n for i in 0..6 {\n\n let valid = is_valid(&[0, 1, 2, 3, 4, 5, i], 6);\n\n match i {\n\n 4 => assert!(valid),\n\n _ => assert!(!valid),\n", "file_path": "base_layer/common_types/src/luhn.rs", "rank": 6, "score": 310750.3356316084 }, { "content": "/// Calculates the positions of the (parent, sibling) of the node at the provided position.\n\n/// Returns an error if the pos provided would result in an underflow or overflow.\n\npub fn family(pos: usize) -> Result<(usize, usize), MerkleMountainRangeError> {\n\n let (peak_map, height) = peak_map_height(pos);\n\n let peak = 1 << height;\n\n\n\n // Convert to i128 so that we don't over/underflow, and then we will cast back to usize after\n\n let pos = pos as i128;\n\n let peak = i128::from(peak);\n\n let peak_map = peak_map as i128;\n\n\n\n let res = if (peak_map & peak) == 0 {\n\n (pos + 2 * peak, pos + 2 * peak - 1)\n\n } else {\n\n (pos + 1, pos + 1 - 2 * peak)\n\n };\n\n\n\n Ok((\n\n res.0.try_into().map_err(|_| MerkleMountainRangeError::OutOfRange)?,\n\n res.1.try_into().map_err(|_| MerkleMountainRangeError::OutOfRange)?,\n\n ))\n\n}\n\n\n", "file_path": "base_layer/mmr/src/common.rs", "rank": 7, "score": 305891.8792017283 }, { "content": "/// Create a runtime and report if it panics. If there are tasks still running after the panic, this\n\n/// will carry on running forever.\n\n// #[deprecated(note = \"use tokio::test instead\")]\n\npub fn test_async<F>(f: F)\n\nwhere F: FnOnce(&mut TestRuntime) {\n\n let mut rt = TestRuntime::from(create_runtime());\n\n f(&mut rt);\n\n let handles = rt.handles.drain(..).collect::<Vec<_>>();\n\n for h in handles {\n\n rt.block_on(h).unwrap();\n\n }\n\n}\n\n\n", "file_path": "infrastructure/test_utils/src/runtime.rs", "rank": 8, "score": 303160.6295226152 }, { "content": "/// Create the default data directory (`~/.tari` on OSx and Linux, for example) if it doesn't already exist\n\npub fn create_data_directory(base_dir: Option<&PathBuf>) -> Result<(), std::io::Error> {\n\n let home = default_path(\"\", base_dir);\n\n\n\n if home.exists() {\n\n Ok(())\n\n } else {\n\n println!(\"Creating {:?}\", home);\n\n std::fs::create_dir_all(home)\n\n }\n\n}\n\n\n", "file_path": "common/src/dir_utils.rs", "rank": 9, "score": 300547.5342133292 }, { "content": "/// Create a pruned Merkle Mountain Range from the provided MMR. Pruning entails throwing all the hashes of the\n\n/// pruned MMR away, except for the current peaks. A new MMR instance is returned that allows you to continue\n\n/// adding onto the MMR as before. Most functions of the pruned MMR will work as expected, but obviously, any\n\n/// leaf hashes prior to the base point won't be available. `get_leaf_hash` will return `None` for those nodes, and\n\n/// `validate` will throw an error.\n\npub fn prune_mmr<D, B>(mmr: &MerkleMountainRange<D, B>) -> Result<PrunedMmr<D>, MerkleMountainRangeError>\n\nwhere\n\n D: Digest,\n\n B: ArrayLike<Value = Hash>,\n\n{\n\n let backend = PrunedHashSet::try_from(mmr)?;\n\n Ok(MerkleMountainRange {\n\n hashes: backend,\n\n _hasher: PhantomData,\n\n })\n\n}\n\n\n", "file_path": "base_layer/mmr/src/functions.rs", "rank": 10, "score": 293049.86442960886 }, { "content": "pub fn mine_to_difficulty(mut block: Block, difficulty: Difficulty) -> Result<Block, String> {\n\n // When starting from the same nonce, in tests it becomes common to mine the same block more than once without the\n\n // hash changing. This introduces the required entropy\n\n block.header.nonce = rand::thread_rng().gen();\n\n for _i in 0..20000 {\n\n if sha3_difficulty(&block.header) == difficulty {\n\n return Ok(block);\n\n }\n\n block.header.nonce += 1;\n\n }\n\n Err(\"Could not mine to difficulty in 20000 iterations\".to_string())\n\n}\n\n\n", "file_path": "base_layer/core/src/test_helpers/mod.rs", "rank": 11, "score": 291658.1544401328 }, { "content": "/// A convenience function in the same vein as [prune_mmr], but applied to `MutableMmr` instances.\n\npub fn prune_mutable_mmr<D, B>(mmr: &MutableMmr<D, B>) -> Result<PrunedMutableMmr<D>, MerkleMountainRangeError>\n\nwhere\n\n D: Digest,\n\n B: ArrayLike<Value = Hash>,\n\n{\n\n let backend = PrunedHashSet::try_from(&mmr.mmr)?;\n\n Ok(MutableMmr {\n\n mmr: MerkleMountainRange::new(backend),\n\n deleted: mmr.deleted.clone(),\n\n size: mmr.size,\n\n })\n\n}\n\n\n", "file_path": "base_layer/mmr/src/functions.rs", "rank": 12, "score": 290387.9540727298 }, { "content": "/// Installs a new configuration file template, copied from the application type's preset and written to the given path.\n\n/// Also includes the common configuration defined in `config/presets/common.toml`.\n\npub fn write_default_config_to<P: AsRef<Path>>(path: P) -> Result<(), std::io::Error> {\n\n // Use the same config file so that all the settings are easier to find, and easier to\n\n // support users over chat channels\n\n let common = include_str!(\"../../config/presets/common.toml\");\n\n let source = [\n\n common,\n\n include_str!(\"../../config/presets/peer_seeds.toml\"),\n\n include_str!(\"../../config/presets/base_node.toml\"),\n\n include_str!(\"../../config/presets/console_wallet.toml\"),\n\n include_str!(\"../../config/presets/miner.toml\"),\n\n include_str!(\"../../config/presets/merge_mining_proxy.toml\"),\n\n include_str!(\"../../config/presets/validator_node.toml\"),\n\n include_str!(\"../../config/presets/collectibles.toml\"),\n\n ]\n\n .join(\"\\n\");\n\n\n\n if let Some(d) = path.as_ref().parent() {\n\n fs::create_dir_all(d)?\n\n };\n\n let mut file = File::create(path)?;\n\n file.write_all(source.as_ref())\n\n}\n\n\n", "file_path": "common/src/configuration/utils.rs", "rank": 13, "score": 290163.8134770296 }, { "content": "pub fn with_temp_dir<F, R>(f: F) -> R\n\nwhere F: FnOnce(&Path) -> R {\n\n let tmp = Builder::new().prefix(\"tari-test\").tempdir().unwrap();\n\n let r = f(tmp.path());\n\n drop(tmp);\n\n r\n\n}\n\n\n", "file_path": "infrastructure/test_utils/src/paths.rs", "rank": 14, "score": 284925.19575383 }, { "content": "pub fn spawn_until_shutdown<F>(fut: F) -> Shutdown\n\nwhere F: Future<Output = ()> + Send + 'static {\n\n let shutdown = Shutdown::new();\n\n let signal = shutdown.to_signal();\n\n task::spawn(async move {\n\n futures::pin_mut!(fut);\n\n future::select(signal, fut).await;\n\n });\n\n shutdown\n\n}\n\n\n", "file_path": "infrastructure/test_utils/src/runtime.rs", "rank": 15, "score": 283234.9432237451 }, { "content": "pub fn serialize_string<S, T>(source: &T, ser: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n T: Display,\n\n{\n\n ser.serialize_str(source.to_string().as_str())\n\n}\n\n\n", "file_path": "common/src/configuration/utils.rs", "rank": 16, "score": 278323.9186753086 }, { "content": "pub fn draw_dialog<B>(\n\n f: &mut Frame<B>,\n\n full_area: Rect,\n\n title: String,\n\n message: String,\n\n color: Color,\n\n width: u16,\n\n height: u16,\n\n) where\n\n B: Backend,\n\n{\n\n let popup_area = centered_rect_absolute(width.min(full_area.width), height.min(full_area.height), full_area);\n\n\n\n f.render_widget(Clear, popup_area);\n\n\n\n let block = Block::default().borders(Borders::ALL).title(Span::styled(\n\n title.as_str(),\n\n Style::default().fg(color).add_modifier(Modifier::BOLD),\n\n ));\n\n f.render_widget(block, popup_area);\n", "file_path": "applications/tari_console_wallet/src/ui/widgets/utilities.rs", "rank": 17, "score": 278197.753482885 }, { "content": "pub fn calculate_mmr_root<D, B>(\n\n src: &MerkleMountainRange<D, B>,\n\n additions: Vec<Hash>,\n\n) -> Result<Hash, MerkleMountainRangeError>\n\nwhere\n\n D: Digest,\n\n B: ArrayLike<Value = Hash>,\n\n{\n\n let mut mmr = prune_mmr(src)?;\n\n for hash in additions {\n\n mmr.push(hash)?;\n\n }\n\n mmr.get_merkle_root()\n\n}\n", "file_path": "base_layer/mmr/src/functions.rs", "rank": 18, "score": 272250.0195941984 }, { "content": "pub fn prompt(question: &str) -> bool {\n\n println!(\"{}\", question);\n\n let mut input = \"\".to_string();\n\n io::stdin().read_line(&mut input).unwrap();\n\n let input = input.trim().to_lowercase();\n\n input == \"y\" || input.is_empty()\n\n}\n\n\n", "file_path": "common/src/configuration/bootstrap.rs", "rank": 19, "score": 271168.850716534 }, { "content": "pub fn setup_filter_test<'a, F>(\n\n covenant: &Covenant,\n\n input: &'a TransactionInput,\n\n block_height: u64,\n\n output_mod: F,\n\n) -> (CovenantContext<'a>, Vec<TransactionOutput>)\n\nwhere\n\n F: FnOnce(&mut Vec<TransactionOutput>),\n\n{\n\n let mut context = create_context(covenant, input, block_height);\n\n // Consume root token (i.e the filter we're testing), args for filter presumably come next\n\n context.next_filter().unwrap();\n\n let mut outputs = create_outputs(10, Default::default());\n\n output_mod(&mut outputs);\n\n (context, outputs)\n\n}\n", "file_path": "base_layer/core/src/covenants/filters/test.rs", "rank": 20, "score": 269136.2754447125 }, { "content": "/// `calculate_mmr_root`` takes an MMR instance and efficiently calculates the new MMR root by applying the given\n\n/// additions to calculate a new MMR root without changing the original MMR.\n\n///\n\n/// This is done by creating a memory-backed sparse (pruned) copy of the original MMR, applying the changes and then\n\n/// calculating a new root.\n\n///\n\n/// # Parameters\n\n/// * `src`: A reference to the original MMR\n\n/// * `additions`: A vector of leaf node hashes to append to the MMR\n\n/// * `deletions`: A vector of leaf node _indices_ that will be marked as deleted.\n\n///\n\n/// # Returns\n\n/// The new MMR root as a result of applying the given changes\n\npub fn calculate_pruned_mmr_root<D, B>(\n\n src: &MutableMmr<D, B>,\n\n additions: Vec<Hash>,\n\n deletions: Vec<u32>,\n\n) -> Result<Hash, MerkleMountainRangeError>\n\nwhere\n\n D: Digest,\n\n B: ArrayLike<Value = Hash>,\n\n{\n\n let mut pruned_mmr = prune_mutable_mmr(src)?;\n\n for hash in additions {\n\n pruned_mmr.push(hash)?;\n\n }\n\n for index in deletions {\n\n pruned_mmr.delete(index);\n\n }\n\n pruned_mmr.compress();\n\n pruned_mmr.get_merkle_root()\n\n}\n\n\n", "file_path": "base_layer/mmr/src/functions.rs", "rank": 21, "score": 269102.1428048082 }, { "content": "/// This function checks:\n\n/// 1. the byte size of TariScript does not exceed the maximum\n\n/// 2. that the outputs do not already exist in the UTxO set.\n\npub fn check_outputs<B: BlockchainBackend>(\n\n db: &B,\n\n constants: &ConsensusConstants,\n\n body: &AggregateBody,\n\n) -> Result<(), ValidationError> {\n\n let mut unique_ids = Vec::new();\n\n let max_script_size = constants.get_max_script_byte_size();\n\n for output in body.outputs() {\n\n check_tari_script_byte_size(&output.script, max_script_size)?;\n\n // Check outputs for duplicate asset ids\n\n if output.features.is_non_fungible_mint() || output.features.is_non_fungible_burn() {\n\n if let Some(unique_id) = output.features.unique_asset_id() {\n\n let parent_pk = output.features.parent_public_key.as_ref();\n\n\n\n let asset_tuple = (parent_pk, unique_id);\n\n if unique_ids.contains(&asset_tuple) {\n\n return Err(ValidationError::ContainsDuplicateUtxoUniqueID);\n\n }\n\n unique_ids.push(asset_tuple)\n\n }\n\n }\n\n check_not_duplicate_txo(db, output)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "base_layer/core/src/validation/helpers.rs", "rank": 22, "score": 269096.98313577997 }, { "content": "#[allow(dead_code)]\n\npub fn check_kernel_malleability(block_mod_fn: impl Fn(&mut Block)) {\n\n check_block_changes_are_detected(MerkleMountainRangeField::Kernel, block_mod_fn);\n\n}\n\n\n", "file_path": "base_layer/core/tests/helpers/block_malleability.rs", "rank": 23, "score": 268544.57683831063 }, { "content": "#[allow(dead_code)]\n\npub fn check_output_malleability(block_mod_fn: impl Fn(&mut Block)) {\n\n check_block_changes_are_detected(MerkleMountainRangeField::Output, block_mod_fn);\n\n}\n\n\n", "file_path": "base_layer/core/tests/helpers/block_malleability.rs", "rank": 24, "score": 268544.57683831063 }, { "content": "#[allow(dead_code)]\n\npub fn check_witness_malleability(block_mod_fn: impl Fn(&mut Block)) {\n\n check_block_changes_are_detected(MerkleMountainRangeField::Witness, block_mod_fn);\n\n}\n\n\n", "file_path": "base_layer/core/tests/helpers/block_malleability.rs", "rank": 25, "score": 268544.57683831063 }, { "content": "#[allow(dead_code)]\n\npub fn check_input_malleability(block_mod_fn: impl Fn(&mut Block)) {\n\n check_block_changes_are_detected(MerkleMountainRangeField::Input, block_mod_fn);\n\n}\n\n\n", "file_path": "base_layer/core/tests/helpers/block_malleability.rs", "rank": 26, "score": 268544.57683831063 }, { "content": "/// Prints a pretty banner on the console as well as the list of available commands\n\npub fn print_banner(commands: Vec<String>, chunk_size: usize, resize_terminal: bool) {\n\n let terminal_title = format!(\"Tari Base Node - Version {}\", consts::APP_VERSION);\n\n if let Err(e) = execute!(stdout(), SetTitle(terminal_title.as_str())) {\n\n println!(\"Error setting terminal title. {}\", e)\n\n }\n\n\n\n let chunks: Vec<Vec<String>> = commands.chunks(chunk_size).map(|x| x.to_vec()).collect();\n\n let mut cell_sizes = Vec::new();\n\n\n\n let mut row_cell_count: usize = 0;\n\n let mut command_data: Vec<Vec<String>> = Vec::new();\n\n for chunk in chunks {\n\n let mut cells: Vec<String> = Vec::new();\n\n for item in chunk {\n\n cells.push(item.clone());\n\n cell_sizes.push(item.chars().count());\n\n row_cell_count += 1;\n\n }\n\n if row_cell_count < chunk_size {\n\n while row_cell_count < chunk_size {\n", "file_path": "applications/tari_base_node/src/commands/cli.rs", "rank": 27, "score": 267983.1039471988 }, { "content": "pub fn deserialize_string_or_struct<'de, T, D>(deserializer: D) -> Result<T, D::Error>\n\nwhere\n\n T: Deserialize<'de> + FromStr<Err = anyhow::Error>,\n\n D: Deserializer<'de>,\n\n{\n\n struct StringOrStruct<T>(PhantomData<fn() -> T>);\n\n\n\n impl<'de, T> Visitor<'de> for StringOrStruct<T>\n\n where T: Deserialize<'de> + FromStr<Err = anyhow::Error>\n\n {\n\n type Value = T;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"string or map\")\n\n }\n\n\n\n fn visit_str<E>(self, value: &str) -> Result<T, E>\n\n where E: de::Error {\n\n Ok(FromStr::from_str(value).unwrap())\n\n }\n\n\n\n fn visit_map<M>(self, map: M) -> Result<T, M::Error>\n\n where M: MapAccess<'de> {\n\n Deserialize::deserialize(de::value::MapAccessDeserializer::new(map))\n\n }\n\n }\n\n\n\n deserializer.deserialize_any(StringOrStruct(PhantomData))\n\n}\n", "file_path": "common/src/configuration/utils.rs", "rank": 28, "score": 266637.3445778843 }, { "content": "/// Create a new block with the provided transactions. The new MMR roots are calculated, and then the new block is\n\n/// added to the database. The newly created block is returned as the result.\n\npub fn append_block<B: BlockchainBackend>(\n\n db: &BlockchainDatabase<B>,\n\n prev_block: &ChainBlock,\n\n txns: Vec<Transaction>,\n\n consensus: &ConsensusManager,\n\n achieved_difficulty: Difficulty,\n\n) -> Result<ChainBlock, ChainStorageError> {\n\n append_block_with_coinbase(\n\n &CryptoFactories::default(),\n\n db,\n\n prev_block,\n\n txns,\n\n consensus,\n\n achieved_difficulty,\n\n )\n\n .map(|(b, _)| b)\n\n}\n\n\n", "file_path": "base_layer/core/tests/helpers/block_builders.rs", "rank": 29, "score": 266067.919694065 }, { "content": "/// Check the PoW data in the BlockHeader. This currently only applies to blocks merged mined with Monero.\n\npub fn check_pow_data<B: BlockchainBackend>(\n\n block_header: &BlockHeader,\n\n rules: &ConsensusManager,\n\n db: &B,\n\n) -> Result<(), ValidationError> {\n\n use PowAlgorithm::{Monero, Sha3};\n\n match block_header.pow.pow_algo {\n\n Monero => {\n\n let monero_data =\n\n MoneroPowData::from_header(block_header).map_err(|e| ValidationError::CustomError(e.to_string()))?;\n\n let seed_height = db.fetch_monero_seed_first_seen_height(&monero_data.randomx_key)?;\n\n if seed_height != 0 {\n\n // Saturating sub: subtraction can underflow in reorgs / rewind-blockchain command\n\n let seed_used_height = block_header.height.saturating_sub(seed_height);\n\n if seed_used_height > rules.consensus_constants(block_header.height).max_randomx_seed_height() {\n\n return Err(ValidationError::BlockHeaderError(\n\n BlockHeaderValidationError::OldSeedHash,\n\n ));\n\n }\n\n }\n", "file_path": "base_layer/core/src/validation/helpers.rs", "rank": 30, "score": 266061.4971929586 }, { "content": "/// This function checks that the outputs do not already exist in the TxO set.\n\npub fn check_not_duplicate_txo<B: BlockchainBackend>(\n\n db: &B,\n\n output: &TransactionOutput,\n\n) -> Result<(), ValidationError> {\n\n if let Some(index) = db.fetch_mmr_leaf_index(MmrTree::Utxo, &output.hash())? {\n\n warn!(\n\n target: LOG_TARGET,\n\n \"Validation failed due to previously spent output: {} (MMR index = {})\", output, index\n\n );\n\n return Err(ValidationError::ContainsTxO);\n\n }\n\n if db\n\n .fetch_unspent_output_hash_by_commitment(&output.commitment)?\n\n .is_some()\n\n {\n\n warn!(\n\n target: LOG_TARGET,\n\n \"Duplicate UTXO set commitment found for output: {}\", output\n\n );\n\n return Err(ValidationError::ContainsDuplicateUtxoCommitment);\n", "file_path": "base_layer/core/src/validation/helpers.rs", "rank": 31, "score": 266061.4971929586 }, { "content": "#[allow(dead_code)]\n\npub fn generate_block<B: BlockchainBackend>(\n\n db: &BlockchainDatabase<B>,\n\n blocks: &mut Vec<ChainBlock>,\n\n transactions: Vec<Transaction>,\n\n consensus: &ConsensusManager,\n\n) -> Result<BlockAddResult, ChainStorageError> {\n\n let prev_block = blocks.last().unwrap();\n\n let template = chain_block_with_new_coinbase(prev_block, transactions, consensus, &CryptoFactories::default()).0;\n\n let new_block = db.prepare_new_block(template)?;\n\n let result = db.add_block(new_block.into());\n\n if let Ok(BlockAddResult::Ok(ref b)) = result {\n\n blocks.push(b.as_ref().clone());\n\n }\n\n result\n\n}\n\n\n", "file_path": "base_layer/core/tests/helpers/block_builders.rs", "rank": 32, "score": 266061.4971929586 }, { "content": "pub fn is_onion_address(addr: &Multiaddr) -> bool {\n\n let protocol = addr.iter().next();\n\n matches!(protocol, Some(Protocol::Onion(_, _)) | Some(Protocol::Onion3(_)))\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn is_onion_address_test() {\n\n let expect_true = [\n\n \"/onion/aaimaq4ygg2iegci:1234\",\n\n \"/onion3/vww6ybal4bd7szmgncyruucpgfkqahzddi37ktceo3ah7ngmcopnpyyd:1234\",\n\n ];\n\n\n\n let expect_false = [\"/dns4/mikes-node-nook.com:80\", \"/ip4/1.2.3.4/tcp/1234\"];\n\n\n\n expect_true.iter().for_each(|addr| {\n\n let addr = addr.parse().unwrap();\n\n assert!(is_onion_address(&addr));\n\n });\n\n\n\n expect_false.iter().for_each(|addr| {\n\n let addr = addr.parse().unwrap();\n\n assert!(!is_onion_address(&addr));\n\n });\n\n }\n\n}\n", "file_path": "comms/core/src/transports/predicate.rs", "rank": 33, "score": 264481.7879645612 }, { "content": "/// Generate a block and add it to the database using the provided transactions and coinbase. The header will be updated\n\n/// with the correct MMR roots.\n\npub fn generate_block_with_coinbase<B: BlockchainBackend>(\n\n db: &mut BlockchainDatabase<B>,\n\n blocks: &mut Vec<ChainBlock>,\n\n transactions: Vec<Transaction>,\n\n coinbase_utxo: TransactionOutput,\n\n coinbase_kernel: TransactionKernel,\n\n consensus: &ConsensusManager,\n\n) -> Result<BlockAddResult, ChainStorageError> {\n\n let template = chain_block_with_coinbase(\n\n blocks.last().unwrap(),\n\n transactions,\n\n coinbase_utxo,\n\n coinbase_kernel,\n\n consensus,\n\n );\n\n let new_block = db.prepare_new_block(template)?;\n\n let result = db.add_block(new_block.into())?;\n\n if let BlockAddResult::Ok(ref b) = result {\n\n blocks.push(b.as_ref().clone());\n\n }\n\n Ok(result)\n\n}\n\n\n", "file_path": "base_layer/core/tests/helpers/block_builders.rs", "rank": 34, "score": 263143.6220153789 }, { "content": "/// Generate a new block using the given transaction schema and add it to the provided database.\n\n/// The blocks and UTXO vectors are also updated with the info from the new block.\n\npub fn generate_new_block<B: BlockchainBackend>(\n\n db: &mut BlockchainDatabase<B>,\n\n blocks: &mut Vec<ChainBlock>,\n\n outputs: &mut Vec<Vec<UnblindedOutput>>,\n\n schemas: Vec<TransactionSchema>,\n\n consensus: &ConsensusManager,\n\n) -> Result<BlockAddResult, ChainStorageError> {\n\n let coinbase_value = consensus.emission_schedule().block_reward(db.get_height().unwrap() + 1);\n\n generate_new_block_with_coinbase(\n\n db,\n\n &CryptoFactories::default(),\n\n blocks,\n\n outputs,\n\n schemas,\n\n coinbase_value,\n\n consensus,\n\n )\n\n}\n\n\n", "file_path": "base_layer/core/tests/helpers/block_builders.rs", "rank": 35, "score": 263143.48338705476 }, { "content": "/// Create a new block with the provided transactions and add a coinbase output. The new MMR roots are calculated, and\n\n/// then the new block is added to the database. The newly created block is returned as the result.\n\npub fn append_block_with_coinbase<B: BlockchainBackend>(\n\n factories: &CryptoFactories,\n\n db: &BlockchainDatabase<B>,\n\n prev_block: &ChainBlock,\n\n txns: Vec<Transaction>,\n\n consensus_manager: &ConsensusManager,\n\n achieved_difficulty: Difficulty,\n\n) -> Result<(ChainBlock, UnblindedOutput), ChainStorageError> {\n\n let height = prev_block.height() + 1;\n\n let mut coinbase_value = consensus_manager.emission_schedule().block_reward(height);\n\n coinbase_value += txns.iter().fold(MicroTari(0), |acc, x| acc + x.body.get_total_fee());\n\n let (coinbase_utxo, coinbase_kernel, coinbase_output) = create_coinbase(\n\n factories,\n\n coinbase_value,\n\n height + consensus_manager.consensus_constants(0).coinbase_lock_height(),\n\n );\n\n let template = chain_block_with_coinbase(prev_block, txns, coinbase_utxo, coinbase_kernel, consensus_manager);\n\n let mut block = db.prepare_new_block(template)?;\n\n block.header.nonce = OsRng.next_u64();\n\n find_header_with_achieved_difficulty(&mut block.header, achieved_difficulty);\n", "file_path": "base_layer/core/tests/helpers/block_builders.rs", "rank": 36, "score": 263143.4299366964 }, { "content": "#[allow(dead_code)]\n\npub fn construct_chained_blocks<B: BlockchainBackend>(\n\n db: &BlockchainDatabase<B>,\n\n block0: ChainBlock,\n\n consensus: &ConsensusManager,\n\n n: usize,\n\n) -> Vec<ChainBlock> {\n\n let mut prev_block = block0;\n\n\n\n repeat_with(|| {\n\n let block = append_block(db, &prev_block, vec![], consensus, 1.into()).unwrap();\n\n prev_block = block.clone();\n\n block\n\n })\n\n .take(n)\n\n .collect()\n\n}\n\n\n", "file_path": "base_layer/core/tests/helpers/block_builders.rs", "rank": 37, "score": 263137.1083408741 }, { "content": "/// Checks the byte size of TariScript is less than or equal to the given size, otherwise returns an error.\n\npub fn check_tari_script_byte_size(script: &TariScript, max_script_size: usize) -> Result<(), ValidationError> {\n\n let script_size = script.consensus_encode_exact_size();\n\n if script_size > max_script_size {\n\n return Err(ValidationError::TariScriptExceedsMaxSize {\n\n max_script_size,\n\n actual_script_size: script_size,\n\n });\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "base_layer/core/src/validation/helpers.rs", "rank": 38, "score": 263087.2231464859 }, { "content": "pub fn is_dns4_addr(addr: &Multiaddr) -> bool {\n\n let proto = addr.iter().next();\n\n matches!(proto, Some(Protocol::Dns4(_))) || matches!(proto, Some(Protocol::Dns(_)))\n\n}\n\n\n", "file_path": "comms/core/src/transports/dns/common.rs", "rank": 39, "score": 261328.786966432 }, { "content": "/// Generate a new block using the given transaction schema and coinbase value and add it to the provided database.\n\n/// The blocks and UTXO vectors are also updated with the info from the new block.\n\npub fn generate_new_block_with_coinbase<B: BlockchainBackend>(\n\n db: &mut BlockchainDatabase<B>,\n\n factories: &CryptoFactories,\n\n blocks: &mut Vec<ChainBlock>,\n\n outputs: &mut Vec<Vec<UnblindedOutput>>,\n\n schemas: Vec<TransactionSchema>,\n\n coinbase_value: MicroTari,\n\n consensus: &ConsensusManager,\n\n) -> Result<BlockAddResult, ChainStorageError> {\n\n let mut txns = Vec::new();\n\n let mut block_utxos = Vec::new();\n\n let mut fees = MicroTari(0);\n\n for schema in schemas {\n\n let (tx, mut utxos) = spend_utxos(schema);\n\n fees += tx.body.get_total_fee();\n\n txns.push(tx);\n\n block_utxos.append(&mut utxos);\n\n }\n\n\n\n let (coinbase_utxo, coinbase_kernel, coinbase_output) = create_coinbase(factories, coinbase_value + fees, 100);\n\n block_utxos.push(coinbase_output);\n\n\n\n outputs.push(block_utxos);\n\n generate_block_with_coinbase(db, blocks, txns, coinbase_utxo, coinbase_kernel, consensus)\n\n}\n\n\n", "file_path": "base_layer/core/tests/helpers/block_builders.rs", "rank": 40, "score": 260324.13497052685 }, { "content": "#[allow(dead_code)]\n\npub fn generate_block_with_achieved_difficulty<B: BlockchainBackend>(\n\n db: &mut BlockchainDatabase<B>,\n\n blocks: &mut Vec<ChainBlock>,\n\n transactions: Vec<Transaction>,\n\n achieved_difficulty: Difficulty,\n\n consensus: &ConsensusManager,\n\n) -> Result<BlockAddResult, ChainStorageError> {\n\n let template = chain_block_with_new_coinbase(\n\n blocks.last().unwrap(),\n\n transactions,\n\n consensus,\n\n &CryptoFactories::default(),\n\n )\n\n .0;\n\n let mut new_block = db.prepare_new_block(template)?;\n\n new_block.header.nonce = OsRng.next_u64();\n\n find_header_with_achieved_difficulty(&mut new_block.header, achieved_difficulty);\n\n let result = db.add_block(new_block.into());\n\n if let Ok(BlockAddResult::Ok(ref b)) = result {\n\n blocks.push(b.as_ref().clone());\n\n }\n\n result\n\n}\n\n\n", "file_path": "base_layer/core/tests/helpers/block_builders.rs", "rank": 41, "score": 260317.82704902263 }, { "content": "pub fn is_max_datetime(datetime: &NaiveDateTime) -> bool {\n\n chrono::MAX_DATETIME.naive_utc() == *datetime\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn formats_duration() {\n\n let s = format_duration(Duration::from_secs(5));\n\n assert_eq!(s, \"5s\");\n\n let s = format_duration(Duration::from_secs(23 * 60 + 10));\n\n assert_eq!(s, \"23m 10s\");\n\n let s = format_duration(Duration::from_secs(9 * 60 * 60 + 35 * 60 + 45));\n\n assert_eq!(s, \"9h 35m 45s\");\n\n }\n\n}\n", "file_path": "comms/core/src/utils/datetime.rs", "rank": 42, "score": 258293.33516188047 }, { "content": "pub fn setup_runtime() -> Result<Runtime, ExitError> {\n\n let mut builder = runtime::Builder::new_multi_thread();\n\n builder.enable_all().build().map_err(|e| {\n\n let msg = format!(\"There was an error while building the node runtime. {}\", e);\n\n ExitError::new(ExitCode::UnknownError, msg)\n\n })\n\n}\n\n\n", "file_path": "applications/tari_app_utilities/src/utilities.rs", "rank": 43, "score": 257952.94306446207 }, { "content": "#[allow(dead_code)]\n\npub fn generate_new_block_with_achieved_difficulty<B: BlockchainBackend>(\n\n db: &mut BlockchainDatabase<B>,\n\n blocks: &mut Vec<ChainBlock>,\n\n outputs: &mut Vec<Vec<UnblindedOutput>>,\n\n schemas: Vec<TransactionSchema>,\n\n achieved_difficulty: Difficulty,\n\n consensus: &ConsensusManager,\n\n) -> Result<BlockAddResult, ChainStorageError> {\n\n let mut txns = Vec::new();\n\n let mut block_utxos = Vec::new();\n\n for schema in schemas {\n\n let (tx, mut utxos) = spend_utxos(schema);\n\n txns.push(tx);\n\n block_utxos.append(&mut utxos);\n\n }\n\n outputs.push(block_utxos);\n\n generate_block_with_achieved_difficulty(db, blocks, txns, achieved_difficulty, consensus)\n\n}\n\n\n", "file_path": "base_layer/core/tests/helpers/block_builders.rs", "rank": 44, "score": 257598.08673420973 }, { "content": "pub fn lmdb_exists<K>(txn: &ConstTransaction<'_>, db: &Database, key: &K) -> Result<bool, ChainStorageError>\n\nwhere K: AsLmdbBytes + ?Sized {\n\n let access = txn.access();\n\n match access.get::<K, [u8]>(db, key).to_opt() {\n\n Ok(None) => Ok(false),\n\n Err(e) => {\n\n error!(target: LOG_TARGET, \"Could not read from lmdb: {:?}\", e);\n\n Err(ChainStorageError::AccessError(e.to_string()))\n\n },\n\n Ok(Some(_)) => Ok(true),\n\n }\n\n}\n\n\n", "file_path": "base_layer/core/src/chain_storage/lmdb_db/lmdb.rs", "rank": 45, "score": 257493.99605870777 }, { "content": "/// Convert a slice into a vector of Public Keys.\n\npub fn slice_to_vec_pubkeys(slice: &[u8], num: usize) -> Result<Vec<RistrettoPublicKey>, ScriptError> {\n\n if slice.len() < num * PUBLIC_KEY_LENGTH {\n\n return Err(ScriptError::InvalidData);\n\n }\n\n\n\n let public_keys = slice\n\n .chunks_exact(PUBLIC_KEY_LENGTH)\n\n .take(num)\n\n .map(RistrettoPublicKey::from_bytes)\n\n .collect::<Result<Vec<RistrettoPublicKey>, ByteArrayError>>()?;\n\n\n\n Ok(public_keys)\n\n}\n\n\n", "file_path": "infrastructure/tari_script/src/op_codes.rs", "rank": 46, "score": 257422.819988831 }, { "content": "pub fn install_configuration<F>(application_type: ApplicationType, path: &Path, installer: F)\n\nwhere F: Fn(ApplicationType, &Path) -> Result<(), std::io::Error> {\n\n if let Err(e) = installer(application_type, path) {\n\n println!(\n\n \"Failed to install a new configuration file in {}: {}\",\n\n path.to_str().unwrap_or(\"?\"),\n\n e\n\n )\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\npub enum ApplicationType {\n\n BaseNode,\n\n ConsoleWallet,\n\n MergeMiningProxy,\n\n Miner,\n\n StratumTranscoder,\n\n ValidatorNode,\n\n Collectibles,\n", "file_path": "common/src/configuration/bootstrap.rs", "rank": 47, "score": 256406.36592805205 }, { "content": "pub fn channel<T: Clone>() -> OneshotTrigger<T> {\n\n OneshotTrigger::new()\n\n}\n\n\n\npub struct OneshotTrigger<T> {\n\n sender: Option<oneshot::Sender<T>>,\n\n signal: OneshotSignal<T>,\n\n}\n\n\n\nimpl<T: Clone> OneshotTrigger<T> {\n\n pub fn new() -> Self {\n\n let (tx, rx) = oneshot::channel();\n\n Self {\n\n sender: Some(tx),\n\n signal: rx.shared().into(),\n\n }\n\n }\n\n\n\n pub fn to_signal(&self) -> OneshotSignal<T> {\n\n self.signal.clone()\n", "file_path": "infrastructure/shutdown/src/oneshot_trigger.rs", "rank": 48, "score": 254632.8804514961 }, { "content": "pub fn lmdb_len(txn: &ConstTransaction<'_>, db: &Database) -> Result<usize, ChainStorageError> {\n\n let stats = txn.db_stat(db).map_err(|e| {\n\n error!(target: LOG_TARGET, \"Could not read length from lmdb: {:?}\", e);\n\n ChainStorageError::AccessError(e.to_string())\n\n })?;\n\n Ok(stats.entries)\n\n}\n\n\n", "file_path": "base_layer/core/src/chain_storage/lmdb_db/lmdb.rs", "rank": 49, "score": 254461.64453934575 }, { "content": "pub fn lmdb_clear(txn: &WriteTransaction<'_>, db: &Database) -> Result<usize, ChainStorageError> {\n\n let mut cursor = txn.cursor(db)?;\n\n let mut access = txn.access();\n\n let mut num_deleted = 0;\n\n while cursor.next::<[u8], [u8]>(&access).to_opt()?.is_some() {\n\n cursor.del(&mut access, del::Flags::empty())?;\n\n num_deleted += 1;\n\n }\n\n Ok(num_deleted)\n\n}\n", "file_path": "base_layer/core/src/chain_storage/lmdb_db/lmdb.rs", "rank": 50, "score": 254461.64453934575 }, { "content": "pub fn lmdb_delete_each_where<K, V, F>(\n\n txn: &WriteTransaction<'_>,\n\n db: &Database,\n\n mut predicate: F,\n\n) -> Result<usize, ChainStorageError>\n\nwhere\n\n K: FromLmdbBytes + ?Sized,\n\n V: DeserializeOwned,\n\n F: FnMut(&K, V) -> Option<bool>,\n\n{\n\n let mut cursor = txn.cursor(db)?;\n\n let mut access = txn.access();\n\n let mut num_deleted = 0;\n\n while let Some((k, v)) = cursor.next::<K, [u8]>(&access).to_opt()? {\n\n match deserialize(v) {\n\n Ok(v) => match predicate(k, v) {\n\n Some(true) => {\n\n cursor.del(&mut access, del::Flags::empty())?;\n\n num_deleted += 1;\n\n },\n", "file_path": "base_layer/core/src/chain_storage/lmdb_db/lmdb.rs", "rank": 51, "score": 252584.54459304071 }, { "content": "/// Appends merge mining hash to a Monero block\n\npub fn append_merge_mining_tag<T: AsRef<[u8]>>(block: &mut monero::Block, hash: T) -> Result<(), MergeMineError> {\n\n if hash.as_ref().len() != monero::Hash::len_bytes() {\n\n return Err(MergeMineError::HashingError(format!(\n\n \"Expected source to be {} bytes, but it was {} bytes\",\n\n monero::Hash::len_bytes(),\n\n hash.as_ref().len()\n\n )));\n\n }\n\n let hash = monero::Hash::from_slice(hash.as_ref());\n\n let mm_tag = SubField::MergeMining(VarInt(0), hash);\n\n block.miner_tx.prefix.extra.0.push(mm_tag);\n\n Ok(())\n\n}\n\n\n", "file_path": "base_layer/core/src/proof_of_work/monero_rx/helpers.rs", "rank": 52, "score": 252336.19717106156 }, { "content": "// Perform a comprehensive search to remove all the minimum height orphans to maintain the configured orphan pool\n\n// storage limit. If the node is configured to run in pruned mode then orphan blocks with heights lower than the horizon\n\n// block height will also be discarded.\n\nfn cleanup_orphans<T: BlockchainBackend>(db: &mut T, orphan_storage_capacity: usize) -> Result<(), ChainStorageError> {\n\n let metadata = db.fetch_chain_metadata()?;\n\n let horizon_height = metadata.horizon_block(metadata.height_of_longest_chain());\n\n\n\n db.delete_oldest_orphans(horizon_height, orphan_storage_capacity)\n\n}\n\n\n", "file_path": "base_layer/core/src/chain_storage/blockchain_database.rs", "rank": 53, "score": 249793.76837100537 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n tonic_build::configure()\n\n .build_client(true)\n\n .build_server(true)\n\n .format(false)\n\n .compile(\n\n &[\n\n \"proto/base_node.proto\",\n\n \"proto/wallet.proto\",\n\n \"proto/validator_node.proto\",\n\n ],\n\n &[\"proto\"],\n\n )?;\n\n\n\n Ok(())\n\n}\n", "file_path": "applications/tari_app_grpc/build.rs", "rank": 54, "score": 249672.90019899036 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n tari_common::build::ProtobufCompiler::new()\n\n .proto_paths(&[\"proto/tips\"])\n\n .include_paths(&[\"proto/tips\"])\n\n .emit_rerun_if_changed_directives()\n\n .compile()\n\n .unwrap();\n\n Ok(())\n\n}\n", "file_path": "dan_layer/common_types/build.rs", "rank": 55, "score": 249672.90019899036 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n tari_common::build::ProtobufCompiler::new()\n\n .proto_paths(&[\"proto/dan\"])\n\n .include_paths(&[\"proto/dan\"])\n\n .emit_rerun_if_changed_directives()\n\n .compile()\n\n .unwrap();\n\n Ok(())\n\n}\n", "file_path": "applications/tari_validator_node/build.rs", "rank": 56, "score": 249672.90019899036 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let gen = StaticApplicationInfo::initialize()?;\n\n gen.write_consts_to_outdir(\"consts.rs\")?;\n\n Ok(())\n\n}\n", "file_path": "applications/tari_app_utilities/build.rs", "rank": 57, "score": 249672.90019899036 }, { "content": "/// Interpret a string as either a socket address (first) or a multiaddr format string.\n\n/// If the former, it gets converted into a MultiAddr before being returned.\n\npub fn socket_or_multi(addr: &str) -> Result<Multiaddr, Error> {\n\n addr.parse::<SocketAddr>()\n\n .map(|socket| match socket {\n\n SocketAddr::V4(ip4) => Multiaddr::from_iter([Protocol::Ip4(*ip4.ip()), Protocol::Tcp(ip4.port())]),\n\n SocketAddr::V6(ip6) => Multiaddr::from_iter([Protocol::Ip6(*ip6.ip()), Protocol::Tcp(ip6.port())]),\n\n })\n\n .or_else(|_| addr.parse::<Multiaddr>())\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use std::net::{Ipv4Addr, Ipv6Addr};\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn socket_or_multi_test() {\n\n let v4_addr = \"127.0.0.1:8080\";\n\n let multi_v4_addr = socket_or_multi(v4_addr).unwrap();\n\n // ipv4 testing\n", "file_path": "common/src/configuration/mod.rs", "rank": 58, "score": 248456.52928614535 }, { "content": "pub fn validate_covenants(block: &Block) -> Result<(), ValidationError> {\n\n for input in block.body.inputs() {\n\n let output_set_size = input\n\n .covenant()?\n\n .execute(block.header.height, input, block.body.outputs())?;\n\n trace!(target: LOG_TARGET, \"{} output(s) passed covenant\", output_set_size);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "base_layer/core/src/validation/helpers.rs", "rank": 59, "score": 248337.47828283353 }, { "content": "pub fn migrate(database: &LMDBDatabase) -> Result<(), LMDBError> {\n\n // Add migrations here in version order\n\n let migrations = vec![v5::Migration.boxed(), v6::Migration.boxed()];\n\n if migrations.is_empty() {\n\n return Ok(());\n\n }\n\n let latest_version = migrations.last().unwrap().get_version();\n\n\n\n // If the database is empty there is nothing to migrate, so set it to the latest version\n\n if database.len()? == 0 {\n\n debug!(target: LOG_TARGET, \"New database does not require migration\");\n\n if let Err(err) = database.insert(&MIGRATION_VERSION_KEY, &latest_version) {\n\n error!(\n\n target: LOG_TARGET,\n\n \"Failed to update migration counter: {}. ** Database may be corrupt **\", err\n\n );\n\n }\n\n return Ok(());\n\n }\n\n\n", "file_path": "comms/core/src/peer_manager/migrations.rs", "rank": 60, "score": 248337.47828283353 }, { "content": "pub fn lmdb_filter_map_values<F, V, R>(\n\n txn: &ConstTransaction<'_>,\n\n db: &Database,\n\n f: F,\n\n) -> Result<Vec<R>, ChainStorageError>\n\nwhere\n\n F: Fn(V) -> Option<R>,\n\n V: DeserializeOwned,\n\n{\n\n let access = txn.access();\n\n let mut cursor = txn.cursor(db).map_err(|e| {\n\n error!(target: LOG_TARGET, \"Could not get read cursor from lmdb: {:?}\", e);\n\n ChainStorageError::AccessError(e.to_string())\n\n })?;\n\n let iter = CursorIter::new(\n\n MaybeOwned::Borrowed(&mut cursor),\n\n &access,\n\n |c, a| c.first(a),\n\n Cursor::next::<[u8], [u8]>,\n\n )?;\n", "file_path": "base_layer/core/src/chain_storage/lmdb_db/lmdb.rs", "rank": 61, "score": 247239.23211487345 }, { "content": "/// Prompt the user to input their seed words in a single line.\n\npub fn prompt_private_key_from_seed_words() -> Result<CipherSeed, ExitError> {\n\n debug!(target: LOG_TARGET, \"Prompting for seed words.\");\n\n let mut rl = Editor::<()>::new();\n\n\n\n loop {\n\n println!(\"Recovery Mode\");\n\n println!();\n\n println!(\"Type or paste all of your seed words on one line, only separated by spaces.\");\n\n let input = rl.readline(\">> \").map_err(|e| ExitError::new(ExitCode::IOError, e))?;\n\n let seed_words: Vec<String> = input.split_whitespace().map(str::to_string).collect();\n\n\n\n match CipherSeed::from_mnemonic(&seed_words, None) {\n\n Ok(seed) => break Ok(seed),\n\n Err(e) => {\n\n debug!(target: LOG_TARGET, \"MnemonicError parsing seed words: {}\", e);\n\n println!(\"Failed to parse seed words! Did you type them correctly?\");\n\n continue;\n\n },\n\n }\n\n }\n\n}\n\n\n", "file_path": "applications/tari_console_wallet/src/recovery.rs", "rank": 62, "score": 246868.6404454818 }, { "content": "/// The height of a node in a full binary tree from its index.\n\npub fn bintree_height(num: usize) -> usize {\n\n if num == 0 {\n\n return 0;\n\n }\n\n peak_map_height(num).1\n\n}\n\n\n", "file_path": "base_layer/mmr/src/common.rs", "rank": 63, "score": 245628.02868684148 }, { "content": "fn parse_duration(arg: &str) -> Result<std::time::Duration, std::num::ParseIntError> {\n\n let seconds = arg.parse()?;\n\n Ok(std::time::Duration::from_secs(seconds))\n\n}\n\n\n\n#[derive(Debug, Args, Clone)]\n\npub struct CoinSplitArgs {\n\n pub amount_per_split: MicroTari,\n\n pub num_splits: usize,\n\n #[clap(short, long, default_value = \"1\")]\n\n pub fee_per_gram: MicroTari,\n\n #[clap(short, long, default_value = \"Coin split\")]\n\n pub message: String,\n\n}\n\n\n\n#[derive(Debug, Args, Clone)]\n\npub struct WhoisArgs {\n\n pub public_key: UniPublicKey,\n\n}\n\n\n", "file_path": "applications/tari_console_wallet/src/cli.rs", "rank": 64, "score": 245579.43601382838 }, { "content": "// This function checks for duplicate inputs and outputs. There should be no duplicate inputs or outputs in a block\n\npub fn check_sorting_and_duplicates(body: &AggregateBody) -> Result<(), ValidationError> {\n\n if !is_all_unique_and_sorted(body.inputs()) {\n\n return Err(ValidationError::UnsortedOrDuplicateInput);\n\n }\n\n\n\n if !is_all_unique_and_sorted(body.outputs()) {\n\n return Err(ValidationError::UnsortedOrDuplicateOutput);\n\n }\n\n\n\n if !is_all_unique_and_sorted(body.kernels()) {\n\n return Err(ValidationError::UnsortedOrDuplicateKernel);\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "base_layer/core/src/validation/helpers.rs", "rank": 65, "score": 242800.6982508658 }, { "content": "/// Returns the MMR node index derived from the leaf index.\n\npub fn node_index(leaf_index: usize) -> usize {\n\n if leaf_index == 0 {\n\n return 0;\n\n }\n\n 2 * leaf_index - leaf_index.count_ones() as usize\n\n}\n\n\n", "file_path": "base_layer/mmr/src/common.rs", "rank": 66, "score": 242674.2240889636 }, { "content": "/// Convert a multiaddr to a socket address required for `TcpStream`\n\n/// This function resolves DNS4 addresses to an ip address.\n\npub fn multiaddr_to_socketaddr(addr: &Multiaddr) -> io::Result<SocketAddr> {\n\n let mut addr_iter = addr.iter();\n\n let network_proto = addr_iter\n\n .next()\n\n .ok_or_else(|| io::Error::new(io::ErrorKind::InvalidInput, format!(\"Invalid address '{}'\", addr)))?;\n\n let transport_proto = addr_iter\n\n .next()\n\n .ok_or_else(|| io::Error::new(io::ErrorKind::InvalidInput, format!(\"Invalid address '{}'\", addr)))?;\n\n\n\n if addr_iter.next().is_some() {\n\n return Err(io::Error::new(\n\n io::ErrorKind::InvalidInput,\n\n format!(\"Invalid address '{}'\", addr),\n\n ));\n\n }\n\n\n\n match (network_proto, transport_proto) {\n\n (Protocol::Dns4(domain), Protocol::Tcp(port)) => {\n\n let addr = format!(\"{}:{}\", domain, port);\n\n addr.to_socket_addrs()\n", "file_path": "comms/core/src/utils/multiaddr.rs", "rank": 67, "score": 242499.06638010443 }, { "content": "#[cfg(feature = \"base_node\")]\n\npub fn create_base_node_wallet_rpc_service<B: BlockchainBackend + 'static>(\n\n db: AsyncBlockchainDb<B>,\n\n mempool: MempoolHandle,\n\n state_machine: StateMachineHandle,\n\n) -> BaseNodeWalletRpcServer<BaseNodeWalletRpcService<B>> {\n\n BaseNodeWalletRpcServer::new(BaseNodeWalletRpcService::new(db, mempool, state_machine))\n\n}\n", "file_path": "base_layer/core/src/base_node/rpc/mod.rs", "rank": 68, "score": 242216.23122657667 }, { "content": "pub fn initiate_recover_db(config: &BaseNodeConfig) -> Result<(), ExitError> {\n\n // create recovery db\n\n match &config.db_type {\n\n DatabaseType::Lmdb => {\n\n create_recovery_lmdb_database(config.lmdb_path.as_path()).map_err(|err| {\n\n error!(target: LOG_TARGET, \"{}\", err);\n\n ExitError::new(ExitCode::UnknownError, err)\n\n })?;\n\n },\n\n };\n\n Ok(())\n\n}\n\n\n\npub async fn run_recovery(node_config: &BaseNodeConfig) -> Result<(), anyhow::Error> {\n\n println!(\"Starting recovery mode\");\n\n let (temp_db, main_db, temp_path) = match &node_config.db_type {\n\n DatabaseType::Lmdb => {\n\n let backend = create_lmdb_database(&node_config.lmdb_path, node_config.lmdb.clone()).map_err(|e| {\n\n error!(target: LOG_TARGET, \"Error opening db: {}\", e);\n\n anyhow!(\"Could not open DB: {}\", e)\n", "file_path": "applications/tari_base_node/src/recovery.rs", "rank": 69, "score": 240176.38105132623 }, { "content": "#[cfg(feature = \"base_node\")]\n\npub fn create_base_node_sync_rpc_service<B: BlockchainBackend + 'static>(\n\n db: AsyncBlockchainDb<B>,\n\n base_node_service: LocalNodeCommsInterface,\n\n) -> BaseNodeSyncRpcServer<BaseNodeSyncRpcService<B>> {\n\n BaseNodeSyncRpcServer::new(BaseNodeSyncRpcService::new(db, base_node_service))\n\n}\n", "file_path": "base_layer/core/src/base_node/sync/rpc/mod.rs", "rank": 70, "score": 239846.23974802904 }, { "content": "pub fn register<C: Collector + 'static>(c: C) -> prometheus::Result<()> {\n\n get_default_registry().register(Box::new(c))\n\n}\n\n\n", "file_path": "infrastructure/metrics/src/lib.rs", "rank": 71, "score": 238761.56716088066 }, { "content": "/// The number of leaves in a MMR of the provided size.\n\n/// Example: on input 5 returns (2 + 1 + 1) as mmr state before adding 5 was\n\n/// 2\n\n/// / \\\n\n/// 0 1 3 4\n\n/// None is returned if the number of leaves exceeds the maximum value of a usize\n\npub fn checked_n_leaves(size: usize) -> Option<usize> {\n\n if size == 0 {\n\n return Some(0);\n\n }\n\n if size == usize::MAX {\n\n return None;\n\n }\n\n\n\n let mut peak_size = ALL_ONES >> size.leading_zeros();\n\n let mut nleaves = 0usize;\n\n let mut size_left = size;\n\n while peak_size != 0 {\n\n if size_left >= peak_size {\n\n nleaves += (peak_size + 1) >> 1;\n\n size_left -= peak_size;\n\n }\n\n peak_size >>= 1;\n\n }\n\n\n\n if size_left == 0 {\n", "file_path": "base_layer/mmr/src/common.rs", "rank": 72, "score": 238278.5544803258 }, { "content": "/// Gets the postorder traversal index of all peaks in a MMR given its size.\n\n/// Starts with the top peak, which is always on the left side of the range, and navigates toward lower siblings\n\n/// toward the right of the range.\n\npub fn find_peaks(size: usize) -> Vec<usize> {\n\n if size == 0 {\n\n return vec![];\n\n }\n\n let mut peak_size = ALL_ONES >> size.leading_zeros();\n\n let mut num_left = size;\n\n let mut sum_prev_peaks = 0;\n\n let mut peaks = vec![];\n\n while peak_size != 0 {\n\n if num_left >= peak_size {\n\n peaks.push(sum_prev_peaks + peak_size - 1);\n\n sum_prev_peaks += peak_size;\n\n num_left -= peak_size;\n\n }\n\n peak_size >>= 1;\n\n }\n\n if num_left > 0 {\n\n return vec![];\n\n }\n\n peaks\n\n}\n\n\n", "file_path": "base_layer/mmr/src/common.rs", "rank": 73, "score": 238265.51336090208 }, { "content": "/// Set up application-level logging using the Log4rs configuration file specified in\n\npub fn initialize_logging(config_file: &Path, default: &str) -> Result<(), ConfigError> {\n\n println!(\n\n \"Initializing logging according to {:?}\",\n\n config_file.to_str().unwrap_or(\"[??]\")\n\n );\n\n\n\n if !config_file.exists() {\n\n if let Some(d) = config_file.parent() {\n\n fs::create_dir_all(d)\n\n .map_err(|e| ConfigError::new(\"Could not create parent directory for log file\", Some(e.to_string())))?\n\n };\n\n let mut file = File::create(config_file)\n\n .map_err(|e| ConfigError::new(\"Could not create default log file\", Some(e.to_string())))?;\n\n file.write_all(default.as_ref())\n\n .map_err(|e| ConfigError::new(\"Could not create default log file\", Some(e.to_string())))?;\n\n }\n\n\n\n log4rs::init_file(config_file, Default::default())\n\n .map_err(|e| ConfigError::new(\"Could not initialize logging\", Some(e.to_string())))\n\n}\n", "file_path": "common/src/logging.rs", "rank": 74, "score": 237367.58685938065 }, { "content": "/// Calculates a checksum using the [Luhn mod n algorithm](https://en.wikipedia.org/wiki/Luhn_mod_N_algorithm). The\n\n/// input to the function is an array of indices, each of which is strictly less than `dict_len`, and the size of the\n\n/// dictionary (`dict_len`). The result is the checksum character, also strictly less than `dict_len`.\n\npub fn checksum(arr: &[usize], dict_len: usize) -> usize {\n\n // Starting from the right and working leftwards is easier since\n\n let (sum, _) = arr.iter().rev().fold((0usize, 2usize), |(sum, factor), digit| {\n\n let mut addend = factor * *digit;\n\n let factor = factor ^ 3; // Toggles between 1 and 2\n\n addend = (addend / dict_len) + addend % dict_len;\n\n (sum + addend, factor)\n\n });\n\n (dict_len - (sum % dict_len)) % dict_len\n\n}\n\n\n", "file_path": "base_layer/common_types/src/luhn.rs", "rank": 75, "score": 236233.39072806638 }, { "content": "pub fn copy_into_fixed_array<T: Default + Copy, const SZ: usize>(elems: &[T]) -> Result<[T; SZ], ByteArrayError> {\n\n if elems.len() != SZ {\n\n return Err(ByteArrayError::IncorrectLength);\n\n }\n\n let mut buf = [T::default(); SZ];\n\n buf.copy_from_slice(&elems[0..SZ]);\n\n Ok(buf)\n\n}\n\n\n", "file_path": "base_layer/common_types/src/array.rs", "rank": 76, "score": 236127.67027119547 }, { "content": "pub fn median(mut list: Vec<u64>) -> Option<f64> {\n\n if list.is_empty() {\n\n return None;\n\n }\n\n list.sort_unstable();\n\n let mid_index = list.len() / 2;\n\n let median = if list.len() % 2 == 0 {\n\n (list[mid_index - 1] + list[mid_index]) as f64 / 2.0\n\n } else {\n\n list[mid_index] as f64\n\n };\n\n Some(median)\n\n}\n\n\n", "file_path": "applications/tari_base_node/src/grpc/helpers.rs", "rank": 77, "score": 234821.77066052333 }, { "content": "/// Generate a new random request key to uniquely identify a request and its corresponding responses.\n\npub fn generate_request_key<R>(rng: &mut R) -> RequestKey\n\nwhere R: RngCore {\n\n rng.next_u64()\n\n}\n\n\n\n/// WaitingRequests is used to keep track of a set of WaitingRequests.\n\n#[allow(clippy::type_complexity)]\n\npub struct WaitingRequests<T> {\n\n requests: Arc<RwLock<HashMap<RequestKey, Option<(OneshotSender<T>, Instant)>>>>,\n\n}\n\n\n\nimpl<T> WaitingRequests<T> {\n\n /// Create a new set of waiting requests.\n\n pub fn new() -> Self {\n\n Self {\n\n requests: Arc::new(RwLock::new(HashMap::new())),\n\n }\n\n }\n\n\n\n /// Insert a new waiting request.\n", "file_path": "base_layer/common_types/src/waiting_requests.rs", "rank": 78, "score": 234628.88715883569 }, { "content": "pub fn response_line(line: &str) -> Result<ResponseLine, ParseError> {\n\n let parser = map_res(digit1, |code: &str| code.parse::<u16>());\n\n let (rest, code) = parser(line)?;\n\n let (rest, ch) = anychar(rest)?;\n\n if ![' ', '-', '+'].contains(&ch) {\n\n return Err(ParseError(format!(\n\n \"Unexpected response character '{}'. Expected ' ', '-' or '+'.\",\n\n ch\n\n )));\n\n }\n\n\n\n Ok(ResponseLine {\n\n has_more: ['-', '+'].contains(&ch),\n\n is_multiline: ch == '+',\n\n code,\n\n value: rest.to_owned(),\n\n })\n\n}\n\n\n", "file_path": "comms/core/src/tor/control_client/parsers.rs", "rank": 79, "score": 234337.96914859716 }, { "content": "fn validate_address(addr: &Multiaddr, allow_test_addrs: bool) -> Result<(), ConnectionManagerError> {\n\n let mut addr_iter = addr.iter();\n\n let proto = addr_iter\n\n .next()\n\n .ok_or_else(|| ConnectionManagerError::InvalidMultiaddr(\"Multiaddr was empty\".to_string()))?;\n\n\n\n let expect_end_of_address = |mut iter: multiaddr::Iter<'_>| match iter.next() {\n\n Some(p) => Err(ConnectionManagerError::InvalidMultiaddr(format!(\n\n \"Unexpected multiaddress component '{}'\",\n\n p\n\n ))),\n\n None => Ok(()),\n\n };\n\n\n\n /// Returns [true] if the address is a unicast link-local address (fe80::/10).\n\n #[inline]\n\n const fn is_unicast_link_local(addr: &Ipv6Addr) -> bool {\n\n (addr.segments()[0] & 0xffc0) == 0xfe80\n\n }\n\n\n", "file_path": "comms/core/src/connection_manager/common.rs", "rank": 80, "score": 232964.70193680658 }, { "content": "/// Add mmproxy extensions object to JSON RPC success response\n\npub fn add_aux_data(mut response: json::Value, mut ext: json::Value) -> json::Value {\n\n if response[\"result\"].is_null() {\n\n return response;\n\n }\n\n match response[\"result\"][MMPROXY_AUX_KEY_NAME].as_object_mut() {\n\n Some(obj_mut) => {\n\n let ext_mut = ext\n\n .as_object_mut()\n\n .expect(\"invalid parameter: expected `ext: json::Value` to be an object but it was not\");\n\n obj_mut.append(ext_mut);\n\n },\n\n None => {\n\n response[\"result\"][MMPROXY_AUX_KEY_NAME] = ext;\n\n },\n\n }\n\n response\n\n}\n\n\n", "file_path": "applications/tari_merge_mining_proxy/src/proxy.rs", "rank": 81, "score": 232863.7805785489 }, { "content": "pub fn register_gauge(name: &str, help: &str) -> prometheus::Result<Gauge> {\n\n let gauge = prometheus::Gauge::new(name, help)?;\n\n register(gauge.clone())?;\n\n Ok(gauge)\n\n}\n\n\n", "file_path": "infrastructure/metrics/src/lib.rs", "rank": 82, "score": 232804.1042548397 }, { "content": "pub fn register_histogram(name: &str, help: &str) -> prometheus::Result<Histogram> {\n\n let gauge = prometheus::Histogram::with_opts(HistogramOpts::new(name, help))?;\n\n register(gauge.clone())?;\n\n Ok(gauge)\n\n}\n\n\n", "file_path": "infrastructure/metrics/src/lib.rs", "rank": 83, "score": 232804.1042548397 }, { "content": "pub fn lmdb_fetch_matching_after<V>(\n\n txn: &ConstTransaction<'_>,\n\n db: &Database,\n\n key_prefix: &[u8],\n\n) -> Result<Vec<V>, ChainStorageError>\n\nwhere\n\n V: DeserializeOwned,\n\n{\n\n let mut cursor = lmdb_get_prefix_cursor(txn, db, key_prefix)?;\n\n let mut result = vec![];\n\n while let Some((_, val)) = cursor.next()? {\n\n result.push(val);\n\n }\n\n Ok(result)\n\n}\n\n\n", "file_path": "base_layer/core/src/chain_storage/lmdb_db/lmdb.rs", "rank": 84, "score": 232671.7505589233 }, { "content": "pub fn report_error(report: bool, status: Status) -> Status {\n\n if report {\n\n status\n\n } else {\n\n Status::new(status.code(), \"Error has occurred. Details are obscured.\")\n\n }\n\n}\n\n\n\npub async fn get_heights(\n\n request: &tari_rpc::HeightRequest,\n\n handler: LocalNodeCommsInterface,\n\n) -> Result<(u64, u64), Status> {\n\n block_heights(handler, request.start_height, request.end_height, request.from_tip).await\n\n}\n\n\n\n#[tonic::async_trait]\n\nimpl tari_rpc::base_node_server::BaseNode for BaseNodeGrpcServer {\n\n type FetchMatchingUtxosStream = mpsc::Receiver<Result<tari_rpc::FetchMatchingUtxosResponse, Status>>;\n\n type GetBlocksStream = mpsc::Receiver<Result<tari_rpc::HistoricalBlock, Status>>;\n\n type GetConstitutionsStream = mpsc::Receiver<Result<tari_rpc::TransactionOutput, Status>>;\n", "file_path": "applications/tari_base_node/src/grpc/base_node_grpc_server.rs", "rank": 85, "score": 232102.77318368916 }, { "content": "/// Generates a vector of bytes that represent the provided mnemonic sequence of words, the language of the mnemonic\n\n/// sequence is detected\n\npub fn to_bytes(mnemonic_seq: &[String]) -> Result<Vec<u8>, MnemonicError> {\n\n let language = MnemonicLanguage::detect_language(mnemonic_seq)?;\n\n to_bytes_with_language(mnemonic_seq, &language)\n\n}\n\n\n", "file_path": "base_layer/key_manager/src/mnemonic.rs", "rank": 86, "score": 231824.11115222058 }, { "content": "pub fn create_com_sig_from_bytes(_bytes: &[u8]) -> Result<ComSig, ByteArrayError> {\n\n Ok(ComSig::default())\n\n // Ok(ComSig::new(\n\n // HomomorphicCommitment::from_bytes(&bytes[0..32])?,\n\n // RistrettoSecretKey::from_bytes(&bytes[33..64])?,\n\n // RistrettoSecretKey::from_bytes(&bytes[64..96])?,\n\n // ))\n\n}\n\n\n", "file_path": "dan_layer/core/src/types.rs", "rank": 87, "score": 231803.1493812533 }, { "content": "/// Generate a random alphanumeric string of the given size using the default `ThreadRng`.\n\npub fn string(len: usize) -> String {\n\n let mut rng = thread_rng();\n\n iter::repeat(())\n\n .map(|_| rng.sample(Alphanumeric) as char)\n\n .take(len)\n\n .collect()\n\n}\n\n\n", "file_path": "infrastructure/test_utils/src/random/mod.rs", "rank": 88, "score": 230686.2915514306 }, { "content": "// TODO: Logging the error and returning a general error to the requester is a common requirement. Figure out a clean\n\n// way to provide this functionality.\n\nfn to_internal_error<T: std::error::Error>(err: T) -> RpcStatus {\n\n error!(target: LOG_TARGET, \"Internal error: {}\", err);\n\n RpcStatus::general(&err.to_string())\n\n}\n\n\n\n#[tari_comms::async_trait]\n\nimpl MempoolService for MempoolRpcService {\n\n async fn get_stats(&self, _: Request<()>) -> Result<Response<proto::mempool::StatsResponse>, RpcStatus> {\n\n let stats = self.mempool().get_stats().await.map_err(to_internal_error)?;\n\n Ok(Response::new(stats.into()))\n\n }\n\n\n\n async fn get_state(&self, _: Request<()>) -> Result<Response<proto::mempool::StateResponse>, RpcStatus> {\n\n let state = self.mempool().get_state().await.map_err(to_internal_error)?;\n\n Ok(Response::new(state.try_into().map_err(|e: String| {\n\n error!(target: LOG_TARGET, \"Internal error: {}\", e);\n\n RpcStatus::general(&e)\n\n })?))\n\n }\n\n\n", "file_path": "base_layer/core/src/mempool/rpc/service.rs", "rank": 89, "score": 230650.65848549048 }, { "content": "pub fn find_header_with_achieved_difficulty(header: &mut BlockHeader, achieved_difficulty: Difficulty) {\n\n let mut num_tries = 0;\n\n\n\n while sha3_difficulty(header) != achieved_difficulty {\n\n header.nonce += 1;\n\n num_tries += 1;\n\n if num_tries > 10_000_000 {\n\n // Just in case we burn a hole in the CI server\n\n panic!(\"Could not find a nonce for achieved difficulty in time\");\n\n }\n\n }\n\n}\n\n\n\n/// Generate a block and add it to the database using the transactions provided. The header will be updated with the\n\n/// correct MMR roots.\n\n/// This function is not able to determine the unblinded outputs of a transaction, so if you are mixing using this\n\n/// with [generate_new_block], you must update the unblinded UTXO vector yourself.\n", "file_path": "base_layer/core/tests/helpers/block_builders.rs", "rank": 90, "score": 229642.30407471786 }, { "content": "/// For a given starting position calculate the parent and sibling positions\n\n/// for the branch/path from that position to the peak of the tree.\n\n/// We will use the sibling positions to generate the \"path\" of a Merkle proof.\n\npub fn family_branch(pos: usize, last_pos: usize) -> Vec<(usize, usize)> {\n\n // loop going up the tree, from node to parent, as long as we stay inside\n\n // the tree (as defined by last_pos).\n\n let (peak_map, height) = peak_map_height(pos);\n\n let mut peak = 1 << height;\n\n let mut branch = vec![];\n\n let mut current = pos;\n\n let mut sibling;\n\n while current < last_pos {\n\n if (peak_map & peak) == 0 {\n\n current += 2 * peak;\n\n sibling = current - 1;\n\n } else {\n\n current += 1;\n\n sibling = current - 2 * peak;\n\n };\n\n if current > last_pos {\n\n break;\n\n }\n\n branch.push((current, sibling));\n\n peak <<= 1;\n\n }\n\n branch\n\n}\n\n\n", "file_path": "base_layer/mmr/src/common.rs", "rank": 91, "score": 229413.25102358568 }, { "content": "pub fn convert_tcpip_multiaddr_to_socketaddr(addr: &Multiaddr) -> Result<SocketAddr, DnsResolverError> {\n\n match extract_protocols(addr)? {\n\n (Protocol::Ip4(host), Protocol::Tcp(port)) => Ok((host, port).into()),\n\n (Protocol::Ip6(host), Protocol::Tcp(port)) => Ok((host, port).into()),\n\n _ => Err(DnsResolverError::ExpectedTcpIpAddress(addr.clone())),\n\n }\n\n}\n\n\n", "file_path": "comms/core/src/transports/dns/common.rs", "rank": 92, "score": 229353.3255890658 }, { "content": "pub fn user_prompt(node_identity: &Arc<NodeIdentity>) -> anyhow::Result<PropagationOpts> {\n\n fn read_line<T: FromStr>(default: T) -> Option<T> {\n\n let mut buf = String::new();\n\n stdin().read_line(&mut buf).unwrap();\n\n let trimmed = buf.trim();\n\n if trimmed.is_empty() {\n\n Some(default)\n\n } else {\n\n trimmed.parse().ok()\n\n }\n\n }\n\n\n\n loop {\n\n println!(\"{}::{}\", node_identity.public_key(), node_identity.public_address());\n\n println!(\"{}\", node_identity);\n\n prompt!(\"Enter the peer:\");\n\n let peer = or_continue!(read_line(String::new()));\n\n let peer = or_continue!(parse_from_short_str(&peer, Default::default()));\n\n prompt!(\"Enter the number of messages to send (default: 1,000)\");\n\n let num_msgs = or_continue!(read_line::<usize>(1_000));\n", "file_path": "comms/dht/examples/propagation/prompt.rs", "rank": 93, "score": 229199.79395268095 }, { "content": "/// Checks that all inputs have matured at the given height\n\npub fn check_maturity(height: u64, inputs: &[TransactionInput]) -> Result<(), TransactionError> {\n\n if let Err(e) = inputs\n\n .iter()\n\n .map(|input| match input.is_mature_at(height) {\n\n Ok(mature) => {\n\n if mature {\n\n Ok(0)\n\n } else {\n\n warn!(\n\n target: LOG_TARGET,\n\n \"Input found that has not yet matured to spending height: {}\", input\n\n );\n\n Err(TransactionError::InputMaturity)\n\n }\n\n },\n\n Err(e) => Err(e),\n\n })\n\n .sum::<Result<usize, TransactionError>>()\n\n {\n\n return Err(e);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "base_layer/core/src/validation/helpers.rs", "rank": 94, "score": 229199.79395268095 }, { "content": "pub fn run(app: App<CrosstermBackend<Stdout>>) -> Result<(), ExitError> {\n\n let mut app = app;\n\n Handle::current()\n\n .block_on(async {\n\n trace!(target: LOG_TARGET, \"Refreshing transaction state\");\n\n app.app_state.refresh_transaction_state().await?;\n\n trace!(target: LOG_TARGET, \"Refreshing contacts state\");\n\n app.app_state.refresh_contacts_state().await?;\n\n trace!(target: LOG_TARGET, \"Refreshing connected peers state\");\n\n app.app_state.refresh_connected_peers_state().await?;\n\n trace!(target: LOG_TARGET, \"Checking connectivity\");\n\n app.app_state.check_connectivity().await;\n\n trace!(target: LOG_TARGET, \"Starting balance enquiry debouncer\");\n\n app.app_state.start_balance_enquiry_debouncer().await?;\n\n trace!(target: LOG_TARGET, \"Refreshing assets\");\n\n app.app_state.refresh_assets_state().await?;\n\n trace!(target: LOG_TARGET, \"Refreshing tokens\");\n\n app.app_state.refresh_tokens_state().await?;\n\n trace!(target: LOG_TARGET, \"Starting app state event monitor\");\n\n app.app_state.start_event_monitor(app.notifier.clone()).await;\n\n Result::<_, UiError>::Ok(())\n\n })\n\n .map_err(|e| ExitError::new(ExitCode::WalletError, e))?;\n\n crossterm_loop(app)\n\n}\n", "file_path": "applications/tari_console_wallet/src/ui/mod.rs", "rank": 95, "score": 229199.79395268095 }, { "content": "/// This is the main loop of the application UI using Crossterm based events\n\nfn crossterm_loop(mut app: App<CrosstermBackend<Stdout>>) -> Result<(), ExitError> {\n\n let events = CrosstermEvents::new();\n\n enable_raw_mode().map_err(|e| {\n\n error!(target: LOG_TARGET, \"Error enabling Raw Mode {}\", e);\n\n ExitCode::InterfaceError\n\n })?;\n\n let mut stdout = stdout();\n\n execute!(stdout, EnterAlternateScreen).map_err(|e| {\n\n error!(target: LOG_TARGET, \"Error creating stdout context. {}\", e);\n\n ExitCode::InterfaceError\n\n })?;\n\n let terminal_title = format!(\"Tari Console Wallet - Version {}\", consts::APP_VERSION);\n\n if let Err(e) = execute!(stdout, SetTitle(terminal_title.as_str())) {\n\n println!(\"Error setting terminal title. {}\", e)\n\n }\n\n\n\n let backend = CrosstermBackend::new(stdout);\n\n\n\n let mut terminal = Terminal::new(backend).map_err(|e| {\n\n error!(target: LOG_TARGET, \"Error creating Terminal context. {}\", e);\n", "file_path": "applications/tari_console_wallet/src/ui/mod.rs", "rank": 96, "score": 227765.81855640886 }, { "content": "pub fn err_empty(name: &str) -> MinerError {\n\n MinerError::EmptyObject(name.to_string())\n\n}\n", "file_path": "applications/tari_miner/src/errors.rs", "rank": 97, "score": 227545.55817647872 }, { "content": "pub fn random_string(len: usize) -> String {\n\n iter::repeat(())\n\n .map(|_| OsRng.sample(Alphanumeric) as char)\n\n .take(len)\n\n .collect()\n\n}\n\n\n", "file_path": "base_layer/wallet/src/test_utils.rs", "rank": 98, "score": 227526.86632398068 }, { "content": "/// Create a cryptographically secure password on length `len`\n\npub fn create_password(len: usize) -> String {\n\n let mut rng = rand::thread_rng();\n\n Alphanumeric.sample_iter(&mut rng).take(len).map(char::from).collect()\n\n}\n", "file_path": "applications/launchpad/backend/src/docker/helpers.rs", "rank": 99, "score": 227526.86632398068 } ]
Rust
polars/polars-lazy/src/physical_plan/expressions/aggregation.rs
qiemem/polars
48ea1ed035a44d188d17f2d01ee07f671df27360
use crate::physical_plan::state::ExecutionState; use crate::physical_plan::PhysicalAggregation; use crate::prelude::*; use polars_arrow::export::arrow::{array::*, compute::concatenate::concatenate}; use polars_arrow::prelude::QuantileInterpolOptions; use polars_core::frame::groupby::{fmt_groupby_column, GroupByMethod, GroupsProxy}; use polars_core::{prelude::*, POOL}; use std::borrow::Cow; use std::sync::Arc; pub(crate) struct AggregationExpr { pub(crate) expr: Arc<dyn PhysicalExpr>, pub(crate) agg_type: GroupByMethod, } impl AggregationExpr { pub fn new(expr: Arc<dyn PhysicalExpr>, agg_type: GroupByMethod) -> Self { Self { expr, agg_type } } } impl PhysicalExpr for AggregationExpr { fn as_expression(&self) -> &Expr { unimplemented!() } fn evaluate(&self, _df: &DataFrame, _state: &ExecutionState) -> Result<Series> { unimplemented!() } #[allow(clippy::ptr_arg)] fn evaluate_on_groups<'a>( &self, df: &DataFrame, groups: &'a GroupsProxy, state: &ExecutionState, ) -> Result<AggregationContext<'a>> { let out = self.aggregate(df, groups, state)?.ok_or_else(|| { PolarsError::ComputeError("Aggregation did not return a Series".into()) })?; Ok(AggregationContext::new(out, Cow::Borrowed(groups), true)) } fn to_field(&self, input_schema: &Schema) -> Result<Field> { let field = self.expr.to_field(input_schema)?; let new_name = fmt_groupby_column(field.name(), self.agg_type); Ok(Field::new(&new_name, field.data_type().clone())) } fn as_agg_expr(&self) -> Result<&dyn PhysicalAggregation> { Ok(self) } } fn rename_option_series(opt: Option<Series>, name: &str) -> Option<Series> { opt.map(|mut s| { s.rename(name); s }) } impl PhysicalAggregation for AggregationExpr { fn aggregate( &self, df: &DataFrame, groups: &GroupsProxy, state: &ExecutionState, ) -> Result<Option<Series>> { let mut ac = self.expr.evaluate_on_groups(df, groups, state)?; let new_name = fmt_groupby_column(ac.series().name(), self.agg_type); match self.agg_type { GroupByMethod::Min => { let agg_s = ac.flat_naive().into_owned().agg_min(ac.groups()); Ok(rename_option_series(agg_s, &new_name)) } GroupByMethod::Max => { let agg_s = ac.flat_naive().into_owned().agg_max(ac.groups()); Ok(rename_option_series(agg_s, &new_name)) } GroupByMethod::Median => { let agg_s = ac.flat_naive().into_owned().agg_median(ac.groups()); Ok(rename_option_series(agg_s, &new_name)) } GroupByMethod::Mean => { let agg_s = ac.flat_naive().into_owned().agg_mean(ac.groups()); Ok(rename_option_series(agg_s, &new_name)) } GroupByMethod::Sum => { let agg_s = ac.flat_naive().into_owned().agg_sum(ac.groups()); Ok(rename_option_series(agg_s, &new_name)) } GroupByMethod::Count => { let mut ca = ac.groups.group_count(); ca.rename(&new_name); Ok(Some(ca.into_series())) } GroupByMethod::First => { let mut agg_s = ac.flat_naive().into_owned().agg_first(ac.groups()); agg_s.rename(&new_name); Ok(Some(agg_s)) } GroupByMethod::Last => { let mut agg_s = ac.flat_naive().into_owned().agg_last(ac.groups()); agg_s.rename(&new_name); Ok(Some(agg_s)) } GroupByMethod::NUnique => { let opt_agg = ac.flat_naive().into_owned().agg_n_unique(ac.groups()); let opt_agg = opt_agg.map(|mut agg| { agg.rename(&new_name); agg.into_series() }); Ok(opt_agg) } GroupByMethod::List => { let agg = ac.aggregated(); Ok(rename_option_series(Some(agg), &new_name)) } GroupByMethod::Groups => { let mut column: ListChunked = ac.groups().as_list_chunked(); column.rename(&new_name); Ok(Some(column.into_series())) } GroupByMethod::Std => { let agg_s = ac.flat_naive().into_owned().agg_std(ac.groups()); Ok(rename_option_series(agg_s, &new_name)) } GroupByMethod::Var => { let agg_s = ac.flat_naive().into_owned().agg_var(ac.groups()); Ok(rename_option_series(agg_s, &new_name)) } GroupByMethod::Quantile(_, _) => { unimplemented!() } } } fn evaluate_partitioned( &self, df: &DataFrame, groups: &GroupsProxy, state: &ExecutionState, ) -> Result<Option<Vec<Series>>> { match self.agg_type { GroupByMethod::Mean => { let series = self.expr.evaluate(df, state)?; let mut new_name = fmt_groupby_column(series.name(), self.agg_type); let agg_s = series.agg_sum(groups); if let Some(agg_s) = agg_s { let mut agg_s = agg_s.cast(&DataType::Float64)?; agg_s.rename(&new_name); new_name.push_str("__POLARS_MEAN_COUNT"); let mut count_s = series.agg_valid_count(groups).unwrap(); count_s.rename(&new_name); Ok(Some(vec![agg_s, count_s])) } else { Ok(None) } } GroupByMethod::List => { let series = self.expr.evaluate(df, state)?; let new_name = fmt_groupby_column(series.name(), self.agg_type); let opt_agg = series.agg_list(groups); Ok(opt_agg.map(|mut s| { s.rename(&new_name); vec![s] })) } _ => PhysicalAggregation::aggregate(self, df, groups, state) .map(|opt| opt.map(|s| vec![s])), } } fn evaluate_partitioned_final( &self, final_df: &DataFrame, groups: &GroupsProxy, state: &ExecutionState, ) -> Result<Option<Series>> { match self.agg_type { GroupByMethod::Mean => { let series = self.expr.evaluate(final_df, state)?; let count_name = format!("{}__POLARS_MEAN_COUNT", series.name()); let new_name = fmt_groupby_column(series.name(), self.agg_type); let count = final_df.column(&count_name).unwrap(); let (agg_count, agg_s) = POOL.join(|| count.agg_sum(groups), || series.agg_sum(groups)); let agg_s = agg_s.map(|agg_s| &agg_s / &agg_count.unwrap()); Ok(rename_option_series(agg_s, &new_name)) } GroupByMethod::List => { let series = self.expr.evaluate(final_df, state)?; let ca = series.list().unwrap(); let new_name = fmt_groupby_column(ca.name(), self.agg_type); let mut values = Vec::with_capacity(groups.len()); let mut can_fast_explode = true; let mut offsets = Vec::<i64>::with_capacity(groups.len() + 1); let mut length_so_far = 0i64; offsets.push(length_so_far); for (_, idx) in groups.idx_ref() { let ca = unsafe { ca.take_unchecked(idx.iter().map(|i| *i as usize).into()) }; let s = ca.explode()?; length_so_far += s.len() as i64; offsets.push(length_so_far); values.push(s.chunks()[0].clone()); if s.len() == 0 { can_fast_explode = false; } } let vals = values.iter().map(|arr| &**arr).collect::<Vec<_>>(); let values: ArrayRef = concatenate(&vals).unwrap().into(); let data_type = ListArray::<i64>::default_datatype(values.data_type().clone()); let arr = Arc::new(ListArray::<i64>::from_data( data_type, offsets.into(), values, None, )) as ArrayRef; let mut ca = ListChunked::new_from_chunks(&new_name, vec![arr]); if can_fast_explode { ca.set_fast_explode() } Ok(Some(ca.into_series())) } _ => PhysicalAggregation::aggregate(self, final_df, groups, state), } } } impl PhysicalAggregation for AggQuantileExpr { fn aggregate( &self, df: &DataFrame, groups: &GroupsProxy, state: &ExecutionState, ) -> Result<Option<Series>> { let series = self.expr.evaluate(df, state)?; let new_name = fmt_groupby_column( series.name(), GroupByMethod::Quantile(self.quantile, self.interpol), ); let opt_agg = series.agg_quantile(groups, self.quantile, self.interpol); let opt_agg = opt_agg.map(|mut agg| { agg.rename(&new_name); agg.into_series() }); Ok(opt_agg) } } impl PhysicalAggregation for CastExpr { fn aggregate( &self, df: &DataFrame, groups: &GroupsProxy, state: &ExecutionState, ) -> Result<Option<Series>> { let agg_expr = self.input.as_agg_expr()?; let opt_agg = agg_expr.aggregate(df, groups, state)?; opt_agg.map(|agg| agg.cast(&self.data_type)).transpose() } } pub struct AggQuantileExpr { pub(crate) expr: Arc<dyn PhysicalExpr>, pub(crate) quantile: f64, pub(crate) interpol: QuantileInterpolOptions, } impl AggQuantileExpr { pub fn new( expr: Arc<dyn PhysicalExpr>, quantile: f64, interpol: QuantileInterpolOptions, ) -> Self { Self { expr, quantile, interpol, } } } impl PhysicalExpr for AggQuantileExpr { fn as_expression(&self) -> &Expr { unimplemented!() } fn evaluate(&self, _df: &DataFrame, _state: &ExecutionState) -> Result<Series> { unimplemented!() } #[allow(clippy::ptr_arg)] fn evaluate_on_groups<'a>( &self, _df: &DataFrame, _groups: &'a GroupsProxy, _state: &ExecutionState, ) -> Result<AggregationContext<'a>> { unimplemented!() } fn to_field(&self, input_schema: &Schema) -> Result<Field> { let field = self.expr.to_field(input_schema)?; let new_name = fmt_groupby_column( field.name(), GroupByMethod::Quantile(self.quantile, self.interpol), ); Ok(Field::new(&new_name, field.data_type().clone())) } fn as_agg_expr(&self) -> Result<&dyn PhysicalAggregation> { Ok(self) } }
use crate::physical_plan::state::ExecutionState; use crate::physical_plan::PhysicalAggregation; use crate::prelude::*; use polars_arrow::export::arrow::{array::*, compute::concatenate::concatenate}; use polars_arrow::prelude::QuantileInterpolOptions; use polars_core::frame::groupby::{fmt_groupby_column, GroupByMethod, GroupsProxy}; use polars_core::{prelude::*, POOL}; use std::borrow::Cow; use std::sync::Arc; pub(crate) struct AggregationExpr { pub(crate) expr: Arc<dyn PhysicalExpr>, pub(crate) agg_type: GroupByMethod, } impl AggregationExpr { pub fn new(expr: Arc<dyn PhysicalExpr>, agg_type: GroupByMethod) -> Self { Self { expr, agg_type } } } impl PhysicalExpr for AggregationExpr { fn as_expression(&self) -> &Expr { unimplemented!() } fn evaluate(&self, _df: &DataFrame, _state: &ExecutionState) -> Result<Series> { unimplemented!() } #[allow(clippy::ptr_arg)] fn evaluate_on_groups<'a>( &self, df: &DataFrame, groups: &'a GroupsProxy, state: &ExecutionState, ) -> Result<AggregationContext<'a>> { let out = self.aggregate(df, groups, state)?.ok_or_else(|| { PolarsError::ComputeError("Aggregation did not return a Series".into()) })?; Ok(AggregationContext::new(out, Cow::Borrowed(groups), true)) } fn to_field(&self, input_schema: &Schema) -> Result<Field> { let field = self.expr.to_field(input_schema)?; let new_name = fmt_groupby_column(field.name(), self.agg_type); Ok(Field::new(&new_name, field.data_type().clone())) } fn as_agg_expr(&self) -> Result<&dyn PhysicalAggregation> { Ok(self) } } fn rename_option_series(opt: Option<Series>, name: &str) -> Option<Series> { opt.map(|mut s| { s.rename(name); s }) } impl PhysicalAggregation for AggregationExpr { fn aggregate( &self, df: &DataFrame, groups: &GroupsProxy, state: &ExecutionState, ) -> Result<Option<Series>> { let mut ac = self.expr.evaluate_on_groups(df, groups, state)?; let new_name = fmt_groupby_column(ac.series().name(), self.agg_type); match self.agg_type { GroupByMethod::Min => { let agg_s = ac.flat_naive().into_owned().agg_min(ac.groups()); Ok(rename_option_series(agg_s, &new_name)) } GroupByMethod::Max => { let agg_s = ac.flat_naive().into_owned().agg_max(ac.groups()); Ok(rename_option_series(agg_s, &new_name)) } GroupByMethod::Median => { let agg_s = ac.flat_naive().into_owned().agg_median(ac.groups()); Ok(rename_option_series(agg_s, &new_name)) } GroupByMethod::Mean => { let agg_s = ac.flat_naive().into_owned().agg_mean(ac.groups()); Ok(rename_option_series(agg_s, &new_name)) } GroupByMethod::Sum => { let agg_s = ac.flat_naive().into_owned().agg_sum(ac.groups()); Ok(rename_option_series(agg_s, &new_name)) } GroupByMethod::Count => { let mut ca = ac.groups.group_count(); ca.rename(&new_name); Ok(Some(ca.into_series())) } GroupByMethod::First => { let mut agg_s = ac.flat_naive().into_owned().agg_first(ac.groups()); agg_s.rename(&new_name); Ok(Some(agg_s)) } GroupByMethod::Last => { let mut agg_s = ac.flat_naive().into_owned().agg_last(ac.groups()); agg_s.rename(&new_name); Ok(Some(agg_s)) } GroupByMethod::NUnique => { let opt_agg = ac.flat_naive().into_owned().agg_n_unique(ac.groups()); let opt_agg = opt_agg.map(|mut agg| { agg.rename(&new_name); agg.
GroupByMethod::Mean => { let series = self.expr.evaluate(final_df, state)?; let count_name = format!("{}__POLARS_MEAN_COUNT", series.name()); let new_name = fmt_groupby_column(series.name(), self.agg_type); let count = final_df.column(&count_name).unwrap(); let (agg_count, agg_s) = POOL.join(|| count.agg_sum(groups), || series.agg_sum(groups)); let agg_s = agg_s.map(|agg_s| &agg_s / &agg_count.unwrap()); Ok(rename_option_series(agg_s, &new_name)) } GroupByMethod::List => { let series = self.expr.evaluate(final_df, state)?; let ca = series.list().unwrap(); let new_name = fmt_groupby_column(ca.name(), self.agg_type); let mut values = Vec::with_capacity(groups.len()); let mut can_fast_explode = true; let mut offsets = Vec::<i64>::with_capacity(groups.len() + 1); let mut length_so_far = 0i64; offsets.push(length_so_far); for (_, idx) in groups.idx_ref() { let ca = unsafe { ca.take_unchecked(idx.iter().map(|i| *i as usize).into()) }; let s = ca.explode()?; length_so_far += s.len() as i64; offsets.push(length_so_far); values.push(s.chunks()[0].clone()); if s.len() == 0 { can_fast_explode = false; } } let vals = values.iter().map(|arr| &**arr).collect::<Vec<_>>(); let values: ArrayRef = concatenate(&vals).unwrap().into(); let data_type = ListArray::<i64>::default_datatype(values.data_type().clone()); let arr = Arc::new(ListArray::<i64>::from_data( data_type, offsets.into(), values, None, )) as ArrayRef; let mut ca = ListChunked::new_from_chunks(&new_name, vec![arr]); if can_fast_explode { ca.set_fast_explode() } Ok(Some(ca.into_series())) } _ => PhysicalAggregation::aggregate(self, final_df, groups, state), } } } impl PhysicalAggregation for AggQuantileExpr { fn aggregate( &self, df: &DataFrame, groups: &GroupsProxy, state: &ExecutionState, ) -> Result<Option<Series>> { let series = self.expr.evaluate(df, state)?; let new_name = fmt_groupby_column( series.name(), GroupByMethod::Quantile(self.quantile, self.interpol), ); let opt_agg = series.agg_quantile(groups, self.quantile, self.interpol); let opt_agg = opt_agg.map(|mut agg| { agg.rename(&new_name); agg.into_series() }); Ok(opt_agg) } } impl PhysicalAggregation for CastExpr { fn aggregate( &self, df: &DataFrame, groups: &GroupsProxy, state: &ExecutionState, ) -> Result<Option<Series>> { let agg_expr = self.input.as_agg_expr()?; let opt_agg = agg_expr.aggregate(df, groups, state)?; opt_agg.map(|agg| agg.cast(&self.data_type)).transpose() } } pub struct AggQuantileExpr { pub(crate) expr: Arc<dyn PhysicalExpr>, pub(crate) quantile: f64, pub(crate) interpol: QuantileInterpolOptions, } impl AggQuantileExpr { pub fn new( expr: Arc<dyn PhysicalExpr>, quantile: f64, interpol: QuantileInterpolOptions, ) -> Self { Self { expr, quantile, interpol, } } } impl PhysicalExpr for AggQuantileExpr { fn as_expression(&self) -> &Expr { unimplemented!() } fn evaluate(&self, _df: &DataFrame, _state: &ExecutionState) -> Result<Series> { unimplemented!() } #[allow(clippy::ptr_arg)] fn evaluate_on_groups<'a>( &self, _df: &DataFrame, _groups: &'a GroupsProxy, _state: &ExecutionState, ) -> Result<AggregationContext<'a>> { unimplemented!() } fn to_field(&self, input_schema: &Schema) -> Result<Field> { let field = self.expr.to_field(input_schema)?; let new_name = fmt_groupby_column( field.name(), GroupByMethod::Quantile(self.quantile, self.interpol), ); Ok(Field::new(&new_name, field.data_type().clone())) } fn as_agg_expr(&self) -> Result<&dyn PhysicalAggregation> { Ok(self) } }
into_series() }); Ok(opt_agg) } GroupByMethod::List => { let agg = ac.aggregated(); Ok(rename_option_series(Some(agg), &new_name)) } GroupByMethod::Groups => { let mut column: ListChunked = ac.groups().as_list_chunked(); column.rename(&new_name); Ok(Some(column.into_series())) } GroupByMethod::Std => { let agg_s = ac.flat_naive().into_owned().agg_std(ac.groups()); Ok(rename_option_series(agg_s, &new_name)) } GroupByMethod::Var => { let agg_s = ac.flat_naive().into_owned().agg_var(ac.groups()); Ok(rename_option_series(agg_s, &new_name)) } GroupByMethod::Quantile(_, _) => { unimplemented!() } } } fn evaluate_partitioned( &self, df: &DataFrame, groups: &GroupsProxy, state: &ExecutionState, ) -> Result<Option<Vec<Series>>> { match self.agg_type { GroupByMethod::Mean => { let series = self.expr.evaluate(df, state)?; let mut new_name = fmt_groupby_column(series.name(), self.agg_type); let agg_s = series.agg_sum(groups); if let Some(agg_s) = agg_s { let mut agg_s = agg_s.cast(&DataType::Float64)?; agg_s.rename(&new_name); new_name.push_str("__POLARS_MEAN_COUNT"); let mut count_s = series.agg_valid_count(groups).unwrap(); count_s.rename(&new_name); Ok(Some(vec![agg_s, count_s])) } else { Ok(None) } } GroupByMethod::List => { let series = self.expr.evaluate(df, state)?; let new_name = fmt_groupby_column(series.name(), self.agg_type); let opt_agg = series.agg_list(groups); Ok(opt_agg.map(|mut s| { s.rename(&new_name); vec![s] })) } _ => PhysicalAggregation::aggregate(self, df, groups, state) .map(|opt| opt.map(|s| vec![s])), } } fn evaluate_partitioned_final( &self, final_df: &DataFrame, groups: &GroupsProxy, state: &ExecutionState, ) -> Result<Option<Series>> { match self.agg_type {
random
[ { "content": "/// Create a Column Expression based on a column name.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `name` - A string slice that holds the name of the column\n\n///\n\n/// # Examples\n\n///\n\n/// ```ignore\n\n/// // select a column name\n\n/// col(\"foo\")\n\n/// ```\n\n///\n\n/// ```ignore\n\n/// // select all columns by using a wildcard\n\n/// col(\"*\")\n\n/// ```\n\n///\n\n/// ```ignore\n\n/// // select specific column by writing a regular expression that starts with `^` and ends with `$`\n\n/// // only if regex features is activated\n\n/// col(\"^foo.*$\")\n\n/// ```\n\npub fn col(name: &str) -> Expr {\n\n match name {\n\n \"*\" => Expr::Wildcard,\n\n _ => Expr::Column(Arc::from(name)),\n\n }\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/functions.rs", "rank": 0, "score": 368290.67475362506 }, { "content": "/// Find the median of all the values in this Expression.\n\npub fn median(name: &str) -> Expr {\n\n col(name).median()\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/functions.rs", "rank": 1, "score": 368279.3382557603 }, { "content": "/// Find the minimum of all the values in this Expression.\n\npub fn min(name: &str) -> Expr {\n\n col(name).min()\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/functions.rs", "rank": 2, "score": 368279.3382557603 }, { "content": "/// Find the mean of all the values in this Expression.\n\npub fn mean(name: &str) -> Expr {\n\n col(name).mean()\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/functions.rs", "rank": 3, "score": 368279.3382557603 }, { "content": "/// Sum all the values in this Expression.\n\npub fn sum(name: &str) -> Expr {\n\n col(name).sum()\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/functions.rs", "rank": 4, "score": 368279.3382557604 }, { "content": "/// Find the maximum of all the values in this Expression.\n\npub fn max(name: &str) -> Expr {\n\n col(name).max()\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/functions.rs", "rank": 5, "score": 368279.3382557603 }, { "content": "/// Find the mean of all the values in this Expression.\n\npub fn avg(name: &str) -> Expr {\n\n col(name).mean()\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/functions.rs", "rank": 6, "score": 368279.3382557604 }, { "content": "/// Count the number of values in this Expression.\n\npub fn count(name: &str) -> Expr {\n\n match name {\n\n \"\" => col(name).count().alias(\"count\"),\n\n _ => col(name).count(),\n\n }\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/functions.rs", "rank": 7, "score": 368279.3382557603 }, { "content": "/// This function takes an expression containing a regex in `col(\"..\")` and expands the columns\n\n/// that are selected by that regex in `result`.\n\nfn expand_regex(expr: &Expr, result: &mut Vec<Expr>, schema: &Schema, pattern: &str) {\n\n let re = regex::Regex::new(pattern)\n\n .unwrap_or_else(|_| panic!(\"invalid regular expression in column: {}\", pattern));\n\n for field in schema.fields() {\n\n let name = field.name();\n\n if re.is_match(name) {\n\n let mut new_expr = expr.clone();\n\n\n\n new_expr.mutate().apply(|e| match &e {\n\n Expr::Column(_) => {\n\n *e = Expr::Column(Arc::from(name.as_str()));\n\n false\n\n }\n\n _ => true,\n\n });\n\n\n\n let new_expr = rewrite_keep_name_and_sufprefix(new_expr);\n\n result.push(new_expr)\n\n }\n\n }\n\n}\n\n\n\n#[cfg(feature = \"regex\")]\n", "file_path": "polars/polars-lazy/src/logical_plan/projection.rs", "rank": 8, "score": 345939.9991963162 }, { "content": "/// Take an expression with a root: col(\"*\") and copies that expression for all columns in the schema,\n\n/// with the exclusion of the `names` in the exclude expression.\n\n/// The resulting expressions are written to result.\n\nfn replace_wilcard(expr: &Expr, result: &mut Vec<Expr>, exclude: &[Arc<str>], schema: &Schema) {\n\n for field in schema.fields() {\n\n let name = field.name();\n\n if !exclude.iter().any(|exluded| &**exluded == name) {\n\n let new_expr = replace_wildcard_with_column(expr.clone(), Arc::from(name.as_str()));\n\n let new_expr = rewrite_keep_name_and_sufprefix(new_expr);\n\n result.push(new_expr)\n\n }\n\n }\n\n}\n\n\n\n#[cfg(feature = \"regex\")]\n", "file_path": "polars/polars-lazy/src/logical_plan/projection.rs", "rank": 9, "score": 336576.7100818606 }, { "content": "pub fn col(name: &str) -> PyExpr {\n\n dsl::col(name).into()\n\n}\n\n\n", "file_path": "py-polars/src/lazy/dsl.rs", "rank": 10, "score": 336091.4116359639 }, { "content": "// Formatting functions used in eager and lazy code for renaming grouped columns\n\npub fn fmt_groupby_column(name: &str, method: GroupByMethod) -> String {\n\n use GroupByMethod::*;\n\n match method {\n\n Min => format!(\"{}_min\", name),\n\n Max => format!(\"{}_max\", name),\n\n Median => format!(\"{}_median\", name),\n\n Mean => format!(\"{}_mean\", name),\n\n First => format!(\"{}_first\", name),\n\n Last => format!(\"{}_last\", name),\n\n Sum => format!(\"{}_sum\", name),\n\n Groups => \"groups\".to_string(),\n\n NUnique => format!(\"{}_n_unique\", name),\n\n Count => format!(\"{}_count\", name),\n\n List => format!(\"{}_agg_list\", name),\n\n Quantile(quantile, _interpol) => format!(\"{}_quantile_{:.2}\", name, quantile),\n\n Std => format!(\"{}_agg_std\", name),\n\n Var => format!(\"{}_agg_var\", name),\n\n }\n\n}\n\n\n", "file_path": "polars/polars-core/src/frame/groupby/mod.rs", "rank": 11, "score": 312200.9939741146 }, { "content": "/// This function searches for a regex expression in `col(\"..\")` and expands the columns\n\n/// that are selected by that regex in `result`. The regex should start with `^` and end with `$`.\n\nfn replace_regex(expr: &Expr, result: &mut Vec<Expr>, schema: &Schema) {\n\n let roots = expr_to_root_column_names(expr);\n\n // only in simple expression (no binary expression)\n\n // we pattern match regex columns\n\n if roots.len() == 1 {\n\n let name = &*roots[0];\n\n if name.starts_with('^') && name.ends_with('$') {\n\n expand_regex(expr, result, schema, name)\n\n } else {\n\n let expr = rewrite_keep_name_and_sufprefix(expr.clone());\n\n result.push(expr)\n\n }\n\n } else {\n\n let expr = rewrite_keep_name_and_sufprefix(expr.clone());\n\n result.push(expr)\n\n }\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/logical_plan/projection.rs", "rank": 12, "score": 307320.678418032 }, { "content": "/// Find a specific quantile of all the values in this Expression.\n\npub fn quantile(name: &str, quantile: f64, interpol: QuantileInterpolOptions) -> Expr {\n\n col(name).quantile(quantile, interpol)\n\n}\n\n\n\nmacro_rules! prepare_binary_function {\n\n ($f:ident) => {\n\n move |s: &mut [Series]| {\n\n let s0 = std::mem::take(&mut s[0]);\n\n let s1 = std::mem::take(&mut s[1]);\n\n\n\n $f(s0, s1)\n\n }\n\n };\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/functions.rs", "rank": 13, "score": 301282.41358517617 }, { "content": "/// Horizontally concat string columns in linear time\n\npub fn concat_str(s: Vec<Expr>, sep: &str) -> Expr {\n\n let sep = sep.to_string();\n\n let function = NoEq::new(Arc::new(move |s: &mut [Series]| {\n\n polars_core::functions::concat_str(s, &sep).map(|ca| ca.into_series())\n\n }) as Arc<dyn SeriesUdf>);\n\n Expr::Function {\n\n input: s,\n\n function,\n\n output_type: GetOutput::from_type(DataType::Utf8),\n\n options: FunctionOptions {\n\n collect_groups: ApplyOptions::ApplyGroups,\n\n input_wildcard_expansion: true,\n\n auto_explode: true,\n\n fmt_str: \"concat_by\",\n\n },\n\n }\n\n}\n\n\n\n/// Concat lists entries.\n", "file_path": "polars/polars-lazy/src/functions.rs", "rank": 14, "score": 299415.1260497139 }, { "content": "#[allow(unused_variables)]\n\nfn prepare_excluded(expr: &Expr, schema: &Schema, keys: &[Expr]) -> Vec<Arc<str>> {\n\n let mut exclude = vec![];\n\n expr.into_iter().for_each(|e| {\n\n if let Expr::Exclude(_, to_exclude) = e {\n\n #[cfg(feature = \"regex\")]\n\n {\n\n // instead of matching the names for regex patterns\n\n // and expanding the matches in the schema we\n\n // reuse the `replace_regex` function. This is a bit\n\n // slower but DRY.\n\n let mut buf = vec![];\n\n for to_exclude_single in to_exclude {\n\n match to_exclude_single {\n\n Excluded::Name(name) => {\n\n let e = Expr::Column(name.clone());\n\n replace_regex(&e, &mut buf, schema);\n\n for col in buf.drain(..) {\n\n if let Expr::Column(name) = col {\n\n exclude.push(name)\n\n }\n", "file_path": "polars/polars-lazy/src/logical_plan/projection.rs", "rank": 15, "score": 298351.55365795444 }, { "content": "fn str_concat_impl<I, T>(mut iter: I, delimiter: &str, name: &str) -> Utf8Chunked\n\nwhere\n\n I: Iterator<Item = Option<T>>,\n\n T: Display,\n\n{\n\n let mut buf = String::with_capacity(iter.size_hint().0 * 5);\n\n\n\n if let Some(first) = iter.next() {\n\n fmt_and_write(first, &mut buf);\n\n\n\n for val in iter {\n\n buf.push_str(delimiter);\n\n fmt_and_write(val, &mut buf);\n\n }\n\n }\n\n buf.shrink_to_fit();\n\n let buf = buf.into_bytes();\n\n let offsets = vec![0, buf.len() as i64];\n\n let arr = unsafe { Utf8Array::from_data_unchecked_default(offsets.into(), buf.into(), None) };\n\n Utf8Chunked::new_from_chunks(name, vec![Arc::new(arr)])\n", "file_path": "polars/polars-core/src/chunked_array/ops/concat_str.rs", "rank": 16, "score": 293319.9032603654 }, { "content": "fn max_exprs_impl(mut exprs: Vec<Expr>) -> Expr {\n\n if exprs.len() == 1 {\n\n return std::mem::take(&mut exprs[0]);\n\n }\n\n\n\n let first = std::mem::take(&mut exprs[0]);\n\n first\n\n .map_many(\n\n |s| {\n\n let s = s.to_vec();\n\n let df = DataFrame::new_no_checks(s);\n\n df.hmax().map(|s| s.unwrap())\n\n },\n\n &exprs[1..],\n\n GetOutput::super_type(),\n\n )\n\n .alias(\"max\")\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/functions.rs", "rank": 17, "score": 288022.21029196016 }, { "content": "fn min_exprs_impl(mut exprs: Vec<Expr>) -> Expr {\n\n if exprs.len() == 1 {\n\n return std::mem::take(&mut exprs[0]);\n\n }\n\n\n\n let first = std::mem::take(&mut exprs[0]);\n\n first\n\n .map_many(\n\n |s| {\n\n let s = s.to_vec();\n\n let df = DataFrame::new_no_checks(s);\n\n df.hmin().map(|s| s.unwrap())\n\n },\n\n &exprs[1..],\n\n GetOutput::super_type(),\n\n )\n\n .alias(\"min\")\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/functions.rs", "rank": 18, "score": 288022.21029196016 }, { "content": "/// replace `DtypeColumn` with `col(\"foo\")..col(\"bar\")`\n\nfn expand_dtypes(expr: &Expr, result: &mut Vec<Expr>, schema: &Schema, dtypes: &[DataType]) {\n\n for dtype in dtypes {\n\n for field in schema.fields().iter().filter(|f| f.data_type() == dtype) {\n\n let name = field.name();\n\n\n\n let mut new_expr = expr.clone();\n\n new_expr.mutate().apply(|e| {\n\n if let Expr::DtypeColumn(_) = &e {\n\n *e = Expr::Column(Arc::from(name.as_str()));\n\n }\n\n // always keep iterating all inputs\n\n true\n\n });\n\n\n\n let new_expr = rewrite_keep_name_and_sufprefix(new_expr);\n\n result.push(new_expr)\n\n }\n\n }\n\n}\n\n\n\n// schema is not used if regex not activated\n", "file_path": "polars/polars-lazy/src/logical_plan/projection.rs", "rank": 19, "score": 287133.985701003 }, { "content": "fn finish_groupby(gb: GroupBy, agg: &str) -> Result<DataFrame> {\n\n match agg {\n\n \"min\" => gb.min(),\n\n \"max\" => gb.max(),\n\n \"mean\" => gb.mean(),\n\n \"first\" => gb.first(),\n\n \"last\" => gb.last(),\n\n \"sum\" => gb.sum(),\n\n \"count\" => gb.count(),\n\n \"n_unique\" => gb.n_unique(),\n\n \"median\" => gb.median(),\n\n \"agg_list\" => gb.agg_list(),\n\n \"groups\" => gb.groups(),\n\n \"std\" => gb.std(),\n\n \"var\" => gb.var(),\n\n a => Err(PolarsError::ComputeError(\n\n format!(\"agg fn {} does not exists\", a).into(),\n\n )),\n\n }\n\n}\n", "file_path": "nodejs-polars/src/dataframe/frame.rs", "rank": 20, "score": 282398.6330206316 }, { "content": "fn finish_groupby(gb: GroupBy, agg: &str) -> PyResult<PyDataFrame> {\n\n let gil = Python::acquire_gil();\n\n let py = gil.python();\n\n\n\n let df = py.allow_threads(|| match agg {\n\n \"min\" => gb.min(),\n\n \"max\" => gb.max(),\n\n \"mean\" => gb.mean(),\n\n \"first\" => gb.first(),\n\n \"last\" => gb.last(),\n\n \"sum\" => gb.sum(),\n\n \"count\" => gb.count(),\n\n \"n_unique\" => gb.n_unique(),\n\n \"median\" => gb.median(),\n\n \"agg_list\" => gb.agg_list(),\n\n \"groups\" => gb.groups(),\n\n \"std\" => gb.std(),\n\n \"var\" => gb.var(),\n\n a => Err(PolarsError::ComputeError(\n\n format!(\"agg fn {} does not exists\", a).into(),\n\n )),\n\n });\n\n\n\n let df = df.map_err(PyPolarsEr::from)?;\n\n Ok(PyDataFrame::new(df))\n\n}\n", "file_path": "py-polars/src/dataframe.rs", "rank": 21, "score": 277557.5693207347 }, { "content": "#[pyfunction]\n\nfn col(name: &str) -> dsl::PyExpr {\n\n dsl::col(name)\n\n}\n\n\n", "file_path": "py-polars/src/lib.rs", "rank": 22, "score": 268671.7103531122 }, { "content": "/// [Not](Expr::Not) expression.\n\npub fn not(expr: Expr) -> Expr {\n\n Expr::Not(Box::new(expr))\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/functions.rs", "rank": 23, "score": 265532.1291983353 }, { "content": "/// [IsNotNull](Expr::IsNotNull) expression.\n\npub fn is_not_null(expr: Expr) -> Expr {\n\n Expr::IsNotNull(Box::new(expr))\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/functions.rs", "rank": 24, "score": 262242.0661095804 }, { "content": "/// [IsNull](Expr::IsNotNull) expression\n\npub fn is_null(expr: Expr) -> Expr {\n\n Expr::IsNull(Box::new(expr))\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/functions.rs", "rank": 25, "score": 262242.0661095804 }, { "content": "/// replace `columns([\"A\", \"B\"])..` with `col(\"A\")..`, `col(\"B\")..`\n\nfn expand_columns(expr: &Expr, result: &mut Vec<Expr>, names: &[String]) {\n\n for name in names {\n\n let mut new_expr = expr.clone();\n\n new_expr.mutate().apply(|e| {\n\n if let Expr::Columns(_) = &e {\n\n *e = Expr::Column(Arc::from(name.as_str()));\n\n }\n\n // always keep iterating all inputs\n\n true\n\n });\n\n\n\n let new_expr = rewrite_keep_name_and_sufprefix(new_expr);\n\n result.push(new_expr)\n\n }\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/logical_plan/projection.rs", "rank": 26, "score": 261246.05159265758 }, { "content": "#[cfg(not(feature = \"dtype-u8\"))]\n\nfn dummies_helper(mut groups: Vec<u32>, len: usize, name: &str) -> Int32Chunked {\n\n groups.sort_unstable();\n\n\n\n // let mut group_member_iter = groups.into_iter();\n\n let mut av: Vec<_> = (0..len).map(|_| 0i32).collect();\n\n\n\n for idx in groups {\n\n let elem = unsafe { av.get_unchecked_mut(idx as usize) };\n\n *elem = 1;\n\n }\n\n\n\n ChunkedArray::from_vec(name, av)\n\n}\n\n\n", "file_path": "polars/polars-core/src/chunked_array/ops/unique/mod.rs", "rank": 27, "score": 257863.28933172242 }, { "content": "/// Select multiple columns by name\n\npub fn cols(names: Vec<String>) -> Expr {\n\n Expr::Columns(names)\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/functions.rs", "rank": 28, "score": 256331.07514839643 }, { "content": "#[cfg(feature = \"dtype-u8\")]\n\nfn dummies_helper(mut groups: Vec<u32>, len: usize, name: &str) -> UInt8Chunked {\n\n groups.sort_unstable();\n\n\n\n let mut av: Vec<_> = (0..len).map(|_| 0u8).collect();\n\n\n\n for idx in groups {\n\n let elem = unsafe { av.get_unchecked_mut(idx as usize) };\n\n *elem = 1;\n\n }\n\n\n\n ChunkedArray::from_vec(name, av)\n\n}\n\n\n", "file_path": "polars/polars-core/src/chunked_array/ops/unique/mod.rs", "rank": 29, "score": 254251.06623439584 }, { "content": "/// Accumulate over multiple columns horizontally / row wise.\n\npub fn fold_exprs<F: 'static, E: AsRef<[Expr]>>(mut acc: Expr, f: F, exprs: E) -> Expr\n\nwhere\n\n F: Fn(Series, Series) -> Result<Series> + Send + Sync + Clone,\n\n{\n\n let mut exprs = exprs.as_ref().to_vec();\n\n if exprs.iter().any(has_wildcard) {\n\n exprs.push(acc);\n\n\n\n let function = NoEq::new(Arc::new(move |series: &mut [Series]| {\n\n let mut series = series.to_vec();\n\n let mut acc = series.pop().unwrap();\n\n\n\n for s in series {\n\n acc = f(acc, s)?;\n\n }\n\n Ok(acc)\n\n }) as Arc<dyn SeriesUdf>);\n\n\n\n // Todo! make sure that output type is correct\n\n Expr::Function {\n", "file_path": "polars/polars-lazy/src/functions.rs", "rank": 30, "score": 254022.70302926254 }, { "content": "/// Selects all columns\n\npub fn all() -> Expr {\n\n Expr::Wildcard\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/functions.rs", "rank": 31, "score": 249553.01118631073 }, { "content": "/// Compute the covariance between two columns.\n\npub fn cov(a: Expr, b: Expr) -> Expr {\n\n let name = \"cov\";\n\n let function = move |a: Series, b: Series| {\n\n let s = match a.dtype() {\n\n DataType::Float32 => {\n\n let ca_a = a.f32().unwrap();\n\n let ca_b = b.f32().unwrap();\n\n Series::new(name, &[polars_core::functions::cov(ca_a, ca_b)])\n\n }\n\n DataType::Float64 => {\n\n let ca_a = a.f64().unwrap();\n\n let ca_b = b.f64().unwrap();\n\n Series::new(name, &[polars_core::functions::cov(ca_a, ca_b)])\n\n }\n\n _ => {\n\n let a = a.cast(&DataType::Float64)?;\n\n let b = b.cast(&DataType::Float64)?;\n\n let ca_a = a.f64().unwrap();\n\n let ca_b = b.f64().unwrap();\n\n Series::new(name, &[polars_core::functions::cov(ca_a, ca_b)])\n", "file_path": "polars/polars-lazy/src/functions.rs", "rank": 32, "score": 249235.98217192554 }, { "content": "/// Compute the pearson correlation between two columns.\n\npub fn pearson_corr(a: Expr, b: Expr) -> Expr {\n\n let name = \"pearson_corr\";\n\n let function = move |a: Series, b: Series| {\n\n let s = match a.dtype() {\n\n DataType::Float32 => {\n\n let ca_a = a.f32().unwrap();\n\n let ca_b = b.f32().unwrap();\n\n Series::new(name, &[polars_core::functions::pearson_corr(ca_a, ca_b)])\n\n }\n\n DataType::Float64 => {\n\n let ca_a = a.f64().unwrap();\n\n let ca_b = b.f64().unwrap();\n\n Series::new(name, &[polars_core::functions::pearson_corr(ca_a, ca_b)])\n\n }\n\n _ => {\n\n let a = a.cast(&DataType::Float64)?;\n\n let b = b.cast(&DataType::Float64)?;\n\n let ca_a = a.f64().unwrap();\n\n let ca_b = b.f64().unwrap();\n\n Series::new(name, &[polars_core::functions::pearson_corr(ca_a, ca_b)])\n", "file_path": "polars/polars-lazy/src/functions.rs", "rank": 33, "score": 246086.2405248719 }, { "content": "#[js_function(1)]\n\npub fn schema(cx: CallContext) -> JsResult<JsObject> {\n\n let params = get_params(&cx)?;\n\n let df = params.get_external::<DataFrame>(&cx, \"_df\")?;\n\n let mut obj = cx.env.create_object()?;\n\n\n\n for field in df.schema().fields() {\n\n let field_name = format!(\"{}\", field.name()).try_into_js(&cx)?;\n\n let dtype: JsDataType = field.data_type().clone().into();\n\n let js_string = dtype.to_string().try_into_js(&cx)?;\n\n obj.set_property(field_name, js_string).unwrap();\n\n }\n\n Ok(obj)\n\n}\n\n\n", "file_path": "nodejs-polars/src/dataframe/frame.rs", "rank": 34, "score": 243757.2559546063 }, { "content": "#[cfg(feature = \"rank\")]\n\n#[cfg_attr(docsrs, doc(cfg(feature = \"rank\")))]\n\npub fn spearman_rank_corr(a: Expr, b: Expr) -> Expr {\n\n pearson_corr(\n\n a.rank(RankOptions {\n\n method: RankMethod::Min,\n\n ..Default::default()\n\n }),\n\n b.rank(RankOptions {\n\n method: RankMethod::Min,\n\n ..Default::default()\n\n }),\n\n )\n\n .with_fmt(\"spearman_rank_correlation\")\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/functions.rs", "rank": 35, "score": 243067.34200450307 }, { "content": "pub fn to_datafusion_expr(expr: Expr) -> Result<DExpr> {\n\n use Expr::*;\n\n\n\n let expr = match expr {\n\n Alias(e, name) => DExpr::Alias(Box::new(to_datafusion_expr(*e)?), name.to_string()),\n\n Column(name) => DExpr::Column(name.to_string()),\n\n Literal(lv) => DExpr::Literal(to_datafusion_lit(lv)?),\n\n BinaryExpr { left, op, right } => DExpr::BinaryExpr {\n\n left: Box::new(to_datafusion_expr(*left)?),\n\n op: to_datafusion_op(op),\n\n right: Box::new(to_datafusion_expr(*right)?),\n\n },\n\n Not(e) => DExpr::Not(Box::new(to_datafusion_expr(*e)?)),\n\n IsNull(e) => DExpr::IsNull(Box::new(to_datafusion_expr(*e)?)),\n\n IsNotNull(e) => DExpr::IsNotNull(Box::new(to_datafusion_expr(*e)?)),\n\n Cast { expr, data_type } => DExpr::Cast {\n\n expr: Box::new(to_datafusion_expr(*expr)?),\n\n data_type: data_type.to_arrow(),\n\n },\n\n Sort { expr, reverse } => DExpr::Sort {\n", "file_path": "polars/polars-lazy/src/datafusion/conversion.rs", "rank": 36, "score": 239936.3669695731 }, { "content": "/// [Cast](Expr::Cast) expression.\n\npub fn cast(expr: Expr, data_type: DataType) -> Expr {\n\n Expr::Cast {\n\n expr: Box::new(expr),\n\n data_type,\n\n strict: false,\n\n }\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/functions.rs", "rank": 37, "score": 237395.13252288222 }, { "content": "#[cfg(feature = \"list\")]\n\n#[cfg_attr(docsrs, doc(cfg(feature = \"list\")))]\n\npub fn concat_lst(s: Vec<Expr>) -> Expr {\n\n let function = NoEq::new(Arc::new(move |s: &mut [Series]| {\n\n let mut first = std::mem::take(&mut s[0]);\n\n let other = &s[1..];\n\n\n\n let first_ca = match first.list().ok() {\n\n Some(ca) => ca,\n\n None => {\n\n first = first.reshape(&[-1, 1]).unwrap();\n\n first.list().unwrap()\n\n }\n\n };\n\n first_ca.lst_concat(other).map(|ca| ca.into_series())\n\n }) as Arc<dyn SeriesUdf>);\n\n Expr::Function {\n\n input: s,\n\n function,\n\n output_type: GetOutput::from_type(DataType::Utf8),\n\n options: FunctionOptions {\n\n collect_groups: ApplyOptions::ApplyFlat,\n\n input_wildcard_expansion: true,\n\n auto_explode: false,\n\n fmt_str: \"concat_list\",\n\n },\n\n }\n\n}\n\n\n\n/// Create list entries that are range arrays\n\n/// - if `low` and `high` are a column, every element will expand into an array in a list column.\n\n/// - if `low` and `high` are literals the output will be of `Int64`.\n", "file_path": "polars/polars-lazy/src/functions.rs", "rank": 38, "score": 237005.74823591276 }, { "content": "pub fn ternary_expr(predicate: Expr, truthy: Expr, falsy: Expr) -> Expr {\n\n Expr::Ternary {\n\n predicate: Box::new(predicate),\n\n truthy: Box::new(truthy),\n\n falsy: Box::new(falsy),\n\n }\n\n}\n\n\n\nimpl Expr {\n\n /// Modify the Options passed to the `Function` node.\n\n pub(crate) fn with_function_options<F>(self, func: F) -> Expr\n\n where\n\n F: Fn(FunctionOptions) -> FunctionOptions,\n\n {\n\n if let Self::Function {\n\n input,\n\n function,\n\n output_type,\n\n mut options,\n\n } = self\n", "file_path": "polars/polars-lazy/src/dsl/mod.rs", "rank": 39, "score": 235928.4562649331 }, { "content": "pub fn str_to_polarstype(s: &str) -> DataType {\n\n match s {\n\n \"<class 'polars.datatypes.UInt8'>\" => DataType::UInt8,\n\n \"<class 'polars.datatypes.UInt16'>\" => DataType::UInt16,\n\n \"<class 'polars.datatypes.UInt32'>\" => DataType::UInt32,\n\n \"<class 'polars.datatypes.UInt64'>\" => DataType::UInt64,\n\n \"<class 'polars.datatypes.Int8'>\" => DataType::Int8,\n\n \"<class 'polars.datatypes.Int16'>\" => DataType::Int16,\n\n \"<class 'polars.datatypes.Int32'>\" => DataType::Int32,\n\n \"<class 'polars.datatypes.Int64'>\" => DataType::Int64,\n\n \"<class 'polars.datatypes.Float32'>\" => DataType::Float32,\n\n \"<class 'polars.datatypes.Float64'>\" => DataType::Float64,\n\n \"<class 'polars.datatypes.Boolean'>\" => DataType::Boolean,\n\n \"<class 'polars.datatypes.Utf8'>\" => DataType::Utf8,\n\n \"<class 'polars.datatypes.Date'>\" => DataType::Date,\n\n \"<class 'polars.datatypes.Datetime'>\" => DataType::Datetime(TimeUnit::Milliseconds, None),\n\n \"<class 'polars.datatypes.Duration'>\" => DataType::Duration(TimeUnit::Milliseconds),\n\n \"<class 'polars.datatypes.Time'>\" => DataType::Time,\n\n \"<class 'polars.datatypes.List'>\" => DataType::List(DataType::Null.into()),\n\n \"<class 'polars.datatypes.Categorical'>\" => DataType::Categorical,\n\n \"<class 'polars.datatypes.Object'>\" => DataType::Object(\"object\"),\n\n tp => panic!(\"Type {} not implemented in str_to_polarstype\", tp),\n\n }\n\n}\n\n\n", "file_path": "py-polars/src/utils.rs", "rank": 40, "score": 234481.17486622467 }, { "content": "pub fn binary_expr(l: Expr, op: Operator, r: Expr) -> Expr {\n\n Expr::BinaryExpr {\n\n left: Box::new(l),\n\n op,\n\n right: Box::new(r),\n\n }\n\n}\n\n\n\n/// Intermediate state of `when(..).then(..).otherwise(..)` expr.\n\n#[derive(Clone)]\n\npub struct When {\n\n predicate: Expr,\n\n}\n\n\n\n/// Intermediate state of `when(..).then(..).otherwise(..)` expr.\n\n#[derive(Clone)]\n\npub struct WhenThen {\n\n predicate: Expr,\n\n then: Expr,\n\n}\n", "file_path": "polars/polars-lazy/src/dsl/mod.rs", "rank": 41, "score": 232075.40102206432 }, { "content": "fn agg_list_list<F: Fn(&ListChunked, bool, &mut Vec<i64>, &mut i64, &mut Vec<ArrayRef>) -> bool>(\n\n ca: &ListChunked,\n\n groups_len: usize,\n\n func: F,\n\n) -> Option<Series> {\n\n let can_fast_explode = true;\n\n let mut offsets = Vec::<i64>::with_capacity(groups_len + 1);\n\n let mut length_so_far = 0i64;\n\n offsets.push(length_so_far);\n\n\n\n let mut list_values = Vec::with_capacity(groups_len);\n\n\n\n let can_fast_explode = func(\n\n ca,\n\n can_fast_explode,\n\n &mut offsets,\n\n &mut length_so_far,\n\n &mut list_values,\n\n );\n\n if groups_len == 0 {\n", "file_path": "polars/polars-core/src/frame/groupby/aggregations.rs", "rank": 42, "score": 232017.58271575143 }, { "content": "/// Evaluate all the expressions with a bitwise and\n\npub fn all_exprs<E: AsRef<[Expr]>>(exprs: E) -> Expr {\n\n let exprs = exprs.as_ref().to_vec();\n\n let func = |s1: Series, s2: Series| Ok(s1.bool()?.bitand(s2.bool()?).into_series());\n\n fold_exprs(lit(true), func, exprs)\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/functions.rs", "rank": 43, "score": 231388.8599233971 }, { "content": "/// Evaluate all the expressions with a bitwise or\n\npub fn any_exprs<E: AsRef<[Expr]>>(exprs: E) -> Expr {\n\n let exprs = exprs.as_ref().to_vec();\n\n let func = |s1: Series, s2: Series| Ok(s1.bool()?.bitor(s2.bool()?).into_series());\n\n fold_exprs(lit(false), func, exprs)\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/functions.rs", "rank": 44, "score": 231388.8599233971 }, { "content": "/// Infer the data type of a record\n\nfn infer_field_schema(string: &str) -> DataType {\n\n // when quoting is enabled in the reader, these quotes aren't escaped, we default to\n\n // Utf8 for them\n\n if string.starts_with('\"') {\n\n return DataType::Utf8;\n\n }\n\n // match regex in a particular order\n\n if BOOLEAN_RE.is_match(string) {\n\n DataType::Boolean\n\n } else if FLOAT_RE.is_match(string) {\n\n DataType::Float64\n\n } else if INTEGER_RE.is_match(string) {\n\n DataType::Int64\n\n } else {\n\n DataType::Utf8\n\n }\n\n}\n\n\n\n#[inline]\n\npub(crate) fn parse_bytes_with_encoding(bytes: &[u8], encoding: CsvEncoding) -> Result<Cow<str>> {\n", "file_path": "polars/polars-io/src/csv_core/utils.rs", "rank": 45, "score": 230582.8997718352 }, { "content": "fn rewrite_keep_name_and_sufprefix(expr: Expr) -> Expr {\n\n // the blocks are added by cargo fmt\n\n #[allow(clippy::blocks_in_if_conditions)]\n\n if has_expr(&expr, |e| {\n\n matches!(e, Expr::KeepName(_) | Expr::SufPreFix { .. })\n\n }) {\n\n match expr {\n\n Expr::KeepName(expr) => {\n\n let roots = expr_to_root_column_names(&expr);\n\n let name = roots\n\n .get(0)\n\n .expect(\"expected root column to keep expression name\");\n\n Expr::Alias(expr, name.clone())\n\n }\n\n Expr::SufPreFix {\n\n is_suffix,\n\n value,\n\n expr,\n\n } => {\n\n let name = get_single_root(&expr).unwrap();\n", "file_path": "polars/polars-lazy/src/logical_plan/projection.rs", "rank": 46, "score": 230488.81249466137 }, { "content": "// Return the schema of the DataFrame read from the CSV.\n\nfn get_schema() -> Schema {\n\n Schema::new(vec![\n\n Field::new(\"category\", DataType::Utf8),\n\n Field::new(\"calories\", DataType::UInt32),\n\n Field::new(\"fats_g\", DataType::Float64),\n\n Field::new(\"sugars_g\", DataType::Float64),\n\n ])\n\n}\n\n\n", "file_path": "examples/aggregate_multiple_files_in_chunks/src/main.rs", "rank": 47, "score": 229660.06764063507 }, { "content": "pub fn py_seq_to_list(name: &str, seq: &PyAny, dtype: &PyAny) -> PyResult<Series> {\n\n let str_repr = dtype.str().unwrap().to_str().unwrap();\n\n let dtype = str_to_polarstype(str_repr);\n\n\n\n let (seq, len) = get_pyseq(seq)?;\n\n let s = match dtype {\n\n DataType::Int64 => {\n\n let mut builder =\n\n ListPrimitiveChunkedBuilder::<i64>::new(name, len, len * 5, DataType::Int64);\n\n for sub_seq in seq.iter()? {\n\n let sub_seq = sub_seq?;\n\n let (sub_seq, len) = get_pyseq(sub_seq)?;\n\n\n\n // safety: we know the iterators len\n\n let iter = unsafe {\n\n sub_seq\n\n .iter()?\n\n .map(|v| {\n\n let v = v.unwrap();\n\n if v.is_none() {\n", "file_path": "py-polars/src/list_construction.rs", "rank": 48, "score": 229049.27792882116 }, { "content": "/// Get the the minimum value per row\n\npub fn min_exprs<E: AsRef<[Expr]>>(exprs: E) -> Expr {\n\n let exprs = exprs.as_ref().to_vec();\n\n min_exprs_impl(exprs)\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/functions.rs", "rank": 49, "score": 228717.87775697574 }, { "content": "/// Get the the maximum value per row\n\npub fn max_exprs<E: AsRef<[Expr]>>(exprs: E) -> Expr {\n\n let exprs = exprs.as_ref().to_vec();\n\n max_exprs_impl(exprs)\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/functions.rs", "rank": 50, "score": 228717.87775697577 }, { "content": "/// Get the the sum of the values per row\n\npub fn sum_exprs<E: AsRef<[Expr]>>(exprs: E) -> Expr {\n\n let exprs = exprs.as_ref().to_vec();\n\n let func = |s1, s2| Ok(&s1 + &s2);\n\n fold_exprs(lit(0), func, exprs)\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/functions.rs", "rank": 51, "score": 228717.87775697577 }, { "content": "/// Infer schema from rows.\n\npub fn rows_to_schema(rows: &[Row]) -> Schema {\n\n // no of rows to use to infer dtype\n\n let max_infer = std::cmp::min(rows.len(), 50);\n\n let mut schema: Schema = (&rows[0]).into();\n\n // the first row that has no nulls will be used to infer the schema.\n\n // if there is a null, we check the next row and see if we can update the schema\n\n\n\n for row in rows.iter().take(max_infer).skip(1) {\n\n // for i in 1..max_infer {\n\n let nulls: Vec<_> = schema\n\n .fields()\n\n .iter()\n\n .enumerate()\n\n .filter_map(|(i, f)| {\n\n if matches!(f.data_type(), DataType::Null) {\n\n Some(i)\n\n } else {\n\n None\n\n }\n\n })\n", "file_path": "polars/polars-core/src/frame/row.rs", "rank": 52, "score": 228094.04826582002 }, { "content": "pub fn cols(names: Vec<String>) -> PyExpr {\n\n dsl::cols(names).into()\n\n}\n\n\n", "file_path": "py-polars/src/lazy/dsl.rs", "rank": 53, "score": 227814.82404296115 }, { "content": "#[cfg(feature = \"arange\")]\n\n#[cfg_attr(docsrs, doc(cfg(feature = \"arange\")))]\n\npub fn arange(low: Expr, high: Expr, step: usize) -> Expr {\n\n if (matches!(low, Expr::Literal(_)) && !matches!(low, Expr::Literal(LiteralValue::Series(_))))\n\n || matches!(high, Expr::Literal(_))\n\n && !matches!(high, Expr::Literal(LiteralValue::Series(_)))\n\n {\n\n let f = move |sa: Series, sb: Series| {\n\n let sa = sa.cast(&DataType::Int64)?;\n\n let sb = sb.cast(&DataType::Int64)?;\n\n let low = sa\n\n .i64()?\n\n .get(0)\n\n .ok_or_else(|| PolarsError::NoData(\"no data in `low` evaluation\".into()))?;\n\n let high = sb\n\n .i64()?\n\n .get(0)\n\n .ok_or_else(|| PolarsError::NoData(\"no data in `high` evaluation\".into()))?;\n\n\n\n if step > 1 {\n\n Ok(Int64Chunked::new_from_iter(\"arange\", (low..high).step_by(step)).into_series())\n\n } else {\n", "file_path": "polars/polars-lazy/src/functions.rs", "rank": 54, "score": 227528.8504742571 }, { "content": "#[cfg(any(feature = \"parquet\", feature = \"csv-file\"))]\n\nfn aggregate_expr_to_scan_agg(\n\n aggregate: Vec<Node>,\n\n expr_arena: &mut Arena<AExpr>,\n\n) -> Vec<ScanAggregation> {\n\n aggregate\n\n .into_iter()\n\n .map(|mut expr| {\n\n let mut alias = None;\n\n if let AExpr::Alias(e, name) = expr_arena.get(expr) {\n\n expr = *e;\n\n alias = Some((*name).to_string())\n\n };\n\n if let AExpr::Agg(agg) = expr_arena.get(expr) {\n\n match agg {\n\n AAggExpr::Min(e) => ScanAggregation::Min {\n\n column: (*aexpr_to_root_names(*e, expr_arena).pop().unwrap()).to_string(),\n\n alias,\n\n },\n\n AAggExpr::Max(e) => ScanAggregation::Max {\n\n column: (*aexpr_to_root_names(*e, expr_arena).pop().unwrap()).to_string(),\n", "file_path": "polars/polars-lazy/src/physical_plan/planner.rs", "rank": 55, "score": 226626.4651351872 }, { "content": "#[test]\n\nfn test_agg_exprs() -> Result<()> {\n\n let df = fruits_cars();\n\n\n\n // a binary expression followed by a function and an aggregation. See if it runs\n\n let out = df\n\n .lazy()\n\n .groupby_stable([col(\"cars\")])\n\n .agg([(lit(1) - col(\"A\"))\n\n .map(|s| Ok(&s * 2), GetOutput::same_type())\n\n .list()\n\n .alias(\"foo\")])\n\n .collect()?;\n\n let ca = out.column(\"foo\")?.list()?;\n\n let out = ca.lst_lengths();\n\n\n\n assert_eq!(Vec::from(&out), &[Some(4), Some(1)]);\n\n Ok(())\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/tests/aggregations.rs", "rank": 56, "score": 225895.82877281765 }, { "content": "fn duplicate_err(name: &str) -> Result<()> {\n\n Err(PolarsError::Duplicate(\n\n format!(\"Column with name: '{}' has more than one occurrences\", name).into(),\n\n ))\n\n}\n\n\n\nimpl DataFrame {\n\n /// Get the index of the column.\n\n fn check_name_to_idx(&self, name: &str) -> Result<usize> {\n\n self.find_idx_by_name(name)\n\n .ok_or_else(|| PolarsError::NotFound(name.into()))\n\n }\n\n\n\n fn check_already_present(&self, name: &str) -> Result<()> {\n\n if self.columns.iter().any(|s| s.name() == name) {\n\n Err(PolarsError::Duplicate(\n\n format!(\"column with name: '{}' already present in DataFrame\", name).into(),\n\n ))\n\n } else {\n\n Ok(())\n", "file_path": "polars/polars-core/src/frame/mod.rs", "rank": 57, "score": 225743.7041937444 }, { "content": "/// Start a when-then-otherwise expression\n\npub fn when(predicate: Expr) -> When {\n\n When { predicate }\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/dsl/mod.rs", "rank": 58, "score": 223498.4073271414 }, { "content": "pub fn js_arr_to_list(name: &str, obj: &JsObject, dtype: &DataType) -> JsResult<Series> {\n\n let len = obj.get_array_length()?;\n\n let s = match dtype {\n\n DataType::Int8 => {\n\n let mut builder = ListPrimitiveChunkedBuilder::<i8>::new(\n\n name,\n\n len as usize,\n\n (len as usize) * 5,\n\n DataType::Int8,\n\n );\n\n for idx in 0..len {\n\n let sub_seq: JsObject = obj.get_element(idx)?;\n\n if sub_seq.is_typedarray()? {\n\n let buff: napi::JsTypedArray = unsafe { sub_seq.into_unknown().cast() };\n\n let v = buff.into_value()?;\n\n let ca = typed_to_chunked!(v, i8, Int8Type);\n\n builder.append_iter(ca.into_iter())\n\n } else {\n\n let sub_seq: JsObject = obj.get_element(idx)?;\n\n let sub_seq_len = sub_seq.get_array_length()?;\n", "file_path": "nodejs-polars/src/list_construction.rs", "rank": 59, "score": 222675.82117677244 }, { "content": "pub fn py_exprs_to_exprs(py_exprs: Vec<PyExpr>) -> Vec<Expr> {\n\n // Safety:\n\n // transparent struct\n\n unsafe { std::mem::transmute(py_exprs) }\n\n}\n", "file_path": "py-polars/src/lazy/utils.rs", "rank": 61, "score": 219591.32468249736 }, { "content": "#[cfg(feature = \"temporal\")]\n\nfn parse_dates(df: DataFrame, fixed_schema: &Schema) -> DataFrame {\n\n let cols = df\n\n .get_columns()\n\n .par_iter()\n\n .map(|s| {\n\n if let Ok(ca) = s.utf8() {\n\n // don't change columns that are in the fixed schema.\n\n if fixed_schema.column_with_name(s.name()).is_some() {\n\n return s.clone();\n\n }\n\n\n\n #[cfg(feature = \"dtype-time\")]\n\n if let Ok(ca) = ca.as_time(None) {\n\n return ca.into_series();\n\n }\n\n if let Ok(ca) = ca.as_date(None) {\n\n ca.into_series()\n\n } else if let Ok(ca) = ca.as_datetime(None, TimeUnit::Milliseconds) {\n\n ca.into_series()\n\n } else {\n", "file_path": "polars/polars-io/src/csv.rs", "rank": 62, "score": 213428.21554457478 }, { "content": "/// Select multiple columns by dtype.\n\npub fn dtype_col(dtype: &DataType) -> Expr {\n\n Expr::DtypeColumn(vec![dtype.clone()])\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/functions.rs", "rank": 63, "score": 213007.3808395908 }, { "content": "fn format_duration(f: &mut Formatter, v: i64, sizes: &[i64], names: &[&str]) -> fmt::Result {\n\n for i in 0..4 {\n\n let whole_num = if i == 0 {\n\n v / sizes[i]\n\n } else {\n\n (v % sizes[i - 1]) / sizes[i]\n\n };\n\n if whole_num <= -1 || whole_num >= 1 {\n\n write!(f, \"{} {}\", whole_num, names[i])?;\n\n if whole_num != 1 {\n\n write!(f, \"s\")?;\n\n }\n\n if v % sizes[i] != 0 {\n\n write!(f, \" \")?;\n\n }\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "polars/polars-core/src/fmt.rs", "rank": 64, "score": 212759.40237029293 }, { "content": "fn cast_impl(name: &str, chunks: &[ArrayRef], dtype: &DataType) -> Result<Series> {\n\n let chunks = cast_chunks(chunks, &dtype.to_physical())?;\n\n let out = Series::try_from((name, chunks))?;\n\n use DataType::*;\n\n let out = match dtype {\n\n Date => out.into_date(),\n\n Datetime(tu, tz) => out.into_datetime(*tu, tz.clone()),\n\n Duration(tu) => out.into_duration(*tu),\n\n #[cfg(feature = \"dtype-time\")]\n\n Time => out.into_time(),\n\n _ => out,\n\n };\n\n\n\n Ok(out)\n\n}\n\n\n\n#[cfg(feature = \"dtype-categorical\")]\n\nimpl ChunkCast for CategoricalChunked {\n\n fn cast(&self, data_type: &DataType) -> Result<Series> {\n\n match data_type {\n", "file_path": "polars/polars-core/src/chunked_array/cast.rs", "rank": 65, "score": 212279.71178582442 }, { "content": "#[js_function(1)]\n\npub fn set_column_names(cx: CallContext) -> JsResult<JsUndefined> {\n\n let params = get_params(&cx)?;\n\n let df = params.get_external_mut::<DataFrame>(&cx, \"_df\")?;\n\n let names = params.get_as::<Vec<&str>>(\"names\")?;\n\n df.set_column_names(&names).map_err(JsPolarsEr::from)?;\n\n\n\n cx.env.get_undefined()\n\n}\n\n\n", "file_path": "nodejs-polars/src/dataframe/frame.rs", "rank": 66, "score": 212058.620475016 }, { "content": "#[bench]\n\nfn bench_group_by(b: &mut Bencher) {\n\n let s1: Series = Series::new(\"item\", (0u32..10000).collect::<Vec<u32>>());\n\n let s2: Series = Series::new(\"group\", iter::repeat(0).take(10000).collect::<Vec<u32>>());\n\n\n\n let df1 = DataFrame::new(vec![s1, s2]).unwrap();\n\n\n\n b.iter(|| {\n\n df1.groupby(\"group\").unwrap().select(\"item\").sum().unwrap();\n\n });\n\n}\n", "file_path": "polars/benches/bench.rs", "rank": 67, "score": 211789.13859100547 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn infer_file_schema(\n\n reader_bytes: &ReaderBytes,\n\n delimiter: u8,\n\n max_read_lines: Option<usize>,\n\n has_header: bool,\n\n schema_overwrite: Option<&Schema>,\n\n // we take &mut because we maybe need to skip more rows dependent\n\n // on the schema inference\n\n skip_rows: &mut usize,\n\n comment_char: Option<u8>,\n\n quote_char: Option<u8>,\n\n null_values: Option<&NullValues>,\n\n) -> Result<(Schema, usize)> {\n\n // We use lossy utf8 here because we don't want the schema inference to fail on utf8.\n\n // It may later.\n\n let encoding = CsvEncoding::LossyUtf8;\n\n\n\n let bytes = skip_line_ending(skip_bom(reader_bytes)).0;\n\n let mut lines = SplitLines::new(bytes, b'\\n').skip(*skip_rows);\n\n\n", "file_path": "polars/polars-io/src/csv_core/utils.rs", "rank": 68, "score": 210568.0817808896 }, { "content": "// the return type is Union[PySeries, PyDataFrame] and a boolean indicating if it is a dataframe or not\n\npub fn apply_lambda_unknown<'a>(\n\n df: &'a DataFrame,\n\n py: Python,\n\n lambda: &'a PyAny,\n\n inference_size: usize,\n\n) -> PyResult<(PyObject, bool)> {\n\n let columns = df.get_columns();\n\n let mut null_count = 0;\n\n\n\n for idx in 0..df.height() {\n\n let iter = columns.iter().map(|s: &Series| Wrap(s.get(idx)));\n\n let arg = (PyTuple::new(py, iter),);\n\n let out = lambda.call1(arg)?;\n\n\n\n if out.is_none() {\n\n null_count += 1;\n\n continue;\n\n } else if out.is_instance::<PyBool>().unwrap() {\n\n let first_value = out.extract::<bool>().ok();\n\n return Ok((\n", "file_path": "py-polars/src/apply/dataframe.rs", "rank": 69, "score": 209197.4453115492 }, { "content": "#[js_function(1)]\n\npub fn str_json_path_match(cx: CallContext) -> JsResult<JsExternal> {\n\n let params = get_params(&cx)?;\n\n let expr = params.get_external::<Expr>(&cx, \"_expr\")?;\n\n let pat = params.get_as::<String>(\"pat\")?;\n\n\n\n let function = move |s: Series| {\n\n let ca = s.utf8()?;\n\n match ca.json_path_match(&pat) {\n\n Ok(ca) => Ok(ca.into_series()),\n\n Err(e) => Err(PolarsError::ComputeError(format!(\"{:?}\", e).into())),\n\n }\n\n };\n\n\n\n expr.clone()\n\n .map(function, GetOutput::from_type(DataType::Boolean))\n\n .try_into_js(&cx)\n\n}\n\n\n", "file_path": "nodejs-polars/src/lazy/dsl.rs", "rank": 70, "score": 208683.59139809257 }, { "content": "/// Find the indexes that would sort these series in order of appearance.\n\n/// That means that the first `Series` will be used to determine the ordering\n\n/// until duplicates are found. Once duplicates are found, the next `Series` will\n\n/// be used and so on.\n\npub fn argsort_by<E: AsRef<[Expr]>>(by: E, reverse: &[bool]) -> Expr {\n\n let reverse = reverse.to_vec();\n\n let function = NoEq::new(Arc::new(move |by: &mut [Series]| {\n\n polars_core::functions::argsort_by(by, &reverse).map(|ca| ca.into_series())\n\n }) as Arc<dyn SeriesUdf>);\n\n\n\n Expr::Function {\n\n input: by.as_ref().to_vec(),\n\n function,\n\n output_type: GetOutput::from_type(DataType::UInt32),\n\n options: FunctionOptions {\n\n collect_groups: ApplyOptions::ApplyFlat,\n\n input_wildcard_expansion: false,\n\n auto_explode: true,\n\n fmt_str: \"argsort_by\",\n\n },\n\n }\n\n}\n\n\n\n#[cfg(feature = \"concat_str\")]\n\n#[cfg_attr(docsrs, doc(cfg(feature = \"concat_str\")))]\n", "file_path": "polars/polars-lazy/src/functions.rs", "rank": 71, "score": 208111.3992499563 }, { "content": "// Compute the mean for every field:\n\n// - calories_mean from calories_sum_sum and calories_count_sum\n\n// - fats_g_mean from fats_g_sum_sum and fats_g_count_sum\n\n// - sugars_g_mean from sugars_g_sum_sum and sugars_g_count_sum\n\n//\n\n// The input is the dataframe used to get the '${field}_count_sum' and\n\n// '${field}_sum_sum' fields. It shall be mutable, as the fields are going\n\n// to be dropped when computed the '${field}_mean'.\n\n//\n\n// The output is a result containing the Vector of mean Series computed.\n\nfn compute_all_means(dataframe: &mut DataFrame) -> PolarResult<Vec<Series>> {\n\n const SERIES_NAMES: &[(&str, &str, &str)] = &[\n\n (\"calories_sum_sum\", \"calories_count_sum\", \"calories_mean\"),\n\n (\"fats_g_sum_sum\", \"fats_g_count_sum\", \"fats_g_mean\"),\n\n (\"sugars_g_sum_sum\", \"sugars_g_count_sum\", \"sugars_g_mean\"),\n\n ];\n\n\n\n let mut result = Vec::with_capacity(SERIES_NAMES.len());\n\n for (sum_column_name, count_column_name, mean_column_name) in SERIES_NAMES {\n\n let mean_column = compute_mean(\n\n dataframe,\n\n sum_column_name,\n\n count_column_name,\n\n mean_column_name,\n\n )?;\n\n result.push(mean_column);\n\n }\n\n\n\n Ok(result)\n\n}\n\n\n", "file_path": "examples/aggregate_multiple_files_in_chunks/src/main.rs", "rank": 72, "score": 205528.80860639593 }, { "content": "/// Apply a lambda with utf8 output type\n\npub fn apply_lambda_with_utf8_out_type<'a>(\n\n df: &'a DataFrame,\n\n py: Python,\n\n lambda: &'a PyAny,\n\n init_null_count: usize,\n\n first_value: Option<&str>,\n\n) -> Utf8Chunked {\n\n let skip = if first_value.is_some() { 1 } else { 0 };\n\n if init_null_count == df.height() {\n\n ChunkedArray::full_null(\"apply\", df.height())\n\n } else {\n\n let iter = apply_iter::<&str>(df, py, lambda, init_null_count, skip);\n\n iterator_to_utf8(iter, init_null_count, first_value, \"apply\", df.height())\n\n }\n\n}\n\n\n", "file_path": "py-polars/src/apply/dataframe.rs", "rank": 73, "score": 205510.57098784228 }, { "content": "/// Apply a lambda with a boolean output type\n\npub fn apply_lambda_with_bool_out_type<'a>(\n\n df: &'a DataFrame,\n\n py: Python,\n\n lambda: &'a PyAny,\n\n init_null_count: usize,\n\n first_value: Option<bool>,\n\n) -> ChunkedArray<BooleanType> {\n\n let skip = if first_value.is_some() { 1 } else { 0 };\n\n if init_null_count == df.height() {\n\n ChunkedArray::full_null(\"apply\", df.height())\n\n } else {\n\n let iter = apply_iter(df, py, lambda, init_null_count, skip);\n\n iterator_to_bool(iter, init_null_count, first_value, \"apply\", df.height())\n\n }\n\n}\n\n\n", "file_path": "py-polars/src/apply/dataframe.rs", "rank": 74, "score": 205510.57098784228 }, { "content": "/// Apply a lambda with list output type\n\npub fn apply_lambda_with_list_out_type<'a>(\n\n df: &'a DataFrame,\n\n py: Python,\n\n lambda: &'a PyAny,\n\n init_null_count: usize,\n\n first_value: Option<&Series>,\n\n dt: &DataType,\n\n) -> ListChunked {\n\n let columns = df.get_columns();\n\n\n\n let skip = if first_value.is_some() { 1 } else { 0 };\n\n if init_null_count == df.height() {\n\n ChunkedArray::full_null(\"apply\", df.height())\n\n } else {\n\n let iter = ((init_null_count + skip)..df.height()).map(|idx| {\n\n let iter = columns.iter().map(|s: &Series| Wrap(s.get(idx)));\n\n let tpl = (PyTuple::new(py, iter),);\n\n match lambda.call1(tpl) {\n\n Ok(val) => match val.getattr(\"_s\") {\n\n Ok(val) => val.extract::<PySeries>().ok().map(|ps| ps.series),\n", "file_path": "py-polars/src/apply/dataframe.rs", "rank": 75, "score": 205510.57098784228 }, { "content": "pub fn apply_lambda_with_rows_output<'a>(\n\n df: &'a DataFrame,\n\n py: Python,\n\n lambda: &'a PyAny,\n\n init_null_count: usize,\n\n first_value: Row<'a>,\n\n inference_size: usize,\n\n) -> Result<DataFrame> {\n\n let columns = df.get_columns();\n\n let width = first_value.0.len();\n\n let null_row = Row::new(vec![AnyValue::Null; width]);\n\n\n\n let mut row_buf = Row::default();\n\n\n\n let skip = 1;\n\n let mut row_iter = ((init_null_count + skip)..df.height()).map(|idx| {\n\n let iter = columns.iter().map(|s: &Series| Wrap(s.get(idx)));\n\n let tpl = (PyTuple::new(py, iter),);\n\n match lambda.call1(tpl) {\n\n Ok(val) => {\n", "file_path": "py-polars/src/apply/dataframe.rs", "rank": 76, "score": 205510.57098784228 }, { "content": "#[cfg(feature = \"concat_str\")]\n\n#[cfg_attr(docsrs, doc(cfg(feature = \"concat_str\")))]\n\npub fn concat_str(s: &[Series], delimiter: &str) -> Result<Utf8Chunked> {\n\n if s.is_empty() {\n\n return Err(PolarsError::NoData(\n\n \"expected multiple series in concat_str function\".into(),\n\n ));\n\n }\n\n let len = s.iter().map(|s| s.len()).max().unwrap();\n\n\n\n let cas = s\n\n .iter()\n\n .map(|s| {\n\n let s = s.cast(&DataType::Utf8)?;\n\n let mut ca = s.utf8()?.clone();\n\n // broadcast\n\n if ca.len() == 1 && len > 1 {\n\n ca = ca.expand_at_index(0, len)\n\n }\n\n\n\n Ok(ca)\n\n })\n", "file_path": "polars/polars-core/src/functions.rs", "rank": 77, "score": 204858.99921598146 }, { "content": "#[js_function(1)]\n\npub fn name(cx: CallContext) -> JsResult<JsString> {\n\n let params = get_params(&cx)?;\n\n let series = params.get_external::<Series>(&cx, \"_series\")?;\n\n series.name().try_into_js(&cx)\n\n}\n\n\n", "file_path": "nodejs-polars/src/series.rs", "rank": 78, "score": 203425.456536798 }, { "content": "/// Apply a closure on the two columns that are evaluated from `Expr` a and `Expr` b.\n\npub fn map_binary<F: 'static>(a: Expr, b: Expr, f: F, output_type: GetOutput) -> Expr\n\nwhere\n\n F: Fn(Series, Series) -> Result<Series> + Send + Sync,\n\n{\n\n let function = prepare_binary_function!(f);\n\n a.map_many(function, &[b], output_type)\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/functions.rs", "rank": 79, "score": 203377.95494971483 }, { "content": "pub fn apply_binary<F: 'static>(a: Expr, b: Expr, f: F, output_type: GetOutput) -> Expr\n\nwhere\n\n F: Fn(Series, Series) -> Result<Series> + Send + Sync,\n\n{\n\n let function = prepare_binary_function!(f);\n\n a.apply_many(function, &[b], output_type)\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/functions.rs", "rank": 80, "score": 203370.84650966275 }, { "content": "fn det_min<T>(state: &mut T, v: Option<T>) -> Option<Option<T>>\n\nwhere\n\n T: Copy + PartialOrd + AddAssign + Add<Output = T>,\n\n{\n\n match v {\n\n Some(v) => {\n\n if v < *state {\n\n *state = v\n\n }\n\n Some(Some(*state))\n\n }\n\n None => Some(None),\n\n }\n\n}\n\n\n", "file_path": "polars/polars-core/src/chunked_array/ops/cum_agg.rs", "rank": 81, "score": 202344.69705201633 }, { "content": "fn det_max<T>(state: &mut T, v: Option<T>) -> Option<Option<T>>\n\nwhere\n\n T: Copy + PartialOrd + AddAssign + Add<Output = T>,\n\n{\n\n match v {\n\n Some(v) => {\n\n if v > *state {\n\n *state = v\n\n }\n\n Some(Some(*state))\n\n }\n\n None => Some(None),\n\n }\n\n}\n\n\n", "file_path": "polars/polars-core/src/chunked_array/ops/cum_agg.rs", "rank": 82, "score": 202344.69705201633 }, { "content": "#[cfg(feature = \"extract_jsonpath\")]\n\nfn extract_json<'a>(expr: &Compiled, json_str: &'a str) -> Option<Cow<'a, str>> {\n\n serde_json::from_str(json_str).ok().and_then(|value| {\n\n // TODO: a lot of heap allocations here. Improve json path by adding a take?\n\n let result = expr.select(&value).ok()?;\n\n let first = *result.get(0)?;\n\n\n\n match first {\n\n Value::String(s) => Some(Cow::Owned(s.clone())),\n\n Value::Null => None,\n\n v => Some(Cow::Owned(v.to_string())),\n\n }\n\n })\n\n}\n\n\n\nimpl Utf8Chunked {\n\n /// Extract json path, first match\n\n /// Refer to <https://goessner.net/articles/JsonPath/>\n\n #[cfg(feature = \"extract_jsonpath\")]\n\n pub fn json_path_match(&self, json_path: &str) -> Result<Utf8Chunked> {\n\n match Compiled::compile(json_path) {\n\n Ok(pat) => Ok(self.apply_on_opt(|opt_s| opt_s.and_then(|s| extract_json(&pat, s)))),\n\n Err(e) => Err(PolarsError::ValueError(\n\n format!(\"error compiling JSONpath expression {:?}\", e).into(),\n\n )),\n\n }\n\n }\n\n}\n", "file_path": "polars/polars-core/src/chunked_array/strings/json_path.rs", "rank": 83, "score": 200441.4988344147 }, { "content": "#[test]\n\nfn test_lazy_df_aggregations() {\n\n let df = load_df();\n\n\n\n assert!(df\n\n .clone()\n\n .lazy()\n\n .min()\n\n .collect()\n\n .unwrap()\n\n .frame_equal_missing(&df.min()));\n\n assert!(df\n\n .clone()\n\n .lazy()\n\n .median()\n\n .collect()\n\n .unwrap()\n\n .frame_equal_missing(&df.median()));\n\n assert!(df\n\n .clone()\n\n .lazy()\n\n .quantile(0.5, QuantileInterpolOptions::default())\n\n .collect()\n\n .unwrap()\n\n .frame_equal_missing(\n\n &df.quantile(0.5, QuantileInterpolOptions::default())\n\n .unwrap()\n\n ));\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/tests/aggregations.rs", "rank": 84, "score": 197988.00313513156 }, { "content": "#[inline]\n\npub fn unset_bit(data: &mut [u8], i: usize) {\n\n data[i >> 3] ^= BIT_MASK[i & 7];\n\n}\n\n\n\n/// Sets bit at position `i` for `data` to 0\n\n///\n\n/// # Safety\n\n///\n\n/// Note this doesn't do any bound checking, for performance reason. The caller is\n\n/// responsible to guarantee that `i` is within bounds.\n\n#[inline]\n\npub unsafe fn unset_bit_raw(data: *mut u8, i: usize) {\n\n *data.add(i >> 3) ^= BIT_MASK[i & 7];\n\n}\n\n\n\n/// Returns the ceil of `value`/`divisor`\n", "file_path": "polars/polars-arrow/src/bit_util.rs", "rank": 85, "score": 197753.66675011138 }, { "content": "#[inline]\n\npub fn set_bit(data: &mut [u8], i: usize) {\n\n data[i >> 3] |= BIT_MASK[i & 7];\n\n}\n\n\n\n/// Sets bit at position `i` for `data`\n\n///\n\n/// # Safety\n\n///\n\n/// Note this doesn't do any bound checking, for performance reason. The caller is\n\n/// responsible to guarantee that `i` is within bounds.\n\n#[inline]\n\npub unsafe fn set_bit_raw(data: *mut u8, i: usize) {\n\n *data.add(i >> 3) |= BIT_MASK[i & 7];\n\n}\n\n\n\n/// Sets bit at position `i` for `data` to 0\n", "file_path": "polars/polars-arrow/src/bit_util.rs", "rank": 86, "score": 197753.66675011138 }, { "content": "/// Create a Literal Expression from `L`\n\npub fn lit<L: Literal>(t: L) -> Expr {\n\n t.lit()\n\n}\n", "file_path": "polars/polars-lazy/src/logical_plan/lit.rs", "rank": 87, "score": 197499.91074565292 }, { "content": "pub fn when(predicate: PyExpr) -> When {\n\n When { predicate }\n\n}\n\n\n", "file_path": "py-polars/src/lazy/dsl.rs", "rank": 88, "score": 197150.40529418382 }, { "content": "fn det_prod<T>(state: &mut Option<T>, v: Option<T>) -> Option<Option<T>>\n\nwhere\n\n T: Copy + PartialOrd + Mul<Output = T>,\n\n{\n\n match (*state, v) {\n\n (Some(state_inner), Some(v)) => {\n\n *state = Some(state_inner * v);\n\n Some(*state)\n\n }\n\n (None, Some(v)) => {\n\n *state = Some(v);\n\n Some(*state)\n\n }\n\n (_, None) => Some(None),\n\n }\n\n}\n\n\n\nimpl<T> ChunkCumAgg<T> for ChunkedArray<T>\n\nwhere\n\n T: PolarsNumericType,\n", "file_path": "polars/polars-core/src/chunked_array/ops/cum_agg.rs", "rank": 89, "score": 196503.593517016 }, { "content": "fn det_sum<T>(state: &mut Option<T>, v: Option<T>) -> Option<Option<T>>\n\nwhere\n\n T: Copy + PartialOrd + AddAssign + Add<Output = T>,\n\n{\n\n match (*state, v) {\n\n (Some(state_inner), Some(v)) => {\n\n *state = Some(state_inner + v);\n\n Some(*state)\n\n }\n\n (None, Some(v)) => {\n\n *state = Some(v);\n\n Some(*state)\n\n }\n\n (_, None) => Some(None),\n\n }\n\n}\n\n\n", "file_path": "polars/polars-core/src/chunked_array/ops/cum_agg.rs", "rank": 90, "score": 196503.593517016 }, { "content": "/// Apply a lambda with a primitive output type\n\npub fn apply_lambda_with_primitive_out_type<'a, D>(\n\n df: &'a DataFrame,\n\n py: Python<'a>,\n\n lambda: &'a PyAny,\n\n init_null_count: usize,\n\n first_value: Option<D::Native>,\n\n) -> ChunkedArray<D>\n\nwhere\n\n D: PyArrowPrimitiveType,\n\n D::Native: ToPyObject + FromPyObject<'a>,\n\n{\n\n let skip = if first_value.is_some() { 1 } else { 0 };\n\n if init_null_count == df.height() {\n\n ChunkedArray::full_null(\"apply\", df.height())\n\n } else {\n\n let iter = apply_iter(df, py, lambda, init_null_count, skip);\n\n iterator_to_primitive(iter, init_null_count, first_value, \"apply\", df.height())\n\n }\n\n}\n\n\n", "file_path": "py-polars/src/apply/dataframe.rs", "rank": 91, "score": 195417.21132246839 }, { "content": "#[js_function(1)]\n\npub fn find_idx_by_name(cx: CallContext) -> JsResult<Either<JsNumber, JsUndefined>> {\n\n let params = get_params(&cx)?;\n\n let df = params.get_external::<DataFrame>(&cx, \"_df\")?;\n\n let name = params.get_as::<&str>(\"name\")?;\n\n let opt = df.find_idx_by_name(name);\n\n\n\n match opt {\n\n Some(idx) => idx.try_into_js(&cx).map(Either::A),\n\n None => cx.env.get_undefined().map(Either::B),\n\n }\n\n}\n\n\n", "file_path": "nodejs-polars/src/dataframe/frame.rs", "rank": 92, "score": 194999.11455887515 }, { "content": "#[pyfunction]\n\nfn concat_str(s: Vec<dsl::PyExpr>, sep: &str) -> dsl::PyExpr {\n\n let s = s.into_iter().map(|e| e.inner).collect();\n\n polars::lazy::functions::concat_str(s, sep).into()\n\n}\n\n\n", "file_path": "py-polars/src/lib.rs", "rank": 93, "score": 194271.6577787464 }, { "content": "#[cfg(feature = \"private\")]\n\npub fn split_df(df: &DataFrame, n: usize) -> Result<Vec<DataFrame>> {\n", "file_path": "polars/polars-core/src/utils/mod.rs", "rank": 94, "score": 192570.58099136507 }, { "content": "// helper that combines the groups into a parallel iterator over `(first, all): (u32, &Vec<u32>)`\n\nfn agg_helper_idx<T, F>(groups: &GroupsIdx, f: F) -> Option<Series>\n\nwhere\n\n F: Fn((u32, &Vec<u32>)) -> Option<T::Native> + Send + Sync,\n\n T: PolarsNumericType,\n\n ChunkedArray<T>: IntoSeries,\n\n{\n\n let ca: ChunkedArray<T> = POOL.install(|| groups.into_par_iter().map(f).collect());\n\n Some(ca.into_series())\n\n}\n\n\n", "file_path": "polars/polars-core/src/frame/groupby/aggregations.rs", "rank": 95, "score": 192426.16654731805 }, { "content": "// helper that iterates on the `all: Vec<Vec<u32>` collection\n\n// this doesn't have traverse the `first: Vec<u32>` memory and is therefore faster\n\nfn agg_helper_idx_on_all<T, F>(groups: &GroupsIdx, f: F) -> Option<Series>\n\nwhere\n\n F: Fn(&Vec<u32>) -> Option<T::Native> + Send + Sync,\n\n T: PolarsNumericType,\n\n ChunkedArray<T>: IntoSeries,\n\n{\n\n let ca: ChunkedArray<T> = POOL.install(|| groups.all().into_par_iter().map(f).collect());\n\n Some(ca.into_series())\n\n}\n\n\n", "file_path": "polars/polars-core/src/frame/groupby/aggregations.rs", "rank": 96, "score": 192420.19634977318 }, { "content": "pub fn lit(value: &PyAny) -> PyExpr {\n\n if let Ok(true) = value.is_instance::<PyBool>() {\n\n let val = value.extract::<bool>().unwrap();\n\n dsl::lit(val).into()\n\n } else if let Ok(int) = value.downcast::<PyInt>() {\n\n let val = int.extract::<i64>().unwrap();\n\n\n\n if val > 0 && val < i32::MAX as i64 || val < 0 && val > i32::MIN as i64 {\n\n dsl::lit(val as i32).into()\n\n } else {\n\n dsl::lit(val).into()\n\n }\n\n } else if let Ok(float) = value.downcast::<PyFloat>() {\n\n let val = float.extract::<f64>().unwrap();\n\n dsl::lit(val).into()\n\n } else if let Ok(pystr) = value.downcast::<PyString>() {\n\n dsl::lit(\n\n pystr\n\n .to_str()\n\n .expect(\"could not transform Python string to Rust Unicode\"),\n", "file_path": "py-polars/src/lazy/dsl.rs", "rank": 97, "score": 189984.85470404907 }, { "content": "fn to_aexprs(input: Vec<Expr>, arena: &mut Arena<AExpr>) -> Vec<Node> {\n\n input.into_iter().map(|e| to_aexpr(e, arena)).collect()\n\n}\n\n\n\n// converts expression to AExpr, which uses an arena (Vec) for allocation\n\npub(crate) fn to_aexpr(expr: Expr, arena: &mut Arena<AExpr>) -> Node {\n\n let v = match expr {\n\n Expr::IsUnique(expr) => AExpr::IsUnique(to_aexpr(*expr, arena)),\n\n Expr::Duplicated(expr) => AExpr::Duplicated(to_aexpr(*expr, arena)),\n\n Expr::Reverse(expr) => AExpr::Reverse(to_aexpr(*expr, arena)),\n\n Expr::Explode(expr) => AExpr::Explode(to_aexpr(*expr, arena)),\n\n Expr::Alias(e, name) => AExpr::Alias(to_aexpr(*e, arena), name),\n\n Expr::Literal(value) => AExpr::Literal(value),\n\n Expr::Column(s) => AExpr::Column(s),\n\n Expr::BinaryExpr { left, op, right } => {\n\n let l = to_aexpr(*left, arena);\n\n let r = to_aexpr(*right, arena);\n\n AExpr::BinaryExpr {\n\n left: l,\n\n op,\n", "file_path": "polars/polars-lazy/src/logical_plan/conversion.rs", "rank": 98, "score": 189422.89719971135 }, { "content": "/// Apply a function/closure over the groups of multiple columns. This should only be used in a groupby aggregation.\n\n///\n\n/// It is the responsibility of the caller that the schema is correct by giving\n\n/// the correct output_type. If None given the output type of the input expr is used.\n\n///\n\n/// This difference with `[map_mul]` is that `[apply_mul]` will create a separate `[Series]` per group.\n\n///\n\n/// * `[map_mul]` should be used for operations that are independent of groups, e.g. `multiply * 2`, or `raise to the power`\n\n/// * `[apply_mul]` should be used for operations that work on a group of data. e.g. `sum`, `count`, etc.\n\npub fn apply_multiple<F, E>(function: F, expr: E, output_type: GetOutput) -> Expr\n\nwhere\n\n F: Fn(&mut [Series]) -> Result<Series> + 'static + Send + Sync,\n\n E: AsRef<[Expr]>,\n\n{\n\n let input = expr.as_ref().to_vec();\n\n\n\n Expr::Function {\n\n input,\n\n function: NoEq::new(Arc::new(function)),\n\n output_type,\n\n options: FunctionOptions {\n\n collect_groups: ApplyOptions::ApplyGroups,\n\n input_wildcard_expansion: false,\n\n auto_explode: true,\n\n fmt_str: \"\",\n\n },\n\n }\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/dsl/mod.rs", "rank": 99, "score": 189293.80475791683 } ]
Rust
daemon/state_helper.rs
slooppe/pueue
ee71ad7c6eb05788af063fd98a649b02c006cbb1
use std::collections::BTreeMap; use std::fs; use std::path::{Path, PathBuf}; use std::sync::MutexGuard; use std::time::SystemTime; use anyhow::{Context, Result}; use chrono::prelude::*; use log::{debug, info}; use pueue_lib::state::{GroupStatus, State}; use pueue_lib::task::{TaskResult, TaskStatus}; pub type LockedState<'a> = MutexGuard<'a, State>; pub fn is_task_removable(state: &LockedState, task_id: &usize, to_delete: &[usize]) -> bool { let dependants: Vec<usize> = state .tasks .iter() .filter(|(_, task)| { task.dependencies.contains(task_id) && !matches!(task.status, TaskStatus::Done(_)) }) .map(|(_, task)| task.id) .collect(); if dependants.is_empty() { return true; } let should_delete_dependants = dependants.iter().all(|task_id| to_delete.contains(task_id)); if !should_delete_dependants { return false; } dependants .iter() .all(|task_id| is_task_removable(state, task_id, to_delete)) } pub fn pause_on_failure(state: &mut LockedState, group: String) { if state.settings.daemon.pause_group_on_failure { state.groups.insert(group, GroupStatus::Paused); } else if state.settings.daemon.pause_all_on_failure { state.set_status_for_all_groups(GroupStatus::Paused); } } pub fn save_settings(state: &LockedState) -> Result<()> { state .settings .save(&state.config_path) .context("Failed to save settings") } pub fn reset_state(state: &mut LockedState) -> Result<()> { backup_state(state)?; state.tasks = BTreeMap::new(); state.set_status_for_all_groups(GroupStatus::Running); save_state(state) } pub fn save_state(state: &State) -> Result<()> { save_state_to_file(state, false) } pub fn backup_state(state: &LockedState) -> Result<()> { save_state_to_file(state, true)?; rotate_state(state).context("Failed to rotate old log files")?; Ok(()) } fn save_state_to_file(state: &State, log: bool) -> Result<()> { let serialized = serde_json::to_string(&state).context("Failed to serialize state:"); let serialized = serialized.unwrap(); let path = state.settings.shared.pueue_directory(); let (temp, real) = if log { let path = path.join("log"); let now: DateTime<Utc> = Utc::now(); let time = now.format("%Y-%m-%d_%H-%M-%S"); ( path.join(format!("{}_state.json.partial", time)), path.join(format!("{}_state.json", time)), ) } else { (path.join("state.json.partial"), path.join("state.json")) }; fs::write(&temp, serialized).context("Failed to write temp file while saving state.")?; fs::rename(&temp, &real).context("Failed to overwrite old state while saving state")?; if log { debug!("State backup created at: {:?}", real); } else { debug!("State saved at: {:?}", real); } Ok(()) } pub fn restore_state(pueue_directory: &Path) -> Result<Option<State>> { let path = pueue_directory.join("state.json"); if !path.exists() { info!( "Couldn't find state from previous session at location: {:?}", path ); return Ok(None); } info!("Start restoring state"); let data = fs::read_to_string(&path).context("State restore: Failed to read file:\n\n{}")?; let mut state: State = serde_json::from_str(&data).context("Failed to deserialize state.")?; for (group, _) in state.settings.daemon.groups.iter() { if let Some(status) = state.groups.clone().get(group) { state.groups.insert(group.clone(), status.clone()); } } for (_, task) in state.tasks.iter_mut() { if task.status == TaskStatus::Running || task.status == TaskStatus::Paused { info!( "Setting task {} with previous status {:?} to new status {:?}", task.id, task.status, TaskResult::Killed ); task.status = TaskStatus::Done(TaskResult::Killed); } if task.status == TaskStatus::Locked { task.status = TaskStatus::Stashed { enqueue_at: None }; } if !state.settings.daemon.groups.contains_key(&task.group) { task.set_default_group(); } if task.status == TaskStatus::Queued { info!( "Pausing group {} to prevent unwanted execution of previous tasks", &task.group ); state.groups.insert(task.group.clone(), GroupStatus::Paused); } } Ok(Some(state)) } fn rotate_state(state: &LockedState) -> Result<()> { let path = state.settings.shared.pueue_directory().join("log"); let mut entries: BTreeMap<SystemTime, PathBuf> = BTreeMap::new(); let mut directory_list = fs::read_dir(path)?; while let Some(Ok(entry)) = directory_list.next() { let path = entry.path(); let metadata = entry.metadata()?; let time = metadata.modified()?; entries.insert(time, path); } let mut number_entries = entries.len(); let mut iter = entries.iter(); while number_entries > 10 { if let Some((_, path)) = iter.next() { fs::remove_file(path)?; number_entries -= 1; } } Ok(()) }
use std::collections::BTreeMap; use std::fs; use std::path::{Path, PathBuf}; use std::sync::MutexGuard; use std::time::SystemTime; use anyhow::{Context, Result}; use chrono::prelude::*; use log::{debug, info}; use pueue_lib::state::{GroupStatus, State}; use pueue_lib::task::{TaskResult, TaskStatus}; pub type LockedState<'a> = MutexGuard<'a, State>; pub fn is_task_removable(state: &LockedState, task_id: &usize, to_delete: &[usize]) -> bool { let dependants: Vec<usize> = state .tasks .iter() .filter(|(_, task)| { task.dependencies.contains(task_id) && !matches!(task.status, TaskStatus::Done(_)) }) .map(|(_, task)| task.id) .collect(); if dependants.is_empty() { return true; } let should_delete_dependants = dependants.iter().all(|task_id| to_delete.contains(task_id)); if !should_delete_dependants { return false; } dependants .iter() .all(|task_id| is_task_removable(state, task_id, to_delete)) } pub fn pause_on_failure(state: &mut LockedState, group: String) { if state.settings.daemon.pause_group_on_failure { state.groups.insert(group, GroupStatus::Paused); } else if state.settings.daemon.pause_all_on_failure { state.set_status_for_all_groups(GroupStatus::Paused); } } pub fn save_settings(state: &LockedState) -> Result<()> { state .settings .save(&state.config_path) .context("Failed to save settings") } pub fn reset_state(state: &mut LockedState) -> Result<()> { backup_state(state)?; state.tasks = BTreeMap::new(); state.set_status_for_all_groups(GroupStatus::Running); save_state(state) } pub fn save_state(state: &State) -> Result<()> { save_state_to_file(state, false) } pub fn backup_state(state: &LockedState) -> Result<()> { save_state_to_file(state, true)?; rotate_state(state).context("Failed to rotate old log files")?; Ok(()) }
pub fn restore_state(pueue_directory: &Path) -> Result<Option<State>> { let path = pueue_directory.join("state.json"); if !path.exists() { info!( "Couldn't find state from previous session at location: {:?}", path ); return Ok(None); } info!("Start restoring state"); let data = fs::read_to_string(&path).context("State restore: Failed to read file:\n\n{}")?; let mut state: State = serde_json::from_str(&data).context("Failed to deserialize state.")?; for (group, _) in state.settings.daemon.groups.iter() { if let Some(status) = state.groups.clone().get(group) { state.groups.insert(group.clone(), status.clone()); } } for (_, task) in state.tasks.iter_mut() { if task.status == TaskStatus::Running || task.status == TaskStatus::Paused { info!( "Setting task {} with previous status {:?} to new status {:?}", task.id, task.status, TaskResult::Killed ); task.status = TaskStatus::Done(TaskResult::Killed); } if task.status == TaskStatus::Locked { task.status = TaskStatus::Stashed { enqueue_at: None }; } if !state.settings.daemon.groups.contains_key(&task.group) { task.set_default_group(); } if task.status == TaskStatus::Queued { info!( "Pausing group {} to prevent unwanted execution of previous tasks", &task.group ); state.groups.insert(task.group.clone(), GroupStatus::Paused); } } Ok(Some(state)) } fn rotate_state(state: &LockedState) -> Result<()> { let path = state.settings.shared.pueue_directory().join("log"); let mut entries: BTreeMap<SystemTime, PathBuf> = BTreeMap::new(); let mut directory_list = fs::read_dir(path)?; while let Some(Ok(entry)) = directory_list.next() { let path = entry.path(); let metadata = entry.metadata()?; let time = metadata.modified()?; entries.insert(time, path); } let mut number_entries = entries.len(); let mut iter = entries.iter(); while number_entries > 10 { if let Some((_, path)) = iter.next() { fs::remove_file(path)?; number_entries -= 1; } } Ok(()) }
fn save_state_to_file(state: &State, log: bool) -> Result<()> { let serialized = serde_json::to_string(&state).context("Failed to serialize state:"); let serialized = serialized.unwrap(); let path = state.settings.shared.pueue_directory(); let (temp, real) = if log { let path = path.join("log"); let now: DateTime<Utc> = Utc::now(); let time = now.format("%Y-%m-%d_%H-%M-%S"); ( path.join(format!("{}_state.json.partial", time)), path.join(format!("{}_state.json", time)), ) } else { (path.join("state.json.partial"), path.join("state.json")) }; fs::write(&temp, serialized).context("Failed to write temp file while saving state.")?; fs::rename(&temp, &real).context("Failed to overwrite old state while saving state")?; if log { debug!("State backup created at: {:?}", real); } else { debug!("State saved at: {:?}", real); } Ok(()) }
function_block-full_function
[ { "content": "/// Print a local log file.\n\n/// This is usually either the stdout or the stderr\n\nfn print_local_file(stdout: &mut Stdout, file: &mut File, lines: &Option<usize>, text: String) {\n\n if let Ok(metadata) = file.metadata() {\n\n if metadata.len() != 0 {\n\n // Don't print a newline between the task information and the first output\n\n println!(\"\\n{}\", text);\n\n\n\n // Only print the last lines if requested\n\n if let Some(lines) = lines {\n\n println!(\"{}\", read_last_lines(file, *lines));\n\n return;\n\n }\n\n\n\n // Print everything\n\n if let Err(err) = io::copy(file, stdout) {\n\n println!(\"Failed reading local log file: {}\", err);\n\n };\n\n }\n\n }\n\n}\n", "file_path": "client/display/log/local.rs", "rank": 6, "score": 270052.5155144278 }, { "content": "/// This is a helper function to safely kill a child process.\n\n/// Its purpose is to properly kill all processes and prevent any dangling processes.\n\n///\n\n/// Sadly, this needs some extra handling. Check the docstring of `send_signal_to_child` for\n\n/// additional information on why this needs to be done.\n\n///\n\n/// Returns `true`, if everything went alright\n\n/// Returns `false`, if the process went away while we tried to send the signal.\n\npub fn kill_child(task_id: usize, child: &mut Child, kill_children: bool) -> bool {\n\n let pid: i32 = child.id().try_into().unwrap();\n\n\n\n // Check whether this process actually spawned a shell.\n\n let is_shell = if let Ok(is_shell) = did_process_spawn_shell(pid) {\n\n is_shell\n\n } else {\n\n return false;\n\n };\n\n\n\n // We have to kill the root process first, to prevent it from spawning new processes.\n\n // However, this prevents us from getting its child processes afterwards.\n\n // That's why we have to get the list of child processes already now.\n\n let mut child_processes = None;\n\n if kill_children || is_shell {\n\n child_processes = Some(get_child_processes(pid));\n\n }\n\n\n\n // Kill the parent first\n\n let kill_result = child.kill();\n", "file_path": "daemon/platform/linux/process_helper.rs", "rank": 7, "score": 266341.04319680756 }, { "content": "/// This is a helper function to safely kill a child process.\n\n/// Its purpose is to properly kill all processes and prevent any dangling processes.\n\npub fn kill_child(task_id: usize, child: &mut Child, _kill_children: bool) -> bool {\n\n match child.kill() {\n\n Err(_) => {\n\n debug!(\"Task {} has already finished by itself\", task_id);\n\n false\n\n }\n\n _ => true,\n\n }\n\n}\n\n\n", "file_path": "daemon/platform/apple/process_helper.rs", "rank": 8, "score": 266333.39817962266 }, { "content": "/// Kill a child process\n\npub fn kill_child(task_id: usize, child: &mut Child, _kill_children: bool) -> bool {\n\n match child.kill() {\n\n Err(_) => {\n\n info!(\"Task {} has already finished by itself\", task_id);\n\n false\n\n }\n\n Ok(_) => {\n\n let pids = get_cur_task_processes(child.id());\n\n\n\n for pid in pids {\n\n terminate_process(pid);\n\n }\n\n true\n\n }\n\n }\n\n}\n\n\n", "file_path": "daemon/platform/windows/process_helper.rs", "rank": 9, "score": 266333.39817962266 }, { "content": "/// Check whether the given group exists. Return an failure message if it doesn't.\n\npub fn ensure_group_exists(state: &MutexGuard<State>, group: &str) -> Result<(), Message> {\n\n if !state.groups.contains_key(group) {\n\n return Err(create_failure_message(format!(\n\n \"Group {} doesn't exists. Use one of these: {:?}\",\n\n group,\n\n state.groups.keys()\n\n )));\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "daemon/network/response_helper.rs", "rank": 10, "score": 257892.16015479708 }, { "content": "/// Returns the formatted `start` and `end` text for a given task.\n\n///\n\n/// 1. If the start || end is today, skip the date.\n\n/// 2. Otherwise show the date in both.\n\n///\n\n/// If the task doesn't have a start and/or end yet, an empty string will be returned\n\n/// for the respective field.\n\nfn formatted_start_end(task: &Task, settings: &Settings) -> (String, String) {\n\n // Get the start time.\n\n // If the task didn't start yet, just return two empty strings.\n\n let start = match task.start {\n\n Some(start) => start,\n\n None => return (\"\".into(), \"\".into()),\n\n };\n\n\n\n // If the task started today, just show the time.\n\n // Otherwise show the full date and time.\n\n let started_today = start >= Local::today().and_hms(0, 0, 0);\n\n let formatted_start = if started_today {\n\n start\n\n .format(&settings.client.status_time_format)\n\n .to_string()\n\n } else {\n\n start\n\n .format(&settings.client.status_datetime_format)\n\n .to_string()\n\n };\n", "file_path": "client/display/state.rs", "rank": 11, "score": 256116.10050391906 }, { "content": "/// By default, several columns aren't shown until there's actually some data to display.\n\n/// This function determines, which of those columns actually need to be shown.\n\npub fn has_special_columns(tasks: &BTreeMap<usize, Task>) -> (bool, bool, bool) {\n\n // Check whether there are any delayed tasks.\n\n let has_delayed_tasks = tasks.iter().any(|(_, task)| {\n\n matches!(\n\n task.status,\n\n TaskStatus::Stashed {\n\n enqueue_at: Some(_)\n\n }\n\n )\n\n });\n\n\n\n // Check whether there are any tasks with dependencies.\n\n let has_dependencies = tasks\n\n .iter()\n\n .any(|(_id, task)| !task.dependencies.is_empty());\n\n\n\n // Check whether there are any tasks a label.\n\n let has_labels = tasks.iter().any(|(_id, task)| task.label.is_some());\n\n\n\n (has_delayed_tasks, has_dependencies, has_labels)\n\n}\n\n\n", "file_path": "client/display/helper.rs", "rank": 12, "score": 250188.4621518429 }, { "content": "/// Follow the log ouput of running task.\n\n///\n\n/// If no task is specified, this will check for the following cases:\n\n///\n\n/// - No running task: Print an error that there are no running tasks\n\n/// - Single running task: Follow the output of that task\n\n/// - Multiple running tasks: Print out the list of possible tasks to follow.\n\npub fn follow_local_task_logs(pueue_directory: &Path, task_id: usize, stderr: bool) {\n\n let (stdout_handle, stderr_handle) = match get_log_file_handles(task_id, pueue_directory) {\n\n Ok((stdout, stderr)) => (stdout, stderr),\n\n Err(err) => {\n\n println!(\"Failed to get log file handles: {}\", err);\n\n return;\n\n }\n\n };\n\n let mut handle = if stderr { stderr_handle } else { stdout_handle };\n\n\n\n let (out_path, err_path) = get_log_paths(task_id, pueue_directory);\n\n let handle_path = if stderr { err_path } else { out_path };\n\n\n\n // Stdout handler to directly write log file output to io::stdout\n\n // without having to load anything into memory.\n\n let mut stdout = io::stdout();\n\n loop {\n\n // Check whether the file still exists. Exit if it doesn't.\n\n if !handle_path.exists() {\n\n println!(\"File has gone away. Did somebody remove the task?\");\n", "file_path": "client/display/follow.rs", "rank": 13, "score": 249616.04505825258 }, { "content": "/// This is a small helper which either returns a given group or the default group.\n\npub fn group_or_default(group: &Option<String>) -> String {\n\n group.clone().unwrap_or_else(|| \"default\".to_string())\n\n}\n\n\n", "file_path": "client/client.rs", "rank": 14, "score": 240198.59455330385 }, { "content": "/// The daemon didn't send any log output, thereby we didn't request any.\n\n/// If that's the case, read the log files from the local pueue directory\n\npub fn print_local_log(task_id: usize, colors: &Colors, settings: &Settings, lines: Option<usize>) {\n\n let (mut stdout_file, mut stderr_file) =\n\n match get_log_file_handles(task_id, &settings.shared.pueue_directory()) {\n\n Ok((stdout, stderr)) => (stdout, stderr),\n\n Err(err) => {\n\n println!(\"Failed to get log file handles: {}\", err);\n\n return;\n\n }\n\n };\n\n // Stdout handler to directly write log file output to io::stdout\n\n // without having to load anything into memory.\n\n let mut stdout = io::stdout();\n\n\n\n print_local_file(\n\n &mut stdout,\n\n &mut stdout_file,\n\n &lines,\n\n style_text(\"stdout:\", Some(colors.green()), Some(Attribute::Bold)),\n\n );\n\n\n\n print_local_file(\n\n &mut stdout,\n\n &mut stderr_file,\n\n &lines,\n\n style_text(\"stderr:\", Some(colors.red()), Some(Attribute::Bold)),\n\n );\n\n}\n\n\n", "file_path": "client/display/log/local.rs", "rank": 15, "score": 235550.44035876304 }, { "content": "/// This is invoked, whenever a task is actually restarted (in-place) without creating a new task.\n\n/// Update a possibly changed path/command and reset all infos from the previous run.\n\nfn restart(state: &mut MutexGuard<State>, to_restart: &TasksToRestart, stashed: bool) {\n\n // Check if we actually know this task.\n\n let task = if let Some(task) = state.tasks.get_mut(&to_restart.task_id) {\n\n task\n\n } else {\n\n return;\n\n };\n\n\n\n // Either enqueue the task or stash it.\n\n task.status = if stashed {\n\n TaskStatus::Stashed { enqueue_at: None }\n\n } else {\n\n TaskStatus::Queued\n\n };\n\n\n\n // Update command and path.\n\n task.original_command = to_restart.command.clone();\n\n task.command = insert_alias(to_restart.command.clone());\n\n task.path = to_restart.path.clone();\n\n\n\n // Reset all variables of any previous run.\n\n task.start = None;\n\n task.end = None;\n\n}\n", "file_path": "daemon/network/message_handler/restart.rs", "rank": 16, "score": 229804.66239357385 }, { "content": "/// Invoked when calling `pueue edit`.\n\n/// If a user wants to edit a message, we need to send him the current command.\n\n/// Lock the task to prevent execution, before the user has finished editing the command.\n\npub fn edit_request(task_id: usize, state: &SharedState) -> Message {\n\n // Check whether the task exists and is queued/stashed. Abort if that's not the case.\n\n let mut state = state.lock().unwrap();\n\n match state.tasks.get_mut(&task_id) {\n\n Some(task) => {\n\n if !task.is_queued() {\n\n return create_failure_message(\"You can only edit a queued/stashed task\");\n\n }\n\n task.prev_status = task.status.clone();\n\n task.status = TaskStatus::Locked;\n\n\n\n let message = EditResponseMessage {\n\n task_id: task.id,\n\n command: task.original_command.clone(),\n\n path: task.path.clone(),\n\n };\n\n Message::EditResponse(message)\n\n }\n\n None => create_failure_message(\"No task with this id.\"),\n\n }\n\n}\n\n\n", "file_path": "daemon/network/message_handler/edit.rs", "rank": 18, "score": 227360.0872448678 }, { "content": "/// Read logs directly from local files for a specific task.\n\nfn get_local_logs(settings: &Settings, id: usize, lines: Option<usize>) -> (String, String) {\n\n let (mut stdout_file, mut stderr_file) =\n\n match get_log_file_handles(id, &settings.shared.pueue_directory()) {\n\n Ok((stdout, stderr)) => (stdout, stderr),\n\n Err(err) => {\n\n let error = format!(\"(Pueue error) Failed to get log file handles: {}\", err);\n\n return (String::new(), error);\n\n }\n\n };\n\n\n\n let stdout = if let Some(lines) = lines {\n\n read_last_lines(&mut stdout_file, lines)\n\n } else {\n\n let mut stdout = String::new();\n\n if let Err(error) = stdout_file.read_to_string(&mut stdout) {\n\n stdout.push_str(&format!(\n\n \"(Pueue error) Failed to read local log output file: {:?}\",\n\n error\n\n ))\n\n };\n", "file_path": "client/display/log/json.rs", "rank": 19, "score": 218907.27408932522 }, { "content": "/// This function allows the user to edit a task's command or path.\n\n/// Save the string to a temporary file, which is the edited by the user with $EDITOR.\n\n/// As soon as the editor is closed, read the file content and return the line\n\npub fn edit_line(line: &str) -> Result<String> {\n\n // Create a temporary file with the command so we can edit it with the editor.\n\n let mut file = NamedTempFile::new().expect(\"Failed to create a temporary file\");\n\n writeln!(file, \"{}\", line).expect(\"Failed writing to temporary file\");\n\n\n\n // Start the editor on this file.\n\n let editor = &env::var(\"EDITOR\").unwrap_or_else(|_e| \"vi\".to_string());\n\n Command::new(editor)\n\n .arg(file.path())\n\n .status()\n\n .context(\"Failed to start editor. Do you have the $EDITOR environment variable set?\")?;\n\n\n\n // Read the file.\n\n let mut file = file.into_file();\n\n file.seek(SeekFrom::Start(0))\n\n .context(\"Couldn't seek to start of file. Aborting.\")?;\n\n\n\n let mut line = String::new();\n\n file.read_to_string(&mut line)\n\n .context(\"Failed to read Command after editing\")?;\n\n\n\n // Remove any trailing newlines from the command.\n\n while line.ends_with('\\n') || line.ends_with('\\r') {\n\n line.pop();\n\n }\n\n\n\n Ok(line)\n\n}\n", "file_path": "client/commands/edit.rs", "rank": 22, "score": 207705.69207452144 }, { "content": "/// Invoked on `pueue groups`.\n\n/// Manage groups.\n\n/// - Show groups\n\n/// - Add group\n\n/// - Remove group\n\npub fn group(message: GroupMessage, sender: &Sender<Message>, state: &SharedState) -> Message {\n\n let state = state.lock().unwrap();\n\n\n\n match message {\n\n GroupMessage::List => {\n\n // Return information about all groups to the client.\n\n Message::GroupResponse(GroupResponseMessage {\n\n groups: state.groups.clone(),\n\n settings: state.settings.daemon.groups.clone(),\n\n })\n\n }\n\n GroupMessage::Add(group) => {\n\n if state.groups.contains_key(&group) {\n\n return create_failure_message(format!(\"Group \\\"{}\\\" already exists\", group));\n\n }\n\n\n\n // Propagate the message to the TaskHandler, which is responsible for actually\n\n // manipulating our internal data\n\n let result = sender.send(Message::Group(GroupMessage::Add(group.clone())));\n\n ok_or_return_failure_message!(result);\n", "file_path": "daemon/network/message_handler/group.rs", "rank": 23, "score": 201890.97614189904 }, { "content": "pub fn kill_and_print_output(mut child: Child) -> Result<()> {\n\n let _ = child.kill();\n\n let output = child.wait_with_output()?;\n\n println!(\"Stdout: \\n{:?}\", String::from_utf8_lossy(&output.stdout));\n\n\n\n println!(\"Stderr: \\n{:?}\", String::from_utf8_lossy(&output.stderr));\n\n\n\n Ok(())\n\n}\n", "file_path": "tests/helper/daemon/helper.rs", "rank": 24, "score": 201271.03701368516 }, { "content": "/// Invoked when calling `pueue remove`.\n\n/// Remove tasks from the queue.\n\n/// We have to ensure that those tasks aren't running!\n\npub fn remove(task_ids: Vec<usize>, state: &SharedState) -> Message {\n\n let mut state = state.lock().unwrap();\n\n let filter = |task: &Task| {\n\n matches!(\n\n task.status,\n\n TaskStatus::Queued\n\n | TaskStatus::Stashed { .. }\n\n | TaskStatus::Done(_)\n\n | TaskStatus::Locked\n\n )\n\n };\n\n let (mut not_running, mut running) = state.filter_tasks(filter, Some(task_ids));\n\n\n\n // Don't delete tasks, if there are other tasks that depend on this one.\n\n // However, we allow to delete those tasks, if they're supposed to be deleted as well.\n\n for task_id in not_running.clone() {\n\n if !is_task_removable(&state, &task_id, &not_running) {\n\n running.push(task_id);\n\n not_running.retain(|id| id != &task_id);\n\n };\n", "file_path": "daemon/network/message_handler/remove.rs", "rank": 25, "score": 201249.4837232591 }, { "content": "/// Invoked when calling `pueue stash`.\n\n/// Stash specific queued tasks.\n\n/// They won't be executed until they're enqueued or explicitely started.\n\npub fn stash(task_ids: Vec<usize>, state: &SharedState) -> Message {\n\n let (matching, mismatching) = {\n\n let mut state = state.lock().unwrap();\n\n let (matching, mismatching) = state.filter_tasks(\n\n |task| matches!(task.status, TaskStatus::Queued | TaskStatus::Locked),\n\n Some(task_ids),\n\n );\n\n\n\n for task_id in &matching {\n\n state.change_status(*task_id, TaskStatus::Stashed { enqueue_at: None });\n\n }\n\n\n\n (matching, mismatching)\n\n };\n\n\n\n let text = \"Tasks are stashed\";\n\n let response = compile_task_response(text, matching, mismatching);\n\n create_success_message(response)\n\n}\n", "file_path": "daemon/network/message_handler/stash.rs", "rank": 26, "score": 201248.7604964319 }, { "content": "/// Set the parallel tasks for either a specific group or the global default.\n\npub fn set_parallel_tasks(message: ParallelMessage, state: &SharedState) -> Message {\n\n let mut state = state.lock().unwrap();\n\n if let Err(message) = ensure_group_exists(&state, &message.group) {\n\n return message;\n\n }\n\n\n\n state\n\n .settings\n\n .daemon\n\n .groups\n\n .insert(message.group.clone(), message.parallel_tasks);\n\n\n\n if let Err(error) = save_settings(&state) {\n\n return create_failure_message(format!(\"Failed while saving the config file: {}\", error));\n\n }\n\n\n\n create_success_message(format!(\n\n \"Parallel tasks setting for group \\\"{}\\\" adjusted\",\n\n &message.group\n\n ))\n\n}\n", "file_path": "daemon/network/message_handler/parallel.rs", "rank": 27, "score": 200337.31226434003 }, { "content": "/// Print the current state of the daemon in a nicely formatted table.\n\npub fn print_state(state: State, cli_command: &SubCommand, colors: &Colors, settings: &Settings) {\n\n let (json, group_only) = match cli_command {\n\n SubCommand::Status { json, group } => (*json, group.clone()),\n\n _ => panic!(\n\n \"Got wrong Subcommand {:?} in print_state. This shouldn't happen\",\n\n cli_command\n\n ),\n\n };\n\n\n\n // If the json flag is specified, print the state as json and exit.\n\n if json {\n\n println!(\"{}\", serde_json::to_string(&state).unwrap());\n\n return;\n\n }\n\n\n\n // Sort all tasks by their respective group;\n\n let sorted_tasks = sort_tasks_by_group(&state.tasks);\n\n\n\n if let Some(group) = group_only {\n\n print_single_group(state, settings, colors, sorted_tasks, group);\n\n return;\n\n }\n\n\n\n print_all_groups(state, settings, colors, sorted_tasks);\n\n}\n\n\n", "file_path": "client/display/state.rs", "rank": 28, "score": 194147.96135154268 }, { "content": "/// Print some tasks into a nicely formatted table\n\nfn print_table(tasks: &BTreeMap<usize, Task>, colors: &Colors, settings: &Settings) {\n\n let (has_delayed_tasks, has_dependencies, has_labels) = has_special_columns(tasks);\n\n\n\n // Create table header row\n\n let mut headers = vec![Cell::new(\"Id\"), Cell::new(\"Status\")];\n\n\n\n if has_delayed_tasks {\n\n headers.push(Cell::new(\"Enqueue At\"));\n\n }\n\n if has_dependencies {\n\n headers.push(Cell::new(\"Deps\"));\n\n }\n\n if has_labels {\n\n headers.push(Cell::new(\"Label\"));\n\n }\n\n\n\n headers.append(&mut vec![\n\n Cell::new(\"Command\"),\n\n Cell::new(\"Path\"),\n\n Cell::new(\"Start\"),\n", "file_path": "client/display/state.rs", "rank": 29, "score": 193115.45872831173 }, { "content": "/// Send a signal to one of Pueue's child process handles.\n\n///\n\n/// There are two scenarios:\n\n///\n\n/// **Normal case**\n\n///\n\n/// A task, such as `sleep 60` get's spawned by the posix shell `sh`.\n\n/// This results in the process `sh -c 'sleep 60'`.\n\n/// Since the posix shell doesn't propagate any process signals to its children, we have to:\n\n/// 1. Send the signal to the shell.\n\n/// 2. Send the signal directly to the children.\n\n/// In our case this would be the `sleep 60` child process.\n\n///\n\n/// If the user also want's to send the signal to all child processes of the task,\n\n/// we have to get all child-processes of the child process.\n\n///\n\n/// **Special case**\n\n///\n\n/// The posix shell `sh` has some some inconsistent behavior.\n\n/// In some circumstances and environments, the `sh -c $command` doesn't spawn a `sh` process with a\n\n/// `$command` child-process, but rather spawns the `$command` as a top-level process directly.\n\n///\n\n/// This makes things a bit more complicated, since we have to find out whether a shell is spawned\n\n/// or not. If a shell is spawned, we do the **Normal case** handling.\n\n///\n\n/// If **no** shell is spawned, we have to send the signal to the top-level process only.\n\n///\n\n/// If the user also want's to send the signal to all child processes of the task,\n\n/// we have to get all child-processes of that `$command` process. and send them the signal.\n\n///\n\n/// Returns `Ok(true)`, if everything went alright\n\n/// Returns `Ok(false)`, if the process went away while we tried to send the signal.\n\npub fn send_signal_to_child(child: &Child, signal: Signal, send_to_children: bool) -> Result<bool> {\n\n let pid: i32 = child.id().try_into().unwrap();\n\n // Check whether this process actually spawned a shell.\n\n let is_shell = if let Ok(is_shell) = did_process_spawn_shell(pid) {\n\n is_shell\n\n } else {\n\n return Ok(false);\n\n };\n\n\n\n if is_shell {\n\n // If it's a shell, we have to send the signal to the actual shell and to all it's children.\n\n // There might be multiple children, for instance, when users use the `&` operator.\n\n // If the `send_to_children` flag is given, the\n\n\n\n // Get all children before sending the signal to the parent process.\n\n // Otherwise the parent might go away and we'll no longer be able to access the children.\n\n let shell_children = get_child_processes(pid);\n\n\n\n // Send the signal to the shell, don't propagate to its children yet.\n\n send_signal_to_process(pid, signal, false)?;\n", "file_path": "daemon/platform/linux/process_helper.rs", "rank": 30, "score": 191388.3942763651 }, { "content": "/// Send a signal to a windows process.\n\npub fn run_action_on_child(child: &Child, action: &ProcessAction, _children: bool) -> Result<bool> {\n\n let pids = get_cur_task_processes(child.id());\n\n if pids.is_empty() {\n\n bail!(\"Process has just gone away\");\n\n }\n\n\n\n match action {\n\n ProcessAction::Pause => {\n\n for pid in pids {\n\n for thread in get_threads(pid) {\n\n suspend_thread(thread);\n\n }\n\n }\n\n }\n\n ProcessAction::Resume => {\n\n for pid in pids {\n\n for thread in get_threads(pid) {\n\n resume_thread(thread);\n\n }\n\n }\n\n }\n\n }\n\n\n\n Ok(true)\n\n}\n\n\n", "file_path": "daemon/platform/windows/process_helper.rs", "rank": 31, "score": 191375.44281939697 }, { "content": "/// Invoked when calling `pueue log`.\n\n/// Return the current state and the stdou/stderr of all tasks to the client.\n\npub fn get_log(message: LogRequestMessage, state: &SharedState) -> Message {\n\n let state = { state.lock().unwrap().clone() };\n\n // Return all logs, if no specific task id is specified.\n\n let task_ids = if message.task_ids.is_empty() {\n\n state.tasks.keys().cloned().collect()\n\n } else {\n\n message.task_ids\n\n };\n\n\n\n let mut tasks = BTreeMap::new();\n\n for task_id in task_ids.iter() {\n\n if let Some(task) = state.tasks.get(task_id) {\n\n // We send log output and the task at the same time.\n\n // This isn't as efficient as sending the raw compressed data directly,\n\n // but it's a lot more convenient for now.\n\n let (stdout, stderr) = if message.send_logs {\n\n match read_and_compress_log_files(\n\n *task_id,\n\n &state.settings.shared.pueue_directory(),\n\n message.lines,\n", "file_path": "daemon/network/message_handler/log.rs", "rank": 32, "score": 188854.53569491132 }, { "content": "/// Remove the daemon's pid file.\n\n/// Errors if it doesn't exist or cannot be deleted.\n\npub fn cleanup_pid_file(pueue_dir: &Path) -> Result<()> {\n\n let pid_file = pueue_dir.join(\"pueue.pid\");\n\n if !pid_file.exists() {\n\n bail!(\n\n \"Couldn't remove pid file, since it doesn't exists. This shouldn't happen: {:?}\",\n\n pid_file\n\n );\n\n }\n\n\n\n std::fs::remove_file(pid_file)?;\n\n Ok(())\n\n}\n", "file_path": "daemon/pid.rs", "rank": 33, "score": 185459.74968401805 }, { "content": "/// Create a file containing the current pid of the daemon's main process.\n\n/// Fails if it already exists or cannot be created.\n\npub fn create_pid_file(pueue_dir: &Path) -> Result<()> {\n\n let pid_path = pueue_dir.join(\"pueue.pid\");\n\n // If an old PID file exists, check if the referenced process is still running.\n\n // The pid might not have been properly cleaned up, if the machine or Pueue crashed hard.\n\n if pid_path.exists() {\n\n check_for_running_daemon(&pid_path)?;\n\n }\n\n let mut file = File::create(pid_path)?;\n\n\n\n file.write_all(std::process::id().to_string().as_bytes())?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "daemon/pid.rs", "rank": 34, "score": 185459.59553196467 }, { "content": "/// Print some information about a task, which is displayed on top of the task's log output.\n\nfn print_task_info(task: &Task, colors: &Colors) {\n\n // Print task id and exit code.\n\n let task_cell = Cell::new(format!(\"Task {}: \", task.id)).add_attribute(Attribute::Bold);\n\n\n\n let (exit_status, color) = match &task.status {\n\n TaskStatus::Paused => (\"paused\".into(), colors.white()),\n\n TaskStatus::Running => (\"running\".into(), colors.yellow()),\n\n TaskStatus::Done(result) => match result {\n\n TaskResult::Success => (\"completed successfully\".into(), colors.green()),\n\n TaskResult::Failed(exit_code) => {\n\n (format!(\"failed with exit code {}\", exit_code), colors.red())\n\n }\n\n TaskResult::FailedToSpawn(err) => (format!(\"failed to spawn: {}\", err), colors.red()),\n\n TaskResult::Killed => (\"killed by system or user\".into(), colors.red()),\n\n TaskResult::Errored => (\"some IO error.\\n Check daemon log.\".into(), colors.red()),\n\n TaskResult::DependencyFailed => (\"dependency failed\".into(), colors.red()),\n\n },\n\n _ => (task.status.to_string(), colors.white()),\n\n };\n\n let status_cell = Cell::new(exit_status).fg(color);\n", "file_path": "client/display/log/mod.rs", "rank": 35, "score": 184166.8427272922 }, { "content": "#[test]\n\nfn test_restore_from_old_state() -> Result<()> {\n\n better_panic::install();\n\n let old_state = include_str!(\"data/v1.0.0_state.json\");\n\n\n\n let temp_dir = TempDir::new(\"pueue_lib\")?;\n\n let temp_path = temp_dir.path();\n\n\n\n // Open v0.12.2 file and write old state to it.\n\n let temp_state_path = temp_dir.path().join(\"state.json\");\n\n let mut file = File::create(&temp_state_path)?;\n\n file.write_all(old_state.as_bytes())?;\n\n\n\n let mut settings: Settings = Settings::default_config()?.try_into()?;\n\n settings.shared.pueue_directory = temp_path.to_path_buf();\n\n\n\n let state = restore_state(&settings.shared.pueue_directory())\n\n .context(\"Failed to restore state in test\")?;\n\n\n\n assert!(state.is_some());\n\n\n\n Ok(())\n\n}\n", "file_path": "tests/state_backward_compatibility.rs", "rank": 36, "score": 183437.95079246134 }, { "content": "pub fn base_setup() -> Result<(Settings, TempDir)> {\n\n // Create a temporary directory used for testing.\n\n let tempdir = TempDir::new(\"pueue_lib\").unwrap();\n\n let tempdir_path = tempdir.path();\n\n\n\n std::fs::create_dir(tempdir_path.join(\"certs\")).unwrap();\n\n\n\n let shared = Shared {\n\n pueue_directory: tempdir_path.clone().to_path_buf(),\n\n #[cfg(not(target_os = \"windows\"))]\n\n use_unix_socket: true,\n\n #[cfg(not(target_os = \"windows\"))]\n\n unix_socket_path: tempdir_path.join(\"test.socket\"),\n\n host: \"localhost\".to_string(),\n\n port: \"51230\".to_string(),\n\n daemon_cert: tempdir_path.join(\"certs\").join(\"daemon.cert\"),\n\n daemon_key: tempdir_path.join(\"certs\").join(\"daemon.key\"),\n\n shared_secret_path: tempdir_path.join(\"secret\"),\n\n };\n\n\n", "file_path": "tests/helper/daemon/setup.rs", "rank": 37, "score": 182116.73444852754 }, { "content": "/// Sort given tasks by their groups\n\n/// This is needed to print a table for each group\n\npub fn sort_tasks_by_group(\n\n tasks: &BTreeMap<usize, Task>,\n\n) -> BTreeMap<String, BTreeMap<usize, Task>> {\n\n // We use a BTreeMap, since groups should be ordered alphabetically by their name\n\n let mut sorted_task_groups = BTreeMap::new();\n\n for (id, task) in tasks.iter() {\n\n if !sorted_task_groups.contains_key(&task.group) {\n\n sorted_task_groups.insert(task.group.clone(), BTreeMap::new());\n\n }\n\n sorted_task_groups\n\n .get_mut(&task.group)\n\n .unwrap()\n\n .insert(*id, task.clone());\n\n }\n\n\n\n sorted_task_groups\n\n}\n", "file_path": "client/display/helper.rs", "rank": 38, "score": 176033.88308157495 }, { "content": "/// Prints log output received from the daemon.\n\n/// We can safely call .unwrap() on stdout and stderr in here, since this\n\n/// branch is always called after ensuring that both are `Some`.\n\npub fn print_remote_log(task_log: &TaskLogMessage, colors: &Colors) {\n\n // Save whether stdout was printed, so we can add a newline between outputs.\n\n if let Some(bytes) = task_log.stdout.as_ref() {\n\n if !bytes.is_empty() {\n\n println!(\n\n \"\\n{}\",\n\n style_text(\"stdout: \", Some(colors.green()), Some(Attribute::Bold))\n\n );\n\n\n\n if let Err(err) = decompress_and_print_remote_log(bytes) {\n\n println!(\"Error while parsing stdout: {}\", err);\n\n }\n\n }\n\n }\n\n\n\n if let Some(bytes) = task_log.stderr.as_ref() {\n\n if !bytes.is_empty() {\n\n println!(\n\n \"\\n{}\",\n\n style_text(\"stderr: \", Some(colors.red()), Some(Attribute::Bold))\n\n );\n\n\n\n if let Err(err) = decompress_and_print_remote_log(bytes) {\n\n println!(\"Error while parsing stderr: {}\", err);\n\n };\n\n }\n\n }\n\n}\n\n\n", "file_path": "client/display/log/remote.rs", "rank": 39, "score": 172524.58565728867 }, { "content": "/// Validator function. The input string has to be parsable as int and bigger than 0\n\nfn min_one(value: &str) -> Result<(), String> {\n\n match value.parse::<usize>() {\n\n Ok(value) => {\n\n if value < 1 {\n\n return Err(\"You must provide a value that's bigger than 0\".into());\n\n }\n\n Ok(())\n\n }\n\n Err(_) => Err(\"Failed to parse integer\".into()),\n\n }\n\n}\n", "file_path": "client/cli.rs", "rank": 40, "score": 163528.29768154462 }, { "content": "/// Invoked when calling `pueue add`.\n\n/// Queues a new task to the state.\n\n/// If the start_immediately flag is set, send a StartMessage to the task handler.\n\npub fn add_task(message: AddMessage, sender: &Sender<Message>, state: &SharedState) -> Message {\n\n let mut state = state.lock().unwrap();\n\n if let Err(message) = ensure_group_exists(&state, &message.group) {\n\n return message;\n\n }\n\n\n\n let starting_status = if message.stashed || message.enqueue_at.is_some() {\n\n TaskStatus::Stashed {\n\n enqueue_at: message.enqueue_at,\n\n }\n\n } else {\n\n TaskStatus::Queued\n\n };\n\n\n\n // Ensure that specified dependencies actually exist.\n\n let not_found: Vec<_> = message\n\n .dependencies\n\n .iter()\n\n .filter(|id| !state.tasks.contains_key(id))\n\n .collect();\n", "file_path": "daemon/network/message_handler/add.rs", "rank": 41, "score": 161561.35826607447 }, { "content": "/// Check whether a process's commandline string is actually a shell or not\n\nfn did_process_spawn_shell(pid: i32) -> Result<bool> {\n\n // Get the /proc representation of the child, so we can do some checks\n\n let process = if let Ok(process) = Process::new(pid) {\n\n process\n\n } else {\n\n info!(\n\n \"Process to kill has probably just gone away. Process {}\",\n\n pid\n\n );\n\n bail!(\"Process has just gone away\");\n\n };\n\n\n\n // Get the root command and check whether it's actually a shell with `sh -c`.\n\n let mut cmdline = if let Ok(cmdline) = process.cmdline() {\n\n cmdline\n\n } else {\n\n info!(\n\n \"Process to kill has probably just gone away. Process {}\",\n\n pid\n\n );\n", "file_path": "daemon/platform/linux/process_helper.rs", "rank": 42, "score": 152995.3092540734 }, { "content": "/// Waits for a daemon to shut down.\n\n/// This is done by waiting for the pid to disappear.\n\npub fn wait_for_shutdown(pid: i32) -> Result<()> {\n\n // Try to read the process. If this fails, the daemon already exited.\n\n let process = match Process::new(pid) {\n\n Ok(process) => process,\n\n Err(_) => return Ok(()),\n\n };\n\n\n\n // Give the daemon about 1 sec to shutdown.\n\n let tries = 40;\n\n let mut current_try = 0;\n\n\n\n while current_try < tries {\n\n // Process is still alive, wait a little longer\n\n if process.is_alive() {\n\n sleep_ms(50);\n\n current_try += 1;\n\n continue;\n\n }\n\n\n\n return Ok(());\n", "file_path": "tests/helper/daemon/helper.rs", "rank": 43, "score": 152993.17624129876 }, { "content": "/// Assert that certain process id no longer exists\n\npub fn process_exists(pid: u32) -> bool {\n\n unsafe {\n\n let handle = CreateToolhelp32Snapshot(TH32CS_SNAPPROCESS, 0);\n\n\n\n let mut process_entry = PROCESSENTRY32::default();\n\n process_entry.dwSize = std::mem::size_of::<PROCESSENTRY32>() as u32;\n\n\n\n loop {\n\n if process_entry.th32ProcessID == pid {\n\n CloseHandle(handle);\n\n return true;\n\n }\n\n\n\n if Process32Next(handle, &mut process_entry) == FALSE {\n\n break;\n\n }\n\n }\n\n\n\n CloseHandle(handle);\n\n }\n", "file_path": "daemon/platform/windows/process_helper.rs", "rank": 44, "score": 151533.69911346657 }, { "content": "/// Check, whether a specific process is exists or not\n\npub fn process_exists(pid: u32) -> bool {\n\n match Process::new(pid as i32) {\n\n Ok(process) => process.is_alive(),\n\n Err(_) => false,\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::thread::sleep;\n\n use std::time::Duration;\n\n\n\n use pretty_assertions::assert_eq;\n\n\n\n use super::*;\n\n\n\n /// Assert that certain process id no longer exists\n\n fn process_is_gone(pid: i32) -> bool {\n\n !process_exists(pid as u32)\n\n }\n", "file_path": "daemon/platform/linux/process_helper.rs", "rank": 45, "score": 151533.69911346657 }, { "content": "/// Check, whether a specific process is exists or not\n\npub fn process_exists(pid: u32) -> bool {\n\n Path::new(&format!(\"/proc/{}\", pid)).exists()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::thread::sleep;\n\n use std::time::Duration;\n\n\n\n /// Assert that certain process id no longer exists\n\n fn process_is_gone(pid: u32) -> bool {\n\n !process_exists(pid)\n\n }\n\n\n\n #[test]\n\n /// Simply check, whether spawning of a shell command works\n\n fn test_spawn_command() {\n\n let mut child = compile_shell_command(\"sleep 0.1\")\n\n .spawn()\n", "file_path": "daemon/platform/apple/process_helper.rs", "rank": 46, "score": 151533.69911346657 }, { "content": "/// Print the log ouput of finished tasks.\n\n/// Either print the logs of every task\n\n/// or only print the logs of the specified tasks.\n\npub fn print_logs(\n\n mut task_logs: BTreeMap<usize, TaskLogMessage>,\n\n cli_command: &SubCommand,\n\n colors: &Colors,\n\n settings: &Settings,\n\n) {\n\n // Get actual commandline options.\n\n // This is necessary to know how we should display/return the log information.\n\n let (json, task_ids, lines, full) = match cli_command {\n\n SubCommand::Log {\n\n json,\n\n task_ids,\n\n lines,\n\n full,\n\n } => (*json, task_ids.clone(), *lines, *full),\n\n _ => panic!(\n\n \"Got wrong Subcommand {:?} in print_log. This shouldn't happen\",\n\n cli_command\n\n ),\n\n };\n", "file_path": "client/display/log/mod.rs", "rank": 47, "score": 151237.7932108808 }, { "content": "fn parse_delay_until(src: &str) -> Result<DateTime<Local>, String> {\n\n if let Ok(seconds) = src.parse::<i64>() {\n\n let delay_until = Local::now() + Duration::seconds(seconds);\n\n return Ok(delay_until);\n\n }\n\n\n\n if let Ok(date_time) = parse_date_string(src, Local::now(), Dialect::Us) {\n\n return Ok(date_time);\n\n }\n\n\n\n Err(String::from(\n\n \"could not parse as seconds or date expression\",\n\n ))\n\n}\n\n\n", "file_path": "client/cli.rs", "rank": 48, "score": 150299.19378218427 }, { "content": "/// Invoked when calling `pueue clean`.\n\n/// Remove all failed or done tasks from the state.\n\npub fn clean(message: CleanMessage, state: &SharedState) -> Message {\n\n let mut state = state.lock().unwrap();\n\n ok_or_return_failure_message!(save_state(&state));\n\n\n\n let (matching, _) = state.filter_tasks(|task| matches!(task.status, TaskStatus::Done(_)), None);\n\n\n\n for task_id in &matching {\n\n // Ensure the task is removable, i.e. there are no dependant tasks.\n\n if !is_task_removable(&state, task_id, &[]) {\n\n continue;\n\n }\n\n // Check if we should ignore this task, if only successful tasks should be removed.\n\n if message.successful_only {\n\n if let Some(task) = state.tasks.get(task_id) {\n\n if !matches!(task.status, TaskStatus::Done(TaskResult::Success)) {\n\n continue;\n\n }\n\n }\n\n }\n\n let _ = state.tasks.remove(task_id).unwrap();\n", "file_path": "daemon/network/message_handler/clean.rs", "rank": 49, "score": 149017.7546388019 }, { "content": "/// Invoked when calling `pueue switch`.\n\n/// Switch the position of two tasks in the upcoming queue.\n\n/// We have to ensure that those tasks are either `Queued` or `Stashed`\n\npub fn switch(message: SwitchMessage, state: &SharedState) -> Message {\n\n let mut state = state.lock().unwrap();\n\n\n\n let task_ids = vec![message.task_id_1, message.task_id_2];\n\n let (_, mismatching) = state.filter_tasks(\n\n |task| matches!(task.status, TaskStatus::Queued | TaskStatus::Stashed { .. }),\n\n Some(task_ids.to_vec()),\n\n );\n\n if !mismatching.is_empty() {\n\n return create_failure_message(\"Tasks have to be either queued or stashed.\");\n\n }\n\n\n\n // Get the tasks. Expect them to be there, since we found no mismatch\n\n let mut first_task = state.tasks.remove(&task_ids[0]).unwrap();\n\n let mut second_task = state.tasks.remove(&task_ids[1]).unwrap();\n\n\n\n // Switch task ids\n\n let first_id = first_task.id;\n\n let second_id = second_task.id;\n\n first_task.id = second_id;\n", "file_path": "daemon/network/message_handler/switch.rs", "rank": 50, "score": 149014.24285968646 }, { "content": "/// Invoked when calling `pueue enqueue`.\n\n/// Enqueue specific stashed tasks.\n\npub fn enqueue(message: EnqueueMessage, state: &SharedState) -> Message {\n\n let mut state = state.lock().unwrap();\n\n let (matching, mismatching) = {\n\n let (matching, mismatching) = state.filter_tasks(\n\n |task| matches!(task.status, TaskStatus::Stashed { .. } | TaskStatus::Locked),\n\n Some(message.task_ids),\n\n );\n\n\n\n (matching, mismatching)\n\n };\n\n\n\n for task_id in &matching {\n\n // We just checked that they're there and the state is locked. It's safe to unwrap.\n\n let task = state.tasks.get_mut(task_id).expect(\"Task should be there.\");\n\n\n\n // Either specify the point of time the task should be enqueued or enqueue the task\n\n // immediately.\n\n if message.enqueue_at.is_some() {\n\n task.status = TaskStatus::Stashed {\n\n enqueue_at: message.enqueue_at,\n", "file_path": "daemon/network/message_handler/enqueue.rs", "rank": 51, "score": 149013.79404180843 }, { "content": "/// Invoked after closing the editor on `pueue edit`.\n\n/// Now we actually update the message with the updated command from the client.\n\npub fn edit(message: EditMessage, state: &SharedState) -> Message {\n\n // Check whether the task exists and is locked. Abort if that's not the case.\n\n let mut state = state.lock().unwrap();\n\n match state.tasks.get_mut(&message.task_id) {\n\n Some(task) => {\n\n if !(task.status == TaskStatus::Locked) {\n\n return create_failure_message(\"Task is no longer locked.\");\n\n }\n\n\n\n task.status = task.prev_status.clone();\n\n task.original_command = message.command.clone();\n\n task.command = insert_alias(message.command.clone());\n\n task.path = message.path.clone();\n\n ok_or_return_failure_message!(save_state(&state));\n\n\n\n create_success_message(\"Command has been updated\")\n\n }\n\n None => create_failure_message(format!(\"Task to edit has gone away: {}\", message.task_id)),\n\n }\n\n}\n", "file_path": "daemon/network/message_handler/edit.rs", "rank": 52, "score": 149010.2161949484 }, { "content": "pub fn print_log_json(\n\n task_log_messages: BTreeMap<usize, TaskLogMessage>,\n\n settings: &Settings,\n\n lines: Option<usize>,\n\n) {\n\n let mut tasks: BTreeMap<usize, Task> = BTreeMap::new();\n\n let mut task_log: BTreeMap<usize, (String, String)> = BTreeMap::new();\n\n // Convert the TaskLogMessage into a proper JSON serializable format.\n\n // Output in TaskLogMessages, if it exists, is compressed.\n\n // We need to decompress and convert to normal strings.\n\n for (id, message) in task_log_messages {\n\n tasks.insert(id, message.task);\n\n\n\n if settings.client.read_local_logs {\n\n let output = get_local_logs(settings, id, lines);\n\n task_log.insert(id, output);\n\n } else {\n\n let output = get_remote_logs(message.stdout, message.stderr);\n\n task_log.insert(id, output);\n\n }\n", "file_path": "client/display/log/json.rs", "rank": 53, "score": 148523.35024829736 }, { "content": "// Determine how many lines of stderr/out should be printed/returned.\n\n// `None` implicates that all lines are printed.\n\n//\n\n// By default, everything is returned for single tasks and only some lines for multiple.\n\n// `json` is an exception to this, in json mode we always only return some lines\n\n// (unless otherwise explicitely requested).\n\n//\n\n// `full` always forces the full log output\n\n// `lines` force a specific amount of lines\n\npub fn determine_log_line_amount(\n\n full: bool,\n\n lines: &Option<usize>,\n\n json: bool,\n\n task_amount: usize,\n\n) -> Option<usize> {\n\n if full {\n\n None\n\n } else if let Some(lines) = lines {\n\n Some(*lines)\n\n } else {\n\n // By default, only some lines are shown per task, if multiple tasks exist or\n\n // json ouput is requested.\n\n if task_amount > 1 || json {\n\n Some(15)\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n", "file_path": "client/display/log/mod.rs", "rank": 54, "score": 145961.00783542293 }, { "content": "/// Send a signal to a unix process.\n\nfn send_signal_to_process(pid: u32, signal: Signal, _children: bool) -> Result<bool, nix::Error> {\n\n debug!(\"Sending signal {} to {}\", signal, pid);\n\n\n\n signal::kill(Pid::from_raw(pid.try_into().unwrap()), signal)?;\n\n Ok(true)\n\n}\n\n\n", "file_path": "daemon/platform/apple/process_helper.rs", "rank": 55, "score": 145908.21270786118 }, { "content": "/// Get a daemon pid from a specific pueue directory.\n\n/// This function gives the daemon a little time to boot up, but ultimately crashes if it takes too\n\n/// long.\n\npub fn get_pid(pueue_dir: &Path) -> Result<i32> {\n\n let pid_file = pueue_dir.join(\"pueue.pid\");\n\n\n\n // Give the daemon about 1 sec to boot and create the pid file.\n\n let tries = 20;\n\n let mut current_try = 0;\n\n\n\n while current_try < tries {\n\n // The daemon didn't create the pid file yet. Wait for 100ms and try again.\n\n if !pid_file.exists() {\n\n sleep_ms(50);\n\n current_try += 1;\n\n continue;\n\n }\n\n\n\n let mut file = File::open(&pid_file).context(\"Couldn't open pid file\")?;\n\n let mut content = String::new();\n\n file.read_to_string(&mut content)\n\n .context(\"Couldn't write to file\")?;\n\n\n", "file_path": "tests/helper/daemon/helper.rs", "rank": 56, "score": 145594.10030799053 }, { "content": "/// Spawn the daemon main logic in it's own async function.\n\n/// It'll be executed by the tokio multi-threaded executor.\n\npub fn boot_daemon(pueue_dir: &Path) -> Result<i32> {\n\n let path = pueue_dir.clone().to_path_buf();\n\n // Start/spin off the daemon and get its PID\n\n tokio::spawn(run_and_handle_error(path, true));\n\n let pid = get_pid(pueue_dir)?;\n\n\n\n let tries = 20;\n\n let mut current_try = 0;\n\n\n\n // Wait up to 1s for the unix socket to pop up.\n\n let socket_path = pueue_dir.join(\"test.socket\");\n\n while current_try < tries {\n\n sleep_ms(50);\n\n if socket_path.exists() {\n\n return Ok(pid);\n\n }\n\n\n\n current_try += 1;\n\n }\n\n\n\n bail!(\"Daemon didn't boot after 1sec\")\n\n}\n\n\n", "file_path": "tests/helper/daemon/setup.rs", "rank": 57, "score": 145594.10030799053 }, { "content": "/// Spawn the daemon by calling the actual pueued binary.\n\n/// This function also checks for the pid file and the unix socket to pop-up.\n\npub fn boot_standalone_daemon(pueue_dir: &Path) -> Result<Child> {\n\n let child = Command::cargo_bin(\"pueued\")?\n\n .arg(\"--config\")\n\n .arg(pueue_dir.join(\"pueue.yml\").to_str().unwrap())\n\n .arg(\"-vvv\")\n\n .stdout(Stdio::piped())\n\n .spawn()?;\n\n\n\n let tries = 20;\n\n let mut current_try = 0;\n\n\n\n // Wait up to 1s for the unix socket to pop up.\n\n let socket_path = pueue_dir.join(\"test.socket\");\n\n while current_try < tries {\n\n sleep_ms(50);\n\n if socket_path.exists() {\n\n return Ok(child);\n\n }\n\n\n\n current_try += 1;\n", "file_path": "tests/helper/daemon/setup.rs", "rank": 58, "score": 143387.24568342656 }, { "content": "/// Return a nicely formatted headline that's displayed above group tables\n\npub fn get_group_headline(\n\n name: &str,\n\n status: &GroupStatus,\n\n parallel: usize,\n\n colors: &Colors,\n\n) -> String {\n\n // Style group name\n\n let name = style(format!(\"Group \\\"{}\\\"\", name)).attribute(Attribute::Bold);\n\n\n\n // Print the current state of the group.\n\n let status = match status {\n\n GroupStatus::Running => style_text(\"running\", Some(colors.green()), None),\n\n GroupStatus::Paused => style_text(\"paused\", Some(colors.yellow()), None),\n\n };\n\n\n\n format!(\"{} ({} parallel): {}\", name, parallel, status)\n\n}\n\n\n", "file_path": "client/display/helper.rs", "rank": 59, "score": 141829.0036355317 }, { "content": "/// We cannot easily stream log output from the client to the daemon (yet).\n\n/// Right now, stdout and stderr are compressed in the daemon and sent as a single payload to the\n\n/// client. In here, we take that payload, decompress it and stream it it directly to stdout.\n\nfn decompress_and_print_remote_log(bytes: &[u8]) -> Result<()> {\n\n let mut decompressor = FrameDecoder::new(bytes);\n\n\n\n let stdout = io::stdout();\n\n let mut write = stdout.lock();\n\n io::copy(&mut decompressor, &mut write)?;\n\n\n\n Ok(())\n\n}\n", "file_path": "client/display/log/remote.rs", "rank": 60, "score": 141290.7982775833 }, { "content": "/// Compile a response for instructions with multiple tasks ids\n\n/// A custom message will be combined with a text about all matching tasks\n\n/// and possibly tasks for which the instruction cannot be executed.\n\npub fn compile_task_response(\n\n message: &str,\n\n matching: Vec<usize>,\n\n mismatching: Vec<usize>,\n\n) -> String {\n\n let matching: Vec<String> = matching.iter().map(|id| id.to_string()).collect();\n\n let mismatching: Vec<String> = mismatching.iter().map(|id| id.to_string()).collect();\n\n let matching_string = matching.join(\", \");\n\n\n\n // We don't have any mismatching ids, return the simple message.\n\n if mismatching.is_empty() {\n\n return format!(\"{}: {}\", message, matching_string);\n\n }\n\n\n\n let mismatched_message = \"The command failed for tasks\";\n\n let mismatching_string = mismatching.join(\", \");\n\n\n\n // All given ids are invalid.\n\n if matching.is_empty() {\n\n return format!(\"{}: {}\", mismatched_message, mismatching_string);\n\n }\n\n\n\n // Some ids were valid, some were invalid.\n\n format!(\n\n \"{}: {}\\n{}: {}\",\n\n message, matching_string, mismatched_message, mismatching_string\n\n )\n\n}\n", "file_path": "daemon/network/response_helper.rs", "rank": 61, "score": 138110.59004700818 }, { "content": "/// Invoked when calling `pueue pause`.\n\n/// Forward the pause message to the task handler, which then pauses groups/tasks/everything.\n\npub fn pause(message: PauseMessage, sender: &Sender<Message>, state: &SharedState) -> Message {\n\n let state = state.lock().unwrap();\n\n // If a group is selected, make sure it exists.\n\n if let TaskSelection::Group(group) = &message.tasks {\n\n if let Err(message) = ensure_group_exists(&state, group) {\n\n return message;\n\n }\n\n }\n\n\n\n // Forward the message to the task handler.\n\n sender\n\n .send(Message::Pause(message.clone()))\n\n .expect(SENDER_ERR);\n\n\n\n // Return a response depending on the selected tasks.\n\n match message.tasks {\n\n TaskSelection::TaskIds(task_ids) => {\n\n let response = task_response_helper(\n\n \"Tasks are being paused\",\n\n task_ids,\n", "file_path": "daemon/network/message_handler/pause.rs", "rank": 62, "score": 137297.33306463208 }, { "content": "/// Invoked when calling `pueue kill`.\n\n/// Forward the kill message to the task handler, which then kills the process.\n\npub fn kill(message: KillMessage, sender: &Sender<Message>, state: &SharedState) -> Message {\n\n let state = state.lock().unwrap();\n\n // If a group is selected, make sure it exists.\n\n if let TaskSelection::Group(group) = &message.tasks {\n\n if let Err(message) = ensure_group_exists(&state, group) {\n\n return message;\n\n }\n\n }\n\n\n\n sender\n\n .send(Message::Kill(message.clone()))\n\n .expect(SENDER_ERR);\n\n\n\n if let Some(signal) = message.signal {\n\n match message.tasks {\n\n TaskSelection::TaskIds(task_ids) => {\n\n let response = task_response_helper(\n\n \"Tasks are being killed\",\n\n task_ids,\n\n |task| task.is_running(),\n", "file_path": "daemon/network/message_handler/kill.rs", "rank": 63, "score": 137292.82536851207 }, { "content": "/// Invoked when calling `pueue start`.\n\n/// Forward the start message to the task handler, which then starts the process(es).\n\npub fn start(message: StartMessage, sender: &Sender<Message>, state: &SharedState) -> Message {\n\n let state = state.lock().unwrap();\n\n // If a group is selected, make sure it exists.\n\n if let TaskSelection::Group(group) = &message.tasks {\n\n if let Err(message) = ensure_group_exists(&state, group) {\n\n return message;\n\n }\n\n }\n\n\n\n // Forward the message to the task handler.\n\n sender\n\n .send(Message::Start(message.clone()))\n\n .expect(SENDER_ERR);\n\n\n\n // Return a response depending on the selected tasks.\n\n match message.tasks {\n\n TaskSelection::TaskIds(task_ids) => {\n\n let response = task_response_helper(\n\n \"Tasks are being started\",\n\n task_ids,\n", "file_path": "daemon/network/message_handler/start.rs", "rank": 64, "score": 137292.79424378107 }, { "content": "/// Invoked when calling `pueue send`.\n\n/// The message will be forwarded to the task handler, which then sends the user input to the process.\n\n/// In here we only do some error handling.\n\npub fn send(message: SendMessage, sender: &Sender<Message>, state: &SharedState) -> Message {\n\n // Check whether the task exists and is running. Abort if that's not the case.\n\n {\n\n let state = state.lock().unwrap();\n\n match state.tasks.get(&message.task_id) {\n\n Some(task) => {\n\n if task.status != TaskStatus::Running {\n\n return create_failure_message(\"You can only send input to a running task\");\n\n }\n\n }\n\n None => return create_failure_message(\"No task with this id.\"),\n\n }\n\n }\n\n\n\n // Check whether the task exists and is running, abort if that's not the case.\n\n sender.send(Message::Send(message)).expect(SENDER_ERR);\n\n\n\n create_success_message(\"Message is being send to the process.\")\n\n}\n", "file_path": "daemon/network/message_handler/send.rs", "rank": 65, "score": 137292.70412189345 }, { "content": "pub fn handle_message(message: Message, sender: &Sender<Message>, state: &SharedState) -> Message {\n\n match message {\n\n Message::Add(message) => add::add_task(message, sender, state),\n\n Message::Clean(message) => clean::clean(message, state),\n\n Message::Edit(message) => edit::edit(message, state),\n\n Message::EditRequest(task_id) => edit::edit_request(task_id, state),\n\n Message::Enqueue(message) => enqueue::enqueue(message, state),\n\n Message::Group(message) => group::group(message, sender, state),\n\n Message::Kill(message) => kill::kill(message, sender, state),\n\n Message::Log(message) => log::get_log(message, state),\n\n Message::Parallel(message) => parallel::set_parallel_tasks(message, state),\n\n Message::Pause(message) => pause::pause(message, sender, state),\n\n Message::Remove(task_ids) => remove::remove(task_ids, state),\n\n Message::Reset(message) => reset(message, sender),\n\n Message::Restart(message) => restart::restart_multiple(message, sender, state),\n\n Message::Send(message) => send::send(message, sender, state),\n\n Message::Start(message) => start::start(message, sender, state),\n\n Message::Stash(task_ids) => stash::stash(task_ids, state),\n\n Message::Switch(message) => switch::switch(message, state),\n\n Message::Status => get_status(state),\n\n Message::DaemonShutdown(shutdown_type) => shutdown(sender, shutdown_type),\n\n _ => create_failure_message(\"Not yet implemented\"),\n\n }\n\n}\n\n\n", "file_path": "daemon/network/message_handler/mod.rs", "rank": 66, "score": 137289.3443592596 }, { "content": "pub fn print_groups(message: GroupResponseMessage, colors: &Colors) {\n\n let mut text = String::new();\n\n let mut group_iter = message.groups.iter().peekable();\n\n while let Some((name, status)) = group_iter.next() {\n\n let parallel = *message.settings.get(name).unwrap();\n\n let styled = get_group_headline(name, status, parallel, colors);\n\n\n\n text.push_str(&styled);\n\n if group_iter.peek().is_some() {\n\n text.push('\\n');\n\n }\n\n }\n\n println!(\"{}\", text);\n\n}\n", "file_path": "client/display/group.rs", "rank": 67, "score": 137257.72207506327 }, { "content": "fn ok_or_failure_message<T, E: Display>(result: Result<T, E>) -> Result<T, Message> {\n\n match result {\n\n Ok(inner) => Ok(inner),\n\n Err(error) => Err(create_failure_message(format!(\n\n \"Failed to save state. This is a bug: {}\",\n\n error\n\n ))),\n\n }\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! ok_or_return_failure_message {\n\n ($expression:expr) => {\n\n match ok_or_failure_message($expression) {\n\n Ok(task_id) => task_id,\n\n Err(error) => return error,\n\n }\n\n };\n\n}\n\n\n", "file_path": "daemon/network/message_handler/mod.rs", "rank": 68, "score": 135461.00988784817 }, { "content": "/// This is a simple small helper function with the purpose of easily styling text,\n\n/// while also prevent styling if we're printing to a non-tty output.\n\n/// If there's any kind of styling in the code, it should be done with the help of this function.\n\npub fn style_text<T: ToString>(\n\n text: T,\n\n color: Option<Color>,\n\n attribute: Option<Attribute>,\n\n) -> String {\n\n let text = text.to_string();\n\n // No tty, we aren't allowed to do any styling\n\n if !stdout().is_tty() {\n\n return text;\n\n }\n\n\n\n let mut styled = style(text);\n\n if let Some(color) = color {\n\n styled = styled.with(color);\n\n }\n\n if let Some(attribute) = attribute {\n\n styled = styled.attribute(attribute);\n\n }\n\n\n\n styled.to_string()\n\n}\n\n\n", "file_path": "client/display/helper.rs", "rank": 69, "score": 135203.28284592993 }, { "content": "/// Setup signal handling and panic handling.\n\n///\n\n/// On SIGINT and SIGTERM, we exit gracefully by sending a DaemonShutdown message to the\n\n/// TaskHandler. This is to prevent dangling processes and other weird edge-cases.\n\n///\n\n/// On panic, we want to cleanup existing unix sockets and the PID file.\n\nfn setup_signal_panic_handling(settings: &Settings, sender: &Sender<Message>) -> Result<()> {\n\n let sender_clone = sender.clone();\n\n\n\n // This section handles Shutdown via SigTerm/SigInt process signals\n\n // Notify the TaskHandler, so it can shutdown gracefully.\n\n // The actual program exit will be done via the TaskHandler.\n\n ctrlc::set_handler(move || {\n\n // Notify the task handler\n\n sender_clone\n\n .send(Message::DaemonShutdown(Shutdown::Emergency))\n\n .expect(\"Failed to send Message to TaskHandler on Shutdown\");\n\n })?;\n\n\n\n // Try to do some final cleanup, even if we panic.\n\n let settings_clone = settings.clone();\n\n let orig_hook = std::panic::take_hook();\n\n std::panic::set_hook(Box::new(move |panic_info| {\n\n // invoke the default handler and exit the process\n\n orig_hook(panic_info);\n\n\n", "file_path": "daemon/lib.rs", "rank": 70, "score": 134429.22273268586 }, { "content": "pub fn task_response_helper<F>(\n\n message: &str,\n\n task_ids: Vec<usize>,\n\n filter: F,\n\n state: &MutexGuard<State>,\n\n) -> String\n\nwhere\n\n F: Fn(&Task) -> bool,\n\n{\n\n // Get all matching/mismatching task_ids for all given ids and statuses.\n\n let (matching, mismatching) = state.filter_tasks(filter, Some(task_ids));\n\n\n\n compile_task_response(message, matching, mismatching)\n\n}\n\n\n", "file_path": "daemon/network/response_helper.rs", "rank": 71, "score": 133381.18858475907 }, { "content": "fn print_all_groups(\n\n state: State,\n\n settings: &Settings,\n\n colors: &Colors,\n\n sorted_tasks: BTreeMap<String, BTreeMap<usize, Task>>,\n\n) {\n\n // Early exit and hint if there are no tasks in the queue\n\n // Print the state of the default group anyway, since this is information one wants to\n\n // see most of the time anyway.\n\n if state.tasks.is_empty() {\n\n let headline = get_group_headline(\n\n \"default\",\n\n state.groups.get(\"default\").unwrap(),\n\n *state.settings.daemon.groups.get(\"default\").unwrap(),\n\n colors,\n\n );\n\n println!(\"{}\\n\", headline);\n\n println!(\"Task list is empty. Add tasks with `pueue add -- [cmd]`\");\n\n return;\n\n }\n", "file_path": "client/display/state.rs", "rank": 72, "score": 123975.67735378009 }, { "content": "pub fn compile_shell_command(command_string: &str) -> Command {\n\n let mut command = Command::new(\"sh\");\n\n command.arg(\"-c\").arg(command_string);\n\n\n\n command\n\n}\n\n\n", "file_path": "daemon/platform/linux/process_helper.rs", "rank": 73, "score": 121240.45402561977 }, { "content": "pub fn compile_shell_command(command_string: &str) -> Command {\n\n let mut command = Command::new(\"sh\");\n\n command.arg(\"-c\").arg(command_string);\n\n\n\n command\n\n}\n\n\n", "file_path": "daemon/platform/apple/process_helper.rs", "rank": 74, "score": 121240.45402561978 }, { "content": "pub fn compile_shell_command(command_string: &str) -> Command {\n\n // Chain two `powershell` commands, one that sets the output encoding to utf8 and then the user provided one.\n\n let mut command = Command::new(\"powershell\");\n\n command.arg(\"-c\").arg(format!(\n\n \"[Console]::OutputEncoding = [Text.UTF8Encoding]::UTF8; {}\",\n\n command_string\n\n ));\n\n\n\n command\n\n}\n\n\n", "file_path": "daemon/platform/windows/process_helper.rs", "rank": 75, "score": 121240.45402561977 }, { "content": "fn print_single_group(\n\n state: State,\n\n settings: &Settings,\n\n colors: &Colors,\n\n mut sorted_tasks: BTreeMap<String, BTreeMap<usize, Task>>,\n\n group: String,\n\n) {\n\n // Only a single group is requested. Print that group and return.\n\n let tasks = sorted_tasks.entry(group.clone()).or_default();\n\n let headline = get_group_headline(\n\n &group,\n\n state.groups.get(&group).unwrap(),\n\n *state.settings.daemon.groups.get(&group).unwrap(),\n\n colors,\n\n );\n\n println!(\"{}\", headline);\n\n\n\n // Show a message if the requested group doesn't have any tasks.\n\n if tasks.is_empty() {\n\n println!(\n\n \"Task list is empty. Add tasks with `pueue add -g {} -- [cmd]`\",\n\n group\n\n );\n\n return;\n\n }\n\n print_table(tasks, colors, settings);\n\n}\n\n\n", "file_path": "client/display/state.rs", "rank": 76, "score": 120723.95488447245 }, { "content": "/// Spin up the daemon and send a SIGTERM shortly afterwards.\n\n/// This should trigger the graceful shutdown and kill the process.\n\nfn test_ctrlc() -> Result<()> {\n\n let (_settings, tempdir) = base_setup()?;\n\n\n\n let mut child = boot_standalone_daemon(tempdir.path())?;\n\n\n\n use nix::sys::signal::{kill, Signal};\n\n // Send SIGTERM signal to process via nix\n\n let nix_pid = nix::unistd::Pid::from_raw(child.id() as i32);\n\n kill(nix_pid, Signal::SIGTERM).context(\"Failed to send SIGTERM to pid\")?;\n\n\n\n // Sleep for 500ms and give the daemon time to shut down\n\n sleep_ms(500);\n\n\n\n let result = child.try_wait();\n\n assert!(matches!(result, Ok(Some(_))));\n\n let code = result.unwrap().unwrap();\n\n assert!(matches!(code.code(), Some(1)));\n\n\n\n Ok(())\n\n}\n", "file_path": "tests/unix/shutdown.rs", "rank": 77, "score": 113200.41642701387 }, { "content": "/// This is a small helper which determines the selection depending on given commandline\n\n/// parameters.\n\n/// If no parameters are given, it returns to the default, which is the \"default\" group.\n\npub fn selection_from_params(\n\n all: bool,\n\n group: &Option<String>,\n\n task_ids: &[usize],\n\n) -> TaskSelection {\n\n if all {\n\n TaskSelection::All\n\n } else if let Some(group) = group {\n\n TaskSelection::Group(group.clone())\n\n } else if !task_ids.is_empty() {\n\n TaskSelection::TaskIds(task_ids.to_owned())\n\n } else {\n\n TaskSelection::Group(\"default\".into())\n\n }\n\n}\n\n\n\nimpl Client {\n\n /// Connect to the daemon, authorize via secret and return a new initialized Client.\n\n pub async fn new(settings: Settings, opt: CliArguments) -> Result<Self> {\n\n // Connect to daemon and get stream used for communication.\n", "file_path": "client/client.rs", "rank": 78, "score": 110934.41658558673 }, { "content": "/// Invoked when calling `pueue status`.\n\n/// Return the current state.\n\nfn get_status(state: &SharedState) -> Message {\n\n let state = state.lock().unwrap().clone();\n\n Message::StatusResponse(Box::new(state))\n\n}\n\n\n", "file_path": "daemon/network/message_handler/mod.rs", "rank": 79, "score": 109757.7249141217 }, { "content": "/// This is a simple and cheap custom fork method.\n\n/// Simply spawn a new child with identical arguments and exit right away.\n\nfn fork_daemon(opt: &CliArguments) -> Result<()> {\n\n let mut arguments = Vec::<String>::new();\n\n\n\n if let Some(config) = &opt.config {\n\n arguments.push(\"--config\".to_string());\n\n arguments.push(config.to_string_lossy().into_owned());\n\n }\n\n\n\n if opt.verbose > 0 {\n\n arguments.push(\"-\".to_string() + &\" \".repeat(opt.verbose as usize));\n\n }\n\n\n\n Command::new(\"pueued\").args(&arguments).spawn()?;\n\n\n\n println!(\"Pueued is now running in the background\");\n\n Ok(())\n\n}\n", "file_path": "daemon/main.rs", "rank": 80, "score": 105023.90947601461 }, { "content": "/// This is a small wrapper around the actual in-place task `restart` functionality.\n\npub fn restart_multiple(\n\n message: RestartMessage,\n\n sender: &Sender<Message>,\n\n state: &SharedState,\n\n) -> Message {\n\n let mut state = state.lock().unwrap();\n\n for task in message.tasks.iter() {\n\n restart(&mut state, task, message.stashed);\n\n }\n\n\n\n // Tell the task manager to start the task immediately, if it's requested.\n\n if message.start_immediately {\n\n let task_ids = message.tasks.iter().map(|task| task.task_id).collect();\n\n sender\n\n .send(Message::Start(StartMessage {\n\n tasks: TaskSelection::TaskIds(task_ids),\n\n children: false,\n\n }))\n\n .expect(SENDER_ERR);\n\n }\n\n\n\n create_success_message(\"Tasks restarted\")\n\n}\n\n\n", "file_path": "daemon/network/message_handler/restart.rs", "rank": 81, "score": 104860.8503437354 }, { "content": "/// Read a PID file and throw an error, if another daemon instance is still running.\n\nfn check_for_running_daemon(pid_path: &Path) -> Result<()> {\n\n let mut file = File::open(&pid_path).context(\"Failed to open PID file\")?;\n\n let mut pid = String::new();\n\n file.read_to_string(&mut pid)\n\n .context(\"Failed to read PID file\")?;\n\n\n\n let pid: u32 = pid\n\n .parse()\n\n .context(format!(\"Failed to parse PID from file: {:?}\", pid_path))?;\n\n\n\n if process_exists(pid) {\n\n bail!(\n\n \"Pid file already exists and another daemon seems to be running.\\n\\\n\n Please stop the daemon beforehand or delete the file manually: {:?}\",\n\n &pid_path\n\n );\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "daemon/pid.rs", "rank": 82, "score": 103211.83714254369 }, { "content": "/// Convenience wrapper around `send_signal_to_child` for internal actions on processes.\n\n/// Its purpose is to hide platform specific logic.\n\npub fn run_action_on_child(\n\n child: &Child,\n\n action: &ProcessAction,\n\n send_to_children: bool,\n\n) -> Result<bool> {\n\n let signal = map_action_to_signal(action);\n\n send_signal_to_child(child, signal, send_to_children)\n\n}\n\n\n", "file_path": "daemon/platform/linux/process_helper.rs", "rank": 83, "score": 103055.43210447113 }, { "content": "/// Send a signal to one of Pueue's child process handles.\n\n/// We need a special since we assume that there's also a `sh -c` around the actuall process.\n\npub fn send_signal_to_child(\n\n child: &Child,\n\n signal: Signal,\n\n _send_to_children: bool,\n\n) -> Result<bool> {\n\n let pid = child.id();\n\n // Send the signal to the shell, don't propagate to its children yet.\n\n send_signal_to_process(pid, signal, false)?;\n\n\n\n signal::kill(Pid::from_raw(pid.try_into().unwrap()), signal)?;\n\n Ok(true)\n\n}\n\n\n", "file_path": "daemon/platform/apple/process_helper.rs", "rank": 84, "score": 103055.43210447113 }, { "content": "/// Convenience wrapper around `send_signal_to_child` for internal actions on processes.\n\n/// Its purpose is to hide platform specific logic.\n\npub fn run_action_on_child(\n\n child: &Child,\n\n action: &ProcessAction,\n\n send_to_children: bool,\n\n) -> Result<bool> {\n\n let signal = map_action_to_signal(action);\n\n send_signal_to_child(child, signal, send_to_children)\n\n}\n\n\n", "file_path": "daemon/platform/apple/process_helper.rs", "rank": 85, "score": 103055.43210447113 }, { "content": "pub fn send_internal_signal_to_child(\n\n child: &Child,\n\n signal: InternalSignal,\n\n send_to_children: bool,\n\n) -> Result<bool> {\n\n bail!(\"Trying to send unix signal on a windows machine. This isn't supported.\");\n\n}\n\n\n", "file_path": "daemon/platform/windows/process_helper.rs", "rank": 86, "score": 101348.97521398257 }, { "content": "/// Convenience wrapper around `send_signal_to_child` for raw unix signals.\n\n/// Its purpose is to hide platform specific logic.\n\npub fn send_internal_signal_to_child(\n\n child: &Child,\n\n signal: InternalSignal,\n\n send_to_children: bool,\n\n) -> Result<bool> {\n\n let signal = map_internal_signal_to_nix_signal(signal);\n\n send_signal_to_child(child, signal, send_to_children)\n\n}\n\n\n", "file_path": "daemon/platform/apple/process_helper.rs", "rank": 87, "score": 101348.97521398257 }, { "content": "/// Convenience wrapper around `send_signal_to_child` for raw unix signals.\n\n/// Its purpose is to hide platform specific logic.\n\npub fn send_internal_signal_to_child(\n\n child: &Child,\n\n signal: InternalSignal,\n\n send_to_children: bool,\n\n) -> Result<bool> {\n\n let signal = map_internal_signal_to_nix_signal(signal);\n\n send_signal_to_child(child, signal, send_to_children)\n\n}\n\n\n", "file_path": "daemon/platform/linux/process_helper.rs", "rank": 88, "score": 101348.97521398257 }, { "content": "/// A helper function to sleep for ms time.\n\n/// Only used to avoid the biolerplate of importing the same stuff all over the place.\n\npub fn sleep_ms(ms: u64) {\n\n std::thread::sleep(std::time::Duration::from_millis(ms));\n\n}\n\n\n\n/// A small helper function, which instantly writes the given string to stdout with a newline.\n\n/// Useful for debugging async tests.\n\npub async fn async_println(out: &str) -> Result<()> {\n\n let mut stdout = io::stdout();\n\n stdout\n\n .write_all(out.as_bytes())\n\n .await\n\n .expect(\"Failed to write to stdout.\");\n\n\n\n stdout\n\n .write_all(\"\\n\".as_bytes())\n\n .await\n\n .expect(\"Failed to write to stdout.\");\n\n stdout.flush().await?;\n\n\n\n Ok(())\n\n}\n", "file_path": "tests/helper/mod.rs", "rank": 89, "score": 101023.52816256817 }, { "content": "pub fn assert_success(message: Message) {\n\n assert!(\n\n matches!(message, Message::Success(_)),\n\n \"Expected to get SuccessMessage, got {:?}\",\n\n message\n\n );\n\n}\n\n\n", "file_path": "tests/helper/daemon/helper.rs", "rank": 90, "score": 99217.66962289422 }, { "content": "pub fn assert_failure(message: Message) {\n\n assert!(\n\n matches!(message, Message::Failure(_)),\n\n \"Expected to get FailureMessage, got {:?}\",\n\n message\n\n );\n\n}\n\n\n", "file_path": "tests/helper/daemon/helper.rs", "rank": 91, "score": 99217.66962289422 }, { "content": "/// Used to style any generic failure message from the daemon.\n\npub fn print_error(colors: &Colors, message: &str) {\n\n let styled = style_text(message, Some(colors.red()), None);\n\n println!(\"{}\", styled);\n\n}\n", "file_path": "client/display/mod.rs", "rank": 92, "score": 94372.04121369263 }, { "content": "/// Used to style any generic success message from the daemon.\n\npub fn print_success(_colors: &Colors, message: &str) {\n\n println!(\"{}\", message);\n\n}\n\n\n", "file_path": "client/display/mod.rs", "rank": 93, "score": 94372.04121369263 }, { "content": "/// Print the log of a single task.\n\n///\n\n/// message: The message returned by the daemon. This message includes all\n\n/// requested tasks and the tasks' logs, if we don't read local logs.\n\n/// lines: Whether we should reduce the log output of each task to a specific number of lines.\n\n/// `None` implicates that everything should be printed.\n\n/// This is only important, if we read local lines.\n\nfn print_log(\n\n message: &mut TaskLogMessage,\n\n colors: &Colors,\n\n settings: &Settings,\n\n lines: Option<usize>,\n\n) {\n\n let task = &message.task;\n\n // We only show logs of finished or running tasks.\n\n if !matches!(\n\n task.status,\n\n TaskStatus::Done(_) | TaskStatus::Running | TaskStatus::Paused\n\n ) {\n\n return;\n\n }\n\n\n\n print_task_info(task, colors);\n\n\n\n if settings.client.read_local_logs {\n\n print_local_log(message.task.id, colors, settings, lines);\n\n } else if message.stdout.is_some() && message.stderr.is_some() {\n\n print_remote_log(message, colors);\n\n } else {\n\n println!(\"Logs requested from pueue daemon, but none received. Please report this bug.\");\n\n }\n\n}\n\n\n", "file_path": "client/display/log/mod.rs", "rank": 94, "score": 94216.13572626222 }, { "content": "/// Read logs from from compressed remote logs.\n\n/// If logs don't exist, an empty string will be returned.\n\nfn get_remote_logs(\n\n stdout_bytes: Option<Vec<u8>>,\n\n stderr_bytes: Option<Vec<u8>>,\n\n) -> (String, String) {\n\n let stdout = if let Some(bytes) = stdout_bytes {\n\n let mut decoder = FrameDecoder::new(&bytes[..]);\n\n let mut stdout = String::new();\n\n if let Err(error) = decoder.read_to_string(&mut stdout) {\n\n stdout.push_str(&format!(\n\n \"(Pueue error) Failed to decompress remote log output: {:?}\",\n\n error\n\n ))\n\n }\n\n stdout\n\n } else {\n\n String::new()\n\n };\n\n\n\n let stderr = if let Some(bytes) = stderr_bytes {\n\n let mut decoder = FrameDecoder::new(&bytes[..]);\n", "file_path": "client/display/log/json.rs", "rank": 95, "score": 92387.55841535996 }, { "content": "/// Get child pids of a specific process.\n\nfn get_child_pids(target_pid: u32, pid_list: &mut Vec<u32>) {\n\n unsafe {\n\n // Take a snapshot of all processes in the system.\n\n // While enumerating the set of processes, new processes can be created and destroyed.\n\n let snapshot_handle = CreateToolhelp32Snapshot(TH32CS_SNAPPROCESS, target_pid);\n\n if snapshot_handle == INVALID_HANDLE_VALUE {\n\n error!(\"Failed to get process {} snapShot\", target_pid);\n\n return;\n\n }\n\n\n\n // Walk the list of processes.\n\n let mut process_entry = PROCESSENTRY32 {\n\n dwSize: std::mem::size_of::<PROCESSENTRY32>() as u32,\n\n ..Default::default()\n\n };\n\n if Process32First(snapshot_handle, &mut process_entry) == FALSE {\n\n error!(\"Couldn't get first process.\");\n\n CloseHandle(snapshot_handle);\n\n return;\n\n }\n", "file_path": "daemon/platform/windows/process_helper.rs", "rank": 96, "score": 89083.20895220639 }, { "content": "fn log_status_change(\n\n current_time: &str,\n\n previous_status: TaskStatus,\n\n task: &Task,\n\n colors: &Colors,\n\n) {\n\n // Finishing tasks get some special handling\n\n if let TaskStatus::Done(result) = &task.status {\n\n let text = match result {\n\n TaskResult::Success => {\n\n format!(\n\n \"Task {} succeeded with {}\",\n\n style_text(task.id, None, Some(Attribute::Bold)),\n\n style_text(\"0\", Some(colors.green()), None)\n\n )\n\n }\n\n TaskResult::DependencyFailed => {\n\n format!(\n\n \"Task {} failed due to {}\",\n\n style_text(task.id, None, Some(Attribute::Bold)),\n", "file_path": "client/commands/wait.rs", "rank": 97, "score": 84851.46352120278 }, { "content": "fn get_color_for_status(task_status: &TaskStatus, colors: &Colors) -> Color {\n\n match task_status {\n\n TaskStatus::Running | TaskStatus::Done(_) => colors.green(),\n\n TaskStatus::Paused | TaskStatus::Locked => colors.white(),\n\n _ => colors.white(),\n\n }\n\n}\n", "file_path": "client/commands/wait.rs", "rank": 98, "score": 78998.43603784966 }, { "content": "/// Get current task pid, all child pid and all children's children\n\nfn get_cur_task_processes(task_pid: u32) -> Vec<u32> {\n\n let mut all_pids = Vec::new();\n\n\n\n // Get all pids by BFS\n\n let mut parent_pids = vec![task_pid];\n\n while let Some(pid) = parent_pids.pop() {\n\n all_pids.push(pid);\n\n\n\n get_child_pids(pid, &mut parent_pids);\n\n }\n\n\n\n // Keep parent pid ahead of child. We need execute action for parent process first.\n\n all_pids.reverse();\n\n all_pids\n\n}\n\n\n", "file_path": "daemon/platform/windows/process_helper.rs", "rank": 99, "score": 78193.35200155443 } ]
Rust
macros/src/parser/mod.rs
ryan-summers/smlang-rs
9f4567b6fb05bd867363bb46385f4c33704fe304
pub mod data; pub mod event; pub mod input_state; pub mod output_state; pub mod state_machine; pub mod transition; use data::DataDefinitions; use event::EventMapping; use state_machine::StateMachine; use input_state::InputState; use proc_macro2::Span; use std::collections::HashMap; use syn::{parse, Ident, Type}; use transition::StateTransition; pub type TransitionMap = HashMap<String, HashMap<String, EventMapping>>; #[derive(Debug)] pub struct ParsedStateMachine { pub temporary_context_type: Option<Type>, pub guard_error: Option<Type>, pub states: HashMap<String, Ident>, pub starting_state: Ident, pub state_data: DataDefinitions, pub events: HashMap<String, Ident>, pub event_data: DataDefinitions, pub states_events_mapping: HashMap<String, HashMap<String, EventMapping>>, } fn add_transition( transition: &StateTransition, transition_map: &mut TransitionMap, state_data: &DataDefinitions, ) -> Result<(), parse::Error> { let p = transition_map .get_mut(&transition.in_state.ident.to_string()) .unwrap(); if !p.contains_key(&transition.event.ident.to_string()) { let mapping = EventMapping { event: transition.event.ident.clone(), guard: transition.guard.clone(), action: transition.action.clone(), out_state: transition.out_state.ident.clone(), }; p.insert(transition.event.ident.to_string(), mapping); } else { return Err(parse::Error::new( transition.in_state.ident.span(), "State and event combination specified multiple times, remove duplicates.", )); } if let Some(_) = state_data .data_types .get(&transition.out_state.ident.to_string()) { if transition.action.is_none() { return Err(parse::Error::new( transition.out_state.ident.span(), "This state has data associated, but not action is define here to provide it.", )); } } Ok(()) } impl ParsedStateMachine { pub fn new(sm: StateMachine) -> parse::Result<Self> { let num_start: usize = sm .transitions .iter() .map(|sm| if sm.in_state.start { 1 } else { 0 }) .sum(); if num_start == 0 { return Err(parse::Error::new( Span::call_site(), "No starting state defined, indicate the starting state with a *.", )); } else if num_start > 1 { return Err(parse::Error::new( Span::call_site(), "More than one starting state defined (indicated with *), remove duplicates.", )); } let starting_state = sm .transitions .iter() .find(|sm| sm.in_state.start) .unwrap() .in_state .ident .clone(); let mut states = HashMap::new(); let mut state_data = DataDefinitions::new(); let mut events = HashMap::new(); let mut event_data = DataDefinitions::new(); let mut states_events_mapping = TransitionMap::new(); for transition in sm.transitions.iter() { let in_state_name = transition.in_state.ident.to_string(); let out_state_name = transition.out_state.ident.to_string(); if !transition.in_state.wildcard { states.insert(in_state_name.clone(), transition.in_state.ident.clone()); state_data.collect(in_state_name.clone(), transition.in_state.data_type.clone())?; } states.insert(out_state_name.clone(), transition.out_state.ident.clone()); state_data.collect( out_state_name.clone(), transition.out_state.data_type.clone(), )?; let event_name = transition.event.ident.to_string(); events.insert(event_name.clone(), transition.event.ident.clone()); event_data.collect(event_name.clone(), transition.event.data_type.clone())?; if !transition.in_state.wildcard { states_events_mapping.insert(transition.in_state.ident.to_string(), HashMap::new()); } states_events_mapping.insert(transition.out_state.ident.to_string(), HashMap::new()); } state_data.all_lifetimes.dedup(); event_data.all_lifetimes.dedup(); for transition in sm.transitions.iter() { if transition.in_state.wildcard { for (name, in_state) in &states { let in_state = InputState { start: false, wildcard: false, ident: in_state.clone(), data_type: state_data.data_types.get(name).cloned(), }; let wildcard_transition = StateTransition { in_state, event: transition.event.clone(), guard: transition.guard.clone(), action: transition.action.clone(), out_state: transition.out_state.clone(), }; add_transition( &wildcard_transition, &mut states_events_mapping, &state_data, )?; } } else { add_transition(transition, &mut states_events_mapping, &state_data)?; } } Ok(ParsedStateMachine { temporary_context_type: sm.temporary_context_type, guard_error: sm.guard_error, states, starting_state, state_data, events, event_data, states_events_mapping, }) } }
pub mod data; pub mod event; pub mod input_state; pub mod output_state; pub mod state_machine; pub mod transition; use data::DataDefinitions; use event::EventMapping; use state_machine::StateMachine; use input_state::InputState; use proc_macro2::Span; use std::collections::HashMap; use syn::{parse, Ident, Type}; use transition::StateTransition; pub type TransitionMap = HashMap<String, HashMap<String, EventMapping>>; #[derive(Debug)] pub struct ParsedStateMachine { pub temporary_context_type: Option<Type>, pub guard_error: Option<Type>, pub states: HashMap<String, Ident>, pub starting_state: Ident, pub state_data: DataDefinitions, pub events: HashMap<String, Ident>, pub event_data: DataDefinitions, pub states_events_mapping: HashMap<String, HashMap<String, EventMapping>>, } fn add_transition( transition: &StateTr
in_state, event: transition.event.clone(), guard: transition.guard.clone(), action: transition.action.clone(), out_state: transition.out_state.clone(), }; add_transition( &wildcard_transition, &mut states_events_mapping, &state_data, )?; } } else { add_transition(transition, &mut states_events_mapping, &state_data)?; } } Ok(ParsedStateMachine { temporary_context_type: sm.temporary_context_type, guard_error: sm.guard_error, states, starting_state, state_data, events, event_data, states_events_mapping, }) } }
ansition, transition_map: &mut TransitionMap, state_data: &DataDefinitions, ) -> Result<(), parse::Error> { let p = transition_map .get_mut(&transition.in_state.ident.to_string()) .unwrap(); if !p.contains_key(&transition.event.ident.to_string()) { let mapping = EventMapping { event: transition.event.ident.clone(), guard: transition.guard.clone(), action: transition.action.clone(), out_state: transition.out_state.ident.clone(), }; p.insert(transition.event.ident.to_string(), mapping); } else { return Err(parse::Error::new( transition.in_state.ident.span(), "State and event combination specified multiple times, remove duplicates.", )); } if let Some(_) = state_data .data_types .get(&transition.out_state.ident.to_string()) { if transition.action.is_none() { return Err(parse::Error::new( transition.out_state.ident.span(), "This state has data associated, but not action is define here to provide it.", )); } } Ok(()) } impl ParsedStateMachine { pub fn new(sm: StateMachine) -> parse::Result<Self> { let num_start: usize = sm .transitions .iter() .map(|sm| if sm.in_state.start { 1 } else { 0 }) .sum(); if num_start == 0 { return Err(parse::Error::new( Span::call_site(), "No starting state defined, indicate the starting state with a *.", )); } else if num_start > 1 { return Err(parse::Error::new( Span::call_site(), "More than one starting state defined (indicated with *), remove duplicates.", )); } let starting_state = sm .transitions .iter() .find(|sm| sm.in_state.start) .unwrap() .in_state .ident .clone(); let mut states = HashMap::new(); let mut state_data = DataDefinitions::new(); let mut events = HashMap::new(); let mut event_data = DataDefinitions::new(); let mut states_events_mapping = TransitionMap::new(); for transition in sm.transitions.iter() { let in_state_name = transition.in_state.ident.to_string(); let out_state_name = transition.out_state.ident.to_string(); if !transition.in_state.wildcard { states.insert(in_state_name.clone(), transition.in_state.ident.clone()); state_data.collect(in_state_name.clone(), transition.in_state.data_type.clone())?; } states.insert(out_state_name.clone(), transition.out_state.ident.clone()); state_data.collect( out_state_name.clone(), transition.out_state.data_type.clone(), )?; let event_name = transition.event.ident.to_string(); events.insert(event_name.clone(), transition.event.ident.clone()); event_data.collect(event_name.clone(), transition.event.data_type.clone())?; if !transition.in_state.wildcard { states_events_mapping.insert(transition.in_state.ident.to_string(), HashMap::new()); } states_events_mapping.insert(transition.out_state.ident.to_string(), HashMap::new()); } state_data.all_lifetimes.dedup(); event_data.all_lifetimes.dedup(); for transition in sm.transitions.iter() { if transition.in_state.wildcard { for (name, in_state) in &states { let in_state = InputState { start: false, wildcard: false, ident: in_state.clone(), data_type: state_data.data_types.get(name).cloned(), }; let wildcard_transition = StateTransition {
random
[ { "content": "// helper function for extracting a vector of lifetimes from a Type\n\nfn get_lifetimes(data_type: &Type) -> Result<Lifetimes, parse::Error> {\n\n let mut lifetimes = Lifetimes::new();\n\n match data_type {\n\n Type::Reference(tr) => {\n\n if let Some(lifetime) = &tr.lifetime {\n\n lifetimes.push(lifetime.clone());\n\n } else {\n\n return Err(parse::Error::new(\n\n data_type.span(),\n\n \"This event's data lifetime is not defined, consider adding a lifetime.\",\n\n ));\n\n }\n\n Ok(lifetimes)\n\n }\n\n Type::Path(tp) => {\n\n let punct = &tp.path.segments;\n\n for p in punct.iter() {\n\n if let PathArguments::AngleBracketed(abga) = &p.arguments {\n\n for arg in &abga.args {\n\n if let GenericArgument::Lifetime(lifetime) = &arg {\n", "file_path": "macros/src/parser/data.rs", "rank": 0, "score": 89590.03458661739 }, { "content": "fn main() {\n\n let mut sm = StateMachine::new(Context);\n\n let result = sm.process_event(Events::Event1(MyEventData(1))); // Guard will fail\n\n\n\n assert!(result == Err(Error::GuardFailed(())));\n\n\n\n let result = sm.process_event(Events::Event1(MyEventData(42))); // Guard will pass\n\n\n\n assert!(result == Ok(&States::State2));\n\n}\n", "file_path": "examples/event_with_data.rs", "rank": 1, "score": 86631.47732296801 }, { "content": "fn main() {\n\n let mut sm = StateMachine::new(Context);\n\n let result = sm.process_event(Events::Event1);\n\n\n\n assert!(result == Ok(&States::State2(MyStateData(42))));\n\n}\n", "file_path": "examples/state_with_data.rs", "rank": 2, "score": 85584.08243982928 }, { "content": "fn main() {\n\n let mut sm = StateMachine::new(Context);\n\n\n\n let result = sm.process_event(Events::Event1(&mut MyEventData(42))); // Guard will pass\n\n\n\n assert!(result == Ok(&States::State2));\n\n}\n", "file_path": "examples/event_with_mutable_data.rs", "rank": 3, "score": 83083.4730208785 }, { "content": "fn main() {\n\n let mut sm = StateMachine::new(Context);\n\n\n\n let result = sm.process_event(Events::Event1(&[])); // Guard will fail\n\n assert!(result == Err(Error::GuardFailed(())));\n\n let result = sm.process_event(Events::Event1(&[1, 2, 3])); // Guard will pass\n\n assert!(result == Ok(&States::State2));\n\n\n\n let r = 42;\n\n let result = sm.process_event(Events::Event2(MyReferenceWrapper(&r))); // Guard will fail\n\n assert!(result == Err(Error::GuardFailed(())));\n\n\n\n let r = 9001;\n\n let result = sm.process_event(Events::Event2(MyReferenceWrapper(&r))); // Guard will pass\n\n assert!(result == Ok(&States::State3));\n\n}\n", "file_path": "examples/event_with_reference_data.rs", "rank": 4, "score": 83083.4730208785 }, { "content": "fn main() {\n\n let mut sm = StateMachine::new(Context);\n\n let result = sm.process_event(Events::Event1);\n\n\n\n assert!(result == Ok(&States::State2(MyStateData(&42))));\n\n}\n", "file_path": "examples/state_with_reference_data.rs", "rank": 5, "score": 82082.54520901313 }, { "content": "/// Generates a string containing 'dot' syntax to generate a statemachine diagram with graphviz.\n\npub fn generate_diagram(sm: &ParsedStateMachine) -> String {\n\n let transitions = &sm.states_events_mapping;\n\n\n\n let diagram_states = sm.states.iter().map(|s| s.0);\n\n let mut diagram_events = vec![];\n\n let mut diagram_transitions = vec![];\n\n for (state, event) in transitions {\n\n for (_event, eventmapping) in event {\n\n diagram_events.push((\n\n eventmapping.event.to_string(),\n\n eventmapping\n\n .guard\n\n .as_ref()\n\n .map(|i| i.to_string())\n\n .unwrap_or_else(|| \"_\".to_string()),\n\n eventmapping\n\n .action\n\n .as_ref()\n\n .map(|i| i.to_string())\n\n .unwrap_or_else(|| \"_\".to_string()),\n", "file_path": "macros/src/diagramgen.rs", "rank": 7, "score": 78137.9211805749 }, { "content": "fn main() {}\n", "file_path": "tests/compile-fail/double_state_event.rs", "rank": 8, "score": 76209.90483137118 }, { "content": "fn main() {}\n\n\n", "file_path": "tests/compile-fail/no_action_with_state_data.rs", "rank": 9, "score": 75935.57197644234 }, { "content": "pub fn generate_code(sm: &ParsedStateMachine) -> proc_macro2::TokenStream {\n\n // Get only the unique states\n\n let mut state_list: Vec<_> = sm.states.iter().map(|(_, value)| value).collect();\n\n state_list.sort_by(|a, b| a.to_string().cmp(&b.to_string()));\n\n\n\n let state_list: Vec<_> = state_list\n\n .iter()\n\n .map(\n\n |value| match sm.state_data.data_types.get(&value.to_string()) {\n\n None => {\n\n quote! {\n\n #value\n\n }\n\n }\n\n Some(t) => {\n\n quote! {\n\n #value(#t)\n\n }\n\n }\n\n },\n", "file_path": "macros/src/codegen.rs", "rank": 10, "score": 71152.64055312058 }, { "content": " /// This trait outlines the guards and actions that need to be implemented for the state\n\n /// machine.\n\n pub trait StateMachineContext {\n\n #guard_list\n\n #action_list\n\n }\n\n\n\n /// List of auto-generated states.\n\n #[allow(missing_docs)]\n\n pub enum States <#state_lifetimes_code> { #(#state_list),* }\n\n\n\n /// Manually define PartialEq for States based on variant only to address issue-#21\n\n impl<#state_lifetimes_code> PartialEq for States <#state_lifetimes_code> {\n\n fn eq(&self, other: &Self) -> bool {\n\n use core::mem::discriminant;\n\n discriminant(self) == discriminant(other)\n\n }\n\n }\n\n\n\n /// List of auto-generated events.\n\n #[allow(missing_docs)]\n\n pub enum Events <#event_lifetimes_code> { #(#event_list),* }\n", "file_path": "macros/src/codegen.rs", "rank": 11, "score": 64022.438325703166 }, { "content": "//! Event data example\n\n//!\n\n//! An example of using event data together with a guard and action.\n\n\n\n#![deny(missing_docs)]\n\n\n\nuse smlang::statemachine;\n\n\n\n/// Event data\n\n#[derive(PartialEq)]\n\npub struct MyEventData(pub u32);\n\n\n\nstatemachine! {\n\n transitions: {\n\n *State1 + Event1(MyEventData) [guard] / action = State2,\n\n // ...\n\n }\n\n}\n\n\n\n/// Context\n", "file_path": "examples/event_with_data.rs", "rank": 12, "score": 56165.359531083006 }, { "content": "pub struct Context;\n\n\n\nimpl StateMachineContext for Context {\n\n fn guard(&mut self, event_data: &MyEventData) -> Result<(), ()> {\n\n if event_data == &MyEventData(42) {\n\n Ok(())\n\n } else {\n\n Err(())\n\n }\n\n }\n\n\n\n fn action(&mut self, event_data: &MyEventData) {\n\n println!(\"Got valid Event Data = {}\", event_data.0);\n\n }\n\n}\n\n\n", "file_path": "examples/event_with_data.rs", "rank": 13, "score": 56162.21144604933 }, { "content": "//! State data example\n\n//!\n\n//! An example of using state data together with an action.\n\n\n\n#![deny(missing_docs)]\n\n\n\nuse smlang::statemachine;\n\n\n\n/// State data\n\n#[derive(PartialEq)]\n\npub struct MyStateData(pub u32);\n\n\n\nstatemachine! {\n\n transitions: {\n\n *State1 + Event1 / action = State2,\n\n State2(MyStateData) + Event2 = State1,\n\n // ...\n\n }\n\n}\n\n\n\n/// Context\n\npub struct Context;\n\n\n\nimpl StateMachineContext for Context {\n\n fn action(&mut self) -> MyStateData {\n\n MyStateData(42)\n\n }\n\n}\n\n\n", "file_path": "examples/state_with_data.rs", "rank": 14, "score": 55066.11764257534 }, { "content": "fn main() {\n\n let mut sm = StateMachine::new(Context);\n\n\n\n assert!(sm.state() == &States::Idle);\n\n\n\n let r = sm.process_event(Events::Charge);\n\n assert!(r == Ok(&States::Charging));\n\n\n\n let r = sm.process_event(Events::Discharge);\n\n assert!(r == Ok(&States::Discharging));\n\n\n\n let r = sm.process_event(Events::Charge);\n\n assert!(r == Ok(&States::Charging));\n\n\n\n let r = sm.process_event(Events::ChargeComplete);\n\n assert!(r == Ok(&States::Charged));\n\n\n\n let r = sm.process_event(Events::Charge);\n\n assert!(r == Err(Error::InvalidEvent));\n\n assert!(sm.state() == &States::Charged);\n", "file_path": "examples/input_state_pattern_match.rs", "rank": 15, "score": 54523.709490274436 }, { "content": "fn main() {}\n", "file_path": "tests/compile-fail/no_starting_state.rs", "rank": 16, "score": 54523.709490274436 }, { "content": "//! Event data example\n\n//!\n\n//! An example of using event data together with a guard and action.\n\n\n\n#![deny(missing_docs)]\n\n\n\nuse smlang::statemachine;\n\n\n\n/// Event data\n\n#[derive(PartialEq)]\n\npub struct MyEventData(pub u32);\n\n\n\nstatemachine! {\n\n transitions: {\n\n *State1 + Event1(&'a mut MyEventData) [guard] / action = State2,\n\n // ...\n\n }\n\n}\n\n\n\n/// Context\n", "file_path": "examples/event_with_mutable_data.rs", "rank": 17, "score": 53558.491591765734 }, { "content": "pub struct Context;\n\n\n\nimpl StateMachineContext for Context {\n\n fn guard(&mut self, event_data: &mut MyEventData) -> Result<(), ()> {\n\n event_data.0 = 55;\n\n Ok(())\n\n }\n\n\n\n fn action(&mut self, event_data: &mut MyEventData) {\n\n println!(\"Got valid Event Data = {}\", event_data.0);\n\n }\n\n}\n\n\n", "file_path": "examples/event_with_mutable_data.rs", "rank": 18, "score": 53555.37551205491 }, { "content": "pub struct Context;\n\n\n\nimpl StateMachineContext for Context {\n\n fn guard1(&mut self, event_data: &[u8]) -> Result<(), ()> {\n\n // Only ok if the slice is not empty\n\n if !event_data.is_empty() {\n\n Ok(())\n\n } else {\n\n Err(())\n\n }\n\n }\n\n\n\n fn action1(&mut self, event_data: &[u8]) {\n\n println!(\"Got valid Event Data = {:?}\", event_data);\n\n }\n\n\n\n fn guard2(&mut self, event_data: &MyReferenceWrapper) -> Result<(), ()> {\n\n if *event_data.0 > 9000 {\n\n Ok(())\n\n } else {\n\n Err(())\n\n }\n\n }\n\n\n\n fn action2(&mut self, event_data: &MyReferenceWrapper) {\n\n println!(\"Got valid Event Data = {}\", event_data.0);\n\n }\n\n}\n\n\n", "file_path": "examples/event_with_reference_data.rs", "rank": 19, "score": 53554.42839386824 }, { "content": "//! Reference types in events\n\n//!\n\n//! A simple example of a state machine which will get events that contain references.\n\n\n\n#![deny(missing_docs)]\n\n\n\nuse smlang::statemachine;\n\n\n\n/// Reference wrapper\n\n#[derive(Clone, Copy, PartialEq, Debug)]\n\npub struct MyReferenceWrapper<'a>(pub &'a u32);\n\n\n\nstatemachine! {\n\n transitions: {\n\n *State1 + Event1(&'a [u8]) [guard1] / action1 = State2,\n\n State2 + Event2(MyReferenceWrapper<'b>) [guard2] / action2 = State3,\n\n }\n\n}\n\n\n\n/// Context\n", "file_path": "examples/event_with_reference_data.rs", "rank": 20, "score": 53554.41653214984 }, { "content": "//! State data example\n\n//!\n\n//! An example of using referenced state data with lifetimes together with an action.\n\n\n\n#![deny(missing_docs)]\n\n\n\nuse smlang::statemachine;\n\n\n\n/// State data\n\n#[derive(PartialEq)]\n\npub struct MyStateData<'a>(&'a u32);\n\n\n\nstatemachine! {\n\n transitions: {\n\n *State1 + Event1 / action = State2,\n\n State2(MyStateData<'a>) + Event2 = State1,\n\n // ...\n\n }\n\n}\n\n\n\n/// Context\n\npub struct Context;\n\n\n\nimpl StateMachineContext for Context {\n\n fn action<'a>(&mut self) -> MyStateData<'a> {\n\n MyStateData(&42)\n\n }\n\n}\n\n\n", "file_path": "examples/state_with_reference_data.rs", "rank": 21, "score": 52509.84062884814 }, { "content": "#[proc_macro]\n\npub fn statemachine(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n // Parse the syntax into structures\n\n let input = parse_macro_input!(input as parser::state_machine::StateMachine);\n\n\n\n // Validate syntax\n\n match parser::ParsedStateMachine::new(input) {\n\n // Generate code and hand the output tokens back to the compiler\n\n Ok(sm) => {\n\n #[cfg(feature = \"graphviz\")]\n\n {\n\n use std::io::Write;\n\n\n\n // Generate dot syntax for the statemachine.\n\n let diagram = diagramgen::generate_diagram(&sm);\n\n\n\n // Start the 'dot' process.\n\n let mut process = std::process::Command::new(\"dot\")\n\n .args(&[\"-Tsvg\", \"-o\", \"statemachine.svg\"])\n\n .stdin(std::process::Stdio::piped())\n\n .spawn()\n", "file_path": "macros/src/lib.rs", "rank": 22, "score": 52376.79134111213 }, { "content": "extern crate smlang;\n\n\n\nuse smlang::statemachine;\n\n\n\nstatemachine! {\n\n transitions: {\n\n *State1 + Event1 = State2,\n\n State1 + Event1 = State3, //~ State and event combination specified multiple times, remove duplicates.\n\n }\n\n}\n\n\n", "file_path": "tests/compile-fail/double_state_event.rs", "rank": 23, "score": 48331.77293964233 }, { "content": "extern crate smlang;\n\n\n\nuse smlang::statemachine;\n\n\n\nstatemachine! {\n\n transitions: {\n\n *State1 + Event1 = State2(u32), //~ This state has data associated, but not action is define here to provide it.\n\n }\n\n}\n\n\n", "file_path": "tests/compile-fail/no_action_with_state_data.rs", "rank": 24, "score": 48046.21442883372 }, { "content": "#[test]\n\nfn tests() {\n\n let t = trybuild::TestCases::new();\n\n t.compile_fail(\"tests/compile-fail/*.rs\");\n\n}\n", "file_path": "tests/test.rs", "rank": 25, "score": 34373.46622980959 }, { "content": "fn main() {\n\n let mut sm = StateMachine::new(Context { num_transitions: 0 });\n\n\n\n sm.process_event(Events::Event1).ok(); // ++\n\n sm.process_event(Events::Event1).ok(); // Will fail\n\n sm.process_event(Events::Event2).ok(); // ++\n\n\n\n assert_eq!(sm.context().num_transitions, 2);\n\n\n\n // ...\n\n}\n", "file_path": "examples/context.rs", "rank": 26, "score": 34373.46622980959 }, { "content": "fn main() {\n\n let mut sm = StateMachine::new(Context);\n\n assert!(sm.state() == &States::State1);\n\n\n\n let r = sm.process_event(Events::Event1);\n\n assert!(r == Ok(&States::State2));\n\n\n\n let r = sm.process_event(Events::Event2);\n\n assert!(r == Ok(&States::State3));\n\n\n\n // Go back in the loop a few time\n\n let r = sm.process_event(Events::Event3);\n\n assert!(r == Ok(&States::State2));\n\n\n\n let r = sm.process_event(Events::Event2);\n\n assert!(r == Ok(&States::State3));\n\n\n\n let r = sm.process_event(Events::Event3);\n\n assert!(r == Ok(&States::State2));\n\n\n\n // Now we cannot use Event1 again, as it is outside the state machine loop\n\n let r = sm.process_event(Events::Event1);\n\n assert!(r == Err(Error::InvalidEvent));\n\n assert!(sm.state() == &States::State2);\n\n}\n", "file_path": "examples/ex2.rs", "rank": 27, "score": 34373.46622980959 }, { "content": "fn main() {\n\n let mut sm = StateMachine::new(Context);\n\n assert!(sm.state() == &States::State1);\n\n\n\n let r = sm.process_event(Events::Event1);\n\n assert!(r == Ok(&States::State2));\n\n\n\n let r = sm.process_event(Events::Event2);\n\n assert!(r == Ok(&States::State3));\n\n\n\n // Now all events will not give any change of state\n\n let r = sm.process_event(Events::Event1);\n\n assert!(r == Err(Error::InvalidEvent));\n\n assert!(sm.state() == &States::State3);\n\n\n\n let r = sm.process_event(Events::Event2);\n\n assert!(r == Err(Error::InvalidEvent));\n\n assert!(sm.state() == &States::State3);\n\n}\n", "file_path": "examples/ex1.rs", "rank": 28, "score": 34373.46622980959 }, { "content": "fn main() {\n\n let mut sm = StateMachine::new(Context);\n\n assert!(sm.state() == &States::State1);\n\n\n\n println!(\"Before action 1\");\n\n\n\n // Go through the first guard and action\n\n let r = sm.process_event(Events::Event1);\n\n assert!(r == Ok(&States::State2));\n\n\n\n println!(\"After action 1\");\n\n\n\n println!(\"Before action 2\");\n\n\n\n // The action will never run as the guard will fail\n\n let r = sm.process_event(Events::Event2);\n\n assert!(r == Err(Error::GuardFailed(())));\n\n\n\n println!(\"After action 2\");\n\n\n\n // Now we are stuck due to the guard never returning true\n\n assert!(sm.state() == &States::State2);\n\n}\n", "file_path": "examples/ex3.rs", "rank": 29, "score": 34373.46622980959 }, { "content": "fn main() {\n\n let mut sm = StateMachine::new(Context);\n\n\n\n // first event starts the dominos\n\n let mut event = Some(Events::ToD1);\n\n\n\n // use a while let loop to let the events propagate and the dominos fall\n\n while let Some(e) = event {\n\n let state = sm.process_event(e).unwrap();\n\n\n\n // use pattern matching to extract the event from any state with an action that fires one\n\n // good practice here NOT to use a wildcard to ensure you don't miss any states\n\n event = match state {\n\n States::D0 => None,\n\n States::D1(event) => *event,\n\n States::D2(event) => *event,\n\n States::D3(event) => *event,\n\n States::D4(event) => *event,\n\n States::D5 => None,\n\n };\n\n }\n\n\n\n // All the dominos fell!\n\n assert!(sm.state() == &States::D5);\n\n}\n", "file_path": "examples/dominos.rs", "rank": 30, "score": 34373.46622980959 }, { "content": "fn main() {\n\n let mut sm = StateMachine::new(Context(0));\n\n assert!(sm.state() == &States::State1);\n\n assert!(sm.context.0 == 0);\n\n\n\n // triggers action\n\n let r = sm.process_event(Events::Event1);\n\n assert!(r == Ok(&States::State2));\n\n assert!(sm.context.0 == 1);\n\n\n\n let r = sm.process_event(Events::Event2);\n\n assert!(r == Ok(&States::State1));\n\n assert!(sm.context.0 == 1);\n\n\n\n // triggers the same action\n\n let r = sm.process_event(Events::Event2);\n\n assert!(r == Ok(&States::State3));\n\n assert!(sm.context.0 == 2);\n\n}\n", "file_path": "examples/reuse_action.rs", "rank": 31, "score": 33086.77521171007 }, { "content": "fn main() {}\n", "file_path": "examples/guard_action_syntax.rs", "rank": 32, "score": 31914.25080680928 }, { "content": "fn main() {}\n", "file_path": "examples/guard_custom_error.rs", "rank": 33, "score": 31914.25080680928 }, { "content": "fn main() {}\n", "file_path": "tests/compile-fail/wildcard_with_pattern.rs", "rank": 34, "score": 30841.343679705766 }, { "content": "fn main() {\n\n let mut sm = StateMachine::new(Context {});\n\n let mut val = 0;\n\n\n\n // This invocation will go through 1 guard and one action.\n\n let r = sm\n\n .process_event(&mut val, Events::Event1(MyEventData(1)))\n\n .unwrap();\n\n\n\n assert!(r == &States::State2(MyStateData(1)));\n\n assert_eq!(val, 2);\n\n}\n", "file_path": "examples/guard_action_syntax_with_temporary_context.rs", "rank": 35, "score": 29855.8759684583 }, { "content": "use super::event::Event;\n\nuse super::input_state::InputState;\n\nuse super::output_state::OutputState;\n\nuse syn::{bracketed, parse, token, Ident, Token};\n\n\n\n#[derive(Debug)]\n\npub struct StateTransition {\n\n pub in_state: InputState,\n\n pub event: Event,\n\n pub guard: Option<Ident>,\n\n pub action: Option<Ident>,\n\n pub out_state: OutputState,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct StateTransitions {\n\n pub in_states: Vec<InputState>,\n\n pub event: Event,\n\n pub guard: Option<Ident>,\n\n pub action: Option<Ident>,\n", "file_path": "macros/src/parser/transition.rs", "rank": 36, "score": 27325.839360103946 }, { "content": " in_state.ident.span(),\n\n \"Wildcards already include all states, so should not be used with input state patterns.\",\n\n ));\n\n }\n\n }\n\n }\n\n\n\n // Event\n\n let event: Event = input.parse()?;\n\n\n\n // Possible guard\n\n let guard = if input.peek(token::Bracket) {\n\n let content;\n\n bracketed!(content in input);\n\n let guard: Ident = content.parse()?;\n\n Some(guard)\n\n } else {\n\n None\n\n };\n\n\n", "file_path": "macros/src/parser/transition.rs", "rank": 37, "score": 27318.321995474347 }, { "content": " pub out_state: OutputState,\n\n}\n\n\n\nimpl parse::Parse for StateTransitions {\n\n fn parse(input: parse::ParseStream) -> syn::Result<Self> {\n\n // parse the input pattern\n\n let mut in_states = Vec::new();\n\n loop {\n\n let in_state: InputState = input.parse()?;\n\n in_states.push(in_state);\n\n if let Err(_) = input.parse::<Token![|]>() {\n\n break;\n\n };\n\n }\n\n\n\n // Make sure that if a wildcard is used, it is the only input state\n\n if in_states.len() > 1 {\n\n for in_state in &in_states {\n\n if in_state.wildcard {\n\n return Err(parse::Error::new(\n", "file_path": "macros/src/parser/transition.rs", "rank": 38, "score": 27315.719390852275 }, { "content": " // Possible action\n\n let action = if let Ok(_) = input.parse::<Token![/]>() {\n\n let action: Ident = input.parse()?;\n\n Some(action)\n\n } else {\n\n None\n\n };\n\n\n\n let out_state: OutputState = input.parse()?;\n\n\n\n Ok(Self {\n\n in_states,\n\n event,\n\n guard,\n\n action,\n\n out_state,\n\n })\n\n }\n\n}\n", "file_path": "macros/src/parser/transition.rs", "rank": 39, "score": 27315.142808139957 }, { "content": "use syn::{parenthesized, parse, spanned::Spanned, token, Ident, Token, Type};\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Event {\n\n pub ident: Ident,\n\n pub data_type: Option<Type>,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct EventMapping {\n\n pub event: Ident,\n\n pub guard: Option<Ident>,\n\n pub action: Option<Ident>,\n\n pub out_state: Ident,\n\n}\n\n\n\nimpl parse::Parse for Event {\n\n fn parse(input: parse::ParseStream) -> syn::Result<Self> {\n\n // Event\n\n input.parse::<Token![+]>()?;\n", "file_path": "macros/src/parser/event.rs", "rank": 49, "score": 26945.876824021845 }, { "content": " let ident: Ident = input.parse()?;\n\n\n\n // Possible type on the event\n\n let data_type = if input.peek(token::Paren) {\n\n let content;\n\n parenthesized!(content in input);\n\n let input: Type = content.parse()?;\n\n\n\n // Check so the type is supported\n\n match &input {\n\n Type::Array(_)\n\n | Type::Path(_)\n\n | Type::Ptr(_)\n\n | Type::Reference(_)\n\n | Type::Slice(_)\n\n | Type::Tuple(_) => (),\n\n _ => {\n\n return Err(parse::Error::new(\n\n input.span(),\n\n \"This is an unsupported type for events.\",\n", "file_path": "macros/src/parser/event.rs", "rank": 50, "score": 26939.065818694206 }, { "content": " ))\n\n }\n\n }\n\n\n\n Some(input)\n\n } else {\n\n None\n\n };\n\n\n\n Ok(Self { ident, data_type })\n\n }\n\n}\n", "file_path": "macros/src/parser/event.rs", "rank": 51, "score": 26937.39409741577 }, { "content": " Ok(())\n\n }\n\n\n\n // helper function for collecting data types and adding them to a data descriptions struct\n\n pub fn collect(&mut self, key: String, data_type: Option<Type>) -> Result<(), parse::Error> {\n\n // check to see if there was every a previous data-type associated with this transition\n\n let prev = self.data_types.get(&key);\n\n\n\n // if there was a previous data definition for this key, may sure it is consistent\n\n if let Some(prev) = prev {\n\n if let Some(ref data_type) = data_type {\n\n if prev != &data_type.clone() {\n\n return Err(parse::Error::new(\n\n data_type.span(),\n\n \"This event's type does not match its previous definition.\",\n\n ));\n\n }\n\n } else {\n\n return Err(parse::Error::new(\n\n data_type.span(),\n", "file_path": "macros/src/parser/data.rs", "rank": 52, "score": 26627.046106927268 }, { "content": "use std::collections::HashMap;\n\nuse syn::{parse, spanned::Spanned, GenericArgument, Lifetime, PathArguments, Type};\n\n\n\npub type DataTypes = HashMap<String, Type>;\n\npub type Lifetimes = Vec<Lifetime>;\n\n\n\n// helper function for extracting a vector of lifetimes from a Type\n", "file_path": "macros/src/parser/data.rs", "rank": 53, "score": 26626.912752671305 }, { "content": " lifetimes.push(lifetime.clone());\n\n }\n\n }\n\n }\n\n }\n\n Ok(lifetimes)\n\n }\n\n _ => Ok(lifetimes),\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct DataDefinitions {\n\n pub data_types: DataTypes,\n\n pub all_lifetimes: Lifetimes,\n\n pub lifetimes: HashMap<String, Lifetimes>,\n\n}\n\n\n\nimpl DataDefinitions {\n\n pub fn new() -> Self {\n", "file_path": "macros/src/parser/data.rs", "rank": 54, "score": 26625.53002313385 }, { "content": " \"This event's type does not match its previous definition.\",\n\n ));\n\n }\n\n }\n\n\n\n if let Some(data_type) = data_type {\n\n self.add(key, data_type)?;\n\n }\n\n Ok(())\n\n }\n\n}\n", "file_path": "macros/src/parser/data.rs", "rank": 55, "score": 26625.42910132839 }, { "content": " Self {\n\n data_types: DataTypes::new(),\n\n all_lifetimes: Lifetimes::new(),\n\n lifetimes: HashMap::new(),\n\n }\n\n }\n\n\n\n // helper function for adding a new data type to a data descriptions struct\n\n fn add(&mut self, key: String, data_type: Type) -> Result<(), parse::Error> {\n\n // retrieve any lifetimes used in this data-type\n\n let mut lifetimes = get_lifetimes(&data_type)?;\n\n\n\n // add the data to the collection\n\n self.data_types.insert(key.clone(), data_type);\n\n\n\n // if any new lifetimes were used in the type definition, we add those as well\n\n if !lifetimes.is_empty() {\n\n self.lifetimes.insert(key, lifetimes.clone());\n\n self.all_lifetimes.append(&mut lifetimes);\n\n }\n", "file_path": "macros/src/parser/data.rs", "rank": 56, "score": 26624.978557610946 }, { "content": "use super::transition::{StateTransition, StateTransitions};\n\nuse syn::{braced, parse, spanned::Spanned, token, Ident, Token, Type};\n\n\n\n#[derive(Debug)]\n\npub struct StateMachine {\n\n pub temporary_context_type: Option<Type>,\n\n pub guard_error: Option<Type>,\n\n pub transitions: Vec<StateTransition>,\n\n}\n\n\n\nimpl StateMachine {\n\n pub fn new() -> Self {\n\n StateMachine {\n\n temporary_context_type: None,\n\n guard_error: None,\n\n transitions: Vec::new(),\n\n }\n\n }\n\n\n\n pub fn add_transitions(&mut self, transitions: StateTransitions) {\n", "file_path": "macros/src/parser/state_machine.rs", "rank": 57, "score": 24749.4241538714 }, { "content": "use syn::{parenthesized, parse, spanned::Spanned, token, Ident, Token, Type};\n\n\n\n#[derive(Debug, Clone)]\n\npub struct OutputState {\n\n pub ident: Ident,\n\n pub data_type: Option<Type>,\n\n}\n\n\n\nimpl parse::Parse for OutputState {\n\n fn parse(input: parse::ParseStream) -> syn::Result<Self> {\n\n input.parse::<Token![=]>()?;\n\n let ident: Ident = input.parse()?;\n\n\n\n // Possible type on the output state\n\n let data_type = if input.peek(token::Paren) {\n\n let content;\n\n parenthesized!(content in input);\n\n let input: Type = content.parse()?;\n\n\n\n // Check so the type is supported\n", "file_path": "macros/src/parser/output_state.rs", "rank": 58, "score": 24747.781306696783 }, { "content": " wildcard,\n\n ident,\n\n data_type,\n\n })\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use super::*;\n\n use syn::parse_quote;\n\n\n\n #[test]\n\n #[should_panic(expected = \"Wildcards can't be used as the starting state.\")]\n\n fn wildcard_used_as_start() {\n\n let _: InputState = parse_quote! {\n\n *_\n\n };\n\n }\n", "file_path": "macros/src/parser/input_state.rs", "rank": 59, "score": 24747.397574228744 }, { "content": "use syn::{parenthesized, parse, spanned::Spanned, token, Ident, Token, Type};\n\n\n\n#[derive(Debug, Clone)]\n\npub struct InputState {\n\n pub start: bool,\n\n pub wildcard: bool,\n\n pub ident: Ident,\n\n pub data_type: Option<Type>,\n\n}\n\n\n\nimpl parse::Parse for InputState {\n\n fn parse(input: parse::ParseStream) -> syn::Result<Self> {\n\n // Check for starting state definition\n\n let start = input.parse::<Token![*]>().is_ok();\n\n\n\n // check to see if this is a wildcard state, which is denoted with \"underscore\"\n\n let underscore = input.parse::<Token![_]>();\n\n let wildcard = underscore.is_ok();\n\n\n\n // wildcards can't be used as starting states\n", "file_path": "macros/src/parser/input_state.rs", "rank": 60, "score": 24747.19620487768 }, { "content": " if start && wildcard {\n\n return Err(parse::Error::new(\n\n input.span(),\n\n \"Wildcards can't be used as the starting state.\",\n\n ));\n\n }\n\n\n\n // Input State\n\n let ident: Ident = if let Ok(underscore) = underscore {\n\n underscore.into()\n\n } else {\n\n input.parse()?\n\n };\n\n\n\n // Possible type on the input state\n\n let data_type = if input.peek(token::Paren) {\n\n let content;\n\n parenthesized!(content in input);\n\n let input: Type = content.parse()?;\n\n\n", "file_path": "macros/src/parser/input_state.rs", "rank": 61, "score": 24744.352057024316 }, { "content": " match &input {\n\n Type::Array(_)\n\n | Type::Path(_)\n\n | Type::Ptr(_)\n\n | Type::Reference(_)\n\n | Type::Slice(_)\n\n | Type::Tuple(_) => (),\n\n _ => {\n\n return Err(parse::Error::new(\n\n input.span(),\n\n \"This is an unsupported type for states.\",\n\n ))\n\n }\n\n }\n\n\n\n Some(input)\n\n } else {\n\n None\n\n };\n\n\n\n Ok(Self { ident, data_type })\n\n }\n\n}\n", "file_path": "macros/src/parser/output_state.rs", "rank": 62, "score": 24742.668679066373 }, { "content": " };\n\n\n\n assert!(start.start);\n\n assert!(!start.wildcard);\n\n assert!(start.data_type.is_none());\n\n }\n\n\n\n #[test]\n\n fn state_without_data() {\n\n let state: InputState = parse_quote! {\n\n State\n\n };\n\n\n\n assert!(!state.start);\n\n assert!(!state.wildcard);\n\n assert!(state.data_type.is_none());\n\n }\n\n\n\n #[test]\n\n fn state_with_data() {\n", "file_path": "macros/src/parser/input_state.rs", "rank": 63, "score": 24741.74902998882 }, { "content": " for in_state in transitions.in_states {\n\n let transition = StateTransition {\n\n in_state,\n\n event: transitions.event.clone(),\n\n guard: transitions.guard.clone(),\n\n action: transitions.action.clone(),\n\n out_state: transitions.out_state.clone(),\n\n };\n\n self.transitions.push(transition);\n\n }\n\n }\n\n}\n\n\n\nimpl parse::Parse for StateMachine {\n\n fn parse(input: parse::ParseStream) -> parse::Result<Self> {\n\n let mut statemachine = StateMachine::new();\n\n\n\n loop {\n\n // If the last line ends with a comma this is true\n\n if input.is_empty() {\n", "file_path": "macros/src/parser/state_machine.rs", "rank": 64, "score": 24741.678571746463 }, { "content": " let state: InputState = parse_quote! {\n\n State(u8)\n\n };\n\n\n\n assert!(!state.start);\n\n assert!(!state.wildcard);\n\n assert!(state.data_type.is_some());\n\n }\n\n}\n", "file_path": "macros/src/parser/input_state.rs", "rank": 65, "score": 24740.82969659645 }, { "content": "\n\n #[test]\n\n #[should_panic(expected = \"The starting state cannot have data associated with it.\")]\n\n fn input_state_with_data() {\n\n let _: InputState = parse_quote! {\n\n *Start(u8)\n\n };\n\n }\n\n\n\n #[test]\n\n #[should_panic(expected = \"Wildcard states cannot have data associated with it.\")]\n\n fn wildcard_with_data() {\n\n let _: InputState = parse_quote! {\n\n _(u8)\n\n };\n\n }\n\n\n\n #[test]\n\n #[should_panic(expected = \"This is an unsupported type for states.\")]\n\n fn unsupported_type() {\n", "file_path": "macros/src/parser/input_state.rs", "rank": 66, "score": 24740.78854790517 }, { "content": " // Check if this is the starting state, it cannot have data as there is no\n\n // supported way of propagating it (for now)\n\n if start {\n\n return Err(parse::Error::new(\n\n input.span(),\n\n \"The starting state cannot have data associated with it.\",\n\n ));\n\n }\n\n\n\n // Wilcards should not have data associated, as data will already be defined\n\n if wildcard {\n\n return Err(parse::Error::new(\n\n input.span(),\n\n \"Wildcard states cannot have data associated with it.\",\n\n ));\n\n }\n\n\n\n // Check so the type is supported\n\n match &input {\n\n Type::Array(_)\n", "file_path": "macros/src/parser/input_state.rs", "rank": 67, "score": 24740.246004248165 }, { "content": "extern crate smlang;\n\n\n\nuse smlang::statemachine;\n\n\n\nstatemachine! { \n\n transitions: {\n\n //~ ERROR No starting state defined, indicate the starting state with a *\n\n State1 + Event1 = State2,\n\n State2 + Event2 = State3,\n\n }\n\n}\n\n\n", "file_path": "tests/compile-fail/no_starting_state.rs", "rank": 68, "score": 24740.066030917067 }, { "content": " break;\n\n }\n\n\n\n match input.parse::<Ident>()?.to_string().as_str() {\n\n \"transitions\" => {\n\n input.parse::<Token![:]>()?;\n\n if input.peek(token::Brace) {\n\n let content;\n\n braced!(content in input);\n\n loop {\n\n if content.is_empty() {\n\n break;\n\n }\n\n\n\n let transitions: StateTransitions = content.parse()?;\n\n statemachine.add_transitions(transitions);\n\n\n\n // No comma at end of line, no more transitions\n\n if content.is_empty() {\n\n break;\n", "file_path": "macros/src/parser/state_machine.rs", "rank": 69, "score": 24739.990652729506 }, { "content": "//! Pattern Matching State Machine\n\n//!\n\n//! This demonstrates the use of input state pattern matching so that states that share a common\n\n//! transition to the same output state can be described more succinctly\n\n#![deny(missing_docs)]\n\n\n\nuse smlang::statemachine;\n\n\n\n// statemachine! {\n\n// transitions: {\n\n// *Idle + Charge = Charging,\n\n// Idle + Discharge = Discharging,\n\n// Charging + ChargeComplete = Charged,\n\n// Discharging + DischargeComplete = Discharged,\n\n// Charged + Discharge = Discharging,\n\n// Dischaged + Charge = Charging,\n\n// Charging + Discharge = Discharging,\n\n// Discharging + Charge = Charging,\n\n// Idle + FaultDetected = Fault,\n\n// Charging + FaultDetected = Fault,\n", "file_path": "examples/input_state_pattern_match.rs", "rank": 70, "score": 24739.759726412973 }, { "content": " | Type::Slice(_)\n\n | Type::Tuple(_) => (),\n\n _ => {\n\n return Err(parse::Error::new(\n\n temporary_context_type.span(),\n\n \"This is an unsupported type for the temporary state.\",\n\n ))\n\n }\n\n }\n\n\n\n // Store the temporary context type\n\n statemachine.temporary_context_type = Some(temporary_context_type);\n\n\n\n }\n\n keyword => {\n\n return Err(parse::Error::new(\n\n input.span(),\n\n format!(\"Unknown keyword {}. Support keywords: [\\\"transitions\\\", \\\"temporary_context\\\", \\\"guard_error\\\"]\", keyword)\n\n ))\n\n }\n", "file_path": "macros/src/parser/state_machine.rs", "rank": 71, "score": 24739.56199350053 }, { "content": " let _: InputState = parse_quote! {\n\n State1(!)\n\n };\n\n }\n\n\n\n #[test]\n\n fn wildcard() {\n\n let wildcard: InputState = parse_quote! {\n\n _\n\n };\n\n\n\n assert!(wildcard.wildcard);\n\n assert!(!wildcard.start);\n\n assert!(wildcard.data_type.is_none());\n\n }\n\n\n\n #[test]\n\n fn start() {\n\n let start: InputState = parse_quote! {\n\n *Start\n", "file_path": "macros/src/parser/input_state.rs", "rank": 72, "score": 24739.46789425498 }, { "content": " let r = sm.process_event(Events::FaultDetected);\n\n assert!(r == Ok(&States::Fault));\n\n\n\n sm = StateMachine::new_with_state(Context, States::Discharging);\n\n let r = sm.process_event(Events::FaultDetected);\n\n assert!(r == Ok(&States::Fault));\n\n\n\n sm = StateMachine::new_with_state(Context, States::Discharged);\n\n let r = sm.process_event(Events::FaultDetected);\n\n assert!(r == Ok(&States::Fault));\n\n\n\n let r = sm.process_event(Events::Charge);\n\n assert!(r == Err(Error::InvalidEvent));\n\n assert!(sm.state() == &States::Fault);\n\n\n\n let r = sm.process_event(Events::Discharge);\n\n assert!(r == Err(Error::InvalidEvent));\n\n assert!(sm.state() == &States::Fault);\n\n\n\n let r = sm.process_event(Events::ChargeComplete);\n\n assert!(r == Err(Error::InvalidEvent));\n\n assert!(sm.state() == &States::Fault);\n\n\n\n let r = sm.process_event(Events::DischargeComplete);\n\n assert!(r == Err(Error::InvalidEvent));\n\n assert!(sm.state() == &States::Fault);\n\n}\n", "file_path": "examples/input_state_pattern_match.rs", "rank": 73, "score": 24739.407223289196 }, { "content": "\n\n let r = sm.process_event(Events::Discharge);\n\n assert!(r == Ok(&States::Discharging));\n\n\n\n let r = sm.process_event(Events::DischargeComplete);\n\n assert!(r == Ok(&States::Discharged));\n\n\n\n let r = sm.process_event(Events::Discharge);\n\n assert!(r == Err(Error::InvalidEvent));\n\n assert!(sm.state() == &States::Discharged);\n\n\n\n sm = StateMachine::new_with_state(Context, States::Idle);\n\n let r = sm.process_event(Events::FaultDetected);\n\n assert!(r == Ok(&States::Fault));\n\n\n\n sm = StateMachine::new_with_state(Context, States::Charging);\n\n let r = sm.process_event(Events::FaultDetected);\n\n assert!(r == Ok(&States::Fault));\n\n\n\n sm = StateMachine::new_with_state(Context, States::Charged);\n", "file_path": "examples/input_state_pattern_match.rs", "rank": 74, "score": 24739.361616471582 }, { "content": " | Type::Path(_)\n\n | Type::Ptr(_)\n\n | Type::Reference(_)\n\n | Type::Slice(_)\n\n | Type::Tuple(_) => (),\n\n _ => {\n\n return Err(parse::Error::new(\n\n input.span(),\n\n \"This is an unsupported type for states.\",\n\n ))\n\n }\n\n }\n\n\n\n Some(input)\n\n } else {\n\n None\n\n };\n\n\n\n Ok(Self {\n\n start,\n", "file_path": "macros/src/parser/input_state.rs", "rank": 75, "score": 24738.495276453104 }, { "content": "// Discharging + FaultDetected = Fault,\n\n// Charged + FaultDetected = Fault,\n\n// Discharged + FaultDetected = Fault,\n\n// Fault + FaultCleard = Idle,\n\n// },\n\n// }\n\n\n\n// A simple charge/discharge state machine that has a dedicated \"Fault\" state\n\nstatemachine! {\n\n transitions: {\n\n *Idle | Discharging | Discharged + Charge = Charging,\n\n Idle | Charging | Charged + Discharge = Discharging,\n\n Charging + ChargeComplete = Charged,\n\n Discharging + DischargeComplete = Discharged,\n\n _ + FaultDetected = Fault,\n\n Fault + FaultCleard = Idle,\n\n },\n\n}\n\n\n\n/// Context\n\npub struct Context;\n\n\n\nimpl StateMachineContext for Context {}\n\n\n", "file_path": "examples/input_state_pattern_match.rs", "rank": 76, "score": 24738.162430131186 }, { "content": " }\n\n\n\n if let Err(_) = content.parse::<Token![,]>() {\n\n break;\n\n };\n\n }\n\n }\n\n }\n\n \"guard_error\" => {\n\n input.parse::<Token![:]>()?;\n\n let guard_error: Type = input.parse()?;\n\n\n\n // Check so the type is supported\n\n match &guard_error {\n\n Type::Array(_)\n\n | Type::Path(_)\n\n | Type::Ptr(_)\n\n | Type::Reference(_)\n\n | Type::Slice(_)\n\n | Type::Tuple(_) => (),\n", "file_path": "macros/src/parser/state_machine.rs", "rank": 77, "score": 24736.773667330697 }, { "content": " _ => {\n\n return Err(parse::Error::new(\n\n guard_error.span(),\n\n \"This is an unsupported type for guard error.\",\n\n ))\n\n }\n\n }\n\n\n\n statemachine.guard_error = Some(guard_error);\n\n }\n\n \"temporary_context\" => {\n\n input.parse::<Token![:]>()?;\n\n let temporary_context_type: Type = input.parse()?;\n\n\n\n // Check so the type is supported\n\n match &temporary_context_type {\n\n Type::Array(_)\n\n | Type::Path(_)\n\n | Type::Ptr(_)\n\n | Type::Reference(_)\n", "file_path": "macros/src/parser/state_machine.rs", "rank": 78, "score": 24736.680691436006 }, { "content": " }\n\n\n\n // No comma at end of line, no more transitions\n\n if input.is_empty() {\n\n break;\n\n }\n\n\n\n if let Err(_) = input.parse::<Token![,]>() {\n\n break;\n\n };\n\n }\n\n\n\n Ok(statemachine)\n\n }\n\n}\n", "file_path": "macros/src/parser/state_machine.rs", "rank": 79, "score": 24735.591481250067 }, { "content": "//! Guard and action syntax example\n\n//!\n\n//! An example of using guards and actions with state and event data.\n\n\n\n#![deny(missing_docs)]\n\n\n\nuse smlang::statemachine;\n\n\n\n/// Event data\n\n#[derive(PartialEq)]\n\npub struct MyEventData(pub u32);\n\n\n\n/// State data\n\n#[derive(PartialEq)]\n\npub struct MyStateData(pub u32);\n\n\n\nstatemachine! {\n\n transitions: {\n\n *State1 + Event1(MyEventData) [guard1] / action1 = State2,\n\n State2(MyStateData) + Event2 [guard2] / action2 = State3,\n", "file_path": "examples/guard_action_syntax.rs", "rank": 80, "score": 15.618926757660224 }, { "content": "//! Guard and action syntax example\n\n//!\n\n//! An example of using guards and actions with state and event data.\n\n\n\n#![deny(missing_docs)]\n\n\n\nuse smlang::statemachine;\n\n\n\n/// Event data\n\n#[derive(PartialEq)]\n\npub struct MyEventData(pub u32);\n\n\n\n/// State data\n\n#[derive(PartialEq)]\n\npub struct MyStateData(pub u32);\n\n\n\nstatemachine! {\n\n temporary_context: &mut u16,\n\n transitions: {\n\n *State1 + Event1(MyEventData) [guard1] / action1 = State2,\n", "file_path": "examples/guard_action_syntax_with_temporary_context.rs", "rank": 81, "score": 15.556947958165992 }, { "content": "### State machine context\n\n\n\nThe state machine needs a context to be defined.\n\nThe `StateMachineContext` is generated from the `statemachine!` proc-macro and is what implements guards and actions, and data that is available in all states within the state machine and persists between state transitions:\n\n\n\n```rust\n\nstatemachine!{\n\n transitions: {\n\n State1 + Event1 = State2,\n\n }\n\n // ...\n\n}\n\n\n\npub struct Context;\n\n\n\nimpl StateMachineContext for Context {}\n\n\n\nfn main() {\n\n let mut sm = StateMachine::new(Context);\n\n\n\n // ...\n\n}\n\n```\n\n\n\nSee example `examples/context.rs` for a usage example.\n\n\n\n### State data\n\n\n\nAny state may have some data associated with it (except the starting state):\n\n\n\n```rust\n\npub struct MyStateData(pub u32);\n\n\n\nstatemachine!{\n\n transitions: {\n\n State1(MyStateData) + Event1 = State2,\n\n }\n\n // ...\n\n}\n\n```\n\n\n\nSee example `examples/state_with_data.rs` for a usage example.\n\n\n\nState data may also have associated lifetimes which the `statemachine!` macro will pick up and add the `States` enum and `StateMachine` structure. This means the following will also work:\n\n\n\n```rust\n\npub struct MyStateData<'a>(&'a u32);\n\n\n\nstatemachine! {\n\n transitions: {\n\n *State1 + Event1 / action = State2,\n\n State2(MyStateData<'a>) + Event2 = State1,\n\n // ...\n\n }\n\n // ...\n\n}\n\n```\n\n\n\nSee example `examples/state_with_reference_data.rs` for a usage example.\n\n\n", "file_path": "README.md", "rank": 82, "score": 13.280465853793876 }, { "content": "//! Guard and action syntax example\n\n//!\n\n//! An example of using guards and actions with state and event data.\n\n\n\n#![deny(missing_docs)]\n\n\n\nuse smlang::statemachine;\n\n\n\n/// Custom guard errors\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\npub enum GuardError {\n\n /// This is a custom guard error variant\n\n Custom,\n\n}\n\n\n\n/// Event data\n\n#[derive(PartialEq)]\n\npub struct MyEventData(pub u32);\n\n\n\n/// State data\n", "file_path": "examples/guard_custom_error.rs", "rank": 83, "score": 13.010489421411926 }, { "content": "#[derive(PartialEq)]\n\npub struct MyStateData(pub u32);\n\n\n\nstatemachine! {\n\n transitions: {\n\n *State1 + Event1(MyEventData) [guard1] / action1 = State2,\n\n State2(MyStateData) + Event2 [guard2] / action2 = State3,\n\n // ...\n\n },\n\n guard_error: GuardError,\n\n}\n\n\n\n/// Context\n\npub struct Context;\n\n\n\nimpl StateMachineContext for Context {\n\n // Guard1 has access to the data from Event1\n\n fn guard1(&mut self, _event_data: &MyEventData) -> Result<(), GuardError> {\n\n Err(GuardError::Custom)\n\n }\n", "file_path": "examples/guard_custom_error.rs", "rank": 84, "score": 12.36464988487938 }, { "content": "### Event data\n\n\n\nData may be passed along with an event into the `guard` and `action`:\n\n\n\n```rust\n\npub struct MyEventData(pub u32);\n\n\n\nstatemachine!{\n\n transitions: {\n\n State1 + Event1(MyEventData) [guard] = State2,\n\n }\n\n // ...\n\n}\n\n```\n\n\n\nEvent data may also have associated lifetimes which the `statemachine!` macro will pick up and add the `Events` enum. This means the following will also work:\n\n\n\n```rust\n\npub struct MyEventData<'a>(pub &'a u32);\n\n\n\nstatemachine!{\n\n transitions: {\n\n State1 + Event1(MyEventData<'a>) [guard1] = State2,\n\n State1 + Event2(&'a [u8]) [guard2] = State3,\n\n }\n\n // ...\n\n}\n\n```\n\n\n\nSee example `examples/event_with_data.rs` for a usage example.\n\n\n\n### Guard and Action syntax\n\n\n\nSee example `examples/guard_action_syntax.rs` for a usage-example.\n\n\n\n## State Machine Examples\n\n\n\nHere are some examples of state machines converted from UML to the State Machine Language DSL. Runnable versions of each example is available in the `examples` folder.\n\n\n\n### Linear state machine\n\n\n\n![alt text](./docs/sm1.png \"\")\n\n\n\nDSL implementation:\n\n\n\n```rust\n\nstatemachine!{\n\n transitions: {\n\n *State1 + Event1 = State2,\n\n State2 + Event2 = State3,\n\n }\n\n}\n\n```\n\n\n\nThis example is available in `ex1.rs`.\n\n\n\n### Looping state machine\n\n\n\n![alt text](./docs/sm2.png \"\")\n\n\n\nDSL implementation:\n\n\n\n```rust\n\nstatemachine!{\n\n transitions: {\n\n *State1 + Event1 = State2,\n\n State2 + Event2 = State3,\n\n State3 + Event3 = State2,\n\n }\n\n}\n\n```\n\n\n\nThis example is available in `ex2.rs`.\n\n\n\n### Using guards and actions\n\n\n\n![alt text](./docs/sm3.png \"\")\n\n\n\nDSL implementation:\n\n\n\n```rust\n\nstatemachine!{\n\n transitions: {\n\n *State1 + Event1 [guard] / action = State2,\n\n }\n\n}\n\n```\n\n\n\nThis example is available in `ex3.rs`.\n\n\n\n## Contributors\n\n\n\nList of contributors in alphabetical order:\n\n\n\n* Emil Fresk ([@korken89](https://github.com/korken89))\n\n* Mathias Koch ([@MathiasKoch](https://github.com/MathiasKoch))\n\n* Donny Zimmanck ([@dzimmanck](https://github.com/dzimmanck))\n\n\n\n---\n\n\n\n## License\n\n\n", "file_path": "README.md", "rank": 85, "score": 12.34437539930078 }, { "content": " }\n\n };\n\n\n\n let state_data = match sm.state_data.data_types.get(state) {\n\n Some(st) => {\n\n quote! { state_data: &#st, }\n\n }\n\n None => {\n\n quote! {}\n\n }\n\n };\n\n let event_data = match sm.event_data.data_types.get(event) {\n\n Some(et) => match et {\n\n Type::Reference(_) => {\n\n quote! { event_data: #et }\n\n }\n\n _ => {\n\n quote! { event_data: &#et }\n\n }\n\n },\n", "file_path": "macros/src/codegen.rs", "rank": 86, "score": 12.306002956196918 }, { "content": "//! An example of using state data to propagate events (See issue-17)\n\n\n\n#![deny(missing_docs)]\n\n\n\nuse smlang::statemachine;\n\n\n\nstatemachine! {\n\n transitions: {\n\n *D0 + ToD1 / to_d2 = D1,\n\n D1(Option<Events>) + ToD2 / to_d3 = D2,\n\n D2(Option<Events>) + ToD3 / to_d4 = D3,\n\n D3(Option<Events>) + ToD4 / to_d5 = D4,\n\n D4(Option<Events>) + ToD5 = D5,\n\n }\n\n}\n\n\n\n/// Context\n\npub struct Context;\n\n\n\nimpl StateMachineContext for Context {\n", "file_path": "examples/dominos.rs", "rank": 87, "score": 12.251146743390324 }, { "content": " }\n\n }\n\n },\n\n )\n\n .collect();\n\n\n\n let transitions = &sm.states_events_mapping;\n\n\n\n let in_states: Vec<_> = transitions\n\n .iter()\n\n .map(|(name, _)| {\n\n let state_name = sm.states.get(name).unwrap();\n\n\n\n match sm.state_data.data_types.get(name) {\n\n None => {\n\n quote! {\n\n #state_name\n\n }\n\n }\n\n Some(_) => {\n", "file_path": "macros/src/codegen.rs", "rank": 88, "score": 12.19255029266019 }, { "content": " sm.state_data.data_types.get(state_name),\n\n sm.event_data.data_types.get(name),\n\n ) {\n\n (None, None) => {\n\n quote! {}\n\n }\n\n (Some(_), None) => {\n\n quote! {\n\n state_data\n\n }\n\n }\n\n (None, Some(_)) => {\n\n quote! {\n\n event_data\n\n }\n\n }\n\n (Some(_), Some(_)) => {\n\n quote! {\n\n state_data, event_data\n\n }\n", "file_path": "macros/src/codegen.rs", "rank": 89, "score": 12.007286012712944 }, { "content": " quote! {\n\n #state_name(ref state_data)\n\n }\n\n }\n\n }\n\n })\n\n .collect();\n\n\n\n let events: Vec<Vec<_>> = transitions\n\n .iter()\n\n .map(|(_, value)| {\n\n value\n\n .iter()\n\n .map(|(name, value)| {\n\n let value = &value.event;\n\n\n\n match sm.event_data.data_types.get(name) {\n\n None => {\n\n quote! {\n\n #value\n", "file_path": "macros/src/codegen.rs", "rank": 90, "score": 12.006884996434792 }, { "content": "//!\n\n//! /// Process an event\n\n//! pub fn process_event(&mut self, event: Events) -> Result<States, Error>;\n\n//! }\n\n//! ```\n\n\n\n#![no_std]\n\n\n\npub use smlang_macros::statemachine;\n", "file_path": "src/lib.rs", "rank": 91, "score": 11.178484883885226 }, { "content": "\n\n // Keep track of already added actions not to duplicate definitions\n\n let mut action_set: Vec<syn::Ident> = Vec::new();\n\n let mut guard_set: Vec<syn::Ident> = Vec::new();\n\n\n\n let mut guard_list = proc_macro2::TokenStream::new();\n\n let mut action_list = proc_macro2::TokenStream::new();\n\n for (state, value) in transitions.iter() {\n\n // create the state data token stream\n\n let state_data = match sm.state_data.data_types.get(state) {\n\n Some(st) => quote! { state_data: &#st, },\n\n None => quote! {},\n\n };\n\n\n\n value.iter().for_each(|(event, value)| {\n\n\n\n // get output state lifetimes\n\n let state_lifetimes = if let Some(lifetimes) = sm.state_data.lifetimes.get(&value.out_state.to_string()) {\n\n lifetimes.clone()\n\n } else {\n", "file_path": "macros/src/codegen.rs", "rank": 92, "score": 11.170259325136588 }, { "content": " ));\n\n diagram_transitions.push((\n\n state,\n\n eventmapping.out_state.to_string(),\n\n eventmapping.event.to_string(),\n\n ));\n\n }\n\n }\n\n\n\n let state_string = diagram_states\n\n .map(|s| {\n\n format!(\n\n \"\\t{} [shape=box color=\\\"red\\\" fillcolor=\\\"#ffbb33\\\" style=filled]\",\n\n s\n\n )\n\n })\n\n .collect::<Vec<String>>();\n\n let event_string = diagram_events\n\n .iter()\n\n .map(|s| {\n", "file_path": "macros/src/diagramgen.rs", "rank": 93, "score": 10.608775401828336 }, { "content": " // ...\n\n }\n\n}\n\n\n\n/// Context\n\npub struct Context;\n\n\n\nimpl StateMachineContext for Context {\n\n // Guard1 has access to the data from Event1\n\n fn guard1(&mut self, _event_data: &MyEventData) -> Result<(), ()> {\n\n todo!()\n\n }\n\n\n\n // Action1 has access to the data from Event1, and need to return the state data for State2\n\n fn action1(&mut self, _event_data: &MyEventData) -> MyStateData {\n\n todo!()\n\n }\n\n\n\n // Guard2 has access to the data from State2\n\n fn guard2(&mut self, _state_data: &MyStateData) -> Result<(), ()> {\n\n todo!()\n\n }\n\n\n\n // Action2 has access to the data from State2\n\n fn action2(&mut self, _state_data: &MyStateData) {\n\n todo!()\n\n }\n\n}\n\n\n", "file_path": "examples/guard_action_syntax.rs", "rank": 94, "score": 10.58137109283394 }, { "content": "\n\n /// Process an event.\n\n ///\n\n /// It will return `Ok(&NextState)` if the transition was successful, or `Err(Error)`\n\n /// if there was an error in the transition.\n\n pub fn process_event(&mut self, #temporary_context mut event: Events) -> Result<&States, Error> {\n\n match self.state {\n\n #(States::#in_states => match event {\n\n #(Events::#events => {\n\n #code_blocks\n\n\n\n Ok(&self.state)\n\n }),*\n\n _ => Err(Error::InvalidEvent),\n\n }),*\n\n _ => Err(Error::InvalidEvent),\n\n }\n\n }\n\n }\n\n }\n\n}\n", "file_path": "macros/src/codegen.rs", "rank": 95, "score": 10.543627015366624 }, { "content": " }\n\n }\n\n Some(_) => {\n\n quote! {\n\n #value(ref mut event_data)\n\n }\n\n }\n\n }\n\n })\n\n .collect()\n\n })\n\n .collect();\n\n\n\n // println!(\"sm: {:#?}\", sm);\n\n // println!(\"in_states: {:#?}\", in_states);\n\n // println!(\"events: {:#?}\", events);\n\n // println!(\"transitions: {:#?}\", transitions);\n\n\n\n // Map guards, actions and output states into code blocks\n\n let guards: Vec<Vec<_>> = transitions\n", "file_path": "macros/src/codegen.rs", "rank": 96, "score": 10.481329868255813 }, { "content": " State2(MyStateData) + Event2 [guard2] / action2 = State3,\n\n // ...\n\n },\n\n}\n\n\n\n/// Context\n\npub struct Context;\n\n\n\nimpl StateMachineContext for Context {\n\n // Guard1 has access to the data from Event1\n\n fn guard1(&mut self, temp_context: &mut u16, _event_data: &MyEventData) -> Result<(), ()> {\n\n *temp_context += 1;\n\n\n\n Ok(())\n\n }\n\n\n\n // Action1 has access to the data from Event1, and need to return the state data for State2\n\n fn action1(&mut self, temp_context: &mut u16, _event_data: &MyEventData) -> MyStateData {\n\n *temp_context += 1;\n\n\n", "file_path": "examples/guard_action_syntax_with_temporary_context.rs", "rank": 97, "score": 10.234280478380947 }, { "content": " }\n\n }\n\n })\n\n .collect()\n\n })\n\n .collect();\n\n\n\n let out_states: Vec<Vec<_>> = transitions\n\n .iter()\n\n .map(|(_, value)| {\n\n value\n\n .iter()\n\n .map(|(_, value)| {\n\n let out_state = &value.out_state;\n\n\n\n match sm.state_data.data_types.get(&out_state.to_string()) {\n\n None => {\n\n quote! {\n\n #out_state\n\n }\n", "file_path": "macros/src/codegen.rs", "rank": 98, "score": 9.762515319070797 }, { "content": " } else {\n\n quote! {\n\n #guard\n\n }\n\n };\n\n\n\n let event_data = match sm.event_data.data_types.get(event) {\n\n Some(et) => match et {\n\n Type::Reference(_) => {\n\n quote! { event_data: #et }\n\n }\n\n _ => {\n\n quote! { event_data: &#et }\n\n }\n\n },\n\n None => {\n\n quote! {}\n\n }\n\n };\n\n\n", "file_path": "macros/src/codegen.rs", "rank": 99, "score": 9.704019487410275 } ]
Rust
futures-util/src/future/try_join.rs
zhanghanyun/futures-rs
f1f28da9bdf4bd5ac1182d6adf9ad71d29bfe728
#![allow(non_snake_case)] use crate::future::{TryMaybeDone, try_maybe_done}; use core::fmt; use core::pin::Pin; use futures_core::future::{Future, TryFuture}; use futures_core::task::{Context, Poll}; use pin_project::pin_project; macro_rules! generate { ($( $(#[$doc:meta])* ($Join:ident, <Fut1, $($Fut:ident),*>), )*) => ($( $(#[$doc])* #[pin_project] #[must_use = "futures do nothing unless you `.await` or poll them"] pub struct $Join<Fut1: TryFuture, $($Fut: TryFuture),*> { #[pin] Fut1: TryMaybeDone<Fut1>, $(#[pin] $Fut: TryMaybeDone<$Fut>,)* } impl<Fut1, $($Fut),*> fmt::Debug for $Join<Fut1, $($Fut),*> where Fut1: TryFuture + fmt::Debug, Fut1::Ok: fmt::Debug, Fut1::Error: fmt::Debug, $( $Fut: TryFuture + fmt::Debug, $Fut::Ok: fmt::Debug, $Fut::Error: fmt::Debug, )* { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct(stringify!($Join)) .field("Fut1", &self.Fut1) $(.field(stringify!($Fut), &self.$Fut))* .finish() } } impl<Fut1, $($Fut),*> $Join<Fut1, $($Fut),*> where Fut1: TryFuture, $( $Fut: TryFuture<Error=Fut1::Error> ),* { fn new(Fut1: Fut1, $($Fut: $Fut),*) -> Self { Self { Fut1: try_maybe_done(Fut1), $($Fut: try_maybe_done($Fut)),* } } } impl<Fut1, $($Fut),*> Future for $Join<Fut1, $($Fut),*> where Fut1: TryFuture, $( $Fut: TryFuture<Error=Fut1::Error> ),* { type Output = Result<(Fut1::Ok, $($Fut::Ok),*), Fut1::Error>; fn poll( self: Pin<&mut Self>, cx: &mut Context<'_> ) -> Poll<Self::Output> { let mut all_done = true; let mut futures = self.project(); all_done &= futures.Fut1.as_mut().poll(cx)?.is_ready(); $( all_done &= futures.$Fut.as_mut().poll(cx)?.is_ready(); )* if all_done { Poll::Ready(Ok(( futures.Fut1.take_output().unwrap(), $( futures.$Fut.take_output().unwrap() ),* ))) } else { Poll::Pending } } } )*) } generate! { (TryJoin, <Fut1, Fut2>), (TryJoin3, <Fut1, Fut2, Fut3>), (TryJoin4, <Fut1, Fut2, Fut3, Fut4>), (TryJoin5, <Fut1, Fut2, Fut3, Fut4, Fut5>), } pub fn try_join<Fut1, Fut2>(future1: Fut1, future2: Fut2) -> TryJoin<Fut1, Fut2> where Fut1: TryFuture, Fut2: TryFuture<Error = Fut1::Error>, { TryJoin::new(future1, future2) } pub fn try_join3<Fut1, Fut2, Fut3>( future1: Fut1, future2: Fut2, future3: Fut3, ) -> TryJoin3<Fut1, Fut2, Fut3> where Fut1: TryFuture, Fut2: TryFuture<Error = Fut1::Error>, Fut3: TryFuture<Error = Fut1::Error>, { TryJoin3::new(future1, future2, future3) } pub fn try_join4<Fut1, Fut2, Fut3, Fut4>( future1: Fut1, future2: Fut2, future3: Fut3, future4: Fut4, ) -> TryJoin4<Fut1, Fut2, Fut3, Fut4> where Fut1: TryFuture, Fut2: TryFuture<Error = Fut1::Error>, Fut3: TryFuture<Error = Fut1::Error>, Fut4: TryFuture<Error = Fut1::Error>, { TryJoin4::new(future1, future2, future3, future4) } pub fn try_join5<Fut1, Fut2, Fut3, Fut4, Fut5>( future1: Fut1, future2: Fut2, future3: Fut3, future4: Fut4, future5: Fut5, ) -> TryJoin5<Fut1, Fut2, Fut3, Fut4, Fut5> where Fut1: TryFuture, Fut2: TryFuture<Error = Fut1::Error>, Fut3: TryFuture<Error = Fut1::Error>, Fut4: TryFuture<Error = Fut1::Error>, Fut5: TryFuture<Error = Fut1::Error>, { TryJoin5::new(future1, future2, future3, future4, future5) }
#![allow(non_snake_case)] use crate::future::{TryMaybeDone, try_maybe_done}; use core::fmt; use core::pin::Pin; use futures_core::future::{Future, TryFuture}; use futures_core::task::{Context, Poll}; use pin_project::pin_project; macro_rules! generate { ($( $(#[$doc:meta])* ($Join:ident, <Fut1, $($Fut:ident),*>), )*) => ($( $(#[$doc])* #[pin_project] #[must_use = "futures do nothing unless you `.await` or poll them"] pub struct $Join<Fut1: TryFuture, $($Fut: TryFuture),*> { #[pin] Fut1: TryMaybeDone<Fut1>, $(#[pin] $Fut: TryMaybeDone<$Fut>,)* } impl<Fut1, $($Fut),*> fmt::Debug for $Join<Fut1, $($Fut),*> where Fut1: TryFuture + fmt::Debug, Fut1::Ok: fmt::Debug, Fut1::Error: fmt::Debug, $( $Fut: TryFuture + fmt::Debug, $Fut::Ok: fmt::Debug, $Fut::Error: fmt::Debug, )* { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct(stringify!($Join)) .field("Fut1", &self.Fut1) $(.field(stringify!($Fut), &self.$Fut))* .finish() } } impl<Fut1, $($Fut),*> $Join<Fut1, $($Fut),*> where Fut1: TryFuture, $( $Fut: TryFuture<Error=Fut1::Error> ),* { fn new(Fut1: Fut1, $($Fut: $Fut),*) -> Self { Self { Fut1: try_maybe_done(Fut1), $($Fut: try_maybe_done($Fut)),* } } } impl<Fut1, $($Fut),*> Future for $Join<Fut1, $($Fut),*> where Fut1: TryFuture, $( $Fut: TryFuture<Error=Fut1::Error> ),* { type Output = Result<(Fut1::Ok, $($Fut::Ok),*), Fut1::Error>; fn poll( self: Pin<&mut Self>, cx: &mut Context<'_> ) -> Poll<Self::Output> { let mut all_done = true; let mut futures = self.project(); all_done &= futures.Fut1.as_mut().poll(cx)?.is_ready(); $( all_done &= futures.$Fut.as_mut().poll(cx)?.is_ready(); )* if all_done { Poll::Ready(Ok(( futures.Fut1.take_output().unwrap(), $( futures.$Fut.take_output().unwrap() ),* ))) } else { Poll::Pending } } } )*) } generate! { (TryJoin, <Fut1, Fut2>), (TryJoin3, <Fut1, Fut2, Fut3>), (TryJoin4, <Fut1, Fut2, Fut3, Fut4>), (TryJoin5, <Fut1, Fut2, Fut3, Fut4, Fut5>), } pub fn try_join<Fut1, Fut2>(future1: Fut1, future2: Fut2) -> TryJoin<Fut1, Fut2> where Fut1: TryFuture, Fut2: TryFuture<Error = Fut1::Error>, { TryJoin::new(future1, future2) } pub fn try_join3<Fut1, Fut2, Fut3>( future1: Fut1, future2: Fut2, future3: Fut3, ) -> TryJoin3<Fut1, Fut2, Fut3> where Fut1: TryFuture, Fut2: TryFuture<Error = Fut1::Error
1, Fut2, Fut3, Fut4, Fut5> where Fut1: TryFuture, Fut2: TryFuture<Error = Fut1::Error>, Fut3: TryFuture<Error = Fut1::Error>, Fut4: TryFuture<Error = Fut1::Error>, Fut5: TryFuture<Error = Fut1::Error>, { TryJoin5::new(future1, future2, future3, future4, future5) }
>, Fut3: TryFuture<Error = Fut1::Error>, { TryJoin3::new(future1, future2, future3) } pub fn try_join4<Fut1, Fut2, Fut3, Fut4>( future1: Fut1, future2: Fut2, future3: Fut3, future4: Fut4, ) -> TryJoin4<Fut1, Fut2, Fut3, Fut4> where Fut1: TryFuture, Fut2: TryFuture<Error = Fut1::Error>, Fut3: TryFuture<Error = Fut1::Error>, Fut4: TryFuture<Error = Fut1::Error>, { TryJoin4::new(future1, future2, future3, future4) } pub fn try_join5<Fut1, Fut2, Fut3, Fut4, Fut5>( future1: Fut1, future2: Fut2, future3: Fut3, future4: Fut4, future5: Fut5, ) -> TryJoin5<Fut
random
[ { "content": "/// Same as [`join`](join()), but with more futures.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # futures::executor::block_on(async {\n\n/// use futures::future;\n\n///\n\n/// let a = async { 1 };\n\n/// let b = async { 2 };\n\n/// let c = async { 3 };\n\n/// let d = async { 4 };\n\n/// let e = async { 5 };\n\n/// let tuple = future::join5(a, b, c, d, e);\n\n///\n\n/// assert_eq!(tuple.await, (1, 2, 3, 4, 5));\n\n/// # });\n\n/// ```\n\npub fn join5<Fut1, Fut2, Fut3, Fut4, Fut5>(\n\n future1: Fut1,\n\n future2: Fut2,\n\n future3: Fut3,\n\n future4: Fut4,\n\n future5: Fut5,\n\n) -> Join5<Fut1, Fut2, Fut3, Fut4, Fut5>\n\nwhere\n\n Fut1: Future,\n\n Fut2: Future,\n\n Fut3: Future,\n\n Fut4: Future,\n\n Fut5: Future,\n\n{\n\n Join5::new(future1, future2, future3, future4, future5)\n\n}\n", "file_path": "futures-util/src/future/join.rs", "rank": 0, "score": 489696.80540882587 }, { "content": "/// Joins the result of two futures, waiting for them both to complete.\n\n///\n\n/// This function will return a new future which awaits both futures to\n\n/// complete. The returned future will finish with a tuple of both results.\n\n///\n\n/// Note that this function consumes the passed futures and returns a\n\n/// wrapped version of it.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # futures::executor::block_on(async {\n\n/// use futures::future;\n\n///\n\n/// let a = async { 1 };\n\n/// let b = async { 2 };\n\n/// let pair = future::join(a, b);\n\n///\n\n/// assert_eq!(pair.await, (1, 2));\n\n/// # });\n\n/// ```\n\npub fn join<Fut1, Fut2>(future1: Fut1, future2: Fut2) -> Join<Fut1, Fut2>\n\nwhere\n\n Fut1: Future,\n\n Fut2: Future,\n\n{\n\n let f = Join::new(future1, future2);\n\n assert_future::<(Fut1::Output, Fut2::Output), _>(f)\n\n}\n\n\n", "file_path": "futures-util/src/future/join.rs", "rank": 3, "score": 444440.883107644 }, { "content": "/// Same as [`join`](join()), but with more futures.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # futures::executor::block_on(async {\n\n/// use futures::future;\n\n///\n\n/// let a = async { 1 };\n\n/// let b = async { 2 };\n\n/// let c = async { 3 };\n\n/// let d = async { 4 };\n\n/// let tuple = future::join4(a, b, c, d);\n\n///\n\n/// assert_eq!(tuple.await, (1, 2, 3, 4));\n\n/// # });\n\n/// ```\n\npub fn join4<Fut1, Fut2, Fut3, Fut4>(\n\n future1: Fut1,\n\n future2: Fut2,\n\n future3: Fut3,\n\n future4: Fut4,\n\n) -> Join4<Fut1, Fut2, Fut3, Fut4>\n\nwhere\n\n Fut1: Future,\n\n Fut2: Future,\n\n Fut3: Future,\n\n Fut4: Future,\n\n{\n\n Join4::new(future1, future2, future3, future4)\n\n}\n\n\n", "file_path": "futures-util/src/future/join.rs", "rank": 4, "score": 435367.4316883668 }, { "content": "/// Same as [`join`](join()), but with more futures.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # futures::executor::block_on(async {\n\n/// use futures::future;\n\n///\n\n/// let a = async { 1 };\n\n/// let b = async { 2 };\n\n/// let c = async { 3 };\n\n/// let tuple = future::join3(a, b, c);\n\n///\n\n/// assert_eq!(tuple.await, (1, 2, 3));\n\n/// # });\n\n/// ```\n\npub fn join3<Fut1, Fut2, Fut3>(\n\n future1: Fut1,\n\n future2: Fut2,\n\n future3: Fut3,\n\n) -> Join3<Fut1, Fut2, Fut3>\n\nwhere\n\n Fut1: Future,\n\n Fut2: Future,\n\n Fut3: Future,\n\n{\n\n Join3::new(future1, future2, future3)\n\n}\n\n\n", "file_path": "futures-util/src/future/join.rs", "rank": 6, "score": 377298.50075609126 }, { "content": "#[doc(hidden)]\n\npub fn poll<F: Future + Unpin>(future: F) -> PollOnce<F> {\n\n PollOnce { future }\n\n}\n\n\n\n#[allow(missing_debug_implementations)]\n\n#[doc(hidden)]\n\npub struct PollOnce<F: Future + Unpin> {\n\n future: F,\n\n}\n\n\n\nimpl<F: Future + Unpin> Future for PollOnce<F> {\n\n type Output = Poll<F::Output>;\n\n fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n Poll::Ready(self.future.poll_unpin(cx))\n\n }\n\n}\n", "file_path": "futures-util/src/async_await/poll.rs", "rank": 8, "score": 358228.8452845872 }, { "content": "/// Creates a new future wrapping around a function returning [`Poll`].\n\n///\n\n/// Polling the returned future delegates to the wrapped function.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # futures::executor::block_on(async {\n\n/// use futures::future::poll_fn;\n\n/// use futures::task::{Context, Poll};\n\n///\n\n/// fn read_line(_cx: &mut Context<'_>) -> Poll<String> {\n\n/// Poll::Ready(\"Hello, World!\".into())\n\n/// }\n\n///\n\n/// let read_future = poll_fn(read_line);\n\n/// assert_eq!(read_future.await, \"Hello, World!\".to_owned());\n\n/// # });\n\n/// ```\n\npub fn poll_fn<T, F>(f: F) -> PollFn<F>\n\nwhere\n\n F: FnMut(&mut Context<'_>) -> Poll<T>\n\n{\n\n PollFn { f }\n\n}\n\n\n\nimpl<F> fmt::Debug for PollFn<F> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.debug_struct(\"PollFn\").finish()\n\n }\n\n}\n\n\n\nimpl<T, F> Future for PollFn<F>\n\n where F: FnMut(&mut Context<'_>) -> Poll<T>,\n\n{\n\n type Output = T;\n\n\n\n fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<T> {\n\n (&mut self.f)(cx)\n\n }\n\n}\n", "file_path": "futures-util/src/future/poll_fn.rs", "rank": 9, "score": 321921.7650932353 }, { "content": "/// Creates a new stream wrapping a function returning `Poll<Option<T>>`.\n\n///\n\n/// Polling the returned stream calls the wrapped function.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use futures::stream::poll_fn;\n\n/// use futures::task::Poll;\n\n///\n\n/// let mut counter = 1usize;\n\n///\n\n/// let read_stream = poll_fn(move |_| -> Poll<Option<String>> {\n\n/// if counter == 0 { return Poll::Ready(None); }\n\n/// counter -= 1;\n\n/// Poll::Ready(Some(\"Hello, World!\".to_owned()))\n\n/// });\n\n/// ```\n\npub fn poll_fn<T, F>(f: F) -> PollFn<F>\n\nwhere\n\n F: FnMut(&mut Context<'_>) -> Poll<Option<T>>,\n\n{\n\n PollFn { f }\n\n}\n\n\n\nimpl<T, F> Stream for PollFn<F>\n\nwhere\n\n F: FnMut(&mut Context<'_>) -> Poll<Option<T>>,\n\n{\n\n type Item = T;\n\n\n\n fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<T>> {\n\n (&mut self.f)(cx)\n\n }\n\n}\n", "file_path": "futures-util/src/stream/poll_fn.rs", "rank": 10, "score": 318508.5437324672 }, { "content": "/// Run a future to completion on the current thread.\n\n///\n\n/// This function will block the caller until the given future has completed.\n\n///\n\n/// Use a [`LocalPool`](LocalPool) if you need finer-grained control over\n\n/// spawned tasks.\n\npub fn block_on<F: Future>(f: F) -> F::Output {\n\n pin_mut!(f);\n\n run_executor(|cx| f.as_mut().poll(cx))\n\n}\n\n\n", "file_path": "futures-executor/src/local_pool.rs", "rank": 11, "score": 316147.55633290124 }, { "content": "// Set up and run a basic single-threaded spawner loop, invoking `f` on each\n\n// turn.\n\nfn run_executor<T, F: FnMut(&mut Context<'_>) -> Poll<T>>(mut f: F) -> T {\n\n let _enter = enter().expect(\n\n \"cannot execute `LocalPool` executor from within \\\n\n another executor\",\n\n );\n\n\n\n CURRENT_THREAD_NOTIFY.with(|thread_notify| {\n\n let waker = waker_ref(thread_notify);\n\n let mut cx = Context::from_waker(&waker);\n\n loop {\n\n if let Poll::Ready(t) = f(&mut cx) {\n\n return t;\n\n }\n\n // Consume the wakeup that occurred while executing `f`, if any.\n\n let unparked = thread_notify.unparked.swap(false, Ordering::Acquire);\n\n if !unparked {\n\n // No wakeup occurred. It may occur now, right before parking,\n\n // but in that case the token made available by `unpark()`\n\n // is guaranteed to still be available and `park()` is a no-op.\n\n thread::park();\n\n // When the thread is unparked, `unparked` will have been set\n\n // and needs to be unset before the next call to `f` to avoid\n\n // a redundant loop iteration.\n\n thread_notify.unparked.store(false, Ordering::Release);\n\n }\n\n }\n\n })\n\n}\n\n\n", "file_path": "futures-executor/src/local_pool.rs", "rank": 12, "score": 306993.8347833206 }, { "content": "/// Creates a `Stream` from a seed and a closure returning a `Future`.\n\n///\n\n/// This function is the dual for the `Stream::fold()` adapter: while\n\n/// `Stream::fold()` reduces a `Stream` to one single value, `unfold()` creates a\n\n/// `Stream` from a seed value.\n\n///\n\n/// `unfold()` will call the provided closure with the provided seed, then wait\n\n/// for the returned `Future` to complete with `(a, b)`. It will then yield the\n\n/// value `a`, and use `b` as the next internal state.\n\n///\n\n/// If the closure returns `None` instead of `Some(Future)`, then the `unfold()`\n\n/// will stop producing items and return `Poll::Ready(None)` in future\n\n/// calls to `poll()`.\n\n///\n\n/// This function can typically be used when wanting to go from the \"world of\n\n/// futures\" to the \"world of streams\": the provided closure can build a\n\n/// `Future` using other library functions working on futures, and `unfold()`\n\n/// will turn it into a `Stream` by repeating the operation.\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// # futures::executor::block_on(async {\n\n/// use futures::stream::{self, StreamExt};\n\n///\n\n/// let stream = stream::unfold(0, |state| async move {\n\n/// if state <= 2 {\n\n/// let next_state = state + 1;\n\n/// let yielded = state * 2;\n\n/// Some((yielded, next_state))\n\n/// } else {\n\n/// None\n\n/// }\n\n/// });\n\n///\n\n/// let result = stream.collect::<Vec<i32>>().await;\n\n/// assert_eq!(result, vec![0, 2, 4]);\n\n/// # });\n\n/// ```\n\npub fn unfold<T, F, Fut, Item>(init: T, f: F) -> Unfold<T, F, Fut>\n\n where F: FnMut(T) -> Fut,\n\n Fut: Future<Output = Option<(Item, T)>>,\n\n{\n\n Unfold {\n\n f,\n\n state: Some(init),\n\n fut: None,\n\n }\n\n}\n\n\n\n/// Stream for the [`unfold`] function.\n\n#[pin_project]\n\n#[must_use = \"streams do nothing unless polled\"]\n\npub struct Unfold<T, F, Fut> {\n\n f: F,\n\n state: Option<T>,\n\n #[pin]\n\n fut: Option<Fut>,\n\n}\n", "file_path": "futures-util/src/stream/unfold.rs", "rank": 13, "score": 294987.905800213 }, { "content": "fn poll_executor<T, F: FnMut(&mut Context<'_>) -> T>(mut f: F) -> T {\n\n let _enter = enter().expect(\n\n \"cannot execute `LocalPool` executor from within \\\n\n another executor\",\n\n );\n\n\n\n CURRENT_THREAD_NOTIFY.with(|thread_notify| {\n\n let waker = waker_ref(thread_notify);\n\n let mut cx = Context::from_waker(&waker);\n\n f(&mut cx)\n\n })\n\n}\n\n\n\nimpl LocalPool {\n\n /// Create a new, empty pool of tasks.\n\n pub fn new() -> Self {\n\n Self {\n\n pool: FuturesUnordered::new(),\n\n incoming: Default::default(),\n\n }\n", "file_path": "futures-executor/src/local_pool.rs", "rank": 14, "score": 292483.3494449776 }, { "content": "/// Creates a new stream that repeats elements of type `A` endlessly by\n\n/// applying the provided closure, the repeater, `F: FnMut() -> A`.\n\n///\n\n/// The `repeat_with()` function calls the repeater over and over again.\n\n///\n\n/// Infinite stream like `repeat_with()` are often used with adapters like\n\n/// [`stream.take()`], in order to make them finite.\n\n///\n\n/// If the element type of the stream you need implements [`Clone`], and\n\n/// it is OK to keep the source element in memory, you should instead use\n\n/// the [`stream.repeat()`] function.\n\n///\n\n/// # Examples\n\n///\n\n/// Basic usage:\n\n///\n\n/// ```\n\n/// # futures::executor::block_on(async {\n\n/// use futures::stream::{self, StreamExt};\n\n///\n\n/// // let's assume we have some value of a type that is not `Clone`\n\n/// // or which don't want to have in memory just yet because it is expensive:\n\n/// #[derive(PartialEq, Debug)]\n\n/// struct Expensive;\n\n///\n\n/// // a particular value forever:\n\n/// let mut things = stream::repeat_with(|| Expensive);\n\n///\n\n/// assert_eq!(Some(Expensive), things.next().await);\n\n/// assert_eq!(Some(Expensive), things.next().await);\n\n/// assert_eq!(Some(Expensive), things.next().await);\n\n/// # });\n\n/// ```\n\n///\n\n/// Using mutation and going finite:\n\n///\n\n/// ```rust\n\n/// # futures::executor::block_on(async {\n\n/// use futures::stream::{self, StreamExt};\n\n///\n\n/// // From the zeroth to the third power of two:\n\n/// let mut curr = 1;\n\n/// let mut pow2 = stream::repeat_with(|| { let tmp = curr; curr *= 2; tmp })\n\n/// .take(4);\n\n///\n\n/// assert_eq!(Some(1), pow2.next().await);\n\n/// assert_eq!(Some(2), pow2.next().await);\n\n/// assert_eq!(Some(4), pow2.next().await);\n\n/// assert_eq!(Some(8), pow2.next().await);\n\n///\n\n/// // ... and now we're done\n\n/// assert_eq!(None, pow2.next().await);\n\n/// # });\n\n/// ```\n\npub fn repeat_with<A, F: FnMut() -> A>(repeater: F) -> RepeatWith<F> {\n\n RepeatWith { repeater }\n\n}\n", "file_path": "futures-util/src/stream/repeat_with.rs", "rank": 15, "score": 282799.4140726735 }, { "content": "/// Creates a `TryStream` from a seed and a closure returning a `TryFuture`.\n\n///\n\n/// This function is the dual for the `TryStream::try_fold()` adapter: while\n\n/// `TryStream::try_fold()` reduces a `TryStream` to one single value,\n\n/// `try_unfold()` creates a `TryStream` from a seed value.\n\n///\n\n/// `try_unfold()` will call the provided closure with the provided seed, then\n\n/// wait for the returned `TryFuture` to complete with `(a, b)`. It will then\n\n/// yield the value `a`, and use `b` as the next internal state.\n\n///\n\n/// If the closure returns `None` instead of `Some(TryFuture)`, then the\n\n/// `try_unfold()` will stop producing items and return `Poll::Ready(None)` in\n\n/// future calls to `poll()`.\n\n///\n\n/// In case of error generated by the returned `TryFuture`, the error will be\n\n/// returned by the `TryStream`. The `TryStream` will then yield\n\n/// `Poll::Ready(None)` in future calls to `poll()`.\n\n///\n\n/// This function can typically be used when wanting to go from the \"world of\n\n/// futures\" to the \"world of streams\": the provided closure can build a\n\n/// `TryFuture` using other library functions working on futures, and\n\n/// `try_unfold()` will turn it into a `TryStream` by repeating the operation.\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// # #[derive(Debug, PartialEq)]\n\n/// # struct SomeError;\n\n/// # futures::executor::block_on(async {\n\n/// use futures::stream::{self, TryStreamExt};\n\n///\n\n/// let stream = stream::try_unfold(0, |state| async move {\n\n/// if state < 0 {\n\n/// return Err(SomeError);\n\n/// }\n\n///\n\n/// if state <= 2 {\n\n/// let next_state = state + 1;\n\n/// let yielded = state * 2;\n\n/// Ok(Some((yielded, next_state)))\n\n/// } else {\n\n/// Ok(None)\n\n/// }\n\n/// });\n\n///\n\n/// let result: Result<Vec<i32>, _> = stream.try_collect().await;\n\n/// assert_eq!(result, Ok(vec![0, 2, 4]));\n\n/// # });\n\n/// ```\n\npub fn try_unfold<T, F, Fut, Item>(init: T, f: F) -> TryUnfold<T, F, Fut>\n\nwhere\n\n F: FnMut(T) -> Fut,\n\n Fut: TryFuture<Ok = Option<(Item, T)>>,\n\n{\n\n TryUnfold {\n\n f,\n\n state: Some(init),\n\n fut: None,\n\n }\n\n}\n\n\n\n/// Stream for the [`try_unfold`] function.\n\n#[pin_project]\n\n#[must_use = \"streams do nothing unless polled\"]\n\npub struct TryUnfold<T, F, Fut> {\n\n f: F,\n\n state: Option<T>,\n\n #[pin]\n\n fut: Option<Fut>,\n", "file_path": "futures-util/src/stream/try_stream/try_unfold.rs", "rank": 16, "score": 282790.0714685435 }, { "content": "/// Waits for either one of two differently-typed futures to complete.\n\n///\n\n/// This function will return a new future which awaits for either one of both\n\n/// futures to complete. The returned future will finish with both the value\n\n/// resolved and a future representing the completion of the other work.\n\n///\n\n/// Note that this function consumes the receiving futures and returns a\n\n/// wrapped version of them.\n\n///\n\n/// Also note that if both this and the second future have the same\n\n/// output type you can use the `Either::factor_first` method to\n\n/// conveniently extract out the value at the end.\n\n///\n\n/// # Examples\n\n///\n\n/// A simple example\n\n///\n\n/// ```\n\n/// # futures::executor::block_on(async {\n\n/// use futures::future::{self, Either};\n\n/// use futures::pin_mut;\n\n///\n\n/// // These two futures have different types even though their outputs have the same type\n\n/// let future1 = async { 1 };\n\n/// let future2 = async { 2 };\n\n///\n\n/// // 'select' requires Future + Unpin bounds\n\n/// pin_mut!(future1);\n\n/// pin_mut!(future2);\n\n///\n\n/// let value = match future::select(future1, future2).await {\n\n/// Either::Left((value1, _)) => value1, // `value1` is resolved from `future1`\n\n/// // `_` represents `future2`\n\n/// Either::Right((value2, _)) => value2, // `value2` is resolved from `future2`\n\n/// // `_` represents `future1`\n\n/// };\n\n///\n\n/// assert!(value == 1 || value == 2);\n\n/// # });\n\n/// ```\n\n///\n\n/// A more complex example\n\n///\n\n/// ```\n\n/// use futures::future::{self, Either, Future, FutureExt};\n\n///\n\n/// // A poor-man's join implemented on top of select\n\n///\n\n/// fn join<A, B>(a: A, b: B) -> impl Future<Output=(A::Output, B::Output)>\n\n/// where A: Future + Unpin,\n\n/// B: Future + Unpin,\n\n/// {\n\n/// future::select(a, b).then(|either| {\n\n/// match either {\n\n/// Either::Left((x, b)) => b.map(move |y| (x, y)).left_future(),\n\n/// Either::Right((y, a)) => a.map(move |x| (x, y)).right_future(),\n\n/// }\n\n/// })\n\n/// }\n\n/// ```\n\npub fn select<A, B>(future1: A, future2: B) -> Select<A, B>\n\n where A: Future + Unpin, B: Future + Unpin\n\n{\n\n Select { inner: Some((future1, future2)) }\n\n}\n\n\n\nimpl<A, B> Future for Select<A, B>\n\nwhere\n\n A: Future + Unpin,\n\n B: Future + Unpin,\n\n{\n\n type Output = Either<(A::Output, B), (B::Output, A)>;\n\n\n\n fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n let (mut a, mut b) = self.inner.take().expect(\"cannot poll Select twice\");\n\n match a.poll_unpin(cx) {\n\n Poll::Ready(x) => Poll::Ready(Either::Left((x, b))),\n\n Poll::Pending => match b.poll_unpin(cx) {\n\n Poll::Ready(x) => Poll::Ready(Either::Right((x, a))),\n\n Poll::Pending => {\n", "file_path": "futures-util/src/future/select.rs", "rank": 17, "score": 261068.31604390798 }, { "content": "#[doc(hidden)]\n\npub fn shuffle<T>(slice: &mut [T]) {\n\n for i in (1..slice.len()).rev() {\n\n slice.swap(i, gen_index(i + 1));\n\n }\n\n}\n\n\n", "file_path": "futures-util/src/async_await/random.rs", "rank": 18, "score": 257319.8175797143 }, { "content": "/// Creates a stream of a single element.\n\n///\n\n/// ```\n\n/// # futures::executor::block_on(async {\n\n/// use futures::stream::{self, StreamExt};\n\n///\n\n/// let stream = stream::once(async { 17 });\n\n/// let collected = stream.collect::<Vec<i32>>().await;\n\n/// assert_eq!(collected, vec![17]);\n\n/// # });\n\n/// ```\n\npub fn once<Fut: Future>(future: Fut) -> Once<Fut> {\n\n Once::new(future)\n\n}\n\n\n\n/// A stream which emits single element and then EOF.\n\n#[pin_project]\n\n#[derive(Debug)]\n\n#[must_use = \"streams do nothing unless polled\"]\n\npub struct Once<Fut> {\n\n #[pin]\n\n future: Option<Fut>\n\n}\n\n\n\nimpl<Fut> Once<Fut> {\n\n pub(crate) fn new(future: Fut) -> Self {\n\n Self { future: Some(future) }\n\n }\n\n}\n\n\n\nimpl<Fut: Future> Stream for Once<Fut> {\n", "file_path": "futures-util/src/stream/once.rs", "rank": 19, "score": 251459.8036446523 }, { "content": "/// Waits for either one of two differently-typed futures to complete.\n\n///\n\n/// This function will return a new future which awaits for either one of both\n\n/// futures to complete. The returned future will finish with both the value\n\n/// resolved and a future representing the completion of the other work.\n\n///\n\n/// Note that this function consumes the receiving futures and returns a\n\n/// wrapped version of them.\n\n///\n\n/// Also note that if both this and the second future have the same\n\n/// success/error type you can use the `Either::factor_first` method to\n\n/// conveniently extract out the value at the end.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use futures::future::{self, Either, Future, FutureExt, TryFuture, TryFutureExt};\n\n///\n\n/// // A poor-man's try_join implemented on top of select\n\n///\n\n/// fn try_join<A, B, E>(a: A, b: B) -> impl TryFuture<Ok=(A::Ok, B::Ok), Error=E>\n\n/// where A: TryFuture<Error = E> + Unpin + 'static,\n\n/// B: TryFuture<Error = E> + Unpin + 'static,\n\n/// E: 'static,\n\n/// {\n\n/// future::try_select(a, b).then(|res| -> Box<dyn Future<Output = Result<_, _>> + Unpin> {\n\n/// match res {\n\n/// Ok(Either::Left((x, b))) => Box::new(b.map_ok(move |y| (x, y))),\n\n/// Ok(Either::Right((y, a))) => Box::new(a.map_ok(move |x| (x, y))),\n\n/// Err(Either::Left((e, _))) => Box::new(future::err(e)),\n\n/// Err(Either::Right((e, _))) => Box::new(future::err(e)),\n\n/// }\n\n/// })\n\n/// }\n\n/// ```\n\npub fn try_select<A, B>(future1: A, future2: B) -> TrySelect<A, B>\n\n where A: TryFuture + Unpin, B: TryFuture + Unpin\n\n{\n\n TrySelect { inner: Some((future1, future2)) }\n\n}\n\n\n\nimpl<A: Unpin, B: Unpin> Future for TrySelect<A, B>\n\n where A: TryFuture, B: TryFuture\n\n{\n\n #[allow(clippy::type_complexity)]\n\n type Output = Result<\n\n Either<(A::Ok, B), (B::Ok, A)>,\n\n Either<(A::Error, B), (B::Error, A)>,\n\n >;\n\n\n\n fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n let (mut a, mut b) = self.inner.take().expect(\"cannot poll Select twice\");\n\n match a.try_poll_unpin(cx) {\n\n Poll::Ready(Err(x)) => Poll::Ready(Err(Either::Left((x, b)))),\n\n Poll::Ready(Ok(x)) => Poll::Ready(Ok(Either::Left((x, b)))),\n", "file_path": "futures-util/src/future/try_select.rs", "rank": 20, "score": 251384.261030588 }, { "content": "/// Creates a new future that allows delayed execution of a closure.\n\n///\n\n/// The provided closure is only run once the future is polled.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # futures::executor::block_on(async {\n\n/// use futures::future;\n\n///\n\n/// let a = future::lazy(|_| 1);\n\n/// assert_eq!(a.await, 1);\n\n///\n\n/// let b = future::lazy(|_| -> i32 {\n\n/// panic!(\"oh no!\")\n\n/// });\n\n/// drop(b); // closure is never run\n\n/// # });\n\n/// ```\n\npub fn lazy<F, R>(f: F) -> Lazy<F>\n\n where F: FnOnce(&mut Context<'_>) -> R,\n\n{\n\n Lazy { f: Some(f) }\n\n}\n\n\n\nimpl<F, R> FusedFuture for Lazy<F>\n\n where F: FnOnce(&mut Context<'_>) -> R,\n\n{\n\n fn is_terminated(&self) -> bool { self.f.is_none() }\n\n}\n\n\n\nimpl<F, R> Future for Lazy<F>\n\n where F: FnOnce(&mut Context<'_>) -> R,\n\n{\n\n type Output = R;\n\n\n\n fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<R> {\n\n Poll::Ready((self.f.take().expect(\"Lazy polled after completion\"))(cx))\n\n }\n\n}\n", "file_path": "futures-util/src/future/lazy.rs", "rank": 21, "score": 248696.30254626676 }, { "content": "/// Creates a new `Abortable` future and a `AbortHandle` which can be used to stop it.\n\n///\n\n/// This function is a convenient (but less flexible) alternative to calling\n\n/// `AbortHandle::new` and `Abortable::new` manually.\n\n///\n\n/// This function is only available when the `std` or `alloc` feature of this\n\n/// library is activated, and it is activated by default.\n\npub fn abortable<Fut>(future: Fut) -> (Abortable<Fut>, AbortHandle)\n\n where Fut: Future\n\n{\n\n let (handle, reg) = AbortHandle::new_pair();\n\n (\n\n Abortable::new(future, reg),\n\n handle,\n\n )\n\n}\n\n\n\n/// Indicator that the `Abortable` future was aborted.\n\n#[derive(Copy, Clone, Debug, Eq, PartialEq)]\n\npub struct Aborted;\n\n\n\nimpl fmt::Display for Aborted {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"`Abortable` future has been aborted\")\n\n }\n\n}\n\n\n", "file_path": "futures-util/src/future/abortable.rs", "rank": 22, "score": 241899.17452717264 }, { "content": "fn iter_pin_mut<T>(slice: Pin<&mut [T]>) -> impl Iterator<Item = Pin<&mut T>> {\n\n // Safety: `std` _could_ make this unsound if it were to decide Pin's\n\n // invariants aren't required to transmit through slices. Otherwise this has\n\n // the same safety as a normal field pin projection.\n\n unsafe { slice.get_unchecked_mut() }\n\n .iter_mut()\n\n .map(|t| unsafe { Pin::new_unchecked(t) })\n\n}\n\n\n\n/// Future for the [`join_all`] function.\n\n#[must_use = \"futures do nothing unless you `.await` or poll them\"]\n\npub struct JoinAll<F>\n\nwhere\n\n F: Future,\n\n{\n\n elems: Pin<Box<[MaybeDone<F>]>>,\n\n}\n\n\n\nimpl<F> fmt::Debug for JoinAll<F>\n\nwhere\n\n F: Future + fmt::Debug,\n\n F::Output: fmt::Debug,\n\n{\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.debug_struct(\"JoinAll\")\n\n .field(\"elems\", &self.elems)\n\n .finish()\n\n }\n\n}\n\n\n", "file_path": "futures-util/src/future/join_all.rs", "rank": 23, "score": 241629.32803484198 }, { "content": "fn iter_pin_mut<T>(slice: Pin<&mut [T]>) -> impl Iterator<Item = Pin<&mut T>> {\n\n // Safety: `std` _could_ make this unsound if it were to decide Pin's\n\n // invariants aren't required to transmit through slices. Otherwise this has\n\n // the same safety as a normal field pin projection.\n\n unsafe { slice.get_unchecked_mut() }\n\n .iter_mut()\n\n .map(|t| unsafe { Pin::new_unchecked(t) })\n\n}\n\n\n", "file_path": "futures-util/src/future/try_join_all.rs", "rank": 24, "score": 239857.5360439446 }, { "content": "/// Wraps a future into a `MaybeDone`\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # futures::executor::block_on(async {\n\n/// use futures::future;\n\n/// use futures::pin_mut;\n\n///\n\n/// let future = future::maybe_done(async { 5 });\n\n/// pin_mut!(future);\n\n/// assert_eq!(future.as_mut().take_output(), None);\n\n/// let () = future.as_mut().await;\n\n/// assert_eq!(future.as_mut().take_output(), Some(5));\n\n/// assert_eq!(future.as_mut().take_output(), None);\n\n/// # });\n\n/// ```\n\npub fn maybe_done<Fut: Future>(future: Fut) -> MaybeDone<Fut> {\n\n MaybeDone::Future(future)\n\n}\n\n\n\nimpl<Fut: Future> MaybeDone<Fut> {\n\n /// Returns an [`Option`] containing a mutable reference to the output of the future.\n\n /// The output of this method will be [`Some`] if and only if the inner\n\n /// future has been completed and [`take_output`](MaybeDone::take_output)\n\n /// has not yet been called.\n\n #[inline]\n\n pub fn output_mut(self: Pin<&mut Self>) -> Option<&mut Fut::Output> {\n\n match self.project() {\n\n MaybeDoneProj::Done(res) => Some(res),\n\n _ => None,\n\n }\n\n }\n\n\n\n /// Attempt to take the output of a `MaybeDone` without driving it\n\n /// towards completion.\n\n #[inline]\n", "file_path": "futures-util/src/future/maybe_done.rs", "rank": 25, "score": 238612.0603798718 }, { "content": "type Never = <fn() -> ! as MyTrait>::Output;\n\n\n\n\n", "file_path": "futures/tests/try_join.rs", "rank": 26, "score": 233582.414178059 }, { "content": "/// Wraps a future into a `TryMaybeDone`\n\npub fn try_maybe_done<Fut: TryFuture>(future: Fut) -> TryMaybeDone<Fut> {\n\n TryMaybeDone::Future(future)\n\n}\n\n\n\nimpl<Fut: TryFuture> TryMaybeDone<Fut> {\n\n /// Returns an [`Option`] containing a mutable reference to the output of the future.\n\n /// The output of this method will be [`Some`] if and only if the inner\n\n /// future has completed successfully and [`take_output`](TryMaybeDone::take_output)\n\n /// has not yet been called.\n\n #[inline]\n\n pub fn output_mut(self: Pin<&mut Self>) -> Option<&mut Fut::Ok> {\n\n match self.project() {\n\n TryMaybeDoneProj::Done(res) => Some(res),\n\n _ => None,\n\n }\n\n }\n\n\n\n /// Attempt to take the output of a `TryMaybeDone` without driving it\n\n /// towards completion.\n\n #[inline]\n", "file_path": "futures-util/src/future/try_maybe_done.rs", "rank": 27, "score": 230764.35836289648 }, { "content": "/// Get a reference to a singleton instance of [`NoopSpawner`].\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use futures::task::SpawnExt;\n\n/// use futures_test::task::noop_spawner_mut;\n\n///\n\n/// let spawner = noop_spawner_mut();\n\n/// spawner.spawn(async { }).unwrap();\n\n/// ```\n\npub fn noop_spawner_mut() -> &'static mut NoopSpawner {\n\n Box::leak(Box::new(NoopSpawner::new()))\n\n}\n", "file_path": "futures-test/src/task/noop_spawner.rs", "rank": 28, "score": 227276.82831352737 }, { "content": "/// Get a reference to a singleton instance of [`PanicSpawner`].\n\n///\n\n/// # Examples\n\n///\n\n/// ```should_panic\n\n/// use futures::task::SpawnExt;\n\n/// use futures_test::task::panic_spawner_mut;\n\n///\n\n/// let spawner = panic_spawner_mut();\n\n/// spawner.spawn(async { })?; // Will panic\n\n/// # Ok::<(), Box<dyn std::error::Error>>(())\n\n/// ```\n\npub fn panic_spawner_mut() -> &'static mut PanicSpawner {\n\n Box::leak(Box::new(PanicSpawner::new()))\n\n}\n", "file_path": "futures-test/src/task/panic_spawner.rs", "rank": 29, "score": 227276.02105447356 }, { "content": "fn with_context<T, R, F>(compat: &mut Compat<T>, f: F) -> R\n\nwhere\n\n T: Unpin,\n\n F: FnOnce(Pin<&mut T>, &mut Context<'_>) -> R,\n\n{\n\n let current = Current::new();\n\n let waker = current.as_waker();\n\n let mut cx = Context::from_waker(&waker);\n\n f(Pin::new(&mut compat.inner), &mut cx)\n\n}\n\n\n", "file_path": "futures-util/src/compat/compat03as01.rs", "rank": 30, "score": 219546.77611478505 }, { "content": "type SendMsg<Fut> = Result<<Fut as Future>::Output, Box<(dyn Any + Send + 'static)>>;\n\n\n\n/// A future which sends its output to the corresponding `RemoteHandle`.\n\n/// Created by [`remote_handle`](crate::future::FutureExt::remote_handle).\n\n#[pin_project]\n\n#[must_use = \"futures do nothing unless you `.await` or poll them\"]\n\n#[cfg_attr(docsrs, doc(cfg(feature = \"channel\")))]\n\npub struct Remote<Fut: Future> {\n\n tx: Option<Sender<SendMsg<Fut>>>,\n\n keep_running: Arc<AtomicBool>,\n\n #[pin]\n\n future: CatchUnwind<AssertUnwindSafe<Fut>>,\n\n}\n\n\n\nimpl<Fut: Future + fmt::Debug> fmt::Debug for Remote<Fut> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.debug_tuple(\"Remote\")\n\n .field(&self.future)\n\n .finish()\n\n }\n", "file_path": "futures-util/src/future/future/remote_handle.rs", "rank": 31, "score": 215248.67967254837 }, { "content": "/// Create a sink from a function which processes one item at a time.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # futures::executor::block_on(async {\n\n/// use futures::sink::{self, SinkExt};\n\n///\n\n/// let unfold = sink::unfold(0, |mut sum, i: i32| {\n\n/// async move {\n\n/// sum += i;\n\n/// eprintln!(\"{}\", i);\n\n/// Ok::<_, futures::never::Never>(sum)\n\n/// }\n\n/// });\n\n/// futures::pin_mut!(unfold);\n\n/// unfold.send(5).await?;\n\n/// # Ok::<(), futures::never::Never>(()) }).unwrap();\n\n/// ```\n\npub fn unfold<T, F, R>(init: T, function: F) -> Unfold<T, F, R> {\n\n Unfold {\n\n state: Some(init),\n\n function,\n\n future: None,\n\n }\n\n}\n\n\n\nimpl<T, F, R, Item, E> Sink<Item> for Unfold<T, F, R>\n\nwhere\n\n F: FnMut(T, Item) -> R,\n\n R: Future<Output = Result<T, E>>,\n\n{\n\n type Error = E;\n\n\n\n fn poll_ready(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {\n\n self.poll_flush(cx)\n\n }\n\n\n\n fn start_send(self: Pin<&mut Self>, item: Item) -> Result<(), Self::Error> {\n", "file_path": "futures-util/src/sink/unfold.rs", "rank": 32, "score": 213250.766860026 }, { "content": "struct StateFn<S, F> {\n\n state: S,\n\n f: F,\n\n}\n\n\n\n/// Stream for the [`scan`](super::StreamExt::scan) method.\n\n#[pin_project]\n\n#[must_use = \"streams do nothing unless polled\"]\n\npub struct Scan<St: Stream, S, Fut, F> {\n\n #[pin]\n\n stream: St,\n\n state_f: Option<StateFn<S, F>>,\n\n #[pin]\n\n future: Option<Fut>,\n\n}\n\n\n\nimpl<St, S, Fut, F> fmt::Debug for Scan<St, S, Fut, F>\n\nwhere\n\n St: Stream + fmt::Debug,\n\n St::Item: fmt::Debug,\n", "file_path": "futures-util/src/stream/stream/scan.rs", "rank": 33, "score": 211344.46022070106 }, { "content": "#[doc(hidden)]\n\npub fn pending_once() -> PendingOnce {\n\n PendingOnce { is_ready: false }\n\n}\n\n\n\n#[allow(missing_debug_implementations)]\n\n#[doc(hidden)]\n\npub struct PendingOnce {\n\n is_ready: bool,\n\n}\n\n\n\nimpl Future for PendingOnce {\n\n type Output = ();\n\n fn poll(mut self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Self::Output> {\n\n if self.is_ready {\n\n Poll::Ready(())\n\n } else {\n\n self.is_ready = true;\n\n Poll::Pending\n\n }\n\n }\n\n}\n", "file_path": "futures-util/src/async_await/pending.rs", "rank": 34, "score": 206835.26207548444 }, { "content": "#[cfg(feature = \"sink\")]\n\nfn with_sink_context<T, Item, R, F>(compat: &mut CompatSink<T, Item>, f: F) -> R\n\nwhere\n\n T: Unpin,\n\n F: FnOnce(Pin<&mut T>, &mut Context<'_>) -> R,\n\n{\n\n let current = Current::new();\n\n let waker = current.as_waker();\n\n let mut cx = Context::from_waker(&waker);\n\n f(Pin::new(&mut compat.inner), &mut cx)\n\n}\n\n\n\n#[cfg(feature = \"io-compat\")]\n\n#[cfg_attr(docsrs, doc(cfg(feature = \"io-compat\")))]\n\nmod io {\n\n use super::*;\n\n use futures_io::{AsyncRead as AsyncRead03, AsyncWrite as AsyncWrite03};\n\n use tokio_io::{AsyncRead as AsyncRead01, AsyncWrite as AsyncWrite01};\n\n\n\n fn poll_03_to_io<T>(x: task03::Poll<Result<T, std::io::Error>>)\n\n -> Result<T, std::io::Error>\n", "file_path": "futures-util/src/compat/compat03as01.rs", "rank": 35, "score": 205485.8236086947 }, { "content": "#[doc(hidden)]\n\npub fn assert_is_unpin_stream<S: Stream + Unpin>(_: &mut S) {}\n\n\n\n/// Assert that the next poll to the provided stream will return\n\n/// [`Poll::Pending`](futures_core::task::Poll::Pending).\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use futures::stream;\n\n/// use futures_test::future::FutureTestExt;\n\n/// use futures_test::{\n\n/// assert_stream_pending, assert_stream_next, assert_stream_done,\n\n/// };\n\n/// use futures::pin_mut;\n\n///\n\n/// let stream = stream::once((async { 5 }).pending_once());\n\n/// pin_mut!(stream);\n\n///\n\n/// assert_stream_pending!(stream);\n\n/// assert_stream_next!(stream, 5);\n", "file_path": "futures-test/src/assert.rs", "rank": 36, "score": 204039.67000047528 }, { "content": "#[doc(hidden)]\n\n#[inline(always)]\n\npub fn assert_unpin<T: Unpin>(_: &T) {}\n\n\n", "file_path": "futures-util/src/async_await/mod.rs", "rank": 37, "score": 187051.96754967116 }, { "content": "#[test]\n\nfn poll_and_pending() {\n\n use futures::{pending, pin_mut, poll};\n\n use futures::executor::block_on;\n\n use futures::task::Poll;\n\n\n\n let pending_once = async { pending!() };\n\n block_on(async {\n\n pin_mut!(pending_once);\n\n assert_eq!(Poll::Pending, poll!(&mut pending_once));\n\n assert_eq!(Poll::Ready(()), poll!(&mut pending_once));\n\n });\n\n}\n\n\n", "file_path": "futures/tests/async_await_macros.rs", "rank": 38, "score": 185158.99237253715 }, { "content": "#[doc(hidden)]\n\n#[inline(always)]\n\npub fn assert_fused_future<T: Future + FusedFuture>(_: &T) {}\n\n\n", "file_path": "futures-util/src/async_await/mod.rs", "rank": 39, "score": 182529.12169533529 }, { "content": "#[test]\n\nfn poll() {\n\n use futures::{\n\n executor::block_on,\n\n future::FutureExt,\n\n poll,\n\n };\n\n\n\n block_on(async {\n\n let _ = poll!(async {}.boxed(),);\n\n })\n\n}\n\n\n", "file_path": "futures/tests/macro_comma_support.rs", "rank": 40, "score": 180975.2114869089 }, { "content": "/// Creates a future which copies all the bytes from one object to another.\n\n///\n\n/// The returned future will copy all the bytes read from this `AsyncRead` into the\n\n/// `writer` specified. This future will only complete once the `reader` has hit\n\n/// EOF and all bytes have been written to and flushed from the `writer`\n\n/// provided.\n\n///\n\n/// On success the number of bytes is returned.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # futures::executor::block_on(async {\n\n/// use futures::io::{self, AsyncWriteExt, Cursor};\n\n///\n\n/// let reader = Cursor::new([1, 2, 3, 4]);\n\n/// let mut writer = Cursor::new(vec![0u8; 5]);\n\n///\n\n/// let bytes = io::copy(reader, &mut writer).await?;\n\n/// writer.close().await?;\n\n///\n\n/// assert_eq!(bytes, 4);\n\n/// assert_eq!(writer.into_inner(), [1, 2, 3, 4, 0]);\n\n/// # Ok::<(), Box<dyn std::error::Error>>(()) }).unwrap();\n\n/// ```\n\npub fn copy<R, W>(reader: R, writer: &mut W) -> Copy<'_, R, W>\n\nwhere\n\n R: AsyncRead,\n\n W: AsyncWrite + Unpin + ?Sized,\n\n{\n\n Copy {\n\n inner: copy_buf(BufReader::new(reader), writer),\n\n }\n\n}\n\n\n\n/// Future for the [`copy()`] function.\n\n#[pin_project]\n\n#[derive(Debug)]\n\n#[must_use = \"futures do nothing unless you `.await` or poll them\"]\n\npub struct Copy<'a, R, W: ?Sized> {\n\n #[pin]\n\n inner: CopyBuf<'a, BufReader<R>, W>,\n\n}\n\n\n\nimpl<R: AsyncRead, W: AsyncWrite + Unpin + ?Sized> Future for Copy<'_, R, W> {\n\n type Output = io::Result<u64>;\n\n\n\n fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n self.project().inner.poll(cx)\n\n }\n\n}\n", "file_path": "futures-util/src/io/copy.rs", "rank": 41, "score": 179538.34945486023 }, { "content": "#[test]\n\nfn select_can_be_used_as_expression() {\n\n use futures::select;\n\n use futures::executor::block_on;\n\n use futures::future;\n\n\n\n block_on(async {\n\n let res = select! {\n\n x = future::ready(7) => x,\n\n y = future::ready(3) => y + 1,\n\n };\n\n assert!(res == 7 || res == 4);\n\n });\n\n}\n\n\n", "file_path": "futures/tests/async_await_macros.rs", "rank": 42, "score": 178748.15857348728 }, { "content": "#[doc(hidden)]\n\n#[inline(always)]\n\npub fn assert_fused_stream<T: Stream + FusedStream>(_: &T) {}\n", "file_path": "futures-util/src/async_await/mod.rs", "rank": 43, "score": 176472.79709636013 }, { "content": "struct Inner<Fut: Future> {\n\n future_or_output: UnsafeCell<FutureOrOutput<Fut>>,\n\n notifier: Arc<Notifier>,\n\n}\n\n\n", "file_path": "futures-util/src/future/future/shared.rs", "rank": 44, "score": 176139.6108407608 }, { "content": "#[test]\n\nfn select_with_complete_can_be_used_as_expression() {\n\n use futures::select;\n\n use futures::executor::block_on;\n\n use futures::future;\n\n\n\n block_on(async {\n\n let res = select! {\n\n x = future::pending::<i32>() => x,\n\n y = future::pending::<i32>() => y + 1,\n\n default => 99,\n\n complete => 237,\n\n };\n\n assert_eq!(res, 237);\n\n });\n\n}\n\n\n", "file_path": "futures/tests/async_await_macros.rs", "rank": 45, "score": 175717.1126946963 }, { "content": "#[test]\n\nfn select_with_default_can_be_used_as_expression() {\n\n use futures::select;\n\n use futures::executor::block_on;\n\n use futures::future::{FutureExt, poll_fn};\n\n use futures::task::{Context, Poll};\n\n\n\n fn poll_always_pending<T>(_cx: &mut Context<'_>) -> Poll<T> {\n\n Poll::Pending\n\n }\n\n\n\n block_on(async {\n\n let res = select! {\n\n x = poll_fn(poll_always_pending::<i32>).fuse() => x,\n\n y = poll_fn(poll_always_pending::<i32>).fuse() => y + 1,\n\n default => 99,\n\n };\n\n assert_eq!(res, 99);\n\n });\n\n}\n\n\n", "file_path": "futures/tests/async_await_macros.rs", "rank": 46, "score": 175717.1126946963 }, { "content": "fn run<F: Future + Send + 'static>(future: F) {\n\n let tp = ThreadPool::new().unwrap();\n\n tp.spawn(future.map(drop)).unwrap();\n\n}\n\n\n", "file_path": "futures/tests/eventual.rs", "rank": 47, "score": 174340.16108513693 }, { "content": "/// Creates a future which copies all the bytes from one object to another.\n\n///\n\n/// The returned future will copy all the bytes read from this `AsyncBufRead` into the\n\n/// `writer` specified. This future will only complete once the `reader` has hit\n\n/// EOF and all bytes have been written to and flushed from the `writer`\n\n/// provided.\n\n///\n\n/// On success the number of bytes is returned.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # futures::executor::block_on(async {\n\n/// use futures::io::{self, AsyncWriteExt, Cursor};\n\n///\n\n/// let reader = Cursor::new([1, 2, 3, 4]);\n\n/// let mut writer = Cursor::new(vec![0u8; 5]);\n\n///\n\n/// let bytes = io::copy_buf(reader, &mut writer).await?;\n\n/// writer.close().await?;\n\n///\n\n/// assert_eq!(bytes, 4);\n\n/// assert_eq!(writer.into_inner(), [1, 2, 3, 4, 0]);\n\n/// # Ok::<(), Box<dyn std::error::Error>>(()) }).unwrap();\n\n/// ```\n\npub fn copy_buf<R, W>(reader: R, writer: &mut W) -> CopyBuf<'_, R, W>\n\nwhere\n\n R: AsyncBufRead,\n\n W: AsyncWrite + Unpin + ?Sized,\n\n{\n\n CopyBuf {\n\n reader,\n\n writer,\n\n amt: 0,\n\n }\n\n}\n\n\n\n/// Future for the [`copy_buf()`] function.\n\n#[pin_project]\n\n#[derive(Debug)]\n\n#[must_use = \"futures do nothing unless you `.await` or poll them\"]\n\npub struct CopyBuf<'a, R, W: ?Sized> {\n\n #[pin]\n\n reader: R,\n\n writer: &'a mut W,\n", "file_path": "futures-util/src/io/copy_buf.rs", "rank": 48, "score": 173110.69641062964 }, { "content": "#[bench]\n\nfn oneshots(b: &mut Bencher) {\n\n const NUM: usize = 10_000;\n\n\n\n b.iter(|| {\n\n let mut txs = VecDeque::with_capacity(NUM);\n\n let mut rxs = FuturesUnordered::new();\n\n\n\n for _ in 0..NUM {\n\n let (tx, rx) = oneshot::channel();\n\n txs.push_back(tx);\n\n rxs.push(rx);\n\n }\n\n\n\n thread::spawn(move || {\n\n while let Some(tx) = txs.pop_front() {\n\n let _ = tx.send(\"hello\");\n\n }\n\n });\n\n\n\n block_on(future::poll_fn(move |cx| {\n\n loop {\n\n if let Poll::Ready(None) = rxs.poll_next_unpin(cx) {\n\n break\n\n }\n\n }\n\n Poll::Ready(())\n\n }))\n\n });\n\n}\n", "file_path": "futures-util/benches/futures_unordered.rs", "rank": 49, "score": 164179.96387339773 }, { "content": "/// Constructs a new handle to an empty reader.\n\n///\n\n/// All reads from the returned reader will return `Poll::Ready(Ok(0))`.\n\n///\n\n/// # Examples\n\n///\n\n/// A slightly sad example of not reading anything into a buffer:\n\n///\n\n/// ```\n\n/// # futures::executor::block_on(async {\n\n/// use futures::io::{self, AsyncReadExt};\n\n///\n\n/// let mut buffer = String::new();\n\n/// let mut reader = io::empty();\n\n/// reader.read_to_string(&mut buffer).await?;\n\n/// assert!(buffer.is_empty());\n\n/// # Ok::<(), Box<dyn std::error::Error>>(()) }).unwrap();\n\n/// ```\n\npub fn empty() -> Empty {\n\n Empty { _priv: () }\n\n}\n\n\n\nimpl AsyncRead for Empty {\n\n #[inline]\n\n fn poll_read(\n\n self: Pin<&mut Self>,\n\n _: &mut Context<'_>,\n\n _: &mut [u8],\n\n ) -> Poll<io::Result<usize>> {\n\n Poll::Ready(Ok(0))\n\n }\n\n\n\n #[cfg(feature = \"read-initializer\")]\n\n #[inline]\n\n unsafe fn initializer(&self) -> Initializer {\n\n Initializer::nop()\n\n }\n\n}\n", "file_path": "futures-util/src/io/empty.rs", "rank": 50, "score": 164163.90009872048 }, { "content": "/// Creates an instance of a writer which will successfully consume all data.\n\n///\n\n/// All calls to `poll_write` on the returned instance will return `Poll::Ready(Ok(buf.len()))`\n\n/// and the contents of the buffer will not be inspected.\n\n///\n\n/// # Examples\n\n///\n\n/// ```rust\n\n/// # futures::executor::block_on(async {\n\n/// use futures::io::{self, AsyncWriteExt};\n\n///\n\n/// let buffer = vec![1, 2, 3, 5, 8];\n\n/// let mut writer = io::sink();\n\n/// let num_bytes = writer.write(&buffer).await?;\n\n/// assert_eq!(num_bytes, 5);\n\n/// # Ok::<(), Box<dyn std::error::Error>>(()) }).unwrap();\n\n/// ```\n\npub fn sink() -> Sink {\n\n Sink { _priv: () }\n\n}\n\n\n\nimpl AsyncWrite for Sink {\n\n #[inline]\n\n fn poll_write(\n\n self: Pin<&mut Self>,\n\n _: &mut Context<'_>,\n\n buf: &[u8],\n\n ) -> Poll<io::Result<usize>> {\n\n Poll::Ready(Ok(buf.len()))\n\n }\n\n\n\n #[inline]\n\n fn poll_write_vectored(\n\n self: Pin<&mut Self>,\n\n _: &mut Context<'_>,\n\n bufs: &[IoSlice<'_>],\n\n ) -> Poll<io::Result<usize>> {\n", "file_path": "futures-util/src/io/sink.rs", "rank": 51, "score": 164163.26066012288 }, { "content": "#[inline]\n\npub fn noop_waker() -> Waker {\n\n unsafe {\n\n Waker::from_raw(noop_raw_waker())\n\n }\n\n}\n\n\n\n/// Get a static reference to a [`Waker`] which\n\n/// does nothing when `wake()` is called on it.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use futures::task::noop_waker_ref;\n\n/// let waker = noop_waker_ref();\n\n/// waker.wake_by_ref();\n\n/// ```\n", "file_path": "futures-task/src/noop_waker.rs", "rank": 52, "score": 161950.83380054083 }, { "content": "#[bench]\n\nfn contended(b: &mut Bencher) {\n\n let pool = LocalPool::new();\n\n let mut exec = pool.executor();\n\n let waker = notify_noop();\n\n let mut map = task::LocalMap::new();\n\n let mut waker = task::Context::new(&mut map, &waker, &mut exec);\n\n\n\n b.iter(|| {\n\n let (x, y) = BiLock::new(1);\n\n\n\n let mut x = LockStream::new(x);\n\n let mut y = LockStream::new(y);\n\n\n\n for _ in 0..1000 {\n\n let x_guard = match x.poll_next(&mut waker) {\n\n Ok(Poll::Ready(Some(guard))) => guard,\n\n _ => panic!(),\n\n };\n\n\n\n // Try poll second lock while first lock still holds the lock\n", "file_path": "futures-util/benches_disabled/bilock.rs", "rank": 53, "score": 161072.47082300618 }, { "content": "/// Create a new [`Waker`](futures_core::task::Waker) which will\n\n/// panic when `wake()` is called on it. The [`Waker`] can be converted\n\n/// into a [`Waker`] which will behave the same way.\n\n///\n\n/// # Examples\n\n///\n\n/// ```should_panic\n\n/// use futures_test::task::panic_waker;\n\n///\n\n/// let waker = panic_waker();\n\n/// waker.wake(); // Will panic\n\n/// ```\n\npub fn panic_waker() -> Waker {\n\n unsafe { Waker::from_raw(raw_panic_waker()) }\n\n}\n\n\n", "file_path": "futures-test/src/task/panic_waker.rs", "rank": 54, "score": 159872.12005880015 }, { "content": "#[bench]\n\nfn lock_unlock(b: &mut Bencher) {\n\n let pool = LocalPool::new();\n\n let mut exec = pool.executor();\n\n let waker = notify_noop();\n\n let mut map = task::LocalMap::new();\n\n let mut waker = task::Context::new(&mut map, &waker, &mut exec);\n\n\n\n b.iter(|| {\n\n let (x, y) = BiLock::new(1);\n\n\n\n let mut x = LockStream::new(x);\n\n let mut y = LockStream::new(y);\n\n\n\n for _ in 0..1000 {\n\n let x_guard = match x.poll_next(&mut waker) {\n\n Ok(Poll::Ready(Some(guard))) => guard,\n\n _ => panic!(),\n\n };\n\n\n\n x.release_lock(x_guard);\n", "file_path": "futures-util/benches_disabled/bilock.rs", "rank": 55, "score": 158883.90124814896 }, { "content": "#[bench]\n\nfn unbounded_100_tx(b: &mut Bencher) {\n\n let mut cx = noop_context();\n\n b.iter(|| {\n\n let (tx, mut rx) = mpsc::unbounded();\n\n\n\n let tx: Vec<_> = (0..100).map(|_| tx.clone()).collect();\n\n\n\n // 1000 send/recv operations total, result should be divided by 1000\n\n for _ in 0..10 {\n\n for (i, x) in tx.iter().enumerate() {\n\n assert_eq!(Poll::Pending, rx.poll_next_unpin(&mut cx));\n\n\n\n UnboundedSender::unbounded_send(x, i).unwrap();\n\n\n\n assert_eq!(Poll::Ready(Some(i)), rx.poll_next_unpin(&mut cx));\n\n }\n\n }\n\n })\n\n}\n\n\n", "file_path": "futures-channel/benches/sync_mpsc.rs", "rank": 56, "score": 158883.90124814896 }, { "content": "#[bench]\n\nfn unbounded_1_tx(b: &mut Bencher) {\n\n let mut cx = noop_context();\n\n b.iter(|| {\n\n let (tx, mut rx) = mpsc::unbounded();\n\n\n\n // 1000 iterations to avoid measuring overhead of initialization\n\n // Result should be divided by 1000\n\n for i in 0..1000 {\n\n\n\n // Poll, not ready, park\n\n assert_eq!(Poll::Pending, rx.poll_next_unpin(&mut cx));\n\n\n\n UnboundedSender::unbounded_send(&tx, i).unwrap();\n\n\n\n // Now poll ready\n\n assert_eq!(Poll::Ready(Some(i)), rx.poll_next_unpin(&mut cx));\n\n }\n\n })\n\n}\n\n\n\n/// 100 producers, single consumer\n", "file_path": "futures-channel/benches/sync_mpsc.rs", "rank": 57, "score": 158883.90124814896 }, { "content": "#[bench]\n\nfn bounded_100_tx(b: &mut Bencher) {\n\n let mut cx = noop_context();\n\n b.iter(|| {\n\n // Each sender can send one item after specified capacity\n\n let (tx, mut rx) = mpsc::channel(0);\n\n\n\n let mut tx: Vec<_> = (0..100).map(|_| {\n\n TestSender {\n\n tx: tx.clone(),\n\n last: 0\n\n }\n\n }).collect();\n\n\n\n for i in 0..10 {\n\n for x in &mut tx {\n\n // Send an item\n\n assert_eq!(Poll::Ready(Some(i + 1)), x.poll_next_unpin(&mut cx));\n\n // Then block\n\n assert_eq!(Poll::Pending, x.poll_next_unpin(&mut cx));\n\n // Recv the item\n\n assert_eq!(Poll::Ready(Some(i + 1)), rx.poll_next_unpin(&mut cx));\n\n }\n\n }\n\n })\n\n}\n", "file_path": "futures-channel/benches/sync_mpsc.rs", "rank": 58, "score": 158883.90124814896 }, { "content": "#[bench]\n\nfn unbounded_uncontended(b: &mut Bencher) {\n\n let mut cx = noop_context();\n\n b.iter(|| {\n\n let (tx, mut rx) = mpsc::unbounded();\n\n\n\n for i in 0..1000 {\n\n UnboundedSender::unbounded_send(&tx, i).expect(\"send\");\n\n // No need to create a task, because poll is not going to park.\n\n assert_eq!(Poll::Ready(Some(i)), rx.poll_next_unpin(&mut cx));\n\n }\n\n })\n\n}\n\n\n\n\n", "file_path": "futures-channel/benches/sync_mpsc.rs", "rank": 59, "score": 158883.90124814896 }, { "content": "#[bench]\n\nfn bounded_1_tx(b: &mut Bencher) {\n\n let mut cx = noop_context();\n\n b.iter(|| {\n\n let (tx, mut rx) = mpsc::channel(0);\n\n\n\n let mut tx = TestSender { tx, last: 0 };\n\n\n\n for i in 0..1000 {\n\n assert_eq!(Poll::Ready(Some(i + 1)), tx.poll_next_unpin(&mut cx));\n\n assert_eq!(Poll::Pending, tx.poll_next_unpin(&mut cx));\n\n assert_eq!(Poll::Ready(Some(i + 1)), rx.poll_next_unpin(&mut cx));\n\n }\n\n })\n\n}\n\n\n\n/// 100 producers, single consumer\n", "file_path": "futures-channel/benches/sync_mpsc.rs", "rank": 60, "score": 158883.90124814896 }, { "content": "/// Create a new [`Context`](core::task::Context) where the\n\n/// [waker](core::task::Context::waker) will ignore any uses.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use futures::future::Future;\n\n/// use futures::task::Poll;\n\n/// use futures_test::task::noop_context;\n\n/// use futures::pin_mut;\n\n///\n\n/// let future = async { 5 };\n\n/// pin_mut!(future);\n\n///\n\n/// assert_eq!(future.poll(&mut noop_context()), Poll::Ready(5));\n\n/// ```\n\npub fn noop_context() -> Context<'static> {\n\n Context::from_waker(noop_waker_ref())\n\n}\n", "file_path": "futures-test/src/task/context.rs", "rank": 61, "score": 155694.1919896988 }, { "content": "/// Create a new [`Context`](core::task::Context) where the\n\n/// [waker](core::task::Context::waker) will panic if used.\n\n///\n\n/// # Examples\n\n///\n\n/// ```should_panic\n\n/// use futures_test::task::panic_context;\n\n///\n\n/// let cx = panic_context();\n\n/// cx.waker().wake_by_ref(); // Will panic\n\n/// ```\n\npub fn panic_context() -> Context<'static> {\n\n Context::from_waker(panic_waker_ref())\n\n}\n\n\n", "file_path": "futures-test/src/task/context.rs", "rank": 62, "score": 155681.1538800401 }, { "content": "/// Creates a future which never resolves, representing a computation that never\n\n/// finishes.\n\n///\n\n/// The returned future will forever return [`Poll::Pending`].\n\n///\n\n/// # Examples\n\n///\n\n/// ```ignore\n\n/// # futures::executor::block_on(async {\n\n/// use futures::future;\n\n///\n\n/// let future = future::pending();\n\n/// let () = future.await;\n\n/// unreachable!();\n\n/// # });\n\n/// ```\n\npub fn pending<T>() -> Pending<T> {\n\n Pending {\n\n _data: marker::PhantomData,\n\n }\n\n}\n\n\n\nimpl<T> Future for Pending<T> {\n\n type Output = T;\n\n\n\n fn poll(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<T> {\n\n Poll::Pending\n\n }\n\n}\n\n\n\nimpl<T> Unpin for Pending<T> {\n\n}\n\n\n\nimpl<T> Clone for Pending<T> {\n\n fn clone(&self) -> Self {\n\n pending()\n\n }\n\n}\n", "file_path": "futures-util/src/future/pending.rs", "rank": 63, "score": 155249.4248984942 }, { "content": "#[bench]\n\nfn thread_yield_multi_thread(b: &mut Bencher) {\n\n use std::sync::mpsc;\n\n use std::thread;\n\n\n\n const NUM: usize = 1_000;\n\n\n\n let (tx, rx) = mpsc::sync_channel::<Waker>(10_000);\n\n\n\n struct Yield {\n\n rem: usize,\n\n tx: mpsc::SyncSender<Waker>,\n\n }\n\n impl Unpin for Yield {}\n\n\n\n impl Future for Yield {\n\n type Output = ();\n\n\n\n fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n if self.rem == 0 {\n\n Poll::Ready(())\n", "file_path": "futures-executor/benches/thread_notify.rs", "rank": 64, "score": 154773.9165877638 }, { "content": "#[inline]\n\n#[cfg(feature = \"std\")]\n\npub fn noop_waker_ref() -> &'static Waker {\n\n static NOOP_WAKER_INSTANCE: Lazy<Waker> = Lazy::new(noop_waker);\n\n &*NOOP_WAKER_INSTANCE\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n #[cfg(feature = \"std\")]\n\n fn issue_2091_cross_thread_segfault() {\n\n let waker = std::thread::spawn(super::noop_waker_ref).join().unwrap();\n\n waker.wake_by_ref();\n\n }\n\n}\n", "file_path": "futures-task/src/noop_waker.rs", "rank": 65, "score": 153654.20512430646 }, { "content": "fn lock_and_then<T, U, E, F>(\n\n lock: &BiLock<T>,\n\n cx: &mut Context<'_>,\n\n f: F\n\n) -> Poll<Result<U, E>>\n\n where F: FnOnce(Pin<&mut T>, &mut Context<'_>) -> Poll<Result<U, E>>\n\n{\n\n let mut l = ready!(lock.poll_lock(cx));\n\n f(l.as_pin_mut(), cx)\n\n}\n\n\n\npub(super) fn split<T: AsyncRead + AsyncWrite>(t: T) -> (ReadHalf<T>, WriteHalf<T>) {\n\n let (a, b) = BiLock::new(t);\n\n (ReadHalf { handle: a }, WriteHalf { handle: b })\n\n}\n\n\n\nimpl<T: Unpin> ReadHalf<T> {\n\n /// Attempts to put the two \"halves\" of a split `AsyncRead + AsyncWrite` back\n\n /// together. Succeeds only if the `ReadHalf<T>` and `WriteHalf<T>` are\n\n /// a matching pair originating from the same call to `AsyncReadExt::split`.\n", "file_path": "futures-util/src/io/split.rs", "rank": 66, "score": 153171.8301201807 }, { "content": "/// Create a sink that will just discard all items given to it.\n\n///\n\n/// Similar to [`io::Sink`](::std::io::Sink).\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # futures::executor::block_on(async {\n\n/// use futures::sink::{self, SinkExt};\n\n///\n\n/// let mut drain = sink::drain();\n\n/// drain.send(5).await?;\n\n/// # Ok::<(), futures::never::Never>(()) }).unwrap();\n\n/// ```\n\npub fn drain<T>() -> Drain<T> {\n\n Drain { marker: PhantomData }\n\n}\n\n\n\nimpl<T> Unpin for Drain<T> {}\n\n\n\nimpl<T> Sink<T> for Drain<T> {\n\n type Error = Never;\n\n\n\n fn poll_ready(\n\n self: Pin<&mut Self>,\n\n _cx: &mut Context<'_>,\n\n ) -> Poll<Result<(), Self::Error>> {\n\n Poll::Ready(Ok(()))\n\n }\n\n\n\n fn start_send(\n\n self: Pin<&mut Self>,\n\n _item: T,\n\n ) -> Result<(), Self::Error> {\n", "file_path": "futures-util/src/sink/drain.rs", "rank": 67, "score": 152030.25450556725 }, { "content": "/// Creates an instance of a reader that infinitely repeats one byte.\n\n///\n\n/// All reads from this reader will succeed by filling the specified buffer with\n\n/// the given byte.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # futures::executor::block_on(async {\n\n/// use futures::io::{self, AsyncReadExt};\n\n///\n\n/// let mut buffer = [0; 3];\n\n/// let mut reader = io::repeat(0b101);\n\n/// reader.read_exact(&mut buffer).await.unwrap();\n\n/// assert_eq!(buffer, [0b101, 0b101, 0b101]);\n\n/// # Ok::<(), Box<dyn std::error::Error>>(()) }).unwrap();\n\n/// ```\n\npub fn repeat(byte: u8) -> Repeat {\n\n Repeat { byte }\n\n}\n\n\n\nimpl AsyncRead for Repeat {\n\n #[inline]\n\n fn poll_read(\n\n self: Pin<&mut Self>,\n\n _: &mut Context<'_>,\n\n buf: &mut [u8],\n\n ) -> Poll<io::Result<usize>> {\n\n for slot in &mut *buf {\n\n *slot = self.byte;\n\n }\n\n Poll::Ready(Ok(buf.len()))\n\n }\n\n\n\n #[inline]\n\n fn poll_read_vectored(\n\n mut self: Pin<&mut Self>,\n", "file_path": "futures-util/src/io/repeat.rs", "rank": 68, "score": 152029.85783341964 }, { "content": "/// Marks the current thread as being within the dynamic extent of an\n\n/// executor.\n\n///\n\n/// Executor implementations should call this function before beginning to\n\n/// execute a tasks, and drop the returned [`Enter`](Enter) value after\n\n/// completing task execution:\n\n///\n\n/// ```\n\n/// use futures::executor::enter;\n\n///\n\n/// let enter = enter().expect(\"...\");\n\n/// /* run task */\n\n/// drop(enter);\n\n/// ```\n\n///\n\n/// Doing so ensures that executors aren't\n\n/// accidentally invoked in a nested fashion.\n\n///\n\n/// # Error\n\n///\n\n/// Returns an error if the current thread is already marked, in which case the\n\n/// caller should panic with a tailored error message.\n\npub fn enter() -> Result<Enter, EnterError> {\n\n ENTERED.with(|c| {\n\n if c.get() {\n\n Err(EnterError { _priv: () })\n\n } else {\n\n c.set(true);\n\n\n\n Ok(Enter { _priv: () })\n\n }\n\n })\n\n}\n\n\n\nimpl fmt::Debug for Enter {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.debug_struct(\"Enter\").finish()\n\n }\n\n}\n\n\n\nimpl Drop for Enter {\n\n fn drop(&mut self) {\n\n ENTERED.with(|c| {\n\n assert!(c.get());\n\n c.set(false);\n\n });\n\n }\n\n}\n", "file_path": "futures-executor/src/enter.rs", "rank": 69, "score": 152014.84519906988 }, { "content": "/// Creates a stream which contains no elements.\n\n///\n\n/// The returned stream will always return `Ready(None)` when polled.\n\npub fn empty<T>() -> Empty<T> {\n\n Empty {\n\n _phantom: PhantomData\n\n }\n\n}\n\n\n\nimpl<T> Unpin for Empty<T> {}\n\n\n\nimpl<T> FusedStream for Empty<T> {\n\n fn is_terminated(&self) -> bool {\n\n true\n\n }\n\n}\n\n\n\nimpl<T> Stream for Empty<T> {\n\n type Item = T;\n\n\n\n fn poll_next(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Option<Self::Item>> {\n\n Poll::Ready(None)\n\n }\n", "file_path": "futures-util/src/stream/empty.rs", "rank": 70, "score": 152008.55636676896 }, { "content": "/// Creates a stream which never returns any elements.\n\n///\n\n/// The returned stream will always return `Pending` when polled.\n\npub fn pending<T>() -> Pending<T> {\n\n Pending { _data: marker::PhantomData }\n\n}\n\n\n\nimpl<T> Unpin for Pending<T> {}\n\n\n\nimpl<T> FusedStream for Pending<T> {\n\n fn is_terminated(&self) -> bool {\n\n true\n\n }\n\n}\n\n\n\nimpl<T> Stream for Pending<T> {\n\n type Item = T;\n\n\n\n fn poll_next(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Option<Self::Item>> {\n\n Poll::Pending\n\n }\n\n\n\n fn size_hint(&self) -> (usize, Option<usize>) {\n\n (0, Some(0))\n\n }\n\n}\n\n\n\nimpl<T> Clone for Pending<T> {\n\n fn clone(&self) -> Self {\n\n pending()\n\n }\n\n}\n", "file_path": "futures-util/src/stream/pending.rs", "rank": 71, "score": 152008.55636676896 }, { "content": "/// Get a global reference to a\n\n/// [`Waker`](futures_core::task::Waker) referencing a singleton\n\n/// instance of a [`Waker`] which panics when woken.\n\n///\n\n/// # Examples\n\n///\n\n/// ```should_panic\n\n/// use futures_test::task::panic_waker_ref;\n\n///\n\n/// let waker = panic_waker_ref();\n\n/// waker.wake_by_ref(); // Will panic\n\n/// ```\n\npub fn panic_waker_ref() -> &'static Waker {\n\n static PANIC_WAKER_INSTANCE: Lazy<Waker> = Lazy::new(panic_waker);\n\n &*PANIC_WAKER_INSTANCE\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n #[should_panic(expected = \"should not be woken\")]\n\n fn issue_2091_cross_thread_segfault() {\n\n let waker = std::thread::spawn(super::panic_waker_ref).join().unwrap();\n\n waker.wake_by_ref();\n\n }\n\n}\n", "file_path": "futures-test/src/task/panic_waker.rs", "rank": 72, "score": 151740.76907778293 }, { "content": "#[bench]\n\nfn thread_yield_single_thread_one_wait(b: &mut Bencher) {\n\n const NUM: usize = 10_000;\n\n\n\n struct Yield {\n\n rem: usize,\n\n }\n\n\n\n impl Future for Yield {\n\n type Output = ();\n\n\n\n fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n if self.rem == 0 {\n\n Poll::Ready(())\n\n } else {\n\n self.rem -= 1;\n\n cx.waker().wake_by_ref();\n\n Poll::Pending\n\n }\n\n }\n\n }\n\n\n\n b.iter(|| {\n\n let y = Yield { rem: NUM };\n\n block_on(y);\n\n });\n\n}\n\n\n", "file_path": "futures-executor/benches/thread_notify.rs", "rank": 73, "score": 150985.3201415792 }, { "content": "#[bench]\n\nfn thread_yield_single_thread_many_wait(b: &mut Bencher) {\n\n const NUM: usize = 10_000;\n\n\n\n struct Yield {\n\n rem: usize,\n\n }\n\n\n\n impl Future for Yield {\n\n type Output = ();\n\n\n\n fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n if self.rem == 0 {\n\n Poll::Ready(())\n\n } else {\n\n self.rem -= 1;\n\n cx.waker().wake_by_ref();\n\n Poll::Pending\n\n }\n\n }\n\n }\n\n\n\n b.iter(|| {\n\n for _ in 0..NUM {\n\n let y = Yield { rem: 1 };\n\n block_on(y);\n\n }\n\n });\n\n}\n\n\n", "file_path": "futures-executor/benches/thread_notify.rs", "rank": 74, "score": 150985.3201415792 }, { "content": "/// Create a new [`Waker`] that counts the number of times it's awoken.\n\n///\n\n/// [`Waker`]: futures_core::task::Waker\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use futures_test::task::new_count_waker;\n\n///\n\n/// let (waker, count) = new_count_waker();\n\n///\n\n/// assert_eq!(count, 0);\n\n///\n\n/// waker.wake_by_ref();\n\n/// waker.wake();\n\n///\n\n/// assert_eq!(count, 2);\n\n/// ```\n\npub fn new_count_waker() -> (Waker, AwokenCount) {\n\n let inner = Arc::new(WakerInner { count: AtomicUsize::new(0) });\n\n (task::waker(inner.clone()), AwokenCount { inner })\n\n}\n", "file_path": "futures-test/src/task/wake_counter.rs", "rank": 75, "score": 149888.4572833134 }, { "content": "#[test]\n\nfn stream_poll_fn() {\n\n let mut counter = 5usize;\n\n\n\n let read_stream = poll_fn(move |_| -> Poll<Option<usize>, std::io::Error> {\n\n if counter == 0 {\n\n return Ok(Poll::Ready(None));\n\n }\n\n counter -= 1;\n\n Ok(Poll::Ready(Some(counter)))\n\n });\n\n\n\n assert_eq!(block_on_stream(read_stream).count(), 5);\n\n}\n\n\n", "file_path": "futures/tests_disabled/stream.rs", "rank": 76, "score": 149158.68673571074 }, { "content": "/// Creates a future which represents a collection of the outputs of the futures\n\n/// given.\n\n///\n\n/// The returned future will drive execution for all of its underlying futures,\n\n/// collecting the results into a destination `Vec<T>` in the same order as they\n\n/// were provided.\n\n///\n\n/// This function is only available when the `std` or `alloc` feature of this\n\n/// library is activated, and it is activated by default.\n\n///\n\n/// # See Also\n\n///\n\n/// This is purposefully a very simple API for basic use-cases. In a lot of\n\n/// cases you will want to use the more powerful\n\n/// [`FuturesOrdered`][crate::stream::FuturesOrdered] APIs, or, if order does\n\n/// not matter, [`FuturesUnordered`][crate::stream::FuturesUnordered].\n\n///\n\n/// Some examples for additional functionality provided by these are:\n\n///\n\n/// * Adding new futures to the set even after it has been started.\n\n///\n\n/// * Only polling the specific futures that have been woken. In cases where\n\n/// you have a lot of futures this will result in much more efficient polling.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # futures::executor::block_on(async {\n\n/// use futures::future::join_all;\n\n///\n\n/// async fn foo(i: u32) -> u32 { i }\n\n///\n\n/// let futures = vec![foo(1), foo(2), foo(3)];\n\n///\n\n/// assert_eq!(join_all(futures).await, [1, 2, 3]);\n\n/// # });\n\n/// ```\n\npub fn join_all<I>(i: I) -> JoinAll<I::Item>\n\nwhere\n\n I: IntoIterator,\n\n I::Item: Future,\n\n{\n\n let elems: Box<[_]> = i.into_iter().map(MaybeDone::Future).collect();\n\n JoinAll { elems: elems.into() }\n\n}\n\n\n\nimpl<F> Future for JoinAll<F>\n\nwhere\n\n F: Future,\n\n{\n\n type Output = Vec<F::Output>;\n\n\n\n fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n let mut all_done = true;\n\n\n\n for elem in iter_pin_mut(self.elems.as_mut()) {\n\n if elem.poll(cx).is_pending() {\n", "file_path": "futures-util/src/future/join_all.rs", "rank": 77, "score": 148676.23750286078 }, { "content": "fn poll_03_to_01<T, E>(x: task03::Poll<Result<T, E>>)\n\n -> Result<Async01<T>, E>\n\n{\n\n match x? {\n\n task03::Poll::Ready(t) => Ok(Async01::Ready(t)),\n\n task03::Poll::Pending => Ok(Async01::NotReady),\n\n }\n\n}\n\n\n\nimpl<Fut> Future01 for Compat<Fut>\n\nwhere\n\n Fut: TryFuture03 + Unpin,\n\n{\n\n type Item = Fut::Ok;\n\n type Error = Fut::Error;\n\n\n\n fn poll(&mut self) -> Poll01<Self::Item, Self::Error> {\n\n with_context(self, |inner, cx| poll_03_to_01(inner.try_poll(cx)))\n\n }\n\n}\n", "file_path": "futures-util/src/compat/compat03as01.rs", "rank": 78, "score": 148607.6195249428 }, { "content": "#[proc_macro_hack]\n\npub fn select_internal(input: TokenStream) -> TokenStream {\n\n crate::select::select(input)\n\n}\n\n\n\n/// The `select_biased!` macro.\n", "file_path": "futures-macro/src/lib.rs", "rank": 79, "score": 148068.88436374022 }, { "content": "#[proc_macro_hack]\n\npub fn join_internal(input: TokenStream) -> TokenStream {\n\n crate::join::join(input)\n\n}\n\n\n\n/// The `try_join!` macro.\n", "file_path": "futures-macro/src/lib.rs", "rank": 80, "score": 148068.88436374022 }, { "content": "/// Creates a future that is immediately ready with a value.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # futures::executor::block_on(async {\n\n/// use futures::future;\n\n///\n\n/// let a = future::ready(1);\n\n/// assert_eq!(a.await, 1);\n\n/// # });\n\n/// ```\n\npub fn ready<T>(t: T) -> Ready<T> {\n\n Ready(Some(t))\n\n}\n\n\n", "file_path": "futures-util/src/future/ready.rs", "rank": 81, "score": 147212.2880065789 }, { "content": "/// Creates a new future which will select over a list of futures.\n\n///\n\n/// The returned future will wait for any future within `iter` to be ready. Upon\n\n/// completion the item resolved will be returned, along with the index of the\n\n/// future that was ready and the list of all the remaining futures.\n\n///\n\n/// There are no guarantees provided on the order of the list with the remaining\n\n/// futures. They might be swapped around, reversed, or completely random.\n\n///\n\n/// This function is only available when the `std` or `alloc` feature of this\n\n/// library is activated, and it is activated by default.\n\n///\n\n/// # Panics\n\n///\n\n/// This function will panic if the iterator specified contains no items.\n\npub fn select_all<I>(iter: I) -> SelectAll<I::Item>\n\n where I: IntoIterator,\n\n I::Item: Future + Unpin,\n\n{\n\n let ret = SelectAll {\n\n inner: iter.into_iter().collect()\n\n };\n\n assert!(!ret.inner.is_empty());\n\n ret\n\n}\n\n\n\nimpl<Fut: Future + Unpin> Future for SelectAll<Fut> {\n\n type Output = (Fut::Output, usize, Vec<Fut>);\n\n\n\n fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n let item = self.inner.iter_mut().enumerate().find_map(|(i, f)| {\n\n match f.poll_unpin(cx) {\n\n Poll::Pending => None,\n\n Poll::Ready(e) => Some((i, e)),\n\n }\n", "file_path": "futures-util/src/future/select_all.rs", "rank": 82, "score": 146614.46650146638 }, { "content": "#[proc_macro_hack]\n\npub fn try_join_internal(input: TokenStream) -> TokenStream {\n\n crate::join::try_join(input)\n\n}\n\n\n\n/// The `select!` macro.\n", "file_path": "futures-macro/src/lib.rs", "rank": 83, "score": 146216.8972090021 }, { "content": "#[proc_macro_hack]\n\npub fn select_biased_internal(input: TokenStream) -> TokenStream {\n\n crate::select::select_biased(input)\n\n}\n", "file_path": "futures-macro/src/lib.rs", "rank": 84, "score": 146216.8972090021 }, { "content": "/// Converts an `Iterator` into a `Stream` which is always ready\n\n/// to yield the next value.\n\n///\n\n/// Iterators in Rust don't express the ability to block, so this adapter\n\n/// simply always calls `iter.next()` and returns that.\n\n///\n\n/// ```\n\n/// # futures::executor::block_on(async {\n\n/// use futures::stream::{self, StreamExt};\n\n///\n\n/// let stream = stream::iter(vec![17, 19]);\n\n/// assert_eq!(vec![17, 19], stream.collect::<Vec<i32>>().await);\n\n/// # });\n\n/// ```\n\npub fn iter<I>(i: I) -> Iter<I::IntoIter>\n\n where I: IntoIterator,\n\n{\n\n Iter {\n\n iter: i.into_iter(),\n\n }\n\n}\n\n\n\nimpl<I> Stream for Iter<I>\n\n where I: Iterator,\n\n{\n\n type Item = I::Item;\n\n\n\n fn poll_next(mut self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Option<I::Item>> {\n\n Poll::Ready(self.iter.next())\n\n }\n\n\n\n fn size_hint(&self) -> (usize, Option<usize>) {\n\n self.iter.size_hint()\n\n }\n\n}\n", "file_path": "futures-util/src/stream/iter.rs", "rank": 85, "score": 145505.8642894662 }, { "content": "#[test]\n\nfn finished_future() {\n\n use std::marker::Unpin;\n\n use futures::channel::oneshot;\n\n use futures::future::{self, Future, FutureExt};\n\n use futures::stream::{FuturesUnordered, StreamExt};\n\n use futures_test::task::noop_context;\n\n\n\n let (_a_tx, a_rx) = oneshot::channel::<i32>();\n\n let (b_tx, b_rx) = oneshot::channel::<i32>();\n\n let (c_tx, c_rx) = oneshot::channel::<i32>();\n\n\n\n let mut stream = vec![\n\n Box::new(a_rx) as Box<dyn Future<Output = Result<_, _>> + Unpin>,\n\n Box::new(future::select(b_rx, c_rx).map(|e| e.factor_first().0)) as _,\n\n ]\n\n .into_iter()\n\n .collect::<FuturesUnordered<_>>();\n\n\n\n let cx = &mut noop_context();\n\n for _ in 0..10 {\n\n assert!(stream.poll_next_unpin(cx).is_pending());\n\n }\n\n\n\n b_tx.send(12).unwrap();\n\n c_tx.send(3).unwrap();\n\n assert!(stream.poll_next_unpin(cx).is_ready());\n\n assert!(stream.poll_next_unpin(cx).is_pending());\n\n assert!(stream.poll_next_unpin(cx).is_pending());\n\n}\n\n\n", "file_path": "futures/tests/futures_unordered.rs", "rank": 86, "score": 145037.72020039865 }, { "content": "/// Creates a new one-shot channel for sending a single value across asynchronous tasks.\n\n///\n\n/// The channel works for a spsc (single-producer, single-consumer) scheme.\n\n///\n\n/// This function is similar to Rust's channel constructor found in the standard\n\n/// library. Two halves are returned, the first of which is a `Sender` handle,\n\n/// used to signal the end of a computation and provide its value. The second\n\n/// half is a `Receiver` which implements the `Future` trait, resolving to the\n\n/// value that was given to the `Sender` handle.\n\n///\n\n/// Each half can be separately owned and sent across tasks.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use futures::channel::oneshot;\n\n/// use std::{thread, time::Duration};\n\n///\n\n/// let (sender, receiver) = oneshot::channel::<i32>();\n\n///\n\n/// thread::spawn(|| {\n\n/// println!(\"THREAD: sleeping zzz...\");\n\n/// thread::sleep(Duration::from_millis(1000));\n\n/// println!(\"THREAD: i'm awake! sending.\");\n\n/// sender.send(3).unwrap();\n\n/// });\n\n///\n\n/// println!(\"MAIN: doing some useful stuff\");\n\n///\n\n/// futures::executor::block_on(async {\n\n/// println!(\"MAIN: waiting for msg...\");\n\n/// println!(\"MAIN: got: {:?}\", receiver.await)\n\n/// });\n\n/// ```\n\npub fn channel<T>() -> (Sender<T>, Receiver<T>) {\n\n let inner = Arc::new(Inner::new());\n\n let receiver = Receiver {\n\n inner: inner.clone(),\n\n };\n\n let sender = Sender {\n\n inner,\n\n };\n\n (sender, receiver)\n\n}\n\n\n\nimpl<T> Inner<T> {\n\n fn new() -> Self {\n\n Self {\n\n complete: AtomicBool::new(false),\n\n data: Lock::new(None),\n\n rx_task: Lock::new(None),\n\n tx_task: Lock::new(None),\n\n }\n\n }\n", "file_path": "futures-channel/src/oneshot.rs", "rank": 87, "score": 143944.34136583653 }, { "content": "/// Creates a [`Waker`] from an `Arc<impl ArcWake>`.\n\n///\n\n/// The returned [`Waker`] will call\n\n/// [`ArcWake.wake()`](ArcWake::wake) if awoken.\n\npub fn waker<W>(wake: Arc<W>) -> Waker\n\nwhere\n\n W: ArcWake + 'static,\n\n{\n\n let ptr = Arc::into_raw(wake) as *const ();\n\n\n\n unsafe {\n\n Waker::from_raw(RawWaker::new(ptr, waker_vtable::<W>()))\n\n }\n\n}\n\n\n\n// FIXME: panics on Arc::clone / refcount changes could wreak havoc on the\n\n// code here. We should guard against this by aborting.\n\n\n\n#[allow(clippy::redundant_clone)] // The clone here isn't actually redundant.\n\nunsafe fn increase_refcount<T: ArcWake>(data: *const ()) {\n\n // Retain Arc, but don't touch refcount by wrapping in ManuallyDrop\n\n let arc = mem::ManuallyDrop::new(Arc::<T>::from_raw(data as *const T));\n\n // Now increase refcount, but don't drop new refcount either\n\n let _arc_clone: mem::ManuallyDrop<_> = arc.clone();\n", "file_path": "futures-task/src/waker.rs", "rank": 88, "score": 143928.70865514126 }, { "content": "/// Convert a list of streams into a `Stream` of results from the streams.\n\n///\n\n/// This essentially takes a list of streams (e.g. a vector, an iterator, etc.)\n\n/// and bundles them together into a single stream.\n\n/// The stream will yield items as they become available on the underlying\n\n/// streams internally, in the order they become available.\n\n///\n\n/// Note that the returned set can also be used to dynamically push more\n\n/// futures into the set as they become available.\n\n///\n\n/// This function is only available when the `std` or `alloc` feature of this\n\n/// library is activated, and it is activated by default.\n\npub fn select_all<I>(streams: I) -> SelectAll<I::Item>\n\n where I: IntoIterator,\n\n I::Item: Stream + Unpin\n\n{\n\n let mut set = SelectAll::new();\n\n\n\n for stream in streams {\n\n set.push(stream);\n\n }\n\n\n\n set\n\n}\n\n\n\nimpl<St: Stream + Unpin> FromIterator<St> for SelectAll<St> {\n\n fn from_iter<T: IntoIterator<Item = St>>(iter: T) -> Self {\n\n select_all(iter)\n\n }\n\n}\n\n\n\nimpl<St: Stream + Unpin> Extend<St> for SelectAll<St> {\n\n fn extend<T: IntoIterator<Item = St>>(&mut self, iter: T) {\n\n for st in iter {\n\n self.push(st)\n\n }\n\n }\n\n}\n", "file_path": "futures-util/src/stream/select_all.rs", "rank": 89, "score": 143399.6549442038 }, { "content": "#[test]\n\nfn drop_in_poll() {\n\n use futures::executor::block_on;\n\n use futures::future::{self, FutureExt, LocalFutureObj};\n\n use std::cell::RefCell;\n\n use std::rc::Rc;\n\n\n\n let slot1 = Rc::new(RefCell::new(None));\n\n let slot2 = slot1.clone();\n\n\n\n let future1 = future::lazy(move |_| {\n\n slot2.replace(None); // Drop future\n\n 1\n\n })\n\n .shared();\n\n\n\n let future2 = LocalFutureObj::new(Box::new(future1.clone()));\n\n slot1.replace(Some(future2));\n\n\n\n assert_eq!(block_on(future1), 1);\n\n}\n\n\n", "file_path": "futures/tests/shared.rs", "rank": 90, "score": 143181.94851593496 }, { "content": "/// Creates a future which represents either a collection of the results of the\n\n/// futures given or an error.\n\n///\n\n/// The returned future will drive execution for all of its underlying futures,\n\n/// collecting the results into a destination `Vec<T>` in the same order as they\n\n/// were provided.\n\n///\n\n/// If any future returns an error then all other futures will be canceled and\n\n/// an error will be returned immediately. If all futures complete successfully,\n\n/// however, then the returned future will succeed with a `Vec` of all the\n\n/// successful results.\n\n///\n\n/// This function is only available when the `std` or `alloc` feature of this\n\n/// library is activated, and it is activated by default.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # futures::executor::block_on(async {\n\n/// use futures::future::{self, try_join_all};\n\n///\n\n/// let futures = vec![\n\n/// future::ok::<u32, u32>(1),\n\n/// future::ok::<u32, u32>(2),\n\n/// future::ok::<u32, u32>(3),\n\n/// ];\n\n///\n\n/// assert_eq!(try_join_all(futures).await, Ok(vec![1, 2, 3]));\n\n///\n\n/// let futures = vec![\n\n/// future::ok::<u32, u32>(1),\n\n/// future::err::<u32, u32>(2),\n\n/// future::ok::<u32, u32>(3),\n\n/// ];\n\n///\n\n/// assert_eq!(try_join_all(futures).await, Err(2));\n\n/// # });\n\n/// ```\n\npub fn try_join_all<I>(i: I) -> TryJoinAll<I::Item>\n\nwhere\n\n I: IntoIterator,\n\n I::Item: TryFuture,\n\n{\n\n let elems: Box<[_]> = i.into_iter().map(TryMaybeDone::Future).collect();\n\n TryJoinAll {\n\n elems: elems.into(),\n\n }\n\n}\n\n\n\nimpl<F> Future for TryJoinAll<F>\n\nwhere\n\n F: TryFuture,\n\n{\n\n type Output = Result<Vec<F::Ok>, F::Error>;\n\n\n\n fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n let mut state = FinalState::AllDone;\n\n\n", "file_path": "futures-util/src/future/try_join_all.rs", "rank": 91, "score": 142782.42877739196 }, { "content": "#[test]\n\nfn futures_not_moved_after_poll() {\n\n use futures::future;\n\n use futures::stream::FuturesUnordered;\n\n use futures_test::future::FutureTestExt;\n\n use futures_test::{assert_stream_done, assert_stream_next};\n\n\n\n // Future that will be ready after being polled twice,\n\n // asserting that it does not move.\n\n let fut = future::ready(()).pending_once().assert_unmoved();\n\n let mut stream = vec![fut; 3].into_iter().collect::<FuturesUnordered<_>>();\n\n assert_stream_next!(stream, ());\n\n assert_stream_next!(stream, ());\n\n assert_stream_next!(stream, ());\n\n assert_stream_done!(stream);\n\n}\n\n\n", "file_path": "futures/tests/futures_unordered.rs", "rank": 92, "score": 142700.82035473455 }, { "content": "/// Create a stream which produces the same item repeatedly.\n\n///\n\n/// The stream never terminates. Note that you likely want to avoid\n\n/// usage of `collect` or such on the returned stream as it will exhaust\n\n/// available memory as it tries to just fill up all RAM.\n\n///\n\n/// ```\n\n/// # futures::executor::block_on(async {\n\n/// use futures::stream::{self, StreamExt};\n\n///\n\n/// let stream = stream::repeat(9);\n\n/// assert_eq!(vec![9, 9, 9], stream.take(3).collect::<Vec<i32>>().await);\n\n/// # });\n\n/// ```\n\npub fn repeat<T>(item: T) -> Repeat<T>\n\n where T: Clone\n\n{\n\n Repeat { item }\n\n}\n\n\n\nimpl<T> Unpin for Repeat<T> {}\n\n\n\nimpl<T> Stream for Repeat<T>\n\n where T: Clone\n\n{\n\n type Item = T;\n\n\n\n fn poll_next(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Option<Self::Item>> {\n\n Poll::Ready(Some(self.item.clone()))\n\n }\n\n\n\n fn size_hint(&self) -> (usize, Option<usize>) {\n\n (usize::max_value(), None)\n\n }\n\n}\n\n\n\nimpl<T> FusedStream for Repeat<T>\n\n where T: Clone,\n\n{\n\n fn is_terminated(&self) -> bool {\n\n false\n\n }\n\n}\n", "file_path": "futures-util/src/stream/repeat.rs", "rank": 93, "score": 142022.14422686122 }, { "content": "struct Guard<'a> { buf: &'a mut Vec<u8>, len: usize }\n\n\n\nimpl Drop for Guard<'_> {\n\n fn drop(&mut self) {\n\n unsafe { self.buf.set_len(self.len); }\n\n }\n\n}\n\n\n\n// This uses an adaptive system to extend the vector when it fills. We want to\n\n// avoid paying to allocate and zero a huge chunk of memory if the reader only\n\n// has 4 bytes while still making large reads if the reader does have a ton\n\n// of data to return. Simply tacking on an extra DEFAULT_BUF_SIZE space every\n\n// time is 4,500 times (!) slower than this if the reader has a very small\n\n// amount of data to return.\n\n//\n\n// Because we're extending the buffer with uninitialized data for trusted\n\n// readers, we need to make sure to truncate that if any of this panics.\n\npub(super) fn read_to_end_internal<R: AsyncRead + ?Sized>(\n\n mut rd: Pin<&mut R>,\n\n cx: &mut Context<'_>,\n", "file_path": "futures-util/src/io/read_to_end.rs", "rank": 94, "score": 141570.94345510745 }, { "content": "#[test]\n\nfn iter_mut_cancel() {\n\n use futures::channel::oneshot;\n\n use futures::executor::block_on_stream;\n\n use futures::stream::FuturesUnordered;\n\n\n\n let (a_tx, a_rx) = oneshot::channel::<i32>();\n\n let (b_tx, b_rx) = oneshot::channel::<i32>();\n\n let (c_tx, c_rx) = oneshot::channel::<i32>();\n\n\n\n let mut stream = vec![a_rx, b_rx, c_rx]\n\n .into_iter()\n\n .collect::<FuturesUnordered<_>>();\n\n\n\n for rx in stream.iter_mut() {\n\n rx.close();\n\n }\n\n\n\n let mut iter = block_on_stream(stream);\n\n\n\n assert!(a_tx.is_canceled());\n\n assert!(b_tx.is_canceled());\n\n assert!(c_tx.is_canceled());\n\n\n\n assert_eq!(iter.next(), Some(Err(futures::channel::oneshot::Canceled)));\n\n assert_eq!(iter.next(), Some(Err(futures::channel::oneshot::Canceled)));\n\n assert_eq!(iter.next(), Some(Err(futures::channel::oneshot::Canceled)));\n\n assert_eq!(iter.next(), None);\n\n}\n\n\n", "file_path": "futures/tests/futures_unordered.rs", "rank": 95, "score": 141321.73536137908 }, { "content": "#[test]\n\nfn iter_mut_len() {\n\n use futures::future;\n\n use futures::stream::FuturesUnordered;\n\n\n\n let mut stream = vec![\n\n future::pending::<()>(),\n\n future::pending::<()>(),\n\n future::pending::<()>(),\n\n ]\n\n .into_iter()\n\n .collect::<FuturesUnordered<_>>();\n\n\n\n let mut iter_mut = stream.iter_mut();\n\n assert_eq!(iter_mut.len(), 3);\n\n assert!(iter_mut.next().is_some());\n\n assert_eq!(iter_mut.len(), 2);\n\n assert!(iter_mut.next().is_some());\n\n assert_eq!(iter_mut.len(), 1);\n\n assert!(iter_mut.next().is_some());\n\n assert_eq!(iter_mut.len(), 0);\n\n assert!(iter_mut.next().is_none());\n\n}\n\n\n", "file_path": "futures/tests/futures_unordered.rs", "rank": 96, "score": 141321.73536137908 }, { "content": "pub trait FnMut1<A>: FnOnce1<A> {\n\n fn call_mut(&mut self, arg: A) -> Self::Output;\n\n}\n\n\n\nimpl<T, A, R> FnMut1<A> for T\n\nwhere\n\n T: FnMut(A) -> R\n\n{\n\n fn call_mut(&mut self, arg: A) -> R {\n\n self(arg)\n\n }\n\n}\n\n\n\n// Not used, but present for completeness\n", "file_path": "futures-util/src/fns.rs", "rank": 97, "score": 141016.79338387164 }, { "content": "/// Creates a new future which will select the first successful future over a list of futures.\n\n///\n\n/// The returned future will wait for any future within `iter` to be ready and Ok. Unlike\n\n/// `select_all`, this will only return the first successful completion, or the last\n\n/// failure. This is useful in contexts where any success is desired and failures\n\n/// are ignored, unless all the futures fail.\n\n///\n\n/// This function is only available when the `std` or `alloc` feature of this\n\n/// library is activated, and it is activated by default.\n\n///\n\n/// # Panics\n\n///\n\n/// This function will panic if the iterator specified contains no items.\n\npub fn select_ok<I>(iter: I) -> SelectOk<I::Item>\n\n where I: IntoIterator,\n\n I::Item: TryFuture + Unpin,\n\n{\n\n let ret = SelectOk {\n\n inner: iter.into_iter().collect()\n\n };\n\n assert!(!ret.inner.is_empty(), \"iterator provided to select_ok was empty\");\n\n ret\n\n}\n\n\n\nimpl<Fut: TryFuture + Unpin> Future for SelectOk<Fut> {\n\n type Output = Result<(Fut::Ok, Vec<Fut>), Fut::Error>;\n\n\n\n fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n // loop until we've either exhausted all errors, a success was hit, or nothing is ready\n\n loop {\n\n let item = self.inner.iter_mut().enumerate().find_map(|(i, f)| {\n\n match f.try_poll_unpin(cx) {\n\n Poll::Pending => None,\n", "file_path": "futures-util/src/future/select_ok.rs", "rank": 98, "score": 140961.13724733092 }, { "content": "#[test]\n\nfn with_propagates_poll_ready() {\n\n use futures::channel::mpsc;\n\n use futures::executor::block_on;\n\n use futures::future;\n\n use futures::sink::{Sink, SinkExt};\n\n use futures::task::Poll;\n\n use std::pin::Pin;\n\n\n\n use flag_cx::flag_cx;\n\n use sassert_next::sassert_next;\n\n\n\n let (tx, mut rx) = mpsc::channel::<i32>(0);\n\n let mut tx = tx.with(|item: i32| future::ok::<i32, mpsc::SendError>(item + 10));\n\n\n\n block_on(future::lazy(|_| {\n\n flag_cx(|flag, cx| {\n\n let mut tx = Pin::new(&mut tx);\n\n\n\n // Should be ready for the first item.\n\n assert_eq!(tx.as_mut().poll_ready(cx), Poll::Ready(Ok(())));\n", "file_path": "futures/tests/sink.rs", "rank": 99, "score": 140681.83101184908 } ]
Rust
src/revaultd/config.rs
KRD1/revault-gui
5eb9b2fee0c78d63cc5f4b7bb93d1f99c4bb8fb0
use bitcoin::{util::bip32, Network}; use serde::Deserialize; use std::{ net::SocketAddr, path::{Path, PathBuf}, }; #[derive(Debug, Clone, Deserialize)] pub struct BitcoindConfig { pub network: Network, pub cookie_path: PathBuf, pub addr: SocketAddr, pub poll_interval_secs: Option<u64>, } #[derive(Debug, Clone, Deserialize)] pub struct WatchtowerConfig { pub host: String, pub noise_key: String, } #[derive(Debug, Clone, Deserialize)] pub struct StakeholderConfig { pub xpub: bip32::ExtendedPubKey, pub watchtowers: Vec<WatchtowerConfig>, pub emergency_address: String, } #[derive(Debug, Clone, Deserialize)] pub struct CosignerConfig { pub host: String, pub noise_key: String, } #[derive(Debug, Clone, Deserialize)] pub struct ManagerConfig { pub xpub: bip32::ExtendedPubKey, pub cosigners: Vec<CosignerConfig>, } #[derive(Debug, Clone, Deserialize)] pub struct ScriptsConfig { pub deposit_descriptor: String, pub unvault_descriptor: String, pub cpfp_descriptor: String, } #[derive(Debug, Clone, Deserialize)] pub struct Config { pub bitcoind_config: BitcoindConfig, pub stakeholder_config: Option<StakeholderConfig>, pub manager_config: Option<ManagerConfig>, pub scripts_config: ScriptsConfig, pub coordinator_host: String, pub coordinator_noise_key: String, pub coordinator_poll_seconds: Option<u64>, pub data_dir: Option<PathBuf>, pub daemon: Option<bool>, pub log_level: Option<String>, } impl Config { pub fn from_file(path: &Path) -> Result<Self, ConfigError> { let config = std::fs::read(path) .map_err(|e| match e.kind() { std::io::ErrorKind::NotFound => ConfigError::NotFound, _ => ConfigError::ReadingFile(format!("Reading configuration file: {}", e)), }) .and_then(|file_content| { toml::from_slice::<Config>(&file_content).map_err(|e| { ConfigError::ReadingFile(format!("Parsing configuration file: {}", e)) }) })?; Ok(config) } pub fn socket_path(&self) -> Result<PathBuf, ConfigError> { let mut path = if let Some(ref datadir) = self.data_dir { datadir.clone() } else { default_datadir().map_err(|_| { ConfigError::Unexpected("Could not locate the default datadir.".to_owned()) })? }; path.push(&self.bitcoind_config.network.to_string()); path.push("revaultd_rpc"); Ok(path) } pub fn default_path() -> Result<PathBuf, ConfigError> { let mut datadir = default_datadir().map_err(|_| { ConfigError::Unexpected("Could not locate the default datadir.".to_owned()) })?; datadir.push("revault.toml"); Ok(datadir) } } pub fn default_datadir() -> Result<PathBuf, ()> { #[cfg(target_os = "linux")] let configs_dir = dirs::home_dir(); #[cfg(not(target_os = "linux"))] let configs_dir = dirs::config_dir(); if let Some(mut path) = configs_dir { #[cfg(target_os = "linux")] path.push(".revault"); #[cfg(not(target_os = "linux"))] path.push("Revault"); return Ok(path); } Err(()) } #[derive(PartialEq, Eq, Debug, Clone)] pub enum ConfigError { NotFound, ReadingFile(String), Unexpected(String), } impl std::fmt::Display for ConfigError { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match self { Self::NotFound => write!(f, "Revaultd Configuration error: not found"), Self::ReadingFile(e) => { write!(f, "Revaultd Configuration error while reading file: {}", e) } Self::Unexpected(e) => write!(f, "Revaultd Configuration error unexpected: {}", e), } } } impl std::error::Error for ConfigError {}
use bitcoin::{util::bip32, Network}; use serde::Deserialize; use std::{ net::SocketAddr, path::{Path, PathBuf}, }; #[derive(Debug, Clone, Deserialize)] pub struct BitcoindConfig { pub network: Network, pub cookie_path: PathBuf, pub addr: SocketAddr, pub poll_interval_secs: Option<u64>, } #[derive(Debug, Clone, Deserialize)] pub struct WatchtowerConfig { pub host: String, pub noise_key: String, } #[derive(Debug, Clone, Deserialize)] pub struct StakeholderConfig { pub xpub: bip32::ExtendedPubKey, pub watchtowers: Vec<WatchtowerConf
figuration file: {}", e)) }) })?; Ok(config) } pub fn socket_path(&self) -> Result<PathBuf, ConfigError> { let mut path = if let Some(ref datadir) = self.data_dir { datadir.clone() } else { default_datadir().map_err(|_| { ConfigError::Unexpected("Could not locate the default datadir.".to_owned()) })? }; path.push(&self.bitcoind_config.network.to_string()); path.push("revaultd_rpc"); Ok(path) } pub fn default_path() -> Result<PathBuf, ConfigError> { let mut datadir = default_datadir().map_err(|_| { ConfigError::Unexpected("Could not locate the default datadir.".to_owned()) })?; datadir.push("revault.toml"); Ok(datadir) } } pub fn default_datadir() -> Result<PathBuf, ()> { #[cfg(target_os = "linux")] let configs_dir = dirs::home_dir(); #[cfg(not(target_os = "linux"))] let configs_dir = dirs::config_dir(); if let Some(mut path) = configs_dir { #[cfg(target_os = "linux")] path.push(".revault"); #[cfg(not(target_os = "linux"))] path.push("Revault"); return Ok(path); } Err(()) } #[derive(PartialEq, Eq, Debug, Clone)] pub enum ConfigError { NotFound, ReadingFile(String), Unexpected(String), } impl std::fmt::Display for ConfigError { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match self { Self::NotFound => write!(f, "Revaultd Configuration error: not found"), Self::ReadingFile(e) => { write!(f, "Revaultd Configuration error while reading file: {}", e) } Self::Unexpected(e) => write!(f, "Revaultd Configuration error unexpected: {}", e), } } } impl std::error::Error for ConfigError {}
ig>, pub emergency_address: String, } #[derive(Debug, Clone, Deserialize)] pub struct CosignerConfig { pub host: String, pub noise_key: String, } #[derive(Debug, Clone, Deserialize)] pub struct ManagerConfig { pub xpub: bip32::ExtendedPubKey, pub cosigners: Vec<CosignerConfig>, } #[derive(Debug, Clone, Deserialize)] pub struct ScriptsConfig { pub deposit_descriptor: String, pub unvault_descriptor: String, pub cpfp_descriptor: String, } #[derive(Debug, Clone, Deserialize)] pub struct Config { pub bitcoind_config: BitcoindConfig, pub stakeholder_config: Option<StakeholderConfig>, pub manager_config: Option<ManagerConfig>, pub scripts_config: ScriptsConfig, pub coordinator_host: String, pub coordinator_noise_key: String, pub coordinator_poll_seconds: Option<u64>, pub data_dir: Option<PathBuf>, pub daemon: Option<bool>, pub log_level: Option<String>, } impl Config { pub fn from_file(path: &Path) -> Result<Self, ConfigError> { let config = std::fs::read(path) .map_err(|e| match e.kind() { std::io::ErrorKind::NotFound => ConfigError::NotFound, _ => ConfigError::ReadingFile(format!("Reading configuration file: {}", e)), }) .and_then(|file_content| { toml::from_slice::<Config>(&file_content).map_err(|e| { ConfigError::ReadingFile(format!("Parsing con
random
[ { "content": "pub fn clipboard<'a, T: 'a + Clone>(\n\n state: &'a mut button::State,\n\n message: T,\n\n) -> button::Button<'a, T> {\n\n button::Button::new(state, clipboard_icon().size(15))\n\n .on_press(message)\n\n .style(ClipboardButtonStyle {})\n\n}\n\n\n", "file_path": "src/ui/component/button.rs", "rank": 0, "score": 105135.60283832636 }, { "content": "pub fn white_card_button<'a, T: 'a + Clone>(\n\n state: &'a mut button::State,\n\n content: Container<'a, T>,\n\n) -> button::Button<'a, T> {\n\n button::Button::new(state, content.padding(10)).style(WhiteCardButtonStyle {})\n\n}\n\n\n", "file_path": "src/ui/component/button.rs", "rank": 1, "score": 101275.1758960774 }, { "content": "pub fn network_icon() -> Text {\n\n icon('\\u{F3ED}')\n\n}\n\n\n", "file_path": "src/ui/icon.rs", "rank": 2, "score": 89261.40931231715 }, { "content": "#[derive(Debug, Clone, Deserialize, Serialize)]\n\nstruct Request {}\n\n\n\n/// getinfo\n\n\n\n/// getinfo response\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\npub struct GetInfoResponse {\n\n pub blockheight: u64,\n\n pub network: String,\n\n pub sync: f64,\n\n pub version: String,\n\n}\n\n\n\n/// list_vaults\n\n\n\n/// listvaults response\n\n#[derive(Debug, Clone, Deserialize)]\n\npub struct ListVaultsResponse {\n\n pub vaults: Vec<Vault>,\n\n}\n", "file_path": "src/revaultd/mod.rs", "rank": 4, "score": 67446.87888686499 }, { "content": "#[derive(Debug)]\n\nstruct StakeholderOverview {\n\n ack_fund_button: iced::button::State,\n\n delegate_fund_button: iced::button::State,\n\n}\n\n\n\nimpl StakeholderOverview {\n\n pub fn new() -> Self {\n\n Self {\n\n ack_fund_button: iced::button::State::new(),\n\n delegate_fund_button: iced::button::State::new(),\n\n }\n\n }\n\n\n\n pub fn view(\n\n &mut self,\n\n ctx: &Context,\n\n overview: &HashMap<VaultStatus, (u64, u64)>,\n\n ) -> Element<Message> {\n\n let (nb_total_vaults, total_amount) =\n\n overview.iter().fold((0, 0), |acc, (status, (nb, amount))| {\n", "file_path": "src/app/view/home.rs", "rank": 5, "score": 64940.52396682986 }, { "content": "#[derive(Debug, Clone)]\n\nstruct ManagerSendInput {\n\n vault: model::Vault,\n\n selected: bool,\n\n}\n\n\n\nimpl ManagerSendInput {\n\n fn new(vault: model::Vault) -> Self {\n\n Self {\n\n vault,\n\n selected: false,\n\n }\n\n }\n\n\n\n pub fn view(&mut self, ctx: &Context) -> Element<InputMessage> {\n\n manager_send_input_view(\n\n ctx,\n\n &self.vault.outpoint(),\n\n &self.vault.amount,\n\n self.selected,\n\n )\n", "file_path": "src/app/state/manager.rs", "rank": 6, "score": 63813.837218595974 }, { "content": "struct AlertBadgeStyle;\n\nimpl container::StyleSheet for AlertBadgeStyle {\n\n fn style(&self) -> container::Style {\n\n container::Style {\n\n border_radius: 40.0,\n\n background: color::WARNING_LIGHT.into(),\n\n text_color: color::WARNING.into(),\n\n ..container::Style::default()\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/ui/component/badge.rs", "rank": 7, "score": 63809.42339825586 }, { "content": "struct BlockBadgeStyle;\n\nimpl container::StyleSheet for BlockBadgeStyle {\n\n fn style(&self) -> container::Style {\n\n container::Style {\n\n border_radius: 40.0,\n\n background: color::PRIMARY_LIGHT.into(),\n\n text_color: color::PRIMARY.into(),\n\n ..container::Style::default()\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/ui/component/badge.rs", "rank": 8, "score": 63809.42339825586 }, { "content": "struct InactiveBadgeStyle;\n\nimpl container::StyleSheet for InactiveBadgeStyle {\n\n fn style(&self) -> container::Style {\n\n container::Style {\n\n border_radius: 40.0,\n\n background: color::BACKGROUND.into(),\n\n ..container::Style::default()\n\n }\n\n }\n\n}\n", "file_path": "src/ui/component/badge.rs", "rank": 9, "score": 63809.42339825586 }, { "content": "#[derive(Debug)]\n\nstruct ManagerSendOutput {\n\n address: String,\n\n amount: String,\n\n\n\n warning_address: bool,\n\n warning_amount: bool,\n\n\n\n view: ManagerSendOutputView,\n\n}\n\n\n\nimpl ManagerSendOutput {\n\n fn new() -> Self {\n\n Self {\n\n address: \"\".to_string(),\n\n amount: \"\".to_string(),\n\n warning_address: false,\n\n warning_amount: false,\n\n view: ManagerSendOutputView::new(),\n\n }\n\n }\n", "file_path": "src/app/state/manager.rs", "rank": 10, "score": 63809.42339825586 }, { "content": "struct PersonBadgeStyle;\n\nimpl container::StyleSheet for PersonBadgeStyle {\n\n fn style(&self) -> container::Style {\n\n container::Style {\n\n border_radius: 40.0,\n\n background: color::FOREGROUND.into(),\n\n text_color: color::CANCEL.into(),\n\n ..container::Style::default()\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/ui/component/badge.rs", "rank": 11, "score": 63809.42339825586 }, { "content": "struct SuccessBadgeStyle;\n\nimpl container::StyleSheet for SuccessBadgeStyle {\n\n fn style(&self) -> container::Style {\n\n container::Style {\n\n border_radius: 40.0,\n\n background: color::SUCCESS_LIGHT.into(),\n\n text_color: color::SUCCESS.into(),\n\n ..container::Style::default()\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/ui/component/badge.rs", "rank": 12, "score": 63809.42339825586 }, { "content": "struct ShieldBadgeStyle;\n\nimpl container::StyleSheet for ShieldBadgeStyle {\n\n fn style(&self) -> container::Style {\n\n container::Style {\n\n border_radius: 40.0,\n\n background: color::FOREGROUND.into(),\n\n text_color: color::CANCEL.into(),\n\n ..container::Style::default()\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/ui/component/badge.rs", "rank": 13, "score": 63809.42339825586 }, { "content": "struct ClipboardButtonStyle {}\n\nimpl button::StyleSheet for ClipboardButtonStyle {\n\n fn active(&self) -> button::Style {\n\n button::Style {\n\n shadow_offset: Vector::default(),\n\n background: Color::TRANSPARENT.into(),\n\n border_radius: 10.0,\n\n border_width: 0.0,\n\n border_color: Color::TRANSPARENT,\n\n text_color: Color::BLACK,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/ui/component/button.rs", "rank": 14, "score": 63809.42339825586 }, { "content": "struct WarningBadgeStyle;\n\nimpl container::StyleSheet for WarningBadgeStyle {\n\n fn style(&self) -> container::Style {\n\n container::Style {\n\n border_radius: 40.0,\n\n background: color::WARNING_LIGHT.into(),\n\n text_color: color::WARNING.into(),\n\n ..container::Style::default()\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/ui/component/badge.rs", "rank": 15, "score": 63809.42339825586 }, { "content": "fn config_path_from_args(args: Vec<String>) -> Result<Option<PathBuf>, Box<dyn Error>> {\n\n if args.len() == 1 {\n\n return Ok(None);\n\n }\n\n\n\n if args.len() != 3 || args[1] != \"--conf\" {\n\n println!(\"Usage: '--conf <configuration file path>'\");\n\n return Err(format!(\"Unknown arguments '{:?}'.\", args).into());\n\n }\n\n\n\n Ok(Some(PathBuf::from(args[2].to_owned())))\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 16, "score": 62802.38357683431 }, { "content": "struct ShieldSuccessBadgeStyle;\n\nimpl container::StyleSheet for ShieldSuccessBadgeStyle {\n\n fn style(&self) -> container::Style {\n\n container::Style {\n\n border_radius: 40.0,\n\n background: color::FOREGROUND.into(),\n\n text_color: color::SUCCESS.into(),\n\n ..container::Style::default()\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/ui/component/badge.rs", "rank": 17, "score": 62747.22337756843 }, { "content": "struct ShieldNotifBadgeStyle;\n\nimpl container::StyleSheet for ShieldNotifBadgeStyle {\n\n fn style(&self) -> container::Style {\n\n container::Style {\n\n border_radius: 40.0,\n\n background: color::FOREGROUND.into(),\n\n text_color: color::CANCEL.into(),\n\n ..container::Style::default()\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/ui/component/badge.rs", "rank": 18, "score": 62747.22337756843 }, { "content": "struct TxDepositBadgeStyle;\n\nimpl container::StyleSheet for TxDepositBadgeStyle {\n\n fn style(&self) -> container::Style {\n\n container::Style {\n\n border_radius: 40.0,\n\n background: color::INFO_LIGHT.into(),\n\n text_color: color::INFO.into(),\n\n ..container::Style::default()\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/ui/component/badge.rs", "rank": 19, "score": 62747.22337756843 }, { "content": "struct WhiteCardButtonStyle {}\n\nimpl button::StyleSheet for WhiteCardButtonStyle {\n\n fn active(&self) -> button::Style {\n\n button::Style {\n\n border_radius: 10.0,\n\n background: color::FOREGROUND.into(),\n\n ..button::Style::default()\n\n }\n\n }\n\n fn hovered(&self) -> button::Style {\n\n button::Style {\n\n border_radius: 10.0,\n\n background: color::FOREGROUND.into(),\n\n border_color: color::SECONDARY,\n\n border_width: 1.0,\n\n ..button::Style::default()\n\n }\n\n }\n\n}\n", "file_path": "src/ui/component/button.rs", "rank": 20, "score": 62747.22337756843 }, { "content": "pub trait State {\n\n fn view(&mut self, ctx: &Context) -> Element<Message>;\n\n fn update(&mut self, message: Message) -> Command<Message>;\n\n fn subscription(&self) -> Subscription<Message> {\n\n Subscription::none()\n\n }\n\n fn load(&self) -> Command<Message> {\n\n Command::none()\n\n }\n\n}\n", "file_path": "src/app/state/mod.rs", "rank": 21, "score": 56889.361476226186 }, { "content": "pub trait VaultView {\n\n fn new() -> Self;\n\n fn view(&mut self, ctx: &Context, vault: &Vault) -> Element<Message>;\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct VaultListItemView {\n\n state: iced::button::State,\n\n}\n\n\n\nimpl VaultView for VaultListItemView {\n\n fn new() -> Self {\n\n VaultListItemView {\n\n state: iced::button::State::new(),\n\n }\n\n }\n\n\n\n fn view(&mut self, ctx: &Context, vault: &Vault) -> iced::Element<Message> {\n\n let updated_at = NaiveDateTime::from_timestamp(vault.updated_at, 0);\n\n button::white_card_button(\n", "file_path": "src/app/view/vault.rs", "rank": 22, "score": 55775.83936609038 }, { "content": "pub trait SettingsBox {\n\n fn title(&self) -> &'static str;\n\n fn description(&self) -> &'static str;\n\n fn body<'a>(&self, config: &Config) -> Column<'a, Message>;\n\n fn display<'a>(&self, config: &Config) -> Container<'a, Message> {\n\n card::simple(Container::new(\n\n Column::new()\n\n .push(\n\n Row::new()\n\n .push(\n\n Container::new(\n\n Row::new()\n\n .push(\n\n Column::new()\n\n .push(text::bold(text::simple(self.title())))\n\n .push(text::small(self.description())),\n\n )\n\n .spacing(20),\n\n )\n\n .width(Length::Fill),\n", "file_path": "src/app/view/settings/boxes.rs", "rank": 23, "score": 54730.1470189339 }, { "content": "pub fn home_icon() -> Text {\n\n icon('\\u{F3DC}')\n\n}\n\n\n", "file_path": "src/ui/icon.rs", "rank": 24, "score": 54297.41173225416 }, { "content": "pub fn send_icon() -> Text {\n\n icon('\\u{F144}')\n\n}\n\n\n", "file_path": "src/ui/icon.rs", "rank": 25, "score": 54297.41173225416 }, { "content": "pub fn vaults_icon() -> Text {\n\n icon('\\u{F1C7}')\n\n}\n\n\n", "file_path": "src/ui/icon.rs", "rank": 26, "score": 54297.41173225416 }, { "content": "pub fn dot_icon() -> Text {\n\n icon('\\u{F287}')\n\n}\n\n\n", "file_path": "src/ui/icon.rs", "rank": 27, "score": 54297.41173225416 }, { "content": "pub fn plus_icon() -> Text {\n\n icon('\\u{F4D7}')\n\n}\n\n\n", "file_path": "src/ui/icon.rs", "rank": 28, "score": 54297.41173225416 }, { "content": "pub fn done_icon() -> Text {\n\n icon('\\u{F26B}')\n\n}\n\n\n", "file_path": "src/ui/icon.rs", "rank": 29, "score": 54297.41173225416 }, { "content": "pub fn turnback_icon() -> Text {\n\n icon('\\u{F131}')\n\n}\n\n\n", "file_path": "src/ui/icon.rs", "rank": 30, "score": 54297.41173225416 }, { "content": "#[allow(dead_code)]\n\npub fn manager_icon() -> Text {\n\n icon('\\u{F4B4}')\n\n}\n\n\n", "file_path": "src/ui/icon.rs", "rank": 31, "score": 54297.41173225416 }, { "content": "#[allow(dead_code)]\n\npub fn withdrawal_icon() -> Text {\n\n icon('\\u{F144}')\n\n}\n\n\n", "file_path": "src/ui/icon.rs", "rank": 32, "score": 54297.41173225416 }, { "content": "pub fn shield_icon() -> Text {\n\n icon('\\u{F517}')\n\n}\n\n\n", "file_path": "src/ui/icon.rs", "rank": 33, "score": 54297.41173225416 }, { "content": "pub fn block_icon() -> Text {\n\n icon('\\u{F1C8}')\n\n}\n\n\n", "file_path": "src/ui/icon.rs", "rank": 34, "score": 54297.41173225416 }, { "content": "#[allow(dead_code)]\n\npub fn stakeholder_icon() -> Text {\n\n icon('\\u{F4AE}')\n\n}\n\n\n", "file_path": "src/ui/icon.rs", "rank": 35, "score": 54297.41173225416 }, { "content": "pub fn warning_icon() -> Text {\n\n icon('\\u{F31B}')\n\n}\n\n\n", "file_path": "src/ui/icon.rs", "rank": 36, "score": 54297.41173225416 }, { "content": "pub fn tooltip_icon() -> Text {\n\n icon('\\u{F410}')\n\n}\n\n\n", "file_path": "src/ui/icon.rs", "rank": 37, "score": 54297.41173225416 }, { "content": "#[allow(dead_code)]\n\npub fn history_icon() -> Text {\n\n icon('\\u{F292}')\n\n}\n\n\n", "file_path": "src/ui/icon.rs", "rank": 38, "score": 54297.41173225416 }, { "content": "pub fn todo_icon() -> Text {\n\n icon('\\u{F28A}')\n\n}\n", "file_path": "src/ui/icon.rs", "rank": 39, "score": 54297.41173225416 }, { "content": "pub fn clipboard_icon() -> Text {\n\n icon('\\u{F28E}')\n\n}\n\n\n", "file_path": "src/ui/icon.rs", "rank": 40, "score": 54297.41173225416 }, { "content": "#[allow(dead_code)]\n\npub fn arrow_up_icon() -> Text {\n\n icon('\\u{F148}')\n\n}\n\n\n", "file_path": "src/ui/icon.rs", "rank": 41, "score": 54297.41173225416 }, { "content": "pub fn deposit_icon() -> Text {\n\n icon('\\u{F123}')\n\n}\n\n\n", "file_path": "src/ui/icon.rs", "rank": 42, "score": 54297.41173225416 }, { "content": "pub fn settings_icon() -> Text {\n\n icon('\\u{F3C5}')\n\n}\n\n\n", "file_path": "src/ui/icon.rs", "rank": 43, "score": 54297.41173225416 }, { "content": "pub fn shield_check_icon() -> Text {\n\n icon('\\u{F509}')\n\n}\n\n\n", "file_path": "src/ui/icon.rs", "rank": 44, "score": 53251.71938509768 }, { "content": "pub fn shield_notif_icon() -> Text {\n\n icon('\\u{F50A}')\n\n}\n\n\n", "file_path": "src/ui/icon.rs", "rank": 45, "score": 53251.71938509768 }, { "content": "pub fn person_check_icon() -> Text {\n\n icon('\\u{F4AF}')\n\n}\n\n\n", "file_path": "src/ui/icon.rs", "rank": 46, "score": 53251.71938509768 }, { "content": "pub fn revault_colored_logo() -> Svg {\n\n let h = Handle::from_memory(LOGO.to_vec());\n\n Svg::new(h)\n\n}\n", "file_path": "src/ui/component/image.rs", "rank": 47, "score": 52267.84225269772 }, { "content": "pub fn manager_send_input_view<'a>(\n\n ctx: &Context,\n\n outpoint: &str,\n\n amount: &u64,\n\n selected: bool,\n\n) -> Element<'a, InputMessage> {\n\n let checkbox = Checkbox::new(selected, \"\", InputMessage::Selected).text_size(10);\n\n let row = Row::new()\n\n .push(checkbox)\n\n .push(\n\n Container::new(\n\n Row::new()\n\n .push(text::bold(text::simple(&format!(\n\n \"{}\",\n\n ctx.converter.converts(*amount)\n\n ))))\n\n .push(text::small(&ctx.converter.unit.to_string()))\n\n .align_items(Align::Center),\n\n )\n\n .width(Length::Fill),\n", "file_path": "src/app/view/manager.rs", "rank": 48, "score": 52267.84225269772 }, { "content": "/// scroll is a wrapper for Scrollable in order to fix a bug from iced 0.3.0\n\n/// scroll add padding to the content in order to give space to the scroll bar.\n\n/// TODO: remove it once https://github.com/hecrj/iced/issues/793 is fixed\n\npub fn scroll<'a, T: 'a>(\n\n state: &'a mut scrollable::State,\n\n content: Container<'a, T>,\n\n) -> Scrollable<'a, T> {\n\n Scrollable::new(state).push(Container::new(content).padding(10))\n\n}\n\n\n", "file_path": "src/ui/component/mod.rs", "rank": 49, "score": 51878.04352251995 }, { "content": "pub fn dashboard<'a, T: 'a>(\n\n header: Container<'a, T>,\n\n sidebar: Container<'a, T>,\n\n main: Container<'a, T>,\n\n) -> Container<'a, T> {\n\n Container::new(\n\n Column::new()\n\n .push(header)\n\n .push(\n\n Row::new()\n\n .push(sidebar.width(Length::Shrink).height(Length::Fill))\n\n .push(main.width(Length::Fill).height(Length::Fill)),\n\n )\n\n .width(iced::Length::Fill)\n\n .height(iced::Length::Fill),\n\n )\n\n}\n\n\n", "file_path": "src/app/view/layout.rs", "rank": 50, "score": 51878.04352251995 }, { "content": "pub fn bold(t: Text) -> Text {\n\n t.font(font::BOLD)\n\n}\n\n\n", "file_path": "src/ui/component/text.rs", "rank": 51, "score": 50832.35117536347 }, { "content": "pub fn danger(t: Text) -> Text {\n\n t.color(color::PRIMARY)\n\n}\n", "file_path": "src/ui/component/text.rs", "rank": 52, "score": 50832.35117536347 }, { "content": "pub fn success(t: Text) -> Text {\n\n t.color(color::SUCCESS)\n\n}\n\n\n", "file_path": "src/ui/component/text.rs", "rank": 53, "score": 50832.35117536347 }, { "content": "pub fn simple(content: &str) -> Text {\n\n Text::new(content).font(font::REGULAR).size(20)\n\n}\n\n\n", "file_path": "src/ui/component/text.rs", "rank": 54, "score": 49848.47404296351 }, { "content": "pub fn small(content: &str) -> Text {\n\n Text::new(content).font(font::REGULAR).size(15)\n\n}\n\n\n", "file_path": "src/ui/component/text.rs", "rank": 55, "score": 49848.47404296351 }, { "content": "pub fn spend_tx_with_feerate_view<'a, T: 'a>(\n\n ctx: &Context,\n\n inputs: &[model::Vault],\n\n psbt: &Psbt,\n\n feerate: Option<&u32>,\n\n) -> Container<'a, T> {\n\n let mut total_fees = 0;\n\n let mut col_input = Column::new()\n\n .push(text::bold(text::simple(\"Inputs\")))\n\n .spacing(10);\n\n for input in inputs {\n\n total_fees += input.amount;\n\n col_input = col_input.push(card::simple(Container::new(\n\n Row::new()\n\n .push(Container::new(text::small(&input.address.to_string())).width(Length::Fill))\n\n .push(\n\n Container::new(text::bold(text::small(&format!(\n\n \"{}\",\n\n ctx.converter.converts(input.amount),\n\n ))))\n", "file_path": "src/app/view/manager.rs", "rank": 56, "score": 48921.08825256779 }, { "content": "pub fn charging_connect_view() -> Element<'static, Message> {\n\n layout::cover(component::text::paragraph(\"Connecting to daemon...\"))\n\n}\n\n\n", "file_path": "src/app/view/charging.rs", "rank": 57, "score": 48045.46423226457 }, { "content": "pub fn charging_starting_daemon_view() -> Element<'static, Message> {\n\n layout::cover(component::text::paragraph(\"Starting daemon...\"))\n\n}\n\n\n", "file_path": "src/app/view/charging.rs", "rank": 58, "score": 47217.38603559503 }, { "content": "pub fn separation<'a, T: 'a>() -> Container<'a, T> {\n\n Container::new(Column::new().push(iced::Text::new(\" \")))\n\n .style(SepStyle)\n\n .height(Length::Units(1))\n\n}\n\n\n\npub struct SepStyle;\n\nimpl container::StyleSheet for SepStyle {\n\n fn style(&self) -> container::Style {\n\n container::Style {\n\n background: color::SECONDARY.into(),\n\n ..container::Style::default()\n\n }\n\n }\n\n}\n\n\n\npub struct ContainerBackgroundStyle;\n\nimpl container::StyleSheet for ContainerBackgroundStyle {\n\n fn style(&self) -> container::Style {\n\n container::Style {\n", "file_path": "src/ui/component/mod.rs", "rank": 59, "score": 47149.7678278189 }, { "content": "pub fn shield<'a, T: 'a>() -> Container<'a, T> {\n\n let icon = shield_icon().width(Length::Units(20));\n\n Container::new(icon)\n\n .width(Length::Units(40))\n\n .height(Length::Units(40))\n\n .style(ShieldBadgeStyle)\n\n .align_x(iced::Align::Center)\n\n .align_y(iced::Align::Center)\n\n}\n\n\n", "file_path": "src/ui/component/badge.rs", "rank": 60, "score": 47149.7678278189 }, { "content": "pub fn block<'a, T: 'a>() -> Container<'a, T> {\n\n let icon = block_icon().width(Length::Units(20));\n\n Container::new(icon)\n\n .width(Length::Units(40))\n\n .height(Length::Units(40))\n\n .style(BlockBadgeStyle)\n\n .align_x(iced::Align::Center)\n\n .align_y(iced::Align::Center)\n\n}\n\n\n", "file_path": "src/ui/component/badge.rs", "rank": 61, "score": 47149.7678278189 }, { "content": "pub fn large_logo<T>() -> Container<'static, T> {\n\n Container::new(\n\n revault_colored_logo()\n\n .width(Length::Units(300))\n\n .height(Length::Fill),\n\n )\n\n}\n\n\n", "file_path": "src/app/view/layout.rs", "rank": 62, "score": 47043.109814050695 }, { "content": "pub fn run(config: Config) -> Result<(), iced::Error> {\n\n App::run(Settings::with_flags(config))\n\n}\n\n\n\nimpl App {\n\n #[allow(unreachable_patterns)]\n\n pub fn load_state(&mut self, role: Role, menu: Menu) -> Command<Message> {\n\n self.context.role = role;\n\n self.context.menu = menu;\n\n let revaultd = self.revaultd.clone().unwrap();\n\n self.state = match self.context.role {\n\n Role::Manager => match self.context.menu {\n\n Menu::Deposit => DepositState::new(revaultd).into(),\n\n Menu::Home => ManagerHomeState::new(revaultd).into(),\n\n Menu::Vaults => VaultsState::new(revaultd).into(),\n\n Menu::Network => ManagerNetworkState::new(revaultd).into(),\n\n Menu::Send => ManagerSendState::new(revaultd).into(),\n\n // Manager cannot delegate funds, the user is redirected to the home.\n\n Menu::DelegateFunds => ManagerHomeState::new(revaultd).into(),\n\n Menu::Settings => SettingsState::new(revaultd.config.clone()).into(),\n", "file_path": "src/app/mod.rs", "rank": 63, "score": 47024.719944990306 }, { "content": "pub fn shield_success<'a, T: 'a>() -> Container<'a, T> {\n\n let icon = shield_check_icon().width(Length::Units(20));\n\n Container::new(icon)\n\n .width(Length::Units(40))\n\n .height(Length::Units(40))\n\n .style(ShieldSuccessBadgeStyle)\n\n .align_x(iced::Align::Center)\n\n .align_y(iced::Align::Center)\n\n}\n\n\n", "file_path": "src/ui/component/badge.rs", "rank": 64, "score": 46222.382037423165 }, { "content": "pub fn person_check<'a, T: 'a>() -> Container<'a, T> {\n\n let icon = person_check_icon().width(Length::Units(20));\n\n Container::new(icon)\n\n .width(Length::Units(40))\n\n .height(Length::Units(40))\n\n .style(PersonBadgeStyle)\n\n .align_x(iced::Align::Center)\n\n .align_y(iced::Align::Center)\n\n}\n\n\n", "file_path": "src/ui/component/badge.rs", "rank": 65, "score": 46222.382037423165 }, { "content": "pub fn vault_canceled<'a, T: 'a>() -> Container<'a, T> {\n\n let icon = turnback_icon().width(Length::Units(20));\n\n Container::new(icon)\n\n .width(Length::Units(40))\n\n .height(Length::Units(40))\n\n .style(AlertBadgeStyle)\n\n .align_x(iced::Align::Center)\n\n .align_y(iced::Align::Center)\n\n}\n\n\n", "file_path": "src/ui/component/badge.rs", "rank": 66, "score": 46222.382037423165 }, { "content": "pub fn shield_notif<'a, T: 'a>() -> Container<'a, T> {\n\n let icon = shield_notif_icon().width(Length::Units(20));\n\n Container::new(icon)\n\n .width(Length::Units(40))\n\n .height(Length::Units(40))\n\n .style(ShieldNotifBadgeStyle)\n\n .align_x(iced::Align::Center)\n\n .align_y(iced::Align::Center)\n\n}\n\n\n", "file_path": "src/ui/component/badge.rs", "rank": 67, "score": 46222.382037423165 }, { "content": "pub fn vault_spending<'a, T: 'a>() -> Container<'a, T> {\n\n let icon = send_icon().width(Length::Units(20));\n\n Container::new(icon)\n\n .width(Length::Units(40))\n\n .height(Length::Units(40))\n\n .style(WarningBadgeStyle)\n\n .align_x(iced::Align::Center)\n\n .align_y(iced::Align::Center)\n\n}\n\n\n", "file_path": "src/ui/component/badge.rs", "rank": 68, "score": 46222.382037423165 }, { "content": "pub fn vault_spent<'a, T: 'a>() -> Container<'a, T> {\n\n let icon = send_icon().width(Length::Units(20));\n\n Container::new(icon)\n\n .width(Length::Units(40))\n\n .height(Length::Units(40))\n\n .style(SuccessBadgeStyle)\n\n .align_x(iced::Align::Center)\n\n .align_y(iced::Align::Center)\n\n}\n\n\n", "file_path": "src/ui/component/badge.rs", "rank": 69, "score": 46222.382037423165 }, { "content": "pub fn vault_unconfirmed<'a, T: 'a>() -> Container<'a, T> {\n\n let icon = deposit_icon().width(Length::Units(20));\n\n Container::new(icon)\n\n .width(Length::Units(40))\n\n .height(Length::Units(40))\n\n .style(WarningBadgeStyle)\n\n .align_x(iced::Align::Center)\n\n .align_y(iced::Align::Center)\n\n}\n\n\n", "file_path": "src/ui/component/badge.rs", "rank": 70, "score": 46222.382037423165 }, { "content": "pub fn vault_canceling<'a, T: 'a>() -> Container<'a, T> {\n\n let icon = turnback_icon().width(Length::Units(20));\n\n Container::new(icon)\n\n .width(Length::Units(40))\n\n .height(Length::Units(40))\n\n .style(WarningBadgeStyle)\n\n .align_x(iced::Align::Center)\n\n .align_y(iced::Align::Center)\n\n}\n\n\n", "file_path": "src/ui/component/badge.rs", "rank": 71, "score": 46222.382037423165 }, { "content": "pub fn tx_deposit<'a, T: 'a>() -> Container<'a, T> {\n\n let icon = deposit_icon().width(Length::Units(20));\n\n Container::new(icon)\n\n .width(Length::Units(40))\n\n .height(Length::Units(40))\n\n .style(TxDepositBadgeStyle)\n\n .align_x(iced::Align::Center)\n\n .align_y(iced::Align::Center)\n\n}\n\n\n", "file_path": "src/ui/component/badge.rs", "rank": 72, "score": 46222.382037423165 }, { "content": "pub fn vault_unvaulting<'a, T: 'a>() -> Container<'a, T> {\n\n let icon = send_icon().width(Length::Units(20));\n\n Container::new(icon)\n\n .width(Length::Units(40))\n\n .height(Length::Units(40))\n\n .style(WarningBadgeStyle)\n\n .align_x(iced::Align::Center)\n\n .align_y(iced::Align::Center)\n\n}\n\n\n", "file_path": "src/ui/component/badge.rs", "rank": 73, "score": 46222.382037423165 }, { "content": "pub fn pending_spent_tx<'a, T: 'a>() -> Container<'a, T> {\n\n let icon = send_icon().width(Length::Units(20));\n\n Container::new(icon)\n\n .width(Length::Units(40))\n\n .height(Length::Units(40))\n\n .style(InactiveBadgeStyle)\n\n .align_x(iced::Align::Center)\n\n .align_y(iced::Align::Center)\n\n}\n\n\n", "file_path": "src/ui/component/badge.rs", "rank": 74, "score": 45346.75801711995 }, { "content": "pub fn paragraph<'a, T: 'a>(s: &str) -> Container<'a, T> {\n\n Container::new(Text::new(s).font(font::REGULAR))\n\n}\n\n\n", "file_path": "src/ui/component/text.rs", "rank": 75, "score": 44841.57516188397 }, { "content": "pub fn charging_syncing_view(progress: &f64) -> Element<'static, Message> {\n\n layout::cover(component::text::paragraph(&format!(\n\n \"Syncing... {}%\",\n\n progress\n\n )))\n\n}\n\n\n", "file_path": "src/app/view/charging.rs", "rank": 76, "score": 44536.71515308706 }, { "content": "pub fn charging_error_view(error: &str) -> Element<'static, Message> {\n\n layout::cover(component::text::paragraph(&format!(\"Error: {}\", error)))\n\n}\n", "file_path": "src/app/view/charging.rs", "rank": 77, "score": 44536.71515308706 }, { "content": "pub fn cover<'a, T: 'a>(content: Container<'a, T>) -> Element<'a, T> {\n\n Column::new()\n\n .push(large_logo())\n\n .push(content)\n\n .width(iced::Length::Fill)\n\n .height(iced::Length::Fill)\n\n .padding(50)\n\n .spacing(50)\n\n .align_items(iced::Align::Center)\n\n .into()\n\n}\n\n\n", "file_path": "src/app/view/layout.rs", "rank": 78, "score": 41992.05927179993 }, { "content": "pub fn main_section<'a, T: 'a>(menu: Container<'a, T>) -> Container<'a, T> {\n\n Container::new(menu.max_width(1500))\n\n .padding(20)\n\n .style(MainSectionStyle)\n\n .align_x(iced::Align::Center)\n\n .width(Length::Fill)\n\n .height(Length::Fill)\n\n}\n\n\n\npub struct MainSectionStyle;\n\nimpl container::StyleSheet for MainSectionStyle {\n\n fn style(&self) -> container::Style {\n\n container::Style {\n\n background: color::BACKGROUND.into(),\n\n ..container::Style::default()\n\n }\n\n }\n\n}\n", "file_path": "src/app/view/layout.rs", "rank": 79, "score": 41207.75669686943 }, { "content": "pub fn navbar<'a, T: 'a>(notification: Option<Container<'a, T>>) -> Container<'a, T> {\n\n let svg = revault_colored_logo()\n\n .width(Length::Units(100))\n\n .height(Length::Fill);\n\n let mut content = Row::new()\n\n .push(Column::new().width(Length::Units(10)))\n\n .push(\n\n Container::new(svg)\n\n .padding(5)\n\n .center_x()\n\n .width(Length::Shrink),\n\n );\n\n\n\n if let Some(n) = notification {\n\n content = content.push(Container::new(n).width(Length::Fill));\n\n }\n\n Container::new(content)\n\n .width(Length::Fill)\n\n .padding(10)\n\n .style(NavbarStyle)\n", "file_path": "src/ui/component/mod.rs", "rank": 80, "score": 40704.99090862818 }, { "content": "pub fn navbar_warning<'a, T: 'a>(warning: Option<&Error>) -> Option<Container<'a, T>> {\n\n if let Some(e) = warning {\n\n return Some(card::alert_warning(Container::new(text::simple(&format!(\n\n \"{}\",\n\n e\n\n )))));\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/app/view/layout.rs", "rank": 81, "score": 40463.84789362461 }, { "content": "pub fn sidebar_menu<'a, T: 'a>(items: Vec<Container<'a, T>>) -> Container<'a, T> {\n\n let mut col = Column::new().padding(15).spacing(15);\n\n for i in items {\n\n col = col.push(i)\n\n }\n\n Container::new(col).style(SidebarMenuStyle)\n\n}\n\n\n", "file_path": "src/app/view/layout.rs", "rank": 82, "score": 39961.08210538335 }, { "content": "use iced::{scrollable, Column, Container, Element, Length, Row};\n\n\n\nuse crate::{\n\n app::{\n\n error::Error,\n\n message::Message,\n\n view::{layout, sidebar::Sidebar, Context},\n\n },\n\n ui::{\n\n color,\n\n component::{badge, card, navbar, scroll, text},\n\n icon::dot_icon,\n\n },\n\n};\n\n\n\n#[derive(Debug)]\n\npub struct ManagerNetworkView {\n\n sidebar: Sidebar,\n\n scroll: scrollable::State,\n\n}\n", "file_path": "src/app/view/network.rs", "rank": 83, "score": 38612.232791290415 }, { "content": " Container::new(\n\n Column::new()\n\n .push(bitcoin_core_card(blockheight))\n\n .spacing(20),\n\n ),\n\n ))),\n\n )\n\n .into()\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct StakeholderNetworkView {\n\n sidebar: Sidebar,\n\n scroll: scrollable::State,\n\n}\n\n\n\nimpl StakeholderNetworkView {\n\n pub fn new() -> Self {\n\n StakeholderNetworkView {\n", "file_path": "src/app/view/network.rs", "rank": 84, "score": 38610.8956942117 }, { "content": "\n\nimpl ManagerNetworkView {\n\n pub fn new() -> Self {\n\n ManagerNetworkView {\n\n scroll: scrollable::State::new(),\n\n sidebar: Sidebar::new(),\n\n }\n\n }\n\n\n\n pub fn view<'a>(\n\n &'a mut self,\n\n ctx: &Context,\n\n warning: Option<&Error>,\n\n blockheight: Option<&u64>,\n\n ) -> Element<'a, Message> {\n\n layout::dashboard(\n\n navbar(layout::navbar_warning(warning)),\n\n self.sidebar.view(ctx),\n\n layout::main_section(Container::new(scroll(\n\n &mut self.scroll,\n", "file_path": "src/app/view/network.rs", "rank": 85, "score": 38609.73335293361 }, { "content": " .push(\n\n Column::new()\n\n .push(text::bold(text::simple(\"Block Height\")))\n\n .push(text::simple(&b.to_string())),\n\n )\n\n .spacing(10),\n\n );\n\n }\n\n card::simple(Container::new(col))\n\n}\n", "file_path": "src/app/view/network.rs", "rank": 86, "score": 38605.656613707215 }, { "content": " scroll: scrollable::State::new(),\n\n sidebar: Sidebar::new(),\n\n }\n\n }\n\n\n\n pub fn view<'a>(\n\n &'a mut self,\n\n ctx: &Context,\n\n warning: Option<&Error>,\n\n blockheight: Option<&u64>,\n\n ) -> Element<'a, Message> {\n\n layout::dashboard(\n\n navbar(layout::navbar_warning(warning)),\n\n self.sidebar.view(ctx),\n\n layout::main_section(Container::new(scroll(\n\n &mut self.scroll,\n\n Container::new(\n\n Column::new()\n\n .push(bitcoin_core_card(blockheight))\n\n .spacing(20),\n\n ),\n\n ))),\n\n )\n\n .into()\n\n }\n\n}\n\n\n", "file_path": "src/app/view/network.rs", "rank": 87, "score": 38604.734780521496 }, { "content": "pub fn button_content<'a, T: 'a>(icon: Option<iced::Text>, text: &str) -> Container<'a, T> {\n\n match icon {\n\n None => Container::new(text::simple(text)).padding(5),\n\n Some(i) => Container::new(\n\n Row::new()\n\n .push(i)\n\n .push(text::simple(text))\n\n .spacing(10)\n\n .align_items(iced::Align::Center),\n\n )\n\n .padding(5),\n\n }\n\n}\n\n\n", "file_path": "src/ui/component/button.rs", "rank": 88, "score": 38119.23583475775 }, { "content": "pub fn sidebar<'a, T: 'a>(menu: Container<'a, T>, footer: Container<'a, T>) -> Container<'a, T> {\n\n Container::new(\n\n Column::new()\n\n .padding(10)\n\n .push(menu.height(Length::Fill))\n\n .push(footer.height(Length::Shrink)),\n\n )\n\n .style(SidebarStyle)\n\n}\n\n\n\npub struct SidebarStyle;\n\nimpl container::StyleSheet for SidebarStyle {\n\n fn style(&self) -> container::Style {\n\n container::Style {\n\n background: color::FOREGROUND.into(),\n\n border_width: 1.0,\n\n border_color: color::SECONDARY,\n\n ..container::Style::default()\n\n }\n\n }\n", "file_path": "src/app/view/layout.rs", "rank": 89, "score": 37965.58605389633 }, { "content": "fn bitcoin_core_card<'a, T: 'a>(blockheight: Option<&u64>) -> Container<'a, T> {\n\n let mut col = Column::new()\n\n .push(\n\n Row::new()\n\n .push(Container::new(text::bold(text::simple(\"Bitcoin Core\"))).width(Length::Fill))\n\n .push(\n\n Container::new(\n\n Row::new()\n\n .push(dot_icon().size(5).color(color::SUCCESS))\n\n .push(text::small(\"Running\").color(color::SUCCESS))\n\n .align_items(iced::Align::Center),\n\n )\n\n .width(Length::Shrink),\n\n ),\n\n )\n\n .spacing(10);\n\n if let Some(b) = blockheight {\n\n col = col.push(\n\n Row::new()\n\n .push(badge::block())\n", "file_path": "src/app/view/network.rs", "rank": 90, "score": 29418.97512189877 }, { "content": "use serde::Deserialize;\n\nuse std::path::{Path, PathBuf};\n\n\n\nuse crate::revaultd::config::default_datadir;\n\n\n\n#[derive(Debug, Clone, Deserialize)]\n\npub struct Config {\n\n /// Path to revaultd configuration file.\n\n pub revaultd_config_path: PathBuf,\n\n /// Path to revaultd binary.\n\n pub revaultd_path: Option<PathBuf>,\n\n /// log level, can be \"info\", \"debug\", \"trace\".\n\n pub log_level: Option<String>,\n\n /// Use iced debug feature if true.\n\n pub debug: Option<bool>,\n\n}\n\n\n\nimpl Config {\n\n pub fn from_file(path: &Path) -> Result<Self, ConfigError> {\n\n let config = std::fs::read(path)\n", "file_path": "src/app/config.rs", "rank": 94, "score": 21.104089279138257 }, { "content": "use bitcoin::{util::psbt::PartiallySignedTransaction, Transaction};\n\nuse serde::{Deserialize, Serialize};\n\n\n\n/// getdepositaddress response\n\n#[derive(Debug, Clone, Deserialize)]\n\npub struct DepositAddress {\n\n pub address: bitcoin::Address,\n\n}\n\n\n\n#[derive(Debug, Clone, Deserialize)]\n\npub struct Vault {\n\n /// Address of the vault deposit\n\n pub address: String,\n\n /// Amount of the vault in satoshis\n\n pub amount: u64,\n\n /// derivation_index is the index used to create scriptPubKey of the deposit address\n\n pub derivation_index: u32,\n\n /// Timestamp of the deposit transaction reception time.\n\n pub received_at: i64,\n\n /// Status of the vault\n", "file_path": "src/revaultd/model.rs", "rank": 95, "score": 19.539061967367086 }, { "content": "pub mod error;\n\nuse error::Error;\n\n\n\n#[cfg(windows)]\n\nuse uds_windows::UnixStream;\n\n\n\n#[cfg(not(windows))]\n\nuse std::os::unix::net::UnixStream;\n\n\n\nuse std::fmt::Debug;\n\nuse std::path::{Path, PathBuf};\n\nuse std::time::Duration;\n\n\n\nuse serde::de::DeserializeOwned;\n\nuse serde::{Deserialize, Serialize};\n\nuse serde_json::{to_writer, Deserializer};\n\n\n\nuse tracing::debug;\n\n\n\n/// A handle to a remote JSONRPC server\n", "file_path": "src/revaultd/client/mod.rs", "rank": 96, "score": 17.21173898537394 }, { "content": " #[serde(with = \"bitcoin_psbt\")]\n\n pub psbt: PartiallySignedTransaction,\n\n pub deposit_outpoints: Vec<String>,\n\n}\n\n\n\nmod bitcoin_transaction {\n\n use bitcoin::{consensus::encode, hashes::hex::FromHex, Transaction};\n\n use serde::{self, Deserialize, Deserializer};\n\n\n\n pub fn deserialize<'de, D>(deserializer: D) -> Result<Transaction, D::Error>\n\n where\n\n D: Deserializer<'de>,\n\n {\n\n let s = String::deserialize(deserializer)?;\n\n let bytes = Vec::from_hex(&s).map_err(serde::de::Error::custom)?;\n\n encode::deserialize::<Transaction>(&bytes).map_err(serde::de::Error::custom)\n\n }\n\n}\n\n\n\nmod bitcoin_psbt {\n", "file_path": "src/revaultd/model.rs", "rank": 97, "score": 16.538604492493516 }, { "content": "use serde_json::json;\n\nuse std::collections::HashMap;\n\nuse std::fmt::Debug;\n\nuse std::path::Path;\n\nuse std::process::Command;\n\n\n\nuse bitcoin::{base64, consensus, util::psbt::PartiallySignedTransaction as Psbt};\n\nuse serde::de::DeserializeOwned;\n\nuse serde::{Deserialize, Serialize};\n\nuse tracing::{debug, error, info, span, Level};\n\n\n\nmod client;\n\npub mod config;\n\npub mod model;\n\n\n\nuse client::Client;\n\nuse config::Config;\n\nuse model::{\n\n DepositAddress, RevocationTransactions, SpendTransaction, SpendTx, SpendTxStatus,\n\n UnvaultTransaction, Vault, VaultStatus, VaultTransactions,\n", "file_path": "src/revaultd/mod.rs", "rank": 98, "score": 16.511222133277233 }, { "content": "use crate::revaultd::{config::ConfigError, RevaultDError};\n\nuse std::convert::From;\n\n\n\n#[derive(Debug, Clone)]\n\npub enum Error {\n\n ConfigError(ConfigError),\n\n RevaultDError(RevaultDError),\n\n UnexpectedError(String),\n\n}\n\n\n\nimpl std::fmt::Display for Error {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n match self {\n\n Self::ConfigError(e) => write!(f, \"Config error: {}\", e),\n\n Self::RevaultDError(e) => write!(f, \"RevaultD error: {}\", e),\n\n Self::UnexpectedError(e) => write!(f, \"Unexpected error: {}\", e),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/app/error.rs", "rank": 99, "score": 16.14208964198735 } ]
Rust
src/components/boid.rs
Luke-Draper/Boids
5ddfe3ecc3e34721ed3c6df7dc92cff41fd1e015
use super::player_control::PlayerControl; use super::velocity::Velocity; use amethyst::{ assets::AssetLoaderSystemData, core::{math::Vector3, transform::Transform}, ecs::{prelude::EntityBuilder, Component, VecStorage, World}, prelude::*, renderer::{ rendy::mesh::{Normal, Position, Tangent, TexCoord}, shape::Shape, Material, MaterialDefaults, Mesh, }, }; use serde::{Deserialize, Serialize}; pub enum WingFlapStage { Up, Middle, Down, Ground, } #[derive(Debug, Deserialize, Serialize)] pub enum BoidSpecies { Test, Sparrow, Robin, Cardinal, Bluejay, Eagle, Duck, Goose, Falcon, } pub struct Boid { pub flap_stage: WingFlapStage, pub flap_time: f64, pub step_time: f64, pub hunger: f32, pub flock_id: u8, } impl Component for Boid { type Storage = VecStorage<Self>; } pub fn initialize_boid_default(world: &mut World) { initialize_boid( world, Vector3::new(0.0, 0.0, 0.0), Velocity { velocity: 0.0, direction: Vector3::new(0.0, 0.0, 0.0), }, 0.0, 0.0, ) } pub fn initialize_boid( world: &mut World, position: Vector3<f32>, velocity: Velocity, direction: f32, pitch: f32, ) { setup_boid(world, position, velocity, direction, pitch).build(); } pub fn initialize_player_boid_default(world: &mut World) { initialize_player_boid( world, Vector3::new(0.0, 0.0, 0.0), Velocity { velocity: 0.0, direction: Vector3::new(0.0, 0.0, 0.0), }, 0.0, 0.0, ) } pub fn initialize_player_boid( world: &mut World, position: Vector3<f32>, velocity: Velocity, direction: f32, pitch: f32, ) { setup_boid(world, position, velocity, direction, pitch) .with(PlayerControl {}) .build(); } fn setup_boid( world: &mut World, position: Vector3<f32>, velocity: Velocity, direction: f32, pitch: f32, ) -> EntityBuilder<'_> { let mut trans = Transform::default(); trans .set_translation_xyz(position[0], position[1], position[2]) .set_rotation_euler(direction, pitch, 0.0); let mesh = world.exec(|loader: AssetLoaderSystemData<'_, Mesh>| { loader.load_from_data( Shape::Cone(100) .generate::<(Vec<Position>, Vec<Normal>, Vec<Tangent>, Vec<TexCoord>)>(None) .into(), (), ) }); let material_defaults = world.read_resource::<MaterialDefaults>().0.clone(); let material = world.exec(|loader: AssetLoaderSystemData<'_, Material>| { loader.load_from_data( Material { ..material_defaults }, (), ) }); world .create_entity() .with(mesh) .with(material) .with(trans) .with(velocity) .with(Boid { flap_stage: WingFlapStage::Up, flap_time: 0.0, step_time: 0.0, hunger: 0.0, flock_id: 0, }) }
use super::player_control::PlayerControl; use super::velocity::Velocity; use amethyst::{ assets::AssetLoaderSystemData, core::{math:
f64, pub step_time: f64, pub hunger: f32, pub flock_id: u8, } impl Component for Boid { type Storage = VecStorage<Self>; } pub fn initialize_boid_default(world: &mut World) { initialize_boid( world, Vector3::new(0.0, 0.0, 0.0), Velocity { velocity: 0.0, direction: Vector3::new(0.0, 0.0, 0.0), }, 0.0, 0.0, ) } pub fn initialize_boid( world: &mut World, position: Vector3<f32>, velocity: Velocity, direction: f32, pitch: f32, ) { setup_boid(world, position, velocity, direction, pitch).build(); } pub fn initialize_player_boid_default(world: &mut World) { initialize_player_boid( world, Vector3::new(0.0, 0.0, 0.0), Velocity { velocity: 0.0, direction: Vector3::new(0.0, 0.0, 0.0), }, 0.0, 0.0, ) } pub fn initialize_player_boid( world: &mut World, position: Vector3<f32>, velocity: Velocity, direction: f32, pitch: f32, ) { setup_boid(world, position, velocity, direction, pitch) .with(PlayerControl {}) .build(); } fn setup_boid( world: &mut World, position: Vector3<f32>, velocity: Velocity, direction: f32, pitch: f32, ) -> EntityBuilder<'_> { let mut trans = Transform::default(); trans .set_translation_xyz(position[0], position[1], position[2]) .set_rotation_euler(direction, pitch, 0.0); let mesh = world.exec(|loader: AssetLoaderSystemData<'_, Mesh>| { loader.load_from_data( Shape::Cone(100) .generate::<(Vec<Position>, Vec<Normal>, Vec<Tangent>, Vec<TexCoord>)>(None) .into(), (), ) }); let material_defaults = world.read_resource::<MaterialDefaults>().0.clone(); let material = world.exec(|loader: AssetLoaderSystemData<'_, Material>| { loader.load_from_data( Material { ..material_defaults }, (), ) }); world .create_entity() .with(mesh) .with(material) .with(trans) .with(velocity) .with(Boid { flap_stage: WingFlapStage::Up, flap_time: 0.0, step_time: 0.0, hunger: 0.0, flock_id: 0, }) }
:Vector3, transform::Transform}, ecs::{prelude::EntityBuilder, Component, VecStorage, World}, prelude::*, renderer::{ rendy::mesh::{Normal, Position, Tangent, TexCoord}, shape::Shape, Material, MaterialDefaults, Mesh, }, }; use serde::{Deserialize, Serialize}; pub enum WingFlapStage { Up, Middle, Down, Ground, } #[derive(Debug, Deserialize, Serialize)] pub enum BoidSpecies { Test, Sparrow, Robin, Cardinal, Bluejay, Eagle, Duck, Goose, Falcon, } pub struct Boid { pub flap_stage: WingFlapStage, pub flap_time:
random
[]
Rust
crates/nu-parser/src/lite_parse.rs
Amanita-muscaria/nushell
416ba1407b8553f5da4a6f8ad59c64b85fda3fb4
use std::iter::Peekable; use std::str::CharIndices; use nu_source::{Span, Spanned, SpannedItem}; use crate::errors::{ParseError, ParseResult}; type Input<'t> = Peekable<CharIndices<'t>>; #[derive(Debug, Clone)] pub struct LiteCommand { pub name: Spanned<String>, pub args: Vec<Spanned<String>>, } impl LiteCommand { fn new(name: Spanned<String>) -> LiteCommand { LiteCommand { name, args: vec![] } } pub(crate) fn span(&self) -> Span { let start = self.name.span.start(); let end = if let Some(x) = self.args.last() { x.span.end() } else { self.name.span.end() }; Span::new(start, end) } } #[derive(Debug, Clone)] pub struct LitePipeline { pub commands: Vec<LiteCommand>, } #[derive(Debug, Clone)] pub struct LiteBlock { pub block: Vec<LitePipeline>, } impl From<Spanned<String>> for LiteCommand { fn from(v: Spanned<String>) -> LiteCommand { LiteCommand::new(v) } } fn skip_whitespace(src: &mut Input) { while let Some((_, x)) = src.peek() { if x.is_whitespace() { let _ = src.next(); } else { break; } } } enum BlockKind { Paren, CurlyBracket, SquareBracket, } fn bare(src: &mut Input, span_offset: usize) -> ParseResult<Spanned<String>> { skip_whitespace(src); let mut bare = String::new(); let start_offset = if let Some((pos, _)) = src.peek() { *pos } else { 0 }; let mut inside_quote: Option<char> = None; let mut block_level: Vec<BlockKind> = vec![]; while let Some((_, c)) = src.peek() { let c = *c; if inside_quote.is_some() { if Some(c) == inside_quote { inside_quote = None; } } else if c == '\'' || c == '"' || c == '`' { inside_quote = Some(c); } else if c == '[' { block_level.push(BlockKind::SquareBracket); } else if c == ']' { if let Some(BlockKind::SquareBracket) = block_level.last() { let _ = block_level.pop(); } } else if c == '{' { block_level.push(BlockKind::CurlyBracket); } else if c == '}' { if let Some(BlockKind::CurlyBracket) = block_level.last() { let _ = block_level.pop(); } } else if c == '(' { block_level.push(BlockKind::Paren); } else if c == ')' { if let Some(BlockKind::Paren) = block_level.last() { let _ = block_level.pop(); } } else if block_level.is_empty() && (c.is_whitespace() || c == '|' || c == ';') { break; } bare.push(c); let _ = src.next(); } let span = Span::new( start_offset + span_offset, start_offset + span_offset + bare.len(), ); if let Some(block) = block_level.last() { return Err(ParseError { cause: nu_errors::ParseError::unexpected_eof( match block { BlockKind::Paren => ")", BlockKind::SquareBracket => "]", BlockKind::CurlyBracket => "}", }, span, ), partial: Some(bare.spanned(span)), }); } if let Some(delimiter) = inside_quote { bare.push(delimiter); let span = Span::new( start_offset + span_offset, start_offset + span_offset + bare.len(), ); return Err(ParseError { cause: nu_errors::ParseError::unexpected_eof(delimiter.to_string(), span), partial: Some(bare.spanned(span)), }); } if bare.is_empty() { return Err(ParseError { cause: nu_errors::ParseError::unexpected_eof("command", span), partial: Some(bare.spanned(span)), }); } Ok(bare.spanned(span)) } fn command(src: &mut Input, span_offset: usize) -> ParseResult<LiteCommand> { let mut cmd = match bare(src, span_offset) { Ok(v) => LiteCommand::new(v), Err(e) => { return Err(ParseError { cause: e.cause, partial: e.partial.map(LiteCommand::new), }); } }; loop { skip_whitespace(src); if let Some((_, c)) = src.peek() { match c { ';' => { break; } '|' => { let _ = src.next(); if let Some((pos, next_c)) = src.peek() { if *next_c == '|' { let span = Span::new(pos - 1 + span_offset, pos + 1 + span_offset); cmd.args.push("||".to_string().spanned(span)); let _ = src.next(); } else { break; } } else { break; } } _ => { match bare(src, span_offset) { Ok(v) => { cmd.args.push(v); } Err(e) => { if let Some(v) = e.partial { cmd.args.push(v); } return Err(ParseError { cause: e.cause, partial: Some(cmd), }); } } } } } else { break; } } Ok(cmd) } fn pipeline(src: &mut Input, span_offset: usize) -> ParseResult<LiteBlock> { let mut block = vec![]; let mut commands = vec![]; skip_whitespace(src); while src.peek().is_some() { let cmd = match command(src, span_offset) { Ok(v) => v, Err(e) => { if let Some(partial) = e.partial { commands.push(partial); block.push(LitePipeline { commands }); } return Err(ParseError { cause: e.cause, partial: Some(LiteBlock { block }), }); } }; commands.push(cmd); skip_whitespace(src); if let Some((_, ';')) = src.peek() { let _ = src.next(); if !commands.is_empty() { block.push(LitePipeline { commands }); commands = vec![]; } } } if !commands.is_empty() { block.push(LitePipeline { commands }); } Ok(LiteBlock { block }) } pub fn lite_parse(src: &str, span_offset: usize) -> ParseResult<LiteBlock> { pipeline(&mut src.char_indices().peekable(), span_offset) } #[cfg(test)] mod tests { use super::*; mod bare { use super::*; #[test] fn simple_1() { let input = "foo bar baz"; let input = &mut input.char_indices().peekable(); let result = bare(input, 0).unwrap(); assert_eq!(result.span.start(), 0); assert_eq!(result.span.end(), 3); } #[test] fn simple_2() { let input = "'foo bar' baz"; let input = &mut input.char_indices().peekable(); let result = bare(input, 0).unwrap(); assert_eq!(result.span.start(), 0); assert_eq!(result.span.end(), 9); } #[test] fn simple_3() { let input = "'foo\" bar' baz"; let input = &mut input.char_indices().peekable(); let result = bare(input, 0).unwrap(); assert_eq!(result.span.start(), 0); assert_eq!(result.span.end(), 10); } #[test] fn simple_4() { let input = "[foo bar] baz"; let input = &mut input.char_indices().peekable(); let result = bare(input, 0).unwrap(); assert_eq!(result.span.start(), 0); assert_eq!(result.span.end(), 9); } #[test] fn simple_5() { let input = "'foo 'bar baz"; let input = &mut input.char_indices().peekable(); let result = bare(input, 0).unwrap(); assert_eq!(result.span.start(), 0); assert_eq!(result.span.end(), 9); } #[test] fn simple_6() { let input = "''foo baz"; let input = &mut input.char_indices().peekable(); let result = bare(input, 0).unwrap(); assert_eq!(result.span.start(), 0); assert_eq!(result.span.end(), 5); } #[test] fn simple_7() { let input = "'' foo"; let input = &mut input.char_indices().peekable(); let result = bare(input, 0).unwrap(); assert_eq!(result.span.start(), 0); assert_eq!(result.span.end(), 2); } #[test] fn simple_8() { let input = " '' foo"; let input = &mut input.char_indices().peekable(); let result = bare(input, 0).unwrap(); assert_eq!(result.span.start(), 1); assert_eq!(result.span.end(), 3); } #[test] fn simple_9() { let input = " 'foo' foo"; let input = &mut input.char_indices().peekable(); let result = bare(input, 0).unwrap(); assert_eq!(result.span.start(), 1); assert_eq!(result.span.end(), 6); } #[test] fn ignore_future() { let input = "foo 'bar"; let input = &mut input.char_indices().peekable(); let result = bare(input, 0).unwrap(); assert_eq!(result.span.start(), 0); assert_eq!(result.span.end(), 3); } #[test] fn invalid_1() { let input = "'foo bar"; let input = &mut input.char_indices().peekable(); let result = bare(input, 0); assert_eq!(result.is_ok(), false); } #[test] fn invalid_2() { let input = "'bar"; let input = &mut input.char_indices().peekable(); let result = bare(input, 0); assert_eq!(result.is_ok(), false); } #[test] fn invalid_4() { let input = " 'bar"; let input = &mut input.char_indices().peekable(); let result = bare(input, 0); assert_eq!(result.is_ok(), false); } } mod lite_parse { use super::*; #[test] fn simple_1() { let result = lite_parse("foo", 0).unwrap(); assert_eq!(result.block.len(), 1); assert_eq!(result.block[0].commands.len(), 1); assert_eq!(result.block[0].commands[0].name.span.start(), 0); assert_eq!(result.block[0].commands[0].name.span.end(), 3); } #[test] fn simple_offset() { let result = lite_parse("foo", 10).unwrap(); assert_eq!(result.block.len(), 1); assert_eq!(result.block[0].commands.len(), 1); assert_eq!(result.block[0].commands[0].name.span.start(), 10); assert_eq!(result.block[0].commands[0].name.span.end(), 13); } #[test] fn incomplete_result() { let result = lite_parse("my_command \"foo' --test", 10).unwrap_err(); assert!(matches!(result.cause.reason(), nu_errors::ParseErrorReason::Eof { .. })); let result = result.partial.unwrap(); assert_eq!(result.block.len(), 1); assert_eq!(result.block[0].commands.len(), 1); assert_eq!(result.block[0].commands[0].name.item, "my_command"); assert_eq!(result.block[0].commands[0].args.len(), 1); assert_eq!(result.block[0].commands[0].args[0].item, "\"foo' --test\""); } } }
use std::iter::Peekable; use std::str::CharIndices; use nu_source::{Span, Spanned, SpannedItem}; use crate::errors::{ParseError, ParseResult}; type Input<'t> = Peekable<CharIndices<'t>>; #[derive(Debug, Clone)] pub struct LiteCommand { pub name: Spanned<String>, pub args: Vec<Spanned<String>>, } impl LiteCommand { fn new(name: Spanned<String>) -> LiteCommand { LiteCommand { name, args: vec![] } } pub(crate) fn span(&self) -> Span { let start = self.name.span.start(); let end = if let Some(x) = self.args.last() { x.span.end() } else { self.name.span.end() }; Span::new(start, end) } } #[derive(Debug, Clone)] pub struct LitePipeline { pub commands: Vec<LiteCommand>, } #[derive(Debug, Clone)] pub struct LiteBlock { pub block: Vec<LitePipeline>, } impl From<Spanned<String>> for LiteCommand { fn from(v: Spanned<String>) -> LiteCommand { LiteCommand::new(v) } } fn skip_whitespace(src: &mut Input) { while let Some((_, x)) = src.peek() { if x.is_whitespace() { let _ = src.next(); } else { break; } } } enum BlockKind { Paren, CurlyBracket, SquareBracket, } fn bare(src: &mut Input, span_offset: usize) -> ParseResult<Spanned<String>> { skip_whitespace(src); let mut bare = String::new(); let start_offset = if let Some((pos, _)) = src.peek() { *pos } else { 0 }; let mut inside_quote: Option<char> = None; let mut block_level: Vec<BlockKind> = vec![]; while let Some((_, c)) = src.peek() { let c = *c; if inside_quote.is_some() { if Some(c) == inside_quote { inside_quote = None; } } else if c == '\'' || c == '"' || c == '`' { inside_quote = Some(c); } else if c == '[' { block_level.push(BlockKind::SquareBracket); } else if c == ']' { if let Some(BlockKind::SquareBracket) = block_level.last() { let _ = block_level.pop(); } } else if c == '{' { block_level.push(BlockKind::CurlyBracket); } else if c == '}' { if let Some(BlockKind::CurlyBracket) = block_level.last() { let _ = block_level.pop(); } } else if c == '(' { block_level.push(BlockKind::Paren); } else if c == ')' { if let Some(BlockKind::Paren) = block_level.last() { let _ = block_level.pop(); } } else if block_level.is_empty() && (c.is_whitespace() || c == '|' || c == ';') { break; } bare.push(c); let _ = src.next(); } let span = Span::new( start_offset + span_offset, start_offset + span_offset + bare.len(), ); if let Some(block) = block_level.last() { return Err(ParseError { cause: nu_errors::ParseError::unexpected_eof(
, span, ), partial: Some(bare.spanned(span)), }); } if let Some(delimiter) = inside_quote { bare.push(delimiter); let span = Span::new( start_offset + span_offset, start_offset + span_offset + bare.len(), ); return Err(ParseError { cause: nu_errors::ParseError::unexpected_eof(delimiter.to_string(), span), partial: Some(bare.spanned(span)), }); } if bare.is_empty() { return Err(ParseError { cause: nu_errors::ParseError::unexpected_eof("command", span), partial: Some(bare.spanned(span)), }); } Ok(bare.spanned(span)) } fn command(src: &mut Input, span_offset: usize) -> ParseResult<LiteCommand> { let mut cmd = match bare(src, span_offset) { Ok(v) => LiteCommand::new(v), Err(e) => { return Err(ParseError { cause: e.cause, partial: e.partial.map(LiteCommand::new), }); } }; loop { skip_whitespace(src); if let Some((_, c)) = src.peek() { match c { ';' => { break; } '|' => { let _ = src.next(); if let Some((pos, next_c)) = src.peek() { if *next_c == '|' { let span = Span::new(pos - 1 + span_offset, pos + 1 + span_offset); cmd.args.push("||".to_string().spanned(span)); let _ = src.next(); } else { break; } } else { break; } } _ => { match bare(src, span_offset) { Ok(v) => { cmd.args.push(v); } Err(e) => { if let Some(v) = e.partial { cmd.args.push(v); } return Err(ParseError { cause: e.cause, partial: Some(cmd), }); } } } } } else { break; } } Ok(cmd) } fn pipeline(src: &mut Input, span_offset: usize) -> ParseResult<LiteBlock> { let mut block = vec![]; let mut commands = vec![]; skip_whitespace(src); while src.peek().is_some() { let cmd = match command(src, span_offset) { Ok(v) => v, Err(e) => { if let Some(partial) = e.partial { commands.push(partial); block.push(LitePipeline { commands }); } return Err(ParseError { cause: e.cause, partial: Some(LiteBlock { block }), }); } }; commands.push(cmd); skip_whitespace(src); if let Some((_, ';')) = src.peek() { let _ = src.next(); if !commands.is_empty() { block.push(LitePipeline { commands }); commands = vec![]; } } } if !commands.is_empty() { block.push(LitePipeline { commands }); } Ok(LiteBlock { block }) } pub fn lite_parse(src: &str, span_offset: usize) -> ParseResult<LiteBlock> { pipeline(&mut src.char_indices().peekable(), span_offset) } #[cfg(test)] mod tests { use super::*; mod bare { use super::*; #[test] fn simple_1() { let input = "foo bar baz"; let input = &mut input.char_indices().peekable(); let result = bare(input, 0).unwrap(); assert_eq!(result.span.start(), 0); assert_eq!(result.span.end(), 3); } #[test] fn simple_2() { let input = "'foo bar' baz"; let input = &mut input.char_indices().peekable(); let result = bare(input, 0).unwrap(); assert_eq!(result.span.start(), 0); assert_eq!(result.span.end(), 9); } #[test] fn simple_3() { let input = "'foo\" bar' baz"; let input = &mut input.char_indices().peekable(); let result = bare(input, 0).unwrap(); assert_eq!(result.span.start(), 0); assert_eq!(result.span.end(), 10); } #[test] fn simple_4() { let input = "[foo bar] baz"; let input = &mut input.char_indices().peekable(); let result = bare(input, 0).unwrap(); assert_eq!(result.span.start(), 0); assert_eq!(result.span.end(), 9); } #[test] fn simple_5() { let input = "'foo 'bar baz"; let input = &mut input.char_indices().peekable(); let result = bare(input, 0).unwrap(); assert_eq!(result.span.start(), 0); assert_eq!(result.span.end(), 9); } #[test] fn simple_6() { let input = "''foo baz"; let input = &mut input.char_indices().peekable(); let result = bare(input, 0).unwrap(); assert_eq!(result.span.start(), 0); assert_eq!(result.span.end(), 5); } #[test] fn simple_7() { let input = "'' foo"; let input = &mut input.char_indices().peekable(); let result = bare(input, 0).unwrap(); assert_eq!(result.span.start(), 0); assert_eq!(result.span.end(), 2); } #[test] fn simple_8() { let input = " '' foo"; let input = &mut input.char_indices().peekable(); let result = bare(input, 0).unwrap(); assert_eq!(result.span.start(), 1); assert_eq!(result.span.end(), 3); } #[test] fn simple_9() { let input = " 'foo' foo"; let input = &mut input.char_indices().peekable(); let result = bare(input, 0).unwrap(); assert_eq!(result.span.start(), 1); assert_eq!(result.span.end(), 6); } #[test] fn ignore_future() { let input = "foo 'bar"; let input = &mut input.char_indices().peekable(); let result = bare(input, 0).unwrap(); assert_eq!(result.span.start(), 0); assert_eq!(result.span.end(), 3); } #[test] fn invalid_1() { let input = "'foo bar"; let input = &mut input.char_indices().peekable(); let result = bare(input, 0); assert_eq!(result.is_ok(), false); } #[test] fn invalid_2() { let input = "'bar"; let input = &mut input.char_indices().peekable(); let result = bare(input, 0); assert_eq!(result.is_ok(), false); } #[test] fn invalid_4() { let input = " 'bar"; let input = &mut input.char_indices().peekable(); let result = bare(input, 0); assert_eq!(result.is_ok(), false); } } mod lite_parse { use super::*; #[test] fn simple_1() { let result = lite_parse("foo", 0).unwrap(); assert_eq!(result.block.len(), 1); assert_eq!(result.block[0].commands.len(), 1); assert_eq!(result.block[0].commands[0].name.span.start(), 0); assert_eq!(result.block[0].commands[0].name.span.end(), 3); } #[test] fn simple_offset() { let result = lite_parse("foo", 10).unwrap(); assert_eq!(result.block.len(), 1); assert_eq!(result.block[0].commands.len(), 1); assert_eq!(result.block[0].commands[0].name.span.start(), 10); assert_eq!(result.block[0].commands[0].name.span.end(), 13); } #[test] fn incomplete_result() { let result = lite_parse("my_command \"foo' --test", 10).unwrap_err(); assert!(matches!(result.cause.reason(), nu_errors::ParseErrorReason::Eof { .. })); let result = result.partial.unwrap(); assert_eq!(result.block.len(), 1); assert_eq!(result.block[0].commands.len(), 1); assert_eq!(result.block[0].commands[0].name.item, "my_command"); assert_eq!(result.block[0].commands[0].args.len(), 1); assert_eq!(result.block[0].commands[0].args[0].item, "\"foo' --test\""); } } }
match block { BlockKind::Paren => ")", BlockKind::SquareBracket => "]", BlockKind::CurlyBracket => "}", }
if_condition
[ { "content": "pub fn span_for_spanned_list(mut iter: impl Iterator<Item = Span>) -> Span {\n\n let first = iter.next();\n\n\n\n let first = match first {\n\n None => return Span::unknown(),\n\n Some(first) => first,\n\n };\n\n\n\n let last = iter.last();\n\n\n\n match last {\n\n None => first,\n\n Some(last) => first.until(last),\n\n }\n\n}\n\n\n\n/// A `Span` is metadata which indicates the start and end positions.\n\n///\n\n/// `Span`s are combined with `AnchorLocation`s to form another type of metadata, a `Tag`.\n\n/// A `Span`'s end position must be greater than or equal to its start position.\n", "file_path": "crates/nu-source/src/meta.rs", "rank": 3, "score": 370771.1542456407 }, { "content": "/// Converts a series of commands into a vec of spanned shapes ready for color-highlighting\n\npub fn shapes(commands: &Block) -> Vec<Spanned<FlatShape>> {\n\n let mut output = vec![];\n\n\n\n for pipeline in &commands.block {\n\n for command in &pipeline.list {\n\n match command {\n\n ClassifiedCommand::Internal(internal) => {\n\n output.append(&mut expression_to_flat_shape(&internal.args.head));\n\n\n\n if let Some(positionals) = &internal.args.positional {\n\n for positional_arg in positionals {\n\n output.append(&mut expression_to_flat_shape(positional_arg));\n\n }\n\n }\n\n\n\n if let Some(named) = &internal.args.named {\n\n for (_, named_arg) in named.iter() {\n\n match named_arg {\n\n NamedValue::PresentSwitch(span) => {\n\n output.push(FlatShape::Flag.spanned(*span));\n", "file_path": "crates/nu-parser/src/shapes.rs", "rank": 4, "score": 369105.9677187208 }, { "content": "pub fn to_bson(input: Vec<Value>, name_tag: Tag) -> Vec<ReturnValue> {\n\n let name_span = name_tag.span;\n\n\n\n let to_process_input = match input.len() {\n\n x if x > 1 => {\n\n let tag = input[0].tag.clone();\n\n vec![Value {\n\n value: UntaggedValue::Table(input),\n\n tag,\n\n }]\n\n }\n\n 1 => input,\n\n _ => vec![],\n\n };\n\n\n\n to_process_input\n\n .into_iter()\n\n .map(move |value| match value_to_bson_value(&value) {\n\n Ok(bson_value) => {\n\n let value_span = value.tag.span;\n", "file_path": "crates/nu_plugin_to_bson/src/to_bson.rs", "rank": 5, "score": 368347.45963784744 }, { "content": "pub fn to_sqlite(input: Vec<Value>, name_tag: Tag) -> Result<Vec<ReturnValue>, ShellError> {\n\n match sqlite_input_stream_to_bytes(input) {\n\n Ok(out) => Ok(vec![ReturnSuccess::value(out)]),\n\n _ => Err(ShellError::labeled_error(\n\n \"Expected a table with SQLite-compatible structure from pipeline\",\n\n \"requires SQLite-compatible input\",\n\n name_tag,\n\n )),\n\n }\n\n}\n", "file_path": "crates/nu_plugin_to_sqlite/src/to_sqlite.rs", "rank": 6, "score": 346924.98419564904 }, { "content": "fn format(input: &str, start: usize) -> (Vec<FormatCommand>, Option<ParseError>) {\n\n let original_start = start;\n\n let mut output = vec![];\n\n let mut error = None;\n\n\n\n let mut loop_input = input.chars().peekable();\n\n let mut start = start;\n\n let mut end = start;\n\n loop {\n\n let mut before = String::new();\n\n\n\n let mut found_start = false;\n\n while let Some(c) = loop_input.next() {\n\n end += 1;\n\n if c == '{' {\n\n if let Some(x) = loop_input.peek() {\n\n if *x == '{' {\n\n found_start = true;\n\n end += 1;\n\n let _ = loop_input.next();\n", "file_path": "crates/nu-parser/src/parse.rs", "rank": 7, "score": 345009.7867410427 }, { "content": "pub fn column_width<'a>(input: &[Vec<Subline<'a>>]) -> usize {\n\n let mut max = 0;\n\n\n\n for line in input {\n\n let mut total = 0;\n\n\n\n let mut first = true;\n\n for inp in line {\n\n if !first {\n\n // Account for the space\n\n total += 1;\n\n } else {\n\n first = false;\n\n }\n\n\n\n total += inp.width;\n\n }\n\n\n\n if total > max {\n\n max = total;\n\n }\n\n }\n\n\n\n max\n\n}\n\n\n", "file_path": "crates/nu-table/src/wrap.rs", "rank": 9, "score": 328568.09002270736 }, { "content": "pub fn split_sublines(input: &str) -> Vec<Vec<Subline>> {\n\n input\n\n .split_terminator('\\n')\n\n .map(|line| {\n\n line.split_terminator(' ')\n\n .map(|x| Subline {\n\n subline: x,\n\n width: UnicodeWidthStr::width(x),\n\n })\n\n .collect::<Vec<_>>()\n\n })\n\n .collect::<Vec<_>>()\n\n}\n\n\n", "file_path": "crates/nu-table/src/wrap.rs", "rank": 10, "score": 310507.979394176 }, { "content": "pub fn whole_stream_command(command: impl WholeStreamCommand + 'static) -> Command {\n\n Command(Arc::new(command))\n\n}\n", "file_path": "crates/nu-cli/src/commands/command.rs", "rank": 11, "score": 307036.2739348776 }, { "content": "pub fn from_sqlite(bytes: Vec<u8>, name_tag: Tag) -> Result<Vec<ReturnValue>, ShellError> {\n\n match from_sqlite_bytes_to_value(bytes, name_tag.clone()) {\n\n Ok(x) => match x {\n\n Value {\n\n value: UntaggedValue::Table(list),\n\n ..\n\n } => Ok(list.into_iter().map(ReturnSuccess::value).collect()),\n\n _ => Ok(vec![ReturnSuccess::value(x)]),\n\n },\n\n Err(_) => Err(ShellError::labeled_error(\n\n \"Could not parse as SQLite\",\n\n \"input cannot be parsed as SQLite\",\n\n &name_tag,\n\n )),\n\n }\n\n}\n", "file_path": "crates/nu_plugin_from_sqlite/src/from_sqlite.rs", "rank": 12, "score": 283185.2141542401 }, { "content": "pub fn from_bson(bytes: Vec<u8>, name_tag: Tag) -> Result<Vec<ReturnValue>, ShellError> {\n\n match from_bson_bytes_to_value(bytes, name_tag.clone()) {\n\n Ok(x) => Ok(vec![ReturnSuccess::value(x)]),\n\n Err(_) => Err(ShellError::labeled_error(\n\n \"Could not parse as BSON\",\n\n \"input cannot be parsed as BSON\",\n\n name_tag,\n\n )),\n\n }\n\n}\n", "file_path": "crates/nu_plugin_from_bson/src/from_bson.rs", "rank": 13, "score": 283185.2141542401 }, { "content": "fn format(input: &str) -> Vec<FormatCommand> {\n\n let mut output = vec![];\n\n\n\n let mut loop_input = input.chars();\n\n loop {\n\n let mut before = String::new();\n\n\n\n while let Some(c) = loop_input.next() {\n\n if c == '{' {\n\n break;\n\n }\n\n before.push(c);\n\n }\n\n\n\n if !before.is_empty() {\n\n output.push(FormatCommand::Text(before.to_string()));\n\n }\n\n // Look for column as we're now at one\n\n let mut column = String::new();\n\n\n", "file_path": "crates/nu-cli/src/commands/format.rs", "rank": 14, "score": 276777.6827030371 }, { "content": "pub fn format_type<'a>(value: impl Into<&'a UntaggedValue>, width: usize) -> String {\n\n Type::from_value(value.into()).colored_string(width)\n\n}\n\n\n", "file_path": "crates/nu-cli/src/data/value.rs", "rank": 15, "score": 272435.81682486954 }, { "content": "pub fn files_exist_at(files: Vec<impl AsRef<Path>>, path: impl AsRef<Path>) -> bool {\n\n files.iter().all(|f| {\n\n let mut loc = PathBuf::from(path.as_ref());\n\n loc.push(f);\n\n loc.exists()\n\n })\n\n}\n\n\n", "file_path": "crates/nu-test-support/src/fs.rs", "rank": 16, "score": 269157.30805960833 }, { "content": "pub fn file_contents_binary(full_path: impl AsRef<Path>) -> Vec<u8> {\n\n let mut file = std::fs::File::open(full_path.as_ref()).expect(\"can not open file\");\n\n let mut contents = Vec::new();\n\n file.read_to_end(&mut contents).expect(\"can not read file\");\n\n contents\n\n}\n\n\n", "file_path": "crates/nu-test-support/src/fs.rs", "rank": 17, "score": 267150.5180393832 }, { "content": "pub fn tag_for_tagged_list(mut iter: impl Iterator<Item = Tag>) -> Tag {\n\n let first = iter.next();\n\n\n\n let first = match first {\n\n None => return Tag::unknown(),\n\n Some(first) => first,\n\n };\n\n\n\n let last = iter.last();\n\n\n\n match last {\n\n None => first,\n\n Some(last) => first.until(last),\n\n }\n\n}\n\n\n", "file_path": "crates/nu-source/src/meta.rs", "rank": 18, "score": 265494.1409655602 }, { "content": "/// Converts a SpannedExpression into a spanned shape(s) ready for color-highlighting\n\npub fn expression_to_flat_shape(e: &SpannedExpression) -> Vec<Spanned<FlatShape>> {\n\n match &e.expr {\n\n Expression::Block(exprs) => shapes(exprs),\n\n Expression::Invocation(exprs) => shapes(exprs),\n\n Expression::FilePath(_) => vec![FlatShape::Path.spanned(e.span)],\n\n Expression::Garbage => vec![FlatShape::Garbage.spanned(e.span)],\n\n Expression::List(exprs) => {\n\n let mut output = vec![];\n\n for expr in exprs.iter() {\n\n output.append(&mut expression_to_flat_shape(expr));\n\n }\n\n output\n\n }\n\n Expression::Path(exprs) => {\n\n let mut output = vec![];\n\n output.append(&mut expression_to_flat_shape(&exprs.head));\n\n for member in exprs.tail.iter() {\n\n if let UnspannedPathMember::String(_) = &member.unspanned {\n\n output.push(FlatShape::StringMember.spanned(member.span));\n\n }\n", "file_path": "crates/nu-parser/src/shapes.rs", "rank": 19, "score": 261839.02502389386 }, { "content": "pub fn compute_stddev(values: &[Value], n: usize, name: &Tag) -> Result<Value, ShellError> {\n\n let variance = variance(values, n, name)?.as_primitive()?;\n\n let sqrt_var = match variance {\n\n Primitive::Decimal(var) => var.sqrt(),\n\n _ => {\n\n return Err(ShellError::labeled_error(\n\n \"Could not take square root of variance\",\n\n \"error occured here\",\n\n name.span,\n\n ))\n\n }\n\n };\n\n match sqrt_var {\n\n Some(stddev) => Ok(UntaggedValue::from(Primitive::Decimal(stddev)).into_value(name)),\n\n None => Err(ShellError::labeled_error(\n\n \"Could not calculate stddev\",\n\n \"error occured here\",\n\n name.span,\n\n )),\n\n }\n", "file_path": "crates/nu-cli/src/commands/math/stddev.rs", "rank": 20, "score": 257617.67922357225 }, { "content": "pub fn compute_variance(values: &[Value], n: usize, name: &Tag) -> Result<Value, ShellError> {\n\n let ss = sum_of_squares(values, name)?;\n\n let n = BigDecimal::from_usize(n).ok_or_else(|| {\n\n ShellError::labeled_error(\n\n \"could not convert to big decimal\",\n\n \"could not convert to big decimal\",\n\n &name.span,\n\n )\n\n })?;\n\n let variance = compute_values(Operator::Divide, &ss, &n.into());\n\n match variance {\n\n Ok(value) => Ok(value.into_value(name)),\n\n Err((_, _)) => Err(ShellError::labeled_error(\n\n \"could not calculate variance of non-integer or unrelated types\",\n\n \"source\",\n\n name,\n\n )),\n\n }\n\n}\n\n\n", "file_path": "crates/nu-cli/src/commands/math/variance.rs", "rank": 21, "score": 257617.67922357225 }, { "content": "/// Easy shorthand function to create a garbage expression at the given span\n\npub fn garbage(span: Span) -> SpannedExpression {\n\n SpannedExpression::new(Expression::Garbage, span)\n\n}\n\n\n", "file_path": "crates/nu-parser/src/parse.rs", "rank": 22, "score": 256901.43436686706 }, { "content": "pub fn did_find_command(#[allow(unused)] name: &str) -> bool {\n\n #[cfg(not(feature = \"which\"))]\n\n {\n\n // we can't perform this check, so just assume it can be found\n\n true\n\n }\n\n\n\n #[cfg(all(feature = \"which\", unix))]\n\n {\n\n which::which(name).is_ok()\n\n }\n\n\n\n #[cfg(all(feature = \"which\", windows))]\n\n {\n\n if which::which(name).is_ok() {\n\n true\n\n } else {\n\n // Reference: https://ss64.com/nt/syntax-internal.html\n\n let cmd_builtins = [\n\n \"assoc\", \"break\", \"color\", \"copy\", \"date\", \"del\", \"dir\", \"dpath\", \"echo\", \"erase\",\n\n \"for\", \"ftype\", \"md\", \"mkdir\", \"mklink\", \"move\", \"path\", \"ren\", \"rename\", \"rd\",\n\n \"rmdir\", \"set\", \"start\", \"time\", \"title\", \"type\", \"ver\", \"verify\", \"vol\",\n\n ];\n\n\n\n cmd_builtins.contains(&name)\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/nu-cli/src/commands/classified/external.rs", "rank": 23, "score": 256516.04903183045 }, { "content": "fn action(input: &Value, pattern: &str, tag: impl Into<Tag>) -> Result<Value, ShellError> {\n\n match &input.value {\n\n UntaggedValue::Primitive(Primitive::Line(s))\n\n | UntaggedValue::Primitive(Primitive::String(s)) => {\n\n let ends_with = s.ends_with(pattern);\n\n Ok(UntaggedValue::boolean(ends_with).into_value(tag))\n\n }\n\n other => {\n\n let got = format!(\"got {}\", other.type_name());\n\n Err(ShellError::labeled_error(\n\n \"value is not string\",\n\n got,\n\n tag.into().span,\n\n ))\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "crates/nu-cli/src/commands/str_/ends_with.rs", "rank": 24, "score": 255711.49428266208 }, { "content": "fn action(input: &Value, pattern: &str, tag: impl Into<Tag>) -> Result<Value, ShellError> {\n\n match &input.value {\n\n UntaggedValue::Primitive(Primitive::Line(s))\n\n | UntaggedValue::Primitive(Primitive::String(s)) => {\n\n let starts_with = s.starts_with(pattern);\n\n Ok(UntaggedValue::boolean(starts_with).into_value(tag))\n\n }\n\n other => {\n\n let got = format!(\"got {}\", other.type_name());\n\n Err(ShellError::labeled_error(\n\n \"value is not string\",\n\n got,\n\n tag.into().span,\n\n ))\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "crates/nu-cli/src/commands/str_/starts_with.rs", "rank": 25, "score": 255692.8676583872 }, { "content": "pub fn test(cmd: impl WholeStreamCommand + 'static) {\n\n let examples = cmd.examples();\n\n let mut base_context = Context::basic().expect(\"could not create basic context\");\n\n\n\n base_context.add_commands(vec![\n\n whole_stream_command(Echo {}),\n\n whole_stream_command(BuildString {}),\n\n whole_stream_command(cmd),\n\n whole_stream_command(StrCollect),\n\n ]);\n\n\n\n for example in examples {\n\n let mut ctx = base_context.clone();\n\n let block = parse_line(example.example, &mut ctx).expect(\"failed to parse example\");\n\n if let Some(expected) = example.result {\n\n let result = block_on(evaluate_block(block, &mut ctx)).expect(\"failed to run example\");\n\n\n\n let errors = ctx.get_errors();\n\n\n\n assert!(\n", "file_path": "crates/nu-cli/src/examples.rs", "rank": 26, "score": 255176.90621054242 }, { "content": "/// A trait that allows structures to define a known way to return a spanned type name\n\npub trait SpannedTypeName {\n\n fn spanned_type_name(&self) -> Spanned<&'static str>;\n\n}\n\n\n\nimpl<T: ShellTypeName + HasSpan> SpannedTypeName for T {\n\n /// Return the type name as a spanned string\n\n fn spanned_type_name(&self) -> Spanned<&'static str> {\n\n self.type_name().spanned(self.span())\n\n }\n\n}\n\n\n\nimpl<T: ShellTypeName> SpannedTypeName for Tagged<T> {\n\n /// Return the spanned type name for a Tagged value\n\n fn spanned_type_name(&self) -> Spanned<&'static str> {\n\n self.item.type_name().spanned(self.tag.span)\n\n }\n\n}\n\n\n", "file_path": "crates/nu-protocol/src/type_name.rs", "rank": 27, "score": 251343.11309649632 }, { "content": "/// Helper to create a pretty-print for the type\n\nfn ty(name: impl std::fmt::Display) -> DebugDocBuilder {\n\n b::kind(format!(\"{}\", name))\n\n}\n\n\n", "file_path": "crates/nu-protocol/src/type_shape.rs", "rank": 28, "score": 249591.41263979045 }, { "content": "pub fn from_list(values: &[Value], starting_idx: usize) -> nu_table::Table {\n\n let config = crate::data::config::config(Tag::unknown());\n\n\n\n let header_style = if let Ok(config) = &config {\n\n let header_align = config.get(\"header_align\").map_or(Alignment::Left, |a| {\n\n a.as_string()\n\n .map_or(Alignment::Center, |a| match a.to_lowercase().as_str() {\n\n \"center\" | \"c\" => Alignment::Center,\n\n \"right\" | \"r\" => Alignment::Right,\n\n _ => Alignment::Center,\n\n })\n\n });\n\n\n\n let header_color = match config.get(\"header_color\") {\n\n Some(c) => match c.as_string() {\n\n Ok(color) => str_to_color(color.to_lowercase()).unwrap_or(ansi_term::Color::Green),\n\n _ => ansi_term::Color::Green,\n\n },\n\n _ => ansi_term::Color::Green,\n\n };\n", "file_path": "crates/nu-cli/src/commands/table.rs", "rank": 29, "score": 249193.1361015918 }, { "content": "pub fn get_children(row: &Value) -> Option<&Vec<Value>> {\n\n if let UntaggedValue::Row(r) = &row.value {\n\n if let Some(v) = r.entries.get(\"children\") {\n\n if let UntaggedValue::Table(t) = &v.value {\n\n return Some(t);\n\n }\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "crates/nu-cli/src/commands/to_xml.rs", "rank": 30, "score": 247396.70130864353 }, { "content": "pub fn from_bson_bytes_to_value(bytes: Vec<u8>, tag: impl Into<Tag>) -> Result<Value, ShellError> {\n\n let mut docs = Vec::new();\n\n let mut b_reader = BytesReader::new(bytes);\n\n while let Ok(v) = decode_document(&mut b_reader) {\n\n docs.push(Bson::Document(v));\n\n }\n\n\n\n convert_bson_value_to_nu_value(&Bson::Array(docs), tag)\n\n}\n\n\n", "file_path": "crates/nu_plugin_from_bson/src/from_bson.rs", "rank": 31, "score": 241187.86349851498 }, { "content": "fn column_names(regex: &Regex) -> Vec<String> {\n\n regex\n\n .capture_names()\n\n .enumerate()\n\n .skip(1)\n\n .map(|(i, name)| {\n\n name.map(String::from)\n\n .unwrap_or_else(|| format!(\"Capture{}\", i))\n\n })\n\n .collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::Command;\n\n\n\n #[test]\n\n fn examples_work_as_expected() {\n\n use crate::examples::test as test_examples;\n\n\n\n test_examples(Command {})\n\n }\n\n}\n", "file_path": "crates/nu-cli/src/commands/parse/command.rs", "rank": 32, "score": 239174.63864907133 }, { "content": "fn collect_values(input: &[Value]) -> Result<Vec<toml::Value>, ShellError> {\n\n let mut out = vec![];\n\n\n\n for value in input {\n\n out.push(helper(value)?);\n\n }\n\n\n\n Ok(out)\n\n}\n\n\n\nasync fn to_toml(\n\n args: CommandArgs,\n\n registry: &CommandRegistry,\n\n) -> Result<OutputStream, ShellError> {\n\n let registry = registry.clone();\n\n let args = args.evaluate_once(&registry).await?;\n\n let name_tag = args.name_tag();\n\n let name_span = name_tag.span;\n\n let input: Vec<Value> = args.input.collect().await;\n\n\n", "file_path": "crates/nu-cli/src/commands/to_toml.rs", "rank": 33, "score": 238249.56740426394 }, { "content": "fn get_list_of_theme_names() -> Vec<String> {\n\n let asset = get_asset_by_name_as_html_themes(\"228_themes.zip\", \"228_themes.json\");\n\n\n\n // If asset doesn't work, make sure to return the default theme\n\n let html_themes = match asset {\n\n Ok(a) => a,\n\n _ => HtmlThemes::default(),\n\n };\n\n\n\n let theme_names: Vec<String> = html_themes\n\n .themes\n\n .iter()\n\n .map(|n| n.name[..].to_string())\n\n .collect();\n\n\n\n theme_names\n\n}\n\n\n\nasync fn to_html(\n\n args: CommandArgs,\n", "file_path": "crates/nu-cli/src/commands/to_html.rs", "rank": 34, "score": 238177.840054043 }, { "content": "fn single_style_span(style: Style, span: Span) -> Vec<Spanned<Style>> {\n\n vec![Spanned::<Style> { span, item: style }]\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::{Palette, ThemedPalette};\n\n use ansi_term::Color;\n\n use nu_protocol::hir::FlatShape;\n\n use nu_source::{Span, Spanned};\n\n use std::io::Cursor;\n\n\n\n #[test]\n\n fn create_themed_palette() {\n\n let json = r#\"\n\n{\n\n \"open_delimiter\": \"a359cc\",\n\n \"close_delimiter\": \"a359cc\",\n\n \"type\": \"a359cc\",\n\n \"identifier\": \"a359cc\",\n", "file_path": "crates/nu-cli/src/shell/palette.rs", "rank": 35, "score": 237371.77128403244 }, { "content": "pub fn min(data: Vec<Value>) -> Result<Value, ShellError> {\n\n let mut smallest = data\n\n .first()\n\n .ok_or_else(|| ShellError::unexpected(ERR_EMPTY_DATA))?\n\n .value\n\n .clone();\n\n\n\n for value in data.iter() {\n\n if let Ok(greater_than) = compare_values(Operator::LessThan, &value.value, &smallest) {\n\n if greater_than {\n\n smallest = value.value.clone();\n\n }\n\n } else {\n\n return Err(ShellError::unexpected(format!(\n\n \"Could not compare\\nleft: {:?}\\nright: {:?}\",\n\n smallest, value.value\n\n )));\n\n }\n\n }\n\n Ok(Value {\n\n value: smallest,\n\n tag: Tag::unknown(),\n\n })\n\n}\n", "file_path": "crates/nu-cli/src/commands/math/reducers.rs", "rank": 36, "score": 237066.0808450149 }, { "content": "pub fn sum(data: Vec<Value>) -> Result<Value, ShellError> {\n\n let mut acc = UntaggedValue::int(0).into_untagged_value();\n\n for value in data {\n\n match value.value {\n\n UntaggedValue::Primitive(_) => {\n\n acc = match compute_values(Operator::Plus, &acc, &value) {\n\n Ok(v) => v.into_untagged_value(),\n\n Err((left_type, right_type)) => {\n\n return Err(ShellError::coerce_error(\n\n left_type.spanned_unknown(),\n\n right_type.spanned_unknown(),\n\n ))\n\n }\n\n };\n\n }\n\n _ => {\n\n return Err(ShellError::labeled_error(\n\n \"Attempted to compute the sum of a value that cannot be summed.\",\n\n \"value appears here\",\n\n value.tag.span,\n\n ))\n\n }\n\n }\n\n }\n\n Ok(acc)\n\n}\n\n\n", "file_path": "crates/nu-cli/src/commands/math/reducers.rs", "rank": 37, "score": 237066.0808450149 }, { "content": "pub fn max(data: Vec<Value>) -> Result<Value, ShellError> {\n\n let mut biggest = data\n\n .first()\n\n .ok_or_else(|| ShellError::unexpected(ERR_EMPTY_DATA))?\n\n .value\n\n .clone();\n\n\n\n for value in data.iter() {\n\n if let Ok(greater_than) = compare_values(Operator::GreaterThan, &value.value, &biggest) {\n\n if greater_than {\n\n biggest = value.value.clone();\n\n }\n\n } else {\n\n return Err(ShellError::unexpected(format!(\n\n \"Could not compare\\nleft: {:?}\\nright: {:?}\",\n\n biggest, value.value\n\n )));\n\n }\n\n }\n\n Ok(Value {\n\n value: biggest,\n\n tag: Tag::unknown(),\n\n })\n\n}\n\n\n", "file_path": "crates/nu-cli/src/commands/math/reducers.rs", "rank": 38, "score": 237066.0808450149 }, { "content": "#[test]\n\nfn substrings_the_input_and_returns_the_string_if_end_index_exceeds_length() {\n\n Playground::setup(\"str_test_10\", |dirs, sandbox| {\n\n sandbox.with_files(vec![FileWithContent(\n\n \"sample.toml\",\n\n r#\"\n\n [package]\n\n name = \"nu-arepas\"\n\n \"#,\n\n )]);\n\n\n\n let actual = nu!(\n\n cwd: dirs.test(), pipeline(\n\n r#\"\n\n open sample.toml\n\n | str substring 0,999 package.name\n\n | get package.name\n\n | echo $it\n\n \"#\n\n ));\n\n\n\n assert_eq!(actual.out, \"nu-arepas\");\n\n })\n\n}\n\n\n", "file_path": "crates/nu-cli/tests/commands/str_/mod.rs", "rank": 39, "score": 235631.53523127956 }, { "content": "#[test]\n\nfn substrings_the_input_and_returns_blank_if_start_index_exceeds_length() {\n\n Playground::setup(\"str_test_11\", |dirs, sandbox| {\n\n sandbox.with_files(vec![FileWithContent(\n\n \"sample.toml\",\n\n r#\"\n\n [package]\n\n name = \"nu-arepas\"\n\n \"#,\n\n )]);\n\n\n\n let actual = nu!(\n\n cwd: dirs.test(), pipeline(\n\n r#\"\n\n open sample.toml\n\n | str substring 50,999 package.name\n\n | get package.name\n\n | echo $it\n\n \"#\n\n ));\n\n\n\n assert_eq!(actual.out, \"\");\n\n })\n\n}\n\n\n", "file_path": "crates/nu-cli/tests/commands/str_/mod.rs", "rank": 40, "score": 235611.4499625627 }, { "content": "fn json_list(input: &[Value]) -> Result<Vec<serde_json::Value>, ShellError> {\n\n let mut out = vec![];\n\n\n\n for value in input {\n\n out.push(value_to_json_value(value)?);\n\n }\n\n\n\n Ok(out)\n\n}\n\n\n\nasync fn to_json(\n\n args: CommandArgs,\n\n registry: &CommandRegistry,\n\n) -> Result<OutputStream, ShellError> {\n\n let registry = registry.clone();\n\n let name_tag = args.call_info.name_tag.clone();\n\n let (ToJSONArgs { pretty }, input) = args.process(&registry).await?;\n\n let name_span = name_tag.span;\n\n let input: Vec<Value> = input.collect().await;\n\n\n", "file_path": "crates/nu-cli/src/commands/to_json.rs", "rank": 41, "score": 235246.02319164693 }, { "content": "pub fn get_data_by_key(value: &Value, name: Spanned<&str>) -> Option<Value> {\n\n match &value.value {\n\n UntaggedValue::Row(o) => o.get_data_by_key(name),\n\n UntaggedValue::Table(l) => {\n\n let mut out = vec![];\n\n for item in l {\n\n match item {\n\n Value {\n\n value: UntaggedValue::Row(o),\n\n ..\n\n } => match o.get_data_by_key(name) {\n\n Some(v) => out.push(v),\n\n None => out.push(UntaggedValue::nothing().into_untagged_value()),\n\n },\n\n _ => out.push(UntaggedValue::nothing().into_untagged_value()),\n\n }\n\n }\n\n\n\n if !out.is_empty() {\n\n Some(UntaggedValue::Table(out).into_value(name.span))\n", "file_path": "crates/nu-value-ext/src/lib.rs", "rank": 42, "score": 234645.8230872407 }, { "content": "#[cfg(test)]\n\npub fn stddev(values: &[Value], name: &Tag) -> Result<Value, ShellError> {\n\n compute_stddev(values, values.len(), name)\n\n}\n\n\n", "file_path": "crates/nu-cli/src/commands/math/stddev.rs", "rank": 44, "score": 230635.94002532505 }, { "content": "pub fn average(values: &[Value], name: &Tag) -> Result<Value, ShellError> {\n\n let sum = reducer_for(Reduce::Summation);\n\n\n\n let number = BigDecimal::from_usize(values.len()).ok_or_else(|| {\n\n ShellError::labeled_error(\"nothing to average\", \"nothing to average\", &name.span)\n\n })?;\n\n\n\n let total_rows = UntaggedValue::decimal(number);\n\n\n\n let are_bytes = values\n\n .get(0)\n\n .ok_or_else(|| {\n\n ShellError::unexpected(\"Cannot perform aggregate math operation on empty data\")\n\n })?\n\n .is_filesize();\n\n\n\n let total = if are_bytes {\n\n to_byte(&sum(\n\n UntaggedValue::int(0).into_untagged_value(),\n\n values\n", "file_path": "crates/nu-cli/src/commands/math/avg.rs", "rank": 45, "score": 230635.94002532505 }, { "content": "pub fn median(values: &[Value], name: &Tag) -> Result<Value, ShellError> {\n\n let take = if values.len() % 2 == 0 {\n\n Pick::MedianAverage\n\n } else {\n\n Pick::Median\n\n };\n\n\n\n let mut sorted = vec![];\n\n\n\n for item in values {\n\n sorted.push(item.clone());\n\n }\n\n\n\n crate::commands::sort_by::sort(&mut sorted, &[], name, false)?;\n\n\n\n match take {\n\n Pick::Median => {\n\n let idx = (values.len() as f64 / 2.0).floor() as usize;\n\n let out = sorted.get(idx).ok_or_else(|| {\n\n ShellError::labeled_error(\n", "file_path": "crates/nu-cli/src/commands/math/median.rs", "rank": 46, "score": 230635.94002532505 }, { "content": "#[cfg(test)]\n\npub fn variance(values: &[Value], name: &Tag) -> Result<Value, ShellError> {\n\n compute_variance(values, values.len(), name)\n\n}\n\n\n", "file_path": "crates/nu-cli/src/commands/math/variance.rs", "rank": 47, "score": 230635.94002532505 }, { "content": "pub fn mode(values: &[Value], name: &Tag) -> Result<Value, ShellError> {\n\n let mut frequency_map = std::collections::HashMap::new();\n\n for v in values {\n\n let counter = frequency_map.entry(v.value.clone()).or_insert(0);\n\n *counter += 1;\n\n }\n\n\n\n let mut max_freq = -1;\n\n let mut modes = Vec::<Value>::new();\n\n for (value, frequency) in frequency_map.iter() {\n\n match max_freq.cmp(&frequency) {\n\n Ordering::Less => {\n\n max_freq = *frequency;\n\n modes.clear();\n\n modes.push(value.clone().into_value(name));\n\n }\n\n Ordering::Equal => {\n\n modes.push(value.clone().into_value(name));\n\n }\n\n Ordering::Greater => (),\n", "file_path": "crates/nu-cli/src/commands/math/mode.rs", "rank": 48, "score": 230635.94002532505 }, { "content": "pub fn summation(values: &[Value], name: &Tag) -> Result<Value, ShellError> {\n\n let sum = reducer_for(Reduce::Summation);\n\n\n\n let first = values.get(0).ok_or_else(|| {\n\n ShellError::unexpected(\"Cannot perform aggregate math operation on empty data\")\n\n })?;\n\n\n\n match first {\n\n v if v.is_filesize() => to_byte(&sum(\n\n UntaggedValue::int(0).into_untagged_value(),\n\n values\n\n .to_vec()\n\n .iter()\n\n .map(|v| match v {\n\n Value {\n\n value: UntaggedValue::Primitive(Primitive::Filesize(num)),\n\n ..\n\n } => UntaggedValue::int(*num as usize).into_untagged_value(),\n\n other => other.clone(),\n\n })\n", "file_path": "crates/nu-cli/src/commands/math/sum.rs", "rank": 49, "score": 230635.94002532505 }, { "content": "pub fn version(args: CommandArgs, _registry: &CommandRegistry) -> Result<OutputStream, ShellError> {\n\n let tag = args.call_info.args.span;\n\n\n\n let mut indexmap = IndexMap::with_capacity(2);\n\n\n\n indexmap.insert(\n\n \"version\".to_string(),\n\n UntaggedValue::string(clap::crate_version!()).into_value(&tag),\n\n );\n\n indexmap.insert(\n\n \"commit_hash\".to_string(),\n\n UntaggedValue::string(GIT_COMMIT_HASH).into_value(&tag),\n\n );\n\n\n\n let value = UntaggedValue::Row(Dictionary::from(indexmap)).into_value(&tag);\n\n Ok(OutputStream::one(value))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::Version;\n\n\n\n #[test]\n\n fn examples_work_as_expected() {\n\n use crate::examples::test as test_examples;\n\n\n\n test_examples(Version {})\n\n }\n\n}\n", "file_path": "crates/nu-cli/src/commands/version.rs", "rank": 50, "score": 229711.75670332316 }, { "content": "fn action(input: &Value, tag: impl Into<Tag>) -> Result<Value, ShellError> {\n\n match &input.value {\n\n UntaggedValue::Primitive(Primitive::Line(s))\n\n | UntaggedValue::Primitive(Primitive::String(s)) => {\n\n Ok(UntaggedValue::string(s.to_ascii_lowercase()).into_value(tag))\n\n }\n\n other => {\n\n let got = format!(\"got {}\", other.type_name());\n\n Err(ShellError::labeled_error(\n\n \"value is not string\",\n\n got,\n\n tag.into().span,\n\n ))\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::{action, SubCommand};\n", "file_path": "crates/nu-cli/src/commands/str_/downcase.rs", "rank": 51, "score": 228276.54702425376 }, { "content": "fn action(input: &Value, tag: impl Into<Tag>) -> Result<Value, ShellError> {\n\n match &input.value {\n\n UntaggedValue::Primitive(Primitive::Line(s))\n\n | UntaggedValue::Primitive(Primitive::String(s)) => {\n\n let other = s.trim();\n\n let out = match BigInt::from_str(other) {\n\n Ok(v) => UntaggedValue::int(v),\n\n Err(reason) => {\n\n return Err(ShellError::labeled_error(\n\n \"could not parse as an integer\",\n\n reason.to_string(),\n\n tag.into().span,\n\n ))\n\n }\n\n };\n\n Ok(out.into_value(tag))\n\n }\n\n other => {\n\n let got = format!(\"got {}\", other.type_name());\n\n Err(ShellError::labeled_error(\n", "file_path": "crates/nu-cli/src/commands/str_/to_integer.rs", "rank": 52, "score": 228276.54702425376 }, { "content": "fn action(input: &Value, tag: impl Into<Tag>) -> Result<Value, ShellError> {\n\n match &input.value {\n\n UntaggedValue::Primitive(Primitive::Line(s))\n\n | UntaggedValue::Primitive(Primitive::String(s)) => {\n\n let mut capitalized = String::new();\n\n\n\n for (idx, character) in s.chars().enumerate() {\n\n let out = if idx == 0 {\n\n character.to_uppercase().to_string()\n\n } else {\n\n character.to_lowercase().to_string()\n\n };\n\n\n\n capitalized.push_str(&out);\n\n }\n\n\n\n Ok(UntaggedValue::string(capitalized).into_value(tag))\n\n }\n\n other => {\n\n let got = format!(\"got {}\", other.type_name());\n", "file_path": "crates/nu-cli/src/commands/str_/capitalize.rs", "rank": 53, "score": 228276.54702425376 }, { "content": "fn action(input: &Value, tag: impl Into<Tag>) -> Result<Value, ShellError> {\n\n match &input.value {\n\n UntaggedValue::Primitive(Primitive::Line(s))\n\n | UntaggedValue::Primitive(Primitive::String(s)) => {\n\n Ok(UntaggedValue::string(s.to_ascii_uppercase()).into_value(tag))\n\n }\n\n other => {\n\n let got = format!(\"got {}\", other.type_name());\n\n Err(ShellError::labeled_error(\n\n \"value is not string\",\n\n got,\n\n tag.into().span,\n\n ))\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::{action, SubCommand};\n", "file_path": "crates/nu-cli/src/commands/str_/upcase.rs", "rank": 54, "score": 228276.54702425376 }, { "content": "fn action(input: &Value, tag: impl Into<Tag>) -> Result<Value, ShellError> {\n\n match &input.value {\n\n UntaggedValue::Primitive(Primitive::Line(s))\n\n | UntaggedValue::Primitive(Primitive::String(s)) => {\n\n let other = s.trim();\n\n let out = match BigDecimal::from_str(other) {\n\n Ok(v) => UntaggedValue::decimal(v),\n\n Err(reason) => {\n\n return Err(ShellError::labeled_error(\n\n \"could not parse as decimal\",\n\n reason.to_string(),\n\n tag.into().span,\n\n ))\n\n }\n\n };\n\n Ok(out.into_value(tag))\n\n }\n\n other => {\n\n let got = format!(\"got {}\", other.type_name());\n\n Err(ShellError::labeled_error(\n", "file_path": "crates/nu-cli/src/commands/str_/to_decimal.rs", "rank": 55, "score": 228276.54702425376 }, { "content": "// NOTE: could this be useful more widely and implemented on Value ?\n\npub fn clone_tagged_value(v: &Value) -> Value {\n\n match &v.value {\n\n UntaggedValue::Primitive(Primitive::String(s)) => {\n\n UntaggedValue::Primitive(Primitive::String(s.clone()))\n\n }\n\n UntaggedValue::Primitive(Primitive::Nothing) => {\n\n UntaggedValue::Primitive(Primitive::Nothing)\n\n }\n\n UntaggedValue::Primitive(Primitive::Boolean(b)) => {\n\n UntaggedValue::Primitive(Primitive::Boolean(*b))\n\n }\n\n UntaggedValue::Primitive(Primitive::Decimal(f)) => {\n\n UntaggedValue::Primitive(Primitive::Decimal(f.clone()))\n\n }\n\n UntaggedValue::Primitive(Primitive::Int(i)) => {\n\n UntaggedValue::Primitive(Primitive::Int(i.clone()))\n\n }\n\n UntaggedValue::Primitive(Primitive::Path(x)) => {\n\n UntaggedValue::Primitive(Primitive::Path(x.clone()))\n\n }\n", "file_path": "crates/nu-cli/src/commands/to_delimited_data.rs", "rank": 56, "score": 226080.02145603523 }, { "content": "/// Shortcuts for creating an entry to the output table\n\nfn entry(arg: impl Into<String>, path: Value, builtin: bool, tag: Tag) -> Value {\n\n let mut map = IndexMap::new();\n\n map.insert(\n\n \"arg\".to_string(),\n\n UntaggedValue::Primitive(Primitive::String(arg.into())).into_value(tag.clone()),\n\n );\n\n map.insert(\"path\".to_string(), path);\n\n map.insert(\n\n \"builtin\".to_string(),\n\n UntaggedValue::Primitive(Primitive::Boolean(builtin)).into_value(tag.clone()),\n\n );\n\n\n\n UntaggedValue::row(map).into_value(tag)\n\n}\n\n\n\nmacro_rules! entry_builtin {\n\n ($arg:expr, $tag:expr) => {\n\n entry(\n\n $arg.clone(),\n\n UntaggedValue::Primitive(Primitive::String(\"nushell built-in command\".to_string()))\n", "file_path": "crates/nu-cli/src/commands/which_.rs", "rank": 57, "score": 225579.17446072324 }, { "content": "fn compute_average(values: &[Value], name: impl Into<Tag>) -> Result<Value, ShellError> {\n\n let name = name.into();\n\n\n\n let sum = reducer_for(Reduce::Summation);\n\n let number = BigDecimal::from_usize(2).ok_or_else(|| {\n\n ShellError::labeled_error(\n\n \"could not convert to big decimal\",\n\n \"could not convert to big decimal\",\n\n &name,\n\n )\n\n })?;\n\n let total_rows = UntaggedValue::decimal(number);\n\n let total = sum(UntaggedValue::int(0).into_untagged_value(), values.to_vec())?;\n\n\n\n match total {\n\n Value {\n\n value: UntaggedValue::Primitive(Primitive::Filesize(num)),\n\n ..\n\n } => {\n\n let left = UntaggedValue::from(Primitive::Int(num.into()));\n", "file_path": "crates/nu-cli/src/commands/math/median.rs", "rank": 58, "score": 225429.07434545504 }, { "content": "pub fn convert_toml_value_to_nu_value(v: &toml::Value, tag: impl Into<Tag>) -> Value {\n\n let tag = tag.into();\n\n\n\n match v {\n\n toml::Value::Boolean(b) => UntaggedValue::boolean(*b).into_value(tag),\n\n toml::Value::Integer(n) => UntaggedValue::int(*n).into_value(tag),\n\n toml::Value::Float(n) => UntaggedValue::decimal(*n).into_value(tag),\n\n toml::Value::String(s) => {\n\n UntaggedValue::Primitive(Primitive::String(String::from(s))).into_value(tag)\n\n }\n\n toml::Value::Array(a) => UntaggedValue::Table(\n\n a.iter()\n\n .map(|x| convert_toml_value_to_nu_value(x, &tag))\n\n .collect(),\n\n )\n\n .into_value(tag),\n\n toml::Value::Datetime(dt) => {\n\n UntaggedValue::Primitive(Primitive::String(dt.to_string())).into_value(tag)\n\n }\n\n toml::Value::Table(t) => {\n", "file_path": "crates/nu-cli/src/commands/from_toml.rs", "rank": 59, "score": 224294.1983374097 }, { "content": "/// Prepares a list of \"sounds like\" matches for the string you're trying to find\n\npub fn did_you_mean(obj_source: &Value, field_tried: &PathMember) -> Option<Vec<(usize, String)>> {\n\n let field_tried = match &field_tried.unspanned {\n\n UnspannedPathMember::String(string) => string.clone(),\n\n UnspannedPathMember::Int(int) => format!(\"{}\", int),\n\n };\n\n\n\n let possibilities = obj_source.data_descriptors();\n\n\n\n let mut possible_matches: Vec<_> = possibilities\n\n .into_iter()\n\n .map(|x| {\n\n let word = x;\n\n let distance = natural::distance::levenshtein_distance(&word, &field_tried);\n\n\n\n (distance, word)\n\n })\n\n .collect();\n\n\n\n if !possible_matches.is_empty() {\n\n possible_matches.sort();\n\n Some(possible_matches)\n\n } else {\n\n None\n\n }\n\n}\n", "file_path": "crates/nu-protocol/src/value/column_path.rs", "rank": 60, "score": 222627.13553805064 }, { "content": "fn for_spec(name: &str, ty: &str, required: bool, tag: impl Into<Tag>) -> Value {\n\n let tag = tag.into();\n\n\n\n let mut spec = TaggedDictBuilder::new(tag);\n\n\n\n spec.insert_untagged(\"name\", UntaggedValue::string(name));\n\n spec.insert_untagged(\"type\", UntaggedValue::string(ty));\n\n spec.insert_untagged(\n\n \"required\",\n\n UntaggedValue::string(if required { \"yes\" } else { \"no\" }),\n\n );\n\n\n\n spec.into_value()\n\n}\n\n\n", "file_path": "crates/nu-cli/src/data/command.rs", "rank": 61, "score": 222476.97052328748 }, { "content": "pub fn from_yaml_string_to_value(s: String, tag: impl Into<Tag>) -> Result<Value, ShellError> {\n\n let tag = tag.into();\n\n let v: serde_yaml::Value = serde_yaml::from_str(&s).map_err(|x| {\n\n ShellError::labeled_error(\n\n format!(\"Could not load yaml: {}\", x),\n\n \"could not load yaml from text\",\n\n &tag,\n\n )\n\n })?;\n\n Ok(convert_yaml_value_to_nu_value(&v, tag)?)\n\n}\n\n\n\nasync fn from_yaml(\n\n args: CommandArgs,\n\n registry: &CommandRegistry,\n\n) -> Result<OutputStream, ShellError> {\n\n let registry = registry.clone();\n\n let args = args.evaluate_once(&registry).await?;\n\n let tag = args.name_tag();\n\n let input = args.input;\n", "file_path": "crates/nu-cli/src/commands/from_yaml.rs", "rank": 62, "score": 221286.9375762593 }, { "content": "pub fn from_json_string_to_value(s: String, tag: impl Into<Tag>) -> serde_hjson::Result<Value> {\n\n let v: serde_hjson::Value = serde_hjson::from_str(&s)?;\n\n Ok(convert_json_value_to_nu_value(&v, tag))\n\n}\n\n\n\nasync fn from_json(\n\n args: CommandArgs,\n\n registry: &CommandRegistry,\n\n) -> Result<OutputStream, ShellError> {\n\n let name_tag = args.call_info.name_tag.clone();\n\n let registry = registry.clone();\n\n\n\n let (FromJSONArgs { objects }, input) = args.process(&registry).await?;\n\n let concat_string = input.collect_string(name_tag.clone()).await?;\n\n\n\n let string_clone: Vec<_> = concat_string.item.lines().map(|x| x.to_string()).collect();\n\n\n\n if objects {\n\n Ok(\n\n futures::stream::iter(string_clone.into_iter().filter_map(move |json_str| {\n", "file_path": "crates/nu-cli/src/commands/from_json.rs", "rank": 63, "score": 221286.9375762593 }, { "content": "pub fn classify_block(lite_block: &LiteBlock, registry: &dyn SignatureRegistry) -> ClassifiedBlock {\n\n // FIXME: fake span\n\n let mut block = Block::new(Span::new(0, 0));\n\n\n\n let mut error = None;\n\n for lite_pipeline in &lite_block.block {\n\n let (lite_pipeline, vars, err) = expand_shorthand_forms(lite_pipeline);\n\n if error.is_none() {\n\n error = err;\n\n }\n\n\n\n let (pipeline, err) = classify_pipeline(&lite_pipeline, registry);\n\n\n\n let pipeline = if let Some(vars) = vars {\n\n let span = pipeline.commands.span;\n\n let block = hir::Block {\n\n block: vec![pipeline.commands.clone()],\n\n span,\n\n };\n\n let mut call = hir::Call::new(\n", "file_path": "crates/nu-parser/src/parse.rs", "rank": 64, "score": 220758.76497030986 }, { "content": "fn merge_descriptors(values: &[Value]) -> Vec<Spanned<String>> {\n\n let mut ret: Vec<Spanned<String>> = vec![];\n\n let mut seen: IndexSet<String> = indexset! {};\n\n for value in values {\n\n for desc in value.data_descriptors() {\n\n if !seen.contains(&desc[..]) {\n\n seen.insert(desc.clone());\n\n ret.push(desc.spanned(value.tag.span));\n\n }\n\n }\n\n }\n\n ret\n\n}\n\n\n\npub async fn to_delimited_data(\n\n headerless: bool,\n\n sep: char,\n\n format_name: &'static str,\n\n input: InputStream,\n\n name: Tag,\n", "file_path": "crates/nu-cli/src/commands/to_delimited_data.rs", "rank": 65, "score": 220750.3227101054 }, { "content": "fn process_arguments(range: Value, name: impl Into<Tag>) -> Result<(isize, isize), ShellError> {\n\n let name = name.into();\n\n\n\n let search = match &range.value {\n\n UntaggedValue::Table(indexes) => {\n\n if indexes.len() > 2 {\n\n Err(ShellError::labeled_error(\n\n \"could not perform substring\",\n\n \"could not perform substring\",\n\n name.span,\n\n ))\n\n } else {\n\n let idx: Vec<String> = indexes\n\n .iter()\n\n .map(|v| as_string(v).unwrap_or_else(|_| String::from(\"\")))\n\n .collect();\n\n\n\n let start = idx\n\n .get(0)\n\n .ok_or_else(|| {\n", "file_path": "crates/nu-cli/src/commands/str_/substring.rs", "rank": 66, "score": 219595.26020558964 }, { "content": "pub fn serve_plugin(plugin: &mut dyn Plugin) {\n\n let mut args = std::env::args();\n\n if args.len() > 1 {\n\n let input = args.nth(1);\n\n\n\n let input = match input {\n\n Some(arg) => std::fs::read_to_string(arg),\n\n None => {\n\n send_response(ShellError::untagged_runtime_error(\"No input given.\"));\n\n return;\n\n }\n\n };\n\n\n\n if let Ok(input) = input {\n\n let command = serde_json::from_str::<NuCommand>(&input);\n\n match command {\n\n Ok(NuCommand::config) => {\n\n send_response(plugin.config());\n\n return;\n\n }\n", "file_path": "crates/nu-plugin/src/plugin.rs", "rank": 67, "score": 218742.97353336227 }, { "content": "pub fn from_xml_string_to_value(s: String, tag: impl Into<Tag>) -> Result<Value, roxmltree::Error> {\n\n let parsed = roxmltree::Document::parse(&s)?;\n\n Ok(from_document_to_value(&parsed, tag))\n\n}\n\n\n\nasync fn from_xml(\n\n args: CommandArgs,\n\n registry: &CommandRegistry,\n\n) -> Result<OutputStream, ShellError> {\n\n let registry = registry.clone();\n\n let args = args.evaluate_once(&registry).await?;\n\n let tag = args.name_tag();\n\n let input = args.input;\n\n\n\n let concat_string = input.collect_string(tag.clone()).await?;\n\n\n\n Ok(\n\n match from_xml_string_to_value(concat_string.item, tag.clone()) {\n\n Ok(x) => match x {\n\n Value {\n", "file_path": "crates/nu-cli/src/commands/from_xml.rs", "rank": 68, "score": 218524.1723113273 }, { "content": "pub fn merge_descriptors(values: &[Value]) -> Vec<String> {\n\n let mut ret: Vec<String> = vec![];\n\n let value_column = \"\".to_string();\n\n for value in values {\n\n let descs = value.data_descriptors();\n\n\n\n if descs.is_empty() {\n\n if !ret.contains(&value_column) {\n\n ret.push(\"\".to_string());\n\n }\n\n } else {\n\n for desc in value.data_descriptors() {\n\n if !ret.contains(&desc) {\n\n ret.push(desc);\n\n }\n\n }\n\n }\n\n }\n\n ret\n\n}\n", "file_path": "crates/nu-protocol/src/value.rs", "rank": 69, "score": 218491.0077123428 }, { "content": "#[allow(unused)]\n\nfn vec_tag<T>(input: Vec<Tagged<T>>) -> Option<Tag> {\n\n let mut iter = input.iter();\n\n let first = iter.next()?.tag.clone();\n\n let last = iter.last();\n\n\n\n Some(match last {\n\n None => first,\n\n Some(last) => first.until(&last.tag),\n\n })\n\n}\n\n\n\npub struct Painter {\n\n original: Vec<u8>,\n\n styles: Vec<Style>,\n\n}\n\n\n\nimpl Painter {\n\n fn new(original: &str) -> Painter {\n\n let bytes: Vec<u8> = original.bytes().collect();\n\n let bytes_count = bytes.len();\n", "file_path": "crates/nu-cli/src/shell/helper.rs", "rank": 70, "score": 217035.3914460041 }, { "content": "fn action(input: &Value, options: &Substring, tag: impl Into<Tag>) -> Result<Value, ShellError> {\n\n let tag = tag.into();\n\n\n\n match &input.value {\n\n UntaggedValue::Primitive(Primitive::Line(s))\n\n | UntaggedValue::Primitive(Primitive::String(s)) => {\n\n let len: isize = s.len().try_into().map_err(|_| {\n\n ShellError::labeled_error(\n\n \"could not perform substring\",\n\n \"could not perform substring\",\n\n tag.span,\n\n )\n\n })?;\n\n\n\n let start: isize = options.0;\n\n let end: isize = options.1;\n\n\n\n if start < len && end >= 0 {\n\n match start.cmp(&end) {\n\n Ordering::Equal => Ok(UntaggedValue::string(\"\").into_value(tag)),\n", "file_path": "crates/nu-cli/src/commands/str_/substring.rs", "rank": 71, "score": 216754.84247433752 }, { "content": "pub fn calculate(values: &[Value], name: &Tag, mf: MathFunction) -> Result<Value, ShellError> {\n\n if values.iter().all(|v| v.is_primitive()) {\n\n mf(&values, &name)\n\n } else {\n\n // If we are not dealing with Primitives, then perhaps we are dealing with a table\n\n // Create a key for each column name\n\n let mut column_values = IndexMap::new();\n\n for value in values {\n\n if let UntaggedValue::Row(row_dict) = &value.value {\n\n for (key, value) in row_dict.entries.iter() {\n\n column_values\n\n .entry(key.clone())\n\n .and_modify(|v: &mut Vec<Value>| v.push(value.clone()))\n\n .or_insert(vec![value.clone()]);\n\n }\n\n }\n\n }\n\n // The mathematical function operates over the columns of the table\n\n let mut column_totals = IndexMap::new();\n\n for (col_name, col_vals) in column_values {\n", "file_path": "crates/nu-cli/src/commands/math/utils.rs", "rank": 72, "score": 215972.43732996413 }, { "content": "pub fn number(number: impl Into<Number>) -> Primitive {\n\n let number = number.into();\n\n\n\n match number {\n\n Number::Int(int) => Primitive::Int(int),\n\n Number::Decimal(decimal) => Primitive::Decimal(decimal),\n\n }\n\n}\n\n\n", "file_path": "crates/nu-cli/src/data/primitive.rs", "rank": 73, "score": 214098.610926818 }, { "content": "pub fn delete_file_at(full_path: impl AsRef<Path>) {\n\n let full_path = full_path.as_ref();\n\n\n\n if full_path.exists() {\n\n std::fs::remove_file(full_path).expect(\"can not delete file\");\n\n }\n\n}\n\n\n", "file_path": "crates/nu-test-support/src/fs.rs", "rank": 74, "score": 213216.09815211192 }, { "content": "pub fn from_toml_string_to_value(s: String, tag: impl Into<Tag>) -> Result<Value, toml::de::Error> {\n\n let v: toml::Value = s.parse::<toml::Value>()?;\n\n Ok(convert_toml_value_to_nu_value(&v, tag))\n\n}\n\n\n\npub async fn from_toml(\n\n args: CommandArgs,\n\n registry: &CommandRegistry,\n\n) -> Result<OutputStream, ShellError> {\n\n let registry = registry.clone();\n\n let args = args.evaluate_once(&registry).await?;\n\n let tag = args.name_tag();\n\n let input = args.input;\n\n\n\n let concat_string = input.collect_string(tag.clone()).await?;\n\n Ok(\n\n match from_toml_string_to_value(concat_string.item, tag.clone()) {\n\n Ok(x) => match x {\n\n Value {\n\n value: UntaggedValue::Table(list),\n", "file_path": "crates/nu-cli/src/commands/from_toml.rs", "rank": 75, "score": 213193.55939101413 }, { "content": "fn action(input: &Value, options: &FindReplace, tag: impl Into<Tag>) -> Result<Value, ShellError> {\n\n match &input.value {\n\n UntaggedValue::Primitive(Primitive::Line(s))\n\n | UntaggedValue::Primitive(Primitive::String(s)) => {\n\n let find = &options.0;\n\n let replacement = &options.1;\n\n\n\n let regex = Regex::new(find.as_str());\n\n\n\n let out = match regex {\n\n Ok(re) => UntaggedValue::string(re.replace(s, replacement.as_str()).to_owned()),\n\n Err(_) => UntaggedValue::string(s),\n\n };\n\n\n\n Ok(out.into_value(tag))\n\n }\n\n other => {\n\n let got = format!(\"got {}\", other.type_name());\n\n Err(ShellError::labeled_error(\n\n \"value is not string\",\n", "file_path": "crates/nu-cli/src/commands/str_/find_replace.rs", "rank": 76, "score": 211472.93082188477 }, { "content": "fn create_default_command_args(context: &RunnableContextWithoutInput) -> RawCommandArgs {\n\n let span = context.name.span;\n\n RawCommandArgs {\n\n host: context.host.clone(),\n\n ctrl_c: context.ctrl_c.clone(),\n\n current_errors: context.current_errors.clone(),\n\n shell_manager: context.shell_manager.clone(),\n\n call_info: UnevaluatedCallInfo {\n\n args: hir::Call {\n\n head: Box::new(SpannedExpression::new(\n\n Expression::Literal(Literal::String(String::new())),\n\n span,\n\n )),\n\n positional: None,\n\n named: None,\n\n span,\n\n external_redirection: ExternalRedirection::Stdout,\n\n },\n\n name_tag: context.name.clone(),\n\n scope: Scope::new(),\n", "file_path": "crates/nu-cli/src/commands/autoview.rs", "rank": 77, "score": 211419.36350814928 }, { "content": "pub fn in_directory(str: impl AsRef<Path>) -> String {\n\n let path = str.as_ref();\n\n let path = if path.is_relative() {\n\n root().join(path)\n\n } else {\n\n path.to_path_buf()\n\n };\n\n\n\n path.display().to_string()\n\n}\n", "file_path": "crates/nu-test-support/src/fs.rs", "rank": 78, "score": 211403.68302586966 }, { "content": "pub fn shell_os_paths() -> Vec<std::path::PathBuf> {\n\n let mut original_paths = vec![];\n\n\n\n if let Some(paths) = std::env::var_os(\"PATH\") {\n\n original_paths = std::env::split_paths(&paths).collect::<Vec<_>>();\n\n }\n\n\n\n original_paths\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::pipeline;\n\n\n\n #[test]\n\n fn constructs_a_pipeline() {\n\n let actual = pipeline(\n\n r#\"\n\n open los_tres_amigos.txt\n\n | from-csv\n", "file_path": "crates/nu-test-support/src/lib.rs", "rank": 79, "score": 210722.8724866112 }, { "content": "fn primitive_doc(name: impl std::fmt::Debug, ty: impl Into<String>) -> DebugDocBuilder {\n\n b::primitive(format!(\"{:?}\", name)) + b::delimit(\"(\", b::kind(ty.into()), \")\")\n\n}\n\n\n", "file_path": "crates/nu-protocol/src/value/debug.rs", "rank": 80, "score": 209794.83712990273 }, { "content": "pub fn load_plugins(context: &mut Context) -> Result<(), ShellError> {\n\n let opts = glob::MatchOptions {\n\n case_sensitive: false,\n\n require_literal_separator: false,\n\n require_literal_leading_dot: false,\n\n };\n\n\n\n for path in search_paths() {\n\n let mut pattern = path.to_path_buf();\n\n\n\n pattern.push(std::path::Path::new(\"nu_plugin_[a-z0-9][a-z0-9]*\"));\n\n\n\n let plugs: Vec<_> = glob::glob_with(&pattern.to_string_lossy(), opts)?\n\n .filter_map(|x| x.ok())\n\n .collect();\n\n\n\n let _failures: Vec<_> = plugs\n\n .par_iter()\n\n .map(|path| {\n\n let bin_name = {\n", "file_path": "crates/nu-cli/src/cli.rs", "rank": 81, "score": 209049.12155629712 }, { "content": "pub fn sort(\n\n vec: &mut [Value],\n\n keys: &[Tagged<String>],\n\n tag: impl Into<Tag>,\n\n insensitive: bool,\n\n) -> Result<(), ShellError> {\n\n let tag = tag.into();\n\n\n\n if vec.is_empty() {\n\n return Err(ShellError::labeled_error(\n\n \"no values to work with\",\n\n \"no values to work with\",\n\n tag,\n\n ));\n\n }\n\n\n\n for sort_arg in keys.iter() {\n\n let match_test = get_data_by_key(&vec[0], sort_arg.borrow_spanned());\n\n if match_test == None {\n\n return Err(ShellError::labeled_error(\n", "file_path": "crates/nu-cli/src/commands/sort_by.rs", "rank": 82, "score": 208267.37667960825 }, { "content": "pub fn group(\n\n column_name: &Option<Tagged<String>>,\n\n values: &Value,\n\n tag: impl Into<Tag>,\n\n) -> Result<Value, ShellError> {\n\n let name = tag.into();\n\n\n\n let grouper = if let Some(column_name) = column_name {\n\n Grouper::ByColumn(Some(column_name.clone()))\n\n } else {\n\n Grouper::ByColumn(None)\n\n };\n\n\n\n match grouper {\n\n Grouper::ByColumn(Some(column_name)) => {\n\n let block = Box::new(move |_, row: &Value| {\n\n match row.get_data_by_key(column_name.borrow_spanned()) {\n\n Some(group_key) => Ok(as_string(&group_key)?),\n\n None => Err(suggestions(column_name.borrow_tagged(), &row)),\n\n }\n", "file_path": "crates/nu-cli/src/commands/group_by.rs", "rank": 83, "score": 208267.37667960825 }, { "content": "pub fn split(\n\n column_name: &Option<Tagged<String>>,\n\n values: &Value,\n\n tag: impl Into<Tag>,\n\n) -> Result<Value, ShellError> {\n\n let name = tag.into();\n\n\n\n let grouper = if let Some(column_name) = column_name {\n\n Grouper::ByColumn(Some(column_name.clone()))\n\n } else {\n\n Grouper::ByColumn(None)\n\n };\n\n\n\n match grouper {\n\n Grouper::ByColumn(Some(column_name)) => {\n\n let block = Box::new(move |_, row: &Value| {\n\n match row.get_data_by_key(column_name.borrow_spanned()) {\n\n Some(group_key) => Ok(as_string(&group_key)?),\n\n None => Err(suggestions(column_name.borrow_tagged(), &row)),\n\n }\n", "file_path": "crates/nu-cli/src/commands/split_by.rs", "rank": 84, "score": 208267.37667960825 }, { "content": "pub trait GroupedValue: Debug + Clone {\n\n type Item;\n\n\n\n fn new() -> Self;\n\n fn merge(&mut self, value: Self::Item);\n\n}\n\n\n\nimpl GroupedValue for Vec<(usize, usize)> {\n\n type Item = usize;\n\n\n\n fn new() -> Vec<(usize, usize)> {\n\n vec![]\n\n }\n\n\n\n fn merge(&mut self, new_value: usize) {\n\n match self.last_mut() {\n\n Some(value) if value.1 == new_value - 1 => {\n\n value.1 += 1;\n\n }\n\n\n", "file_path": "crates/nu-protocol/src/type_shape.rs", "rank": 85, "score": 207952.04322900402 }, { "content": "fn ty(name: impl std::fmt::Debug) -> DebugDocBuilder {\n\n b::kind(format!(\"{:?}\", name))\n\n}\n", "file_path": "crates/nu-protocol/src/value/debug.rs", "rank": 86, "score": 207618.61286106362 }, { "content": "fn prim(name: impl std::fmt::Debug) -> DebugDocBuilder {\n\n b::primitive(format!(\"{:?}\", name))\n\n}\n\n\n", "file_path": "crates/nu-protocol/src/value/debug.rs", "rank": 87, "score": 207618.61286106362 }, { "content": "#[test]\n\nfn ls_sort_by_type_name_sensitive() {\n\n let actual = nu!(\n\n cwd: \"tests/fixtures/formats\", pipeline(\n\n r#\"\n\n open sample-ls-output.json\n\n | sort-by type name\n\n | select name type\n\n | to json\n\n \"#\n\n ));\n\n\n\n let json_output = r#\"[{\"name\":\"C\",\"type\":\"Dir\"},{\"name\":\"B.txt\",\"type\":\"File\"},{\"name\":\"a.txt\",\"type\":\"File\"}]\"#;\n\n\n\n assert_eq!(actual.out, json_output);\n\n}\n\n\n", "file_path": "crates/nu-cli/tests/commands/sort_by.rs", "rank": 88, "score": 207396.244862578 }, { "content": "#[test]\n\nfn ls_sort_by_type_name_insensitive() {\n\n let actual = nu!(\n\n cwd: \"tests/fixtures/formats\", pipeline(\n\n r#\"\n\n open sample-ls-output.json\n\n | sort-by -i type name\n\n | select name type\n\n | to json\n\n \"#\n\n ));\n\n\n\n let json_output = r#\"[{\"name\":\"C\",\"type\":\"Dir\"},{\"name\":\"a.txt\",\"type\":\"File\"},{\"name\":\"B.txt\",\"type\":\"File\"}]\"#;\n\n\n\n assert_eq!(actual.out, json_output);\n\n}\n", "file_path": "crates/nu-cli/tests/commands/sort_by.rs", "rank": 89, "score": 207396.244862578 }, { "content": "pub fn expect_return_value_at(\n\n for_results: Result<Vec<Result<ReturnSuccess, ShellError>>, ShellError>,\n\n at: usize,\n\n) -> Value {\n\n let return_values = for_results\n\n .expect(\"Failed! This seems to be an error getting back the results from the plugin.\");\n\n\n\n for (idx, item) in return_values.iter().enumerate() {\n\n let item = match item {\n\n Ok(return_value) => return_value,\n\n Err(reason) => panic!(format!(\"{}\", reason)),\n\n };\n\n\n\n if idx == at {\n\n if let Some(value) = item.raw_value() {\n\n return value;\n\n } else {\n\n panic!(\"Internal error: could not get raw value in expect_return_value_at\")\n\n }\n\n }\n", "file_path": "crates/nu-plugin/src/test_helpers.rs", "rank": 90, "score": 206851.23575179133 }, { "content": "pub fn file_contents(full_path: impl AsRef<Path>) -> String {\n\n let mut file = std::fs::File::open(full_path.as_ref()).expect(\"can not open file\");\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)\n\n .expect(\"can not read file\");\n\n contents\n\n}\n\n\n", "file_path": "crates/nu-test-support/src/fs.rs", "rank": 91, "score": 206330.385518127 }, { "content": "pub fn reducer_for(\n\n command: Reduce,\n\n) -> Box<dyn Fn(Value, Vec<Value>) -> Result<Value, ShellError> + Send + Sync + 'static> {\n\n match command {\n\n Reduce::Summation | Reduce::Default => Box::new(formula(\n\n UntaggedValue::int(0).into_untagged_value(),\n\n Box::new(sum),\n\n )),\n\n Reduce::Minimum => Box::new(|_, values| min(values)),\n\n Reduce::Maximum => Box::new(|_, values| max(values)),\n\n }\n\n}\n\n\n\npub enum Reduce {\n\n Summation,\n\n Minimum,\n\n Maximum,\n\n Default,\n\n}\n\n\n", "file_path": "crates/nu-cli/src/commands/math/reducers.rs", "rank": 92, "score": 205173.10889334773 }, { "content": "/// A trait that allows structures to define a known .type_name() which pretty-prints the type\n\npub trait ShellTypeName {\n\n fn type_name(&self) -> &'static str;\n\n}\n\n\n\nimpl<T: ShellTypeName> ShellTypeName for Spanned<T> {\n\n /// Return the type_name of the spanned item\n\n fn type_name(&self) -> &'static str {\n\n self.item.type_name()\n\n }\n\n}\n\n\n\nimpl<T: ShellTypeName> ShellTypeName for &T {\n\n /// Return the type_name for the borrowed reference\n\n fn type_name(&self) -> &'static str {\n\n (*self).type_name()\n\n }\n\n}\n\n\n", "file_path": "crates/nu-protocol/src/type_name.rs", "rank": 93, "score": 204777.6867292354 }, { "content": "fn args() -> Vec<String> {\n\n // skip (--testbin bin_name args)\n\n std::env::args().skip(2).collect()\n\n}\n", "file_path": "crates/nu-cli/src/utils/test_bins.rs", "rank": 94, "score": 203747.81499257573 }, { "content": "pub fn maximum(values: &[Value], _name: &Tag) -> Result<Value, ShellError> {\n\n let max_func = reducer_for(Reduce::Maximum);\n\n max_func(Value::nothing(), values.to_vec())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::SubCommand;\n\n\n\n #[test]\n\n fn examples_work_as_expected() {\n\n use crate::examples::test as test_examples;\n\n\n\n test_examples(SubCommand {})\n\n }\n\n}\n", "file_path": "crates/nu-cli/src/commands/math/max.rs", "rank": 95, "score": 202713.29263392114 }, { "content": "pub fn minimum(values: &[Value], _name: &Tag) -> Result<Value, ShellError> {\n\n let min_func = reducer_for(Reduce::Minimum);\n\n min_func(Value::nothing(), values.to_vec())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::SubCommand;\n\n\n\n #[test]\n\n fn examples_work_as_expected() {\n\n use crate::examples::test as test_examples;\n\n\n\n test_examples(SubCommand {})\n\n }\n\n}\n", "file_path": "crates/nu-cli/src/commands/math/min.rs", "rank": 96, "score": 202713.29263392114 }, { "content": "pub fn line_ending() -> String {\n\n #[cfg(windows)]\n\n {\n\n String::from(\"\\r\\n\")\n\n }\n\n\n\n #[cfg(not(windows))]\n\n {\n\n String::from(\"\\n\")\n\n }\n\n}\n\n\n", "file_path": "crates/nu-test-support/src/fs.rs", "rank": 97, "score": 202432.65469391274 }, { "content": "pub fn from_ini_string_to_value(\n\n s: String,\n\n tag: impl Into<Tag>,\n\n) -> Result<Value, serde_ini::de::Error> {\n\n let v: HashMap<String, HashMap<String, String>> = serde_ini::from_str(&s)?;\n\n Ok(convert_ini_top_to_nu_value(&v, tag))\n\n}\n\n\n\nasync fn from_ini(\n\n args: CommandArgs,\n\n registry: &CommandRegistry,\n\n) -> Result<OutputStream, ShellError> {\n\n let registry = registry.clone();\n\n let args = args.evaluate_once(&registry).await?;\n\n let tag = args.name_tag();\n\n let input = args.input;\n\n let concat_string = input.collect_string(tag.clone()).await?;\n\n\n\n match from_ini_string_to_value(concat_string.item, tag.clone()) {\n\n Ok(x) => match x {\n", "file_path": "crates/nu-cli/src/commands/from_ini.rs", "rank": 98, "score": 202213.1430441903 }, { "content": "fn value_from_vec<V>(vec: Vec<V>, tag: &Tag) -> Value\n\nwhere\n\n V: Into<Value>,\n\n{\n\n if vec.is_empty() {\n\n UntaggedValue::nothing()\n\n } else {\n\n let values = vec.into_iter().map(Into::into).collect::<Vec<Value>>();\n\n UntaggedValue::Table(values)\n\n }\n\n .into_value(tag)\n\n}\n\n\n\nimpl From<DirInfo> for Value {\n\n fn from(d: DirInfo) -> Self {\n\n let mut r: IndexMap<String, Value> = IndexMap::new();\n\n\n\n r.insert(\n\n \"path\".to_string(),\n\n UntaggedValue::path(d.path).into_value(&d.tag),\n", "file_path": "crates/nu-cli/src/commands/du.rs", "rank": 99, "score": 201843.13419646968 } ]
Rust
crates/revm/src/evm.rs
mattsse/revm
247d4d0e19b15feb0cf400e8d5dd93921b41a9d7
use crate::{ db::{Database, DatabaseCommit, DatabaseRef, RefDBWrapper}, error::ExitReason, evm_impl::{EVMImpl, Transact}, subroutine::State, BerlinSpec, ByzantineSpec, Env, Inspector, IstanbulSpec, LatestSpec, LondonSpec, NoOpInspector, Spec, SpecId, TransactOut, }; use alloc::boxed::Box; use revm_precompiles::Precompiles; pub struct EVM<DB> { pub env: Env, pub db: Option<DB>, } pub fn new<DB>() -> EVM<DB> { EVM::new() } impl<DB: Database + DatabaseCommit> EVM<DB> { pub fn transact_commit(&mut self) -> (ExitReason, TransactOut, u64) { let (exit, out, gas, state) = self.transact(); self.db.as_mut().unwrap().commit(state); (exit, out, gas) } pub fn inspect_commit<INSP: Inspector>( &mut self, inspector: INSP, ) -> (ExitReason, TransactOut, u64) { let (exit, out, gas, state) = self.inspect(inspector); self.db.as_mut().unwrap().commit(state); (exit, out, gas) } } impl<DB: Database> EVM<DB> { pub fn transact(&mut self) -> (ExitReason, TransactOut, u64, State) { if let Some(db) = self.db.as_mut() { let mut noop = NoOpInspector {}; let out = evm_inner::<DB, false>(&self.env, db, &mut noop).transact(); out } else { panic!("Database needs to be set"); } } pub fn inspect<INSP: Inspector>( &mut self, mut inspector: INSP, ) -> (ExitReason, TransactOut, u64, State) { if let Some(db) = self.db.as_mut() { evm_inner::<DB, true>(&self.env, db, &mut inspector).transact() } else { panic!("Database needs to be set"); } } } impl<DB: DatabaseRef> EVM<DB> { pub fn transact_ref(&self) -> (ExitReason, TransactOut, u64, State) { if let Some(db) = self.db.as_ref() { let mut noop = NoOpInspector {}; let mut db = RefDBWrapper::new(db); let db = &mut db; let out = evm_inner::<RefDBWrapper, false>(&self.env, db, &mut noop).transact(); out } else { panic!("Database needs to be set"); } } pub fn inspect_ref<INSP: Inspector>( &self, mut inspector: INSP, ) -> (ExitReason, TransactOut, u64, State) { if let Some(db) = self.db.as_ref() { let mut db = RefDBWrapper::new(db); let db = &mut db; let out = evm_inner::<RefDBWrapper, true>(&self.env, db, &mut inspector).transact(); out } else { panic!("Database needs to be set"); } } } impl<DB> EVM<DB> { pub fn new() -> Self { Self { env: Env::default(), db: None, } } pub fn database(&mut self, db: DB) { self.db = Some(db); } pub fn db(&mut self) -> Option<&mut DB> { self.db.as_mut() } pub fn take_db(&mut self) -> DB { core::mem::take(&mut self.db).unwrap() } } macro_rules! create_evm { ($spec:ident, $db:ident,$env:ident,$inspector:ident) => { Box::new(EVMImpl::<'a, $spec, DB, INSPECT>::new( $db, $env, $inspector, Precompiles::new::<{ SpecId::to_precompile_id($spec::SPEC_ID) }>(), )) as Box<dyn Transact + 'a> }; } pub fn evm_inner<'a, DB: Database, const INSPECT: bool>( env: &'a Env, db: &'a mut DB, insp: &'a mut dyn Inspector, ) -> Box<dyn Transact + 'a> { match env.cfg.spec_id { SpecId::LATEST => create_evm!(LatestSpec, db, env, insp), SpecId::LONDON => create_evm!(LondonSpec, db, env, insp), SpecId::BERLIN => create_evm!(BerlinSpec, db, env, insp), SpecId::ISTANBUL => create_evm!(IstanbulSpec, db, env, insp), SpecId::BYZANTINE => create_evm!(ByzantineSpec, db, env, insp), _ => panic!("Spec Not supported"), } }
use crate::{ db::{Database, DatabaseCommit, DatabaseRef, RefDBWrapper}, error::ExitReason, evm_impl::{EVMImpl, Transact}, subroutine::State, BerlinSpec, ByzantineSpec, Env, Inspector, IstanbulSpec, LatestSpec, LondonSpec, NoOpInspector, Spec, SpecId, TransactOut, }; use alloc::boxed::Box; use revm_precompiles::Precompiles; pub struct EVM<DB> { pub env: Env, pub db: Option<DB>, } pub fn new<DB>() -> EVM<DB> { EVM::new() } impl<DB: Database + DatabaseCommit> EVM<DB> { pub fn transact_commit(&mut self) -> (ExitReason, TransactOut, u64) { let (exit, out, gas, state) = self.transact(); self.db.as_mut().unwrap().commit(state); (exit, out, gas) } pub fn inspect_commit<INSP: Inspector>( &mut self, inspector: INSP, ) -> (
} impl<DB: Database> EVM<DB> { pub fn transact(&mut self) -> (ExitReason, TransactOut, u64, State) { if let Some(db) = self.db.as_mut() { let mut noop = NoOpInspector {}; let out = evm_inner::<DB, false>(&self.env, db, &mut noop).transact(); out } else { panic!("Database needs to be set"); } } pub fn inspect<INSP: Inspector>( &mut self, mut inspector: INSP, ) -> (ExitReason, TransactOut, u64, State) { if let Some(db) = self.db.as_mut() { evm_inner::<DB, true>(&self.env, db, &mut inspector).transact() } else { panic!("Database needs to be set"); } } } impl<DB: DatabaseRef> EVM<DB> { pub fn transact_ref(&self) -> (ExitReason, TransactOut, u64, State) { if let Some(db) = self.db.as_ref() { let mut noop = NoOpInspector {}; let mut db = RefDBWrapper::new(db); let db = &mut db; let out = evm_inner::<RefDBWrapper, false>(&self.env, db, &mut noop).transact(); out } else { panic!("Database needs to be set"); } } pub fn inspect_ref<INSP: Inspector>( &self, mut inspector: INSP, ) -> (ExitReason, TransactOut, u64, State) { if let Some(db) = self.db.as_ref() { let mut db = RefDBWrapper::new(db); let db = &mut db; let out = evm_inner::<RefDBWrapper, true>(&self.env, db, &mut inspector).transact(); out } else { panic!("Database needs to be set"); } } } impl<DB> EVM<DB> { pub fn new() -> Self { Self { env: Env::default(), db: None, } } pub fn database(&mut self, db: DB) { self.db = Some(db); } pub fn db(&mut self) -> Option<&mut DB> { self.db.as_mut() } pub fn take_db(&mut self) -> DB { core::mem::take(&mut self.db).unwrap() } } macro_rules! create_evm { ($spec:ident, $db:ident,$env:ident,$inspector:ident) => { Box::new(EVMImpl::<'a, $spec, DB, INSPECT>::new( $db, $env, $inspector, Precompiles::new::<{ SpecId::to_precompile_id($spec::SPEC_ID) }>(), )) as Box<dyn Transact + 'a> }; } pub fn evm_inner<'a, DB: Database, const INSPECT: bool>( env: &'a Env, db: &'a mut DB, insp: &'a mut dyn Inspector, ) -> Box<dyn Transact + 'a> { match env.cfg.spec_id { SpecId::LATEST => create_evm!(LatestSpec, db, env, insp), SpecId::LONDON => create_evm!(LondonSpec, db, env, insp), SpecId::BERLIN => create_evm!(BerlinSpec, db, env, insp), SpecId::ISTANBUL => create_evm!(IstanbulSpec, db, env, insp), SpecId::BYZANTINE => create_evm!(ByzantineSpec, db, env, insp), _ => panic!("Spec Not supported"), } }
ExitReason, TransactOut, u64) { let (exit, out, gas, state) = self.inspect(inspector); self.db.as_mut().unwrap().commit(state); (exit, out, gas) }
function_block-function_prefix_line
[ { "content": "#[inline(always)]\n\nfn gas_call_l64_after<SPEC: Spec>(machine: &mut Machine) -> Result<u64, ExitReason> {\n\n if SPEC::enabled(TANGERINE) {\n\n //EIP-150: Gas cost changes for IO-heavy operations\n\n let gas = machine.gas().remaining();\n\n Ok(gas - gas / 64)\n\n } else {\n\n Ok(machine.gas().remaining())\n\n }\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/system.rs", "rank": 0, "score": 306576.5717819926 }, { "content": "pub fn selfdestruct_cost<SPEC: Spec>(res: SelfDestructResult) -> u64 {\n\n let should_charge_topup = if SPEC::enabled(ISTANBUL) {\n\n res.had_value && !res.exists\n\n } else {\n\n !res.exists\n\n };\n\n\n\n let selfdestruct_gas_topup = if should_charge_topup {\n\n if SPEC::enabled(TANGERINE) {\n\n //EIP-150: Gas cost changes for IO-heavy operations\n\n 25000\n\n } else {\n\n 0\n\n }\n\n } else {\n\n 0\n\n };\n\n\n\n let selfdestruct_gas = if SPEC::enabled(TANGERINE) { 5000 } else { 0 }; //EIP-150: Gas cost changes for IO-heavy operations\n\n\n\n let mut gas = selfdestruct_gas + selfdestruct_gas_topup;\n\n if SPEC::enabled(BERLIN) && res.is_cold {\n\n gas += ACCOUNT_ACCESS_COLD\n\n }\n\n gas\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/gas/calc.rs", "rank": 1, "score": 285628.4231902328 }, { "content": "pub fn gas_query(gas_used: u64, gas_limit: u64) -> Result<u64, ExitError> {\n\n if gas_used > gas_limit {\n\n return Err(ExitError::OutOfGas);\n\n }\n\n Ok(gas_used)\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct PrecompileOutput {\n\n pub cost: u64,\n\n pub output: Vec<u8>,\n\n pub logs: Vec<Log>,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Log {\n\n pub address: Address,\n\n pub topics: Vec<H256>,\n\n pub data: Bytes,\n\n}\n", "file_path": "crates/revm_precompiles/src/lib.rs", "rank": 2, "score": 259962.76989447023 }, { "content": "#[inline(always)]\n\npub fn hot_cold_cost<SPEC: Spec>(is_cold: bool, regular_value: u64) -> u64 {\n\n if SPEC::enabled(BERLIN) {\n\n if is_cold {\n\n ACCOUNT_ACCESS_COLD\n\n } else {\n\n STORAGE_READ_WARM\n\n }\n\n } else {\n\n regular_value\n\n }\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/gas/calc.rs", "rank": 3, "score": 259789.4663305828 }, { "content": "pub fn sload_cost<SPEC: Spec>(is_cold: bool) -> u64 {\n\n if SPEC::enabled(BERLIN) {\n\n if is_cold {\n\n SLOAD_COLD\n\n } else {\n\n STORAGE_READ_WARM\n\n }\n\n } else if SPEC::enabled(ISTANBUL) {\n\n // EIP-1884: Repricing for trie-size-dependent opcodes\n\n 800\n\n } else if SPEC::enabled(TANGERINE) {\n\n // EIP-150: Gas cost changes for IO-heavy operations\n\n 200\n\n } else {\n\n 50\n\n }\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/gas/calc.rs", "rank": 4, "score": 257642.12623642886 }, { "content": "pub fn returndatacopy<SPEC: Spec>(machine: &mut Machine) -> Control {\n\n check!(SPEC::enabled(BYZANTINE)); // EIP-211: New opcodes: RETURNDATASIZE and RETURNDATACOPY\n\n pop_u256!(machine, memory_offset, data_offset, len);\n\n gas_or_fail!(machine, gas::verylowcopy_cost(len));\n\n memory_resize!(machine, memory_offset, len);\n\n if data_offset\n\n .checked_add(len)\n\n .map(|l| l > U256::from(machine.return_data_buffer.len()))\n\n .unwrap_or(true)\n\n {\n\n return Control::Exit(ExitError::OutOfOffset.into());\n\n }\n\n\n\n match machine\n\n .memory\n\n .copy_large(memory_offset, data_offset, len, &machine.return_data_buffer)\n\n {\n\n Ok(()) => Control::Continue,\n\n Err(e) => Control::Exit(e.into()),\n\n }\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/system.rs", "rank": 5, "score": 256295.09950518998 }, { "content": "#[inline(always)]\n\npub fn revert<SPEC: Spec>(machine: &mut Machine) -> Control {\n\n check!(SPEC::enabled(BYZANTINE)); // EIP-140: REVERT instruction\n\n // zero gas cost gas!(machine,gas::ZERO);\n\n pop_u256!(machine, start, len);\n\n memory_resize!(machine, start, len);\n\n machine.return_range = start..(start + len);\n\n Control::Exit(ExitRevert::Reverted.into())\n\n}\n", "file_path": "crates/revm/src/instructions/misc.rs", "rank": 6, "score": 256295.09950518998 }, { "content": "pub fn returndatasize<SPEC: Spec>(machine: &mut Machine) -> Control {\n\n check!(SPEC::enabled(BYZANTINE)); // EIP-211: New opcodes: RETURNDATASIZE and RETURNDATACOPY\n\n gas!(machine, gas::BASE);\n\n\n\n let size = U256::from(machine.return_data_buffer.len());\n\n push_u256!(machine, size);\n\n\n\n Control::Continue\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/system.rs", "rank": 7, "score": 256295.09950518998 }, { "content": "pub fn account_access_gas<SPEC: Spec>(is_cold: bool) -> u64 {\n\n if SPEC::enabled(BERLIN) {\n\n if is_cold {\n\n ACCOUNT_ACCESS_COLD\n\n } else {\n\n STORAGE_READ_WARM\n\n }\n\n } else if SPEC::enabled(ISTANBUL) {\n\n 700\n\n } else {\n\n 20\n\n }\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/gas/calc.rs", "rank": 8, "score": 255056.0748573743 }, { "content": "#[inline(always)]\n\npub fn eval_exp<SPEC: Spec>(machine: &mut Machine) -> Control {\n\n pop_u256!(machine, op1, op2);\n\n gas_or_fail!(machine, gas::exp_cost::<SPEC>(op2));\n\n let ret = exp(op1, op2);\n\n push_u256!(machine, ret);\n\n\n\n Control::Continue\n\n}\n\n\n\n/// In the yellow paper `SIGNEXTEND` is defined to take two inputs, we will call them\n\n/// `x` and `y`, and produce one output. The first `t` bits of the output (numbering from the\n\n/// left, starting from 0) are equal to the `t`-th bit of `y`, where `t` is equal to\n\n/// `256 - 8(x + 1)`. The remaining bits of the output are equal to the corresponding bits of `y`.\n\n/// Note: if `x >= 32` then the output is equal to `y` since `t <= 0`. To efficiently implement\n\n/// this algorithm in the case `x < 32` we do the following. Let `b` be equal to the `t`-th bit\n\n/// of `y` and let `s = 255 - t = 8x + 7` (this is effectively the same index as `t`, but\n\n/// numbering the bits from the right instead of the left). We can create a bit mask which is all\n\n/// zeros up to and including the `t`-th bit, and all ones afterwards by computing the quantity\n\n/// `2^s - 1`. We can use this mask to compute the output depending on the value of `b`.\n\n/// If `b == 1` then the yellow paper says the output should be all ones up to\n\n/// and including the `t`-th bit, followed by the remaining bits of `y`; this is equal to\n\n/// `y | !mask` where `|` is the bitwise `OR` and `!` is bitwise negation. Similarly, if\n\n/// `b == 0` then the yellow paper says the output should start with all zeros, then end with\n\n/// bits from `b`; this is equal to `y & mask` where `&` is bitwise `AND`.\n", "file_path": "crates/revm/src/instructions/arithmetic.rs", "rank": 9, "score": 253626.2878977654 }, { "content": "pub fn exp_cost<SPEC: Spec>(power: U256) -> Option<u64> {\n\n if power == U256::zero() {\n\n Some(EXP)\n\n } else {\n\n let gas_byte = U256::from(if SPEC::enabled(SPURIOUS_DRAGON) {\n\n 50\n\n } else {\n\n 10\n\n }); // EIP-160: EXP cost increase\n\n let gas = U256::from(EXP).checked_add(\n\n gas_byte.checked_mul(U256::from(super::utils::log2floor(power) / 8 + 1))?,\n\n )?;\n\n\n\n if gas > U256::from(u64::MAX) {\n\n return None;\n\n }\n\n\n\n Some(gas.as_u64())\n\n }\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/gas/calc.rs", "rank": 10, "score": 251921.97793530684 }, { "content": "pub fn sload<H: Handler, SPEC: Spec>(machine: &mut Machine, handler: &mut H) -> Control {\n\n pop!(machine, index);\n\n let (value, is_cold) = handler.sload(machine.contract.address, index);\n\n inspect!(\n\n handler,\n\n sload,\n\n &machine.contract.address,\n\n &index,\n\n &value,\n\n is_cold\n\n );\n\n gas!(machine, gas::sload_cost::<SPEC>(is_cold));\n\n push!(machine, value);\n\n Control::Continue\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/system.rs", "rank": 11, "score": 248940.7280970465 }, { "content": "pub fn sstore<H: Handler, SPEC: Spec>(machine: &mut Machine, handler: &mut H) -> Control {\n\n check!(!SPEC::IS_STATIC_CALL);\n\n\n\n pop!(machine, index, value);\n\n let (original, old, new, is_cold) = handler.sstore(machine.contract.address, index, value);\n\n inspect!(\n\n handler,\n\n sstore,\n\n machine.contract.address,\n\n index,\n\n new,\n\n old,\n\n original,\n\n is_cold\n\n );\n\n gas_or_fail!(machine, {\n\n let remaining_gas = machine.gas.remaining();\n\n gas::sstore_cost::<SPEC>(original, old, new, remaining_gas, is_cold)\n\n });\n\n refund!(machine, gas::sstore_refund::<SPEC>(original, old, new));\n\n Control::Continue\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/system.rs", "rank": 12, "score": 248940.7280970465 }, { "content": "pub fn selfbalance<H: Handler, SPEC: Spec>(machine: &mut Machine, handler: &mut H) -> Control {\n\n check!(SPEC::enabled(ISTANBUL)); // EIP-1884: Repricing for trie-size-dependent opcodes\n\n let (balance, _) = handler.balance(machine.contract.address);\n\n gas!(machine, gas::LOW);\n\n push_u256!(machine, balance);\n\n\n\n Control::Continue\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/system.rs", "rank": 13, "score": 248940.7280970465 }, { "content": "pub fn selfdestruct<H: Handler, SPEC: Spec>(machine: &mut Machine, handler: &mut H) -> Control {\n\n check!(!SPEC::IS_STATIC_CALL);\n\n pop!(machine, target);\n\n\n\n let res = handler.selfdestruct(machine.contract.address, target.into());\n\n inspect!(handler, selfdestruct);\n\n\n\n // EIP-3529: Reduction in refunds\n\n if !SPEC::enabled(LONDON) && !res.previously_destroyed {\n\n refund!(machine, gas::SELFDESTRUCT)\n\n }\n\n gas!(machine, gas::selfdestruct_cost::<SPEC>(res));\n\n\n\n Control::Exit(ExitSucceed::SelfDestructed.into())\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/system.rs", "rank": 14, "score": 248940.7280970465 }, { "content": "pub fn extcodehash<H: Handler, SPEC: Spec>(machine: &mut Machine, handler: &mut H) -> Control {\n\n check!(SPEC::enabled(CONSTANTINOPLE)); // EIP-1052: EXTCODEHASH opcode\n\n pop!(machine, address);\n\n let (code_hash, is_cold) = handler.code_hash(address.into());\n\n gas!(\n\n machine,\n\n if SPEC::enabled(ISTANBUL) {\n\n // EIP-1884: Repricing for trie-size-dependent opcodes\n\n gas::account_access_gas::<SPEC>(is_cold)\n\n } else {\n\n 400\n\n }\n\n );\n\n push!(machine, code_hash);\n\n\n\n Control::Continue\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/system.rs", "rank": 15, "score": 248940.7280970465 }, { "content": "pub fn balance<H: Handler, SPEC: Spec>(machine: &mut Machine, handler: &mut H) -> Control {\n\n pop!(machine, address);\n\n let (balance, is_cold) = handler.balance(address.into());\n\n gas!(\n\n machine,\n\n if SPEC::enabled(ISTANBUL) {\n\n // EIP-1884: Repricing for trie-size-dependent opcodes\n\n gas::account_access_gas::<SPEC>(is_cold)\n\n } else if SPEC::enabled(TANGERINE) {\n\n 400\n\n } else {\n\n 20\n\n }\n\n );\n\n push_u256!(machine, balance);\n\n\n\n Control::Continue\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/system.rs", "rank": 16, "score": 248940.7280970465 }, { "content": "pub fn extcodecopy<H: Handler, SPEC: Spec>(machine: &mut Machine, handler: &mut H) -> Control {\n\n pop!(machine, address);\n\n pop_u256!(machine, memory_offset, code_offset, len);\n\n\n\n let (code, is_cold) = handler.code(address.into());\n\n gas_or_fail!(machine, gas::extcodecopy_cost::<SPEC>(len, is_cold));\n\n\n\n memory_resize!(machine, memory_offset, len);\n\n match machine\n\n .memory\n\n .copy_large(memory_offset, code_offset, len, &code)\n\n {\n\n Ok(()) => (),\n\n Err(e) => return Control::Exit(e.into()),\n\n };\n\n\n\n Control::Continue\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/system.rs", "rank": 17, "score": 248940.7280970465 }, { "content": "pub fn basefee<H: Handler, SPEC: Spec>(machine: &mut Machine, handler: &mut H) -> Control {\n\n check!(SPEC::enabled(LONDON)); // EIP-3198: BASEFEE opcode\n\n let basefee = handler.env().block.basefee;\n\n gas!(machine, gas::BASE);\n\n push_u256!(machine, basefee);\n\n\n\n Control::Continue\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/system.rs", "rank": 18, "score": 248940.7280970465 }, { "content": "pub fn chainid<H: Handler, SPEC: Spec>(machine: &mut Machine, handler: &mut H) -> Control {\n\n check!(SPEC::enabled(ISTANBUL)); // EIP-1344: ChainID opcode\n\n gas!(machine, gas::BASE);\n\n\n\n push_u256!(machine, handler.env().cfg.chain_id);\n\n\n\n Control::Continue\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/system.rs", "rank": 19, "score": 248940.7280970465 }, { "content": "pub fn extcodesize<H: Handler, SPEC: Spec>(machine: &mut Machine, handler: &mut H) -> Control {\n\n pop!(machine, address);\n\n\n\n let (code, is_cold) = handler.code(address.into());\n\n gas!(machine, gas::account_access_gas::<SPEC>(is_cold));\n\n\n\n push_u256!(machine, U256::from(code.len()));\n\n\n\n Control::Continue\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/system.rs", "rank": 20, "score": 248940.7280970465 }, { "content": "pub fn log<H: Handler, SPEC: Spec>(machine: &mut Machine, n: u8, handler: &mut H) -> Control {\n\n check!(!SPEC::IS_STATIC_CALL);\n\n\n\n pop_u256!(machine, offset, len);\n\n gas_or_fail!(machine, gas::log_cost(n, len));\n\n memory_resize!(machine, offset, len);\n\n let data = if len == U256::zero() {\n\n Bytes::new()\n\n } else {\n\n let offset = as_usize_or_fail!(offset);\n\n let len = as_usize_or_fail!(len);\n\n\n\n Bytes::from(machine.memory.get(offset, len))\n\n };\n\n let n = n as usize;\n\n if machine.stack.len() < n {\n\n return Control::Exit(ExitError::StackUnderflow.into());\n\n }\n\n\n\n let mut topics = Vec::with_capacity(n);\n\n for _ in 0..(n) {\n\n /*** SAFETY stack bounds already checked few lines above */\n\n topics.push(unsafe { machine.stack.pop_unsafe() });\n\n }\n\n\n\n handler.log(machine.contract.address, topics, data);\n\n Control::Continue\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/system.rs", "rank": 21, "score": 242609.37031690683 }, { "content": "pub fn extcodecopy_cost<SPEC: Spec>(len: U256, is_cold: bool) -> Option<u64> {\n\n let wordd = len / U256::from(32);\n\n let wordr = len % U256::from(32);\n\n let base_gas: u64 = if SPEC::enabled(BERLIN) {\n\n if is_cold {\n\n ACCOUNT_ACCESS_COLD\n\n } else {\n\n STORAGE_READ_WARM\n\n }\n\n } else if SPEC::enabled(ISTANBUL) {\n\n 700\n\n } else {\n\n 20\n\n };\n\n let gas = U256::from(\n\n U256::from(base_gas).checked_add(U256::from(COPY).checked_mul(\n\n if wordr == U256::zero() {\n\n wordd\n\n } else {\n\n wordd + U256::one()\n", "file_path": "crates/revm/src/instructions/gas/calc.rs", "rank": 22, "score": 241338.95753827074 }, { "content": "pub fn run<INSP: 'static + Inspector + Clone + Send>(test_files: Vec<PathBuf>, inspector: INSP) {\n\n let endjob = Arc::new(AtomicBool::new(false));\n\n let console_bar = Arc::new(ProgressBar::new(test_files.len() as u64));\n\n let mut joins = Vec::new();\n\n let queue = Arc::new(Mutex::new((0, test_files)));\n\n let elapsed = Arc::new(Mutex::new(std::time::Duration::ZERO));\n\n for _ in 0..1 {\n\n let queue = queue.clone();\n\n let endjob = endjob.clone();\n\n let console_bar = console_bar.clone();\n\n let elapsed = elapsed.clone();\n\n let mut insp = inspector.clone();\n\n\n\n joins.push(\n\n std::thread::Builder::new()\n\n .stack_size(50 * 1024 * 1024)\n\n .spawn(move || loop {\n\n let test_path = {\n\n let mut queue = queue.lock().unwrap();\n\n if queue.1.len() <= queue.0 {\n", "file_path": "bins/revm-ethereum-tests/src/runner.rs", "rank": 23, "score": 231050.44495078156 }, { "content": "pub fn memory_gas(a: usize) -> Result<u64, ExitError> {\n\n let a = a as u64;\n\n MEMORY\n\n .checked_mul(a)\n\n .ok_or(ExitError::OutOfGas)?\n\n .checked_add(a.checked_mul(a).ok_or(ExitError::OutOfGas)? / 512)\n\n .ok_or(ExitError::OutOfGas)\n\n}\n", "file_path": "crates/revm/src/instructions/gas/calc.rs", "rank": 25, "score": 215526.44743895906 }, { "content": "pub fn l64(gas: u64) -> u64 {\n\n gas - gas / 64\n\n}\n\n\n", "file_path": "crates/revm/src/util.rs", "rank": 26, "score": 214750.14110038837 }, { "content": "#[allow(clippy::collapsible_else_if)]\n\npub fn sstore_cost<SPEC: Spec>(\n\n original: H256,\n\n current: H256,\n\n new: H256,\n\n gas: u64,\n\n is_cold: bool,\n\n) -> Option<u64> {\n\n // TODO untangle this mess and make it more elegant\n\n let (gas_sload, gas_sstore_reset) = if SPEC::enabled(BERLIN) {\n\n (STORAGE_READ_WARM, SSTORE_RESET - SLOAD_COLD)\n\n } else {\n\n (sload_cost::<SPEC>(is_cold), SSTORE_RESET)\n\n };\n\n let gas_cost = if SPEC::enabled(CONSTANTINOPLE) {\n\n if SPEC::enabled(CONSTANTINOPLE) && gas <= CALL_STIPEND {\n\n return None;\n\n }\n\n\n\n if new == current {\n\n gas_sload\n", "file_path": "crates/revm/src/instructions/gas/calc.rs", "rank": 27, "score": 212908.48931478304 }, { "content": "pub fn call_cost<SPEC: Spec>(\n\n value: U256,\n\n is_new: bool,\n\n is_cold: bool,\n\n is_call_or_callcode: bool,\n\n is_call_or_staticcall: bool,\n\n) -> u64 {\n\n let transfers_value = value != U256::default();\n\n\n\n let call_gas = if SPEC::enabled(BERLIN) {\n\n if is_cold {\n\n ACCOUNT_ACCESS_COLD\n\n } else {\n\n STORAGE_READ_WARM\n\n }\n\n } else if SPEC::enabled(TANGERINE) {\n\n // EIP-150: Gas cost changes for IO-heavy operations\n\n 700\n\n } else {\n\n 40\n\n };\n\n\n\n call_gas\n\n + xfer_cost(is_call_or_callcode, transfers_value)\n\n + new_cost::<SPEC>(is_call_or_staticcall, is_new, transfers_value)\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/gas/calc.rs", "rank": 28, "score": 212908.48931478304 }, { "content": "pub fn call<H: Handler, SPEC: Spec>(\n\n machine: &mut Machine,\n\n scheme: CallScheme,\n\n handler: &mut H,\n\n) -> Control {\n\n match scheme {\n\n CallScheme::DelegateCall => check!(SPEC::enabled(HOMESTEAD)), // EIP-7: DELEGATECALL\n\n CallScheme::StaticCall => check!(SPEC::enabled(BYZANTINE)), // EIP-214: New opcode STATICCALL\n\n _ => (),\n\n }\n\n machine.return_data_buffer = Bytes::new();\n\n\n\n pop_u256!(machine, local_gas_limit);\n\n pop!(machine, to);\n\n let local_gas_limit = if local_gas_limit > U256::from(u64::MAX) {\n\n u64::MAX\n\n } else {\n\n local_gas_limit.as_u64()\n\n };\n\n\n", "file_path": "crates/revm/src/instructions/system.rs", "rank": 29, "score": 207693.3745760255 }, { "content": "pub fn create<H: Handler, SPEC: Spec>(\n\n machine: &mut Machine,\n\n is_create2: bool,\n\n handler: &mut H,\n\n) -> Control {\n\n check!(!SPEC::IS_STATIC_CALL);\n\n if is_create2 {\n\n check!(SPEC::enabled(CONSTANTINOPLE)); // EIP-1014: Skinny CREATE2\n\n }\n\n\n\n machine.return_data_buffer = Bytes::new();\n\n\n\n pop_u256!(machine, value, code_offset, len);\n\n\n\n memory_resize!(machine, code_offset, len);\n\n let code = if len == U256::zero() {\n\n Bytes::new()\n\n } else {\n\n let code_offset = as_usize_or_fail!(code_offset);\n\n let len = as_usize_or_fail!(len);\n", "file_path": "crates/revm/src/instructions/system.rs", "rank": 30, "score": 207693.3745760255 }, { "content": "pub fn calc_linear_cost_u32(len: usize, base: u64, word: u64) -> u64 {\n\n (len as u64 + 32 - 1) / 32 * word + base\n\n}\n\n\n", "file_path": "crates/revm_precompiles/src/lib.rs", "rank": 31, "score": 197761.47395584115 }, { "content": "pub fn log2floor(value: U256) -> u64 {\n\n assert!(value != U256::zero());\n\n let mut l: u64 = 256;\n\n for i in 0..4 {\n\n let i = 3 - i;\n\n if value.0[i] == 0u64 {\n\n l -= 64;\n\n } else {\n\n l -= value.0[i].leading_zeros() as u64;\n\n if l == 0 {\n\n return l;\n\n } else {\n\n return l - 1;\n\n }\n\n }\n\n }\n\n l\n\n}\n", "file_path": "crates/revm/src/instructions/gas/utils.rs", "rank": 32, "score": 193278.32288186645 }, { "content": "#[inline(always)]\n\npub fn eval<H: Handler, S: Spec>(\n\n machine: &mut Machine,\n\n opcode: OpCode,\n\n position: usize,\n\n handler: &mut H,\n\n) -> Control {\n\n let opcode = opcode.as_u8();\n\n match opcode {\n\n opcode::STOP => Control::Exit(ExitSucceed::Stopped.into()),\n\n opcode::ADD => op2_u256_tuple!(machine, overflowing_add, gas::VERYLOW),\n\n opcode::MUL => op2_u256_tuple!(machine, overflowing_mul, gas::LOW),\n\n opcode::SUB => op2_u256_tuple!(machine, overflowing_sub, gas::VERYLOW),\n\n opcode::DIV => op2_u256_fn!(machine, arithmetic::div, gas::LOW),\n\n opcode::SDIV => op2_u256_fn!(machine, arithmetic::sdiv, gas::LOW),\n\n opcode::MOD => op2_u256_fn!(machine, arithmetic::rem, gas::LOW),\n\n opcode::SMOD => op2_u256_fn!(machine, arithmetic::srem, gas::LOW),\n\n opcode::ADDMOD => op3_u256_fn!(machine, arithmetic::addmod, gas::MID),\n\n opcode::MULMOD => op3_u256_fn!(machine, arithmetic::mulmod, gas::MID),\n\n opcode::EXP => arithmetic::eval_exp::<S>(machine),\n\n opcode::SIGNEXTEND => op2_u256_fn!(machine, arithmetic::signextend, gas::LOW),\n", "file_path": "crates/revm/src/instructions/mod.rs", "rank": 33, "score": 190652.17659713363 }, { "content": "pub fn merkelize(state: State) -> H256 {\n\n let vec: Vec<_> = state\n\n .0\n\n .into_iter()\n\n .map(|(address, acc)| {\n\n let storage_root = trie_account_rlp(acc);\n\n (address.clone(), storage_root)\n\n })\n\n .collect();\n\n\n\n trie_root(vec)\n\n}\n\n\n", "file_path": "bins/revm-merkle/src/main.rs", "rank": 34, "score": 190311.6117987975 }, { "content": "fn new_cost<SPEC: Spec>(is_call_or_staticcall: bool, is_new: bool, transfers_value: bool) -> u64 {\n\n if is_call_or_staticcall {\n\n if SPEC::enabled(ISTANBUL) {\n\n if transfers_value && is_new {\n\n NEWACCOUNT\n\n } else {\n\n 0\n\n }\n\n } else if is_new {\n\n NEWACCOUNT\n\n } else {\n\n 0\n\n }\n\n } else {\n\n 0\n\n }\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/gas/calc.rs", "rank": 35, "score": 189851.18177632394 }, { "content": "#[inline(always)]\n\npub fn jumpi(machine: &mut Machine) -> Control {\n\n gas!(machine, gas::HIGH);\n\n\n\n pop_u256!(machine, dest);\n\n pop!(machine, value);\n\n\n\n if value != H256::zero() {\n\n let dest = as_usize_or_fail!(dest, ExitError::InvalidJump);\n\n if machine.contract.is_valid_jump(dest) {\n\n Control::Jump(dest)\n\n } else {\n\n Control::Exit(ExitError::InvalidJump.into())\n\n }\n\n } else {\n\n Control::Continue\n\n }\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/misc.rs", "rank": 36, "score": 189627.6601663729 }, { "content": "#[inline(always)]\n\npub fn jump(machine: &mut Machine) -> Control {\n\n gas!(machine, gas::MID);\n\n\n\n pop_u256!(machine, dest);\n\n let dest = as_usize_or_fail!(dest, ExitError::InvalidJump);\n\n\n\n if machine.contract.is_valid_jump(dest) {\n\n Control::Jump(dest)\n\n } else {\n\n Control::Exit(ExitError::InvalidJump.into())\n\n }\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/misc.rs", "rank": 37, "score": 189627.6601663729 }, { "content": "pub fn sha3(machine: &mut Machine) -> Control {\n\n pop_u256!(machine, from, len);\n\n gas_or_fail!(machine, gas::sha3_cost(len));\n\n\n\n memory_resize!(machine, from, len);\n\n let data = if len == U256::zero() {\n\n Bytes::new()\n\n } else {\n\n let from = as_usize_or_fail!(from);\n\n let len = as_usize_or_fail!(len);\n\n\n\n machine.memory.get(from, len)\n\n };\n\n\n\n let ret = Keccak256::digest(data.as_ref());\n\n push!(machine, H256::from_slice(ret.as_slice()));\n\n\n\n Control::Continue\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/system.rs", "rank": 38, "score": 189627.6601663729 }, { "content": "#[inline(always)]\n\npub fn mstore(machine: &mut Machine) -> Control {\n\n gas!(machine, gas::VERYLOW);\n\n\n\n pop_u256!(machine, index);\n\n pop!(machine, value);\n\n\n\n memory_resize!(machine, index, U256::from(32));\n\n let index = as_usize_or_fail!(index);\n\n try_or_fail!(machine.memory.set(index, &value[..], Some(32)));\n\n Control::Continue\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/misc.rs", "rank": 39, "score": 189627.6601663729 }, { "content": "#[inline(always)]\n\npub fn calldatacopy(machine: &mut Machine) -> Control {\n\n pop_u256!(machine, memory_offset, data_offset, len);\n\n gas_or_fail!(machine, gas::verylowcopy_cost(len));\n\n memory_resize!(machine, memory_offset, len);\n\n\n\n if len == U256::zero() {\n\n return Control::Continue;\n\n }\n\n\n\n try_or_fail!(machine.memory.copy_large(\n\n memory_offset,\n\n data_offset,\n\n len,\n\n &machine.contract.input\n\n ));\n\n Control::Continue\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/misc.rs", "rank": 40, "score": 189627.6601663729 }, { "content": "#[inline(always)]\n\npub fn mstore8(machine: &mut Machine) -> Control {\n\n gas!(machine, gas::VERYLOW);\n\n\n\n pop_u256!(machine, index, value);\n\n\n\n // memory aditional gas checked here\n\n memory_resize!(machine, index, U256::one());\n\n let index = as_usize_or_fail!(index);\n\n let value = (value.low_u32() & 0xff) as u8;\n\n try_or_fail!(machine.memory.set(index, &[value], Some(1)));\n\n Control::Continue\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/misc.rs", "rank": 41, "score": 189627.66016637287 }, { "content": "#[inline(always)]\n\npub fn ret(machine: &mut Machine) -> Control {\n\n // zero gas cost gas!(machine,gas::ZERO);\n\n pop_u256!(machine, start, len);\n\n memory_resize!(machine, start, len);\n\n machine.return_range = start..(start + len);\n\n Control::Exit(ExitSucceed::Returned.into())\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/misc.rs", "rank": 42, "score": 189627.66016637287 }, { "content": "#[inline(always)]\n\npub fn codecopy(machine: &mut Machine) -> Control {\n\n pop_u256!(machine, memory_offset, code_offset, len);\n\n gas_or_fail!(machine, gas::verylowcopy_cost(len));\n\n memory_resize!(machine, memory_offset, len);\n\n\n\n try_or_fail!(machine.memory.copy_large(\n\n memory_offset,\n\n code_offset,\n\n len,\n\n &machine.contract.code\n\n ));\n\n Control::Continue\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/misc.rs", "rank": 43, "score": 189627.6601663729 }, { "content": "#[inline(always)]\n\npub fn calldatasize(machine: &mut Machine) -> Control {\n\n gas!(machine, gas::BASE);\n\n\n\n let len = U256::from(machine.contract.input.len());\n\n push_u256!(machine, len);\n\n Control::Continue\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/misc.rs", "rank": 44, "score": 189627.6601663729 }, { "content": "#[inline(always)]\n\npub fn codesize(machine: &mut Machine) -> Control {\n\n gas!(machine, gas::BASE);\n\n let size = U256::from(machine.contract.code.len());\n\n push_u256!(machine, size);\n\n Control::Continue\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/misc.rs", "rank": 45, "score": 189627.6601663729 }, { "content": "pub fn callvalue(machine: &mut Machine) -> Control {\n\n gas!(machine, gas::BASE);\n\n\n\n let mut ret = H256::default();\n\n machine.contract.value.to_big_endian(&mut ret[..]);\n\n push!(machine, ret);\n\n\n\n Control::Continue\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/system.rs", "rank": 46, "score": 189627.6601663729 }, { "content": "#[inline(always)]\n\npub fn msize(machine: &mut Machine) -> Control {\n\n gas!(machine, gas::BASE);\n\n push_u256!(machine, machine.memory.effective_len());\n\n Control::Continue\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/misc.rs", "rank": 47, "score": 189627.6601663729 }, { "content": "#[inline(always)]\n\npub fn pop(machine: &mut Machine) -> Control {\n\n gas!(machine, gas::BASE);\n\n pop!(machine, _val);\n\n Control::Continue\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/misc.rs", "rank": 48, "score": 189627.66016637287 }, { "content": "pub fn caller(machine: &mut Machine) -> Control {\n\n gas!(machine, gas::BASE);\n\n\n\n let ret = H256::from(machine.contract.caller);\n\n push!(machine, ret);\n\n\n\n Control::Continue\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/system.rs", "rank": 49, "score": 189627.6601663729 }, { "content": "pub fn gas(machine: &mut Machine) -> Control {\n\n gas!(machine, gas::BASE);\n\n\n\n push_u256!(machine, U256::from(machine.gas.remaining()));\n\n Control::Continue\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/system.rs", "rank": 50, "score": 189627.6601663729 }, { "content": "pub fn address(machine: &mut Machine) -> Control {\n\n gas!(machine, gas::BASE);\n\n\n\n let ret = H256::from(machine.contract.address);\n\n push!(machine, ret);\n\n\n\n Control::Continue\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/system.rs", "rank": 51, "score": 189627.6601663729 }, { "content": "#[inline(always)]\n\npub fn mload(machine: &mut Machine) -> Control {\n\n gas!(machine, gas::VERYLOW);\n\n pop_u256!(machine, index);\n\n\n\n // memory aditional gas checked here\n\n memory_resize!(machine, index, U256::from(32));\n\n let index = as_usize_or_fail!(index);\n\n let value = H256::from_slice(&machine.memory.get(index, 32)[..]);\n\n push!(machine, value);\n\n Control::Continue\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/misc.rs", "rank": 52, "score": 189627.6601663729 }, { "content": "#[inline(always)]\n\npub fn calldataload(machine: &mut Machine) -> Control {\n\n gas!(machine, gas::VERYLOW);\n\n\n\n pop_u256!(machine, index);\n\n\n\n let mut load = [0u8; 32];\n\n #[allow(clippy::needless_range_loop)]\n\n for i in 0..32 {\n\n if let Some(p) = index.checked_add(U256::from(i)) {\n\n if p <= U256::from(usize::MAX) {\n\n let p = p.as_usize();\n\n if p < machine.contract.input.len() {\n\n load[i] = machine.contract.input[p];\n\n }\n\n }\n\n }\n\n }\n\n\n\n push!(machine, H256::from(load));\n\n Control::Continue\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/misc.rs", "rank": 53, "score": 189627.66016637287 }, { "content": "#[inline(always)]\n\npub fn jumpdest(machine: &mut Machine) -> Control {\n\n gas!(machine, gas::JUMPDEST);\n\n Control::Continue\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/misc.rs", "rank": 54, "score": 189627.66016637287 }, { "content": "pub fn create2_cost(len: U256) -> Option<u64> {\n\n let base = U256::from(CREATE);\n\n // ceil(len / 32.0)\n\n let sha_addup_base = len / U256::from(32)\n\n + if len % U256::from(32) == U256::zero() {\n\n U256::zero()\n\n } else {\n\n U256::one()\n\n };\n\n let sha_addup = U256::from(SHA3WORD).checked_mul(sha_addup_base)?;\n\n let gas = base.checked_add(sha_addup)?;\n\n\n\n if gas > U256::from(u64::MAX) {\n\n return None;\n\n }\n\n\n\n Some(gas.as_u64())\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/gas/calc.rs", "rank": 55, "score": 185985.72179537045 }, { "content": "pub fn sha3_cost(len: U256) -> Option<u64> {\n\n let wordd = len / U256::from(32);\n\n let wordr = len % U256::from(32);\n\n\n\n let gas = U256::from(SHA3).checked_add(U256::from(SHA3WORD).checked_mul(\n\n if wordr == U256::zero() {\n\n wordd\n\n } else {\n\n wordd + U256::one()\n\n },\n\n )?)?;\n\n\n\n if gas > U256::from(u64::MAX) {\n\n return None;\n\n }\n\n\n\n Some(gas.as_u64())\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/gas/calc.rs", "rank": 56, "score": 185985.7217953705 }, { "content": "pub fn verylowcopy_cost(len: U256) -> Option<u64> {\n\n let wordd = len / U256::from(32);\n\n let wordr = len % U256::from(32);\n\n\n\n let gas = U256::from(VERYLOW).checked_add(U256::from(COPY).checked_mul(\n\n if wordr == U256::zero() {\n\n wordd\n\n } else {\n\n wordd + U256::one()\n\n },\n\n )?)?;\n\n\n\n if gas > U256::from(u64::MAX) {\n\n return None;\n\n }\n\n\n\n Some(gas.as_u64())\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/gas/calc.rs", "rank": 57, "score": 185985.72179537045 }, { "content": "pub fn origin<H: Handler>(machine: &mut Machine, handler: &mut H) -> Control {\n\n gas!(machine, gas::BASE);\n\n\n\n let ret = H256::from(handler.env().tx.caller);\n\n push!(machine, ret);\n\n\n\n Control::Continue\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/system.rs", "rank": 58, "score": 185916.09615465908 }, { "content": "pub fn timestamp<H: Handler>(machine: &mut Machine, handler: &mut H) -> Control {\n\n gas!(machine, gas::BASE);\n\n push_u256!(machine, handler.env().block.timestamp);\n\n Control::Continue\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/system.rs", "rank": 59, "score": 185916.09615465908 }, { "content": "pub fn gaslimit<H: Handler>(machine: &mut Machine, handler: &mut H) -> Control {\n\n gas!(machine, gas::BASE);\n\n\n\n push_u256!(machine, handler.env().block.gas_limit);\n\n Control::Continue\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/system.rs", "rank": 60, "score": 185916.09615465908 }, { "content": "pub fn blockhash<H: Handler>(machine: &mut Machine, handler: &mut H) -> Control {\n\n gas!(machine, gas::BLOCKHASH);\n\n\n\n pop_u256!(machine, number);\n\n push!(machine, handler.block_hash(number));\n\n\n\n Control::Continue\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/system.rs", "rank": 61, "score": 185916.09615465908 }, { "content": "pub fn gasprice<H: Handler>(machine: &mut Machine, handler: &mut H) -> Control {\n\n gas!(machine, gas::BASE);\n\n\n\n let mut ret = H256::default();\n\n handler\n\n .env()\n\n .effective_gas_price()\n\n .to_big_endian(&mut ret[..]);\n\n push!(machine, ret);\n\n\n\n Control::Continue\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/system.rs", "rank": 62, "score": 185916.09615465908 }, { "content": "pub fn number<H: Handler>(machine: &mut Machine, handler: &mut H) -> Control {\n\n gas!(machine, gas::BASE);\n\n\n\n push_u256!(machine, handler.env().block.number);\n\n Control::Continue\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/system.rs", "rank": 63, "score": 185916.09615465908 }, { "content": "pub fn coinbase<H: Handler>(machine: &mut Machine, handler: &mut H) -> Control {\n\n gas!(machine, gas::BASE);\n\n\n\n push!(machine, handler.env().block.coinbase.into());\n\n Control::Continue\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/system.rs", "rank": 64, "score": 185916.09615465908 }, { "content": "pub fn difficulty<H: Handler>(machine: &mut Machine, handler: &mut H) -> Control {\n\n gas!(machine, gas::BASE);\n\n\n\n push_u256!(machine, handler.env().block.difficulty);\n\n Control::Continue\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/system.rs", "rank": 65, "score": 185916.09615465908 }, { "content": "pub fn create_address(caller: H160, nonce: u64) -> H160 {\n\n let mut stream = rlp::RlpStream::new_list(2);\n\n stream.append(&caller);\n\n stream.append(&nonce);\n\n H256::from_slice(Keccak256::digest(&stream.out()).as_slice()).into()\n\n}\n\n\n", "file_path": "crates/revm/src/util.rs", "rank": 66, "score": 185756.11044797883 }, { "content": "#[allow(clippy::collapsible_else_if)]\n\npub fn sstore_refund<SPEC: Spec>(original: H256, current: H256, new: H256) -> i64 {\n\n if SPEC::enabled(ISTANBUL) {\n\n // EIP-3529: Reduction in refunds\n\n let sstore_clears_schedule = if SPEC::enabled(LONDON) {\n\n (SSTORE_RESET - SLOAD_COLD + ACCESS_LIST_STORAGE_KEY) as i64\n\n } else {\n\n REFUND_SSTORE_CLEARS\n\n };\n\n if current == new {\n\n 0\n\n } else {\n\n if original == current && new == H256::default() {\n\n sstore_clears_schedule\n\n } else {\n\n let mut refund = 0;\n\n\n\n if original != H256::default() {\n\n if current == H256::default() {\n\n refund -= sstore_clears_schedule;\n\n } else if new == H256::default() {\n", "file_path": "crates/revm/src/instructions/gas/calc.rs", "rank": 68, "score": 183385.8250087102 }, { "content": "#[inline(always)]\n\npub fn dup(machine: &mut Machine, n: usize) -> Control {\n\n gas!(machine, gas::VERYLOW);\n\n\n\n let value = try_or_fail!(machine.stack.peek(n - 1));\n\n push!(machine, value);\n\n Control::Continue\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/misc.rs", "rank": 69, "score": 182575.15931016178 }, { "content": "#[inline(always)]\n\npub fn swap(machine: &mut Machine, n: usize) -> Control {\n\n gas!(machine, gas::VERYLOW);\n\n\n\n let val1 = try_or_fail!(machine.stack.peek(0));\n\n let val2 = try_or_fail!(machine.stack.peek(n));\n\n try_or_fail!(machine.stack.set(0, val2));\n\n try_or_fail!(machine.stack.set(n, val1));\n\n Control::Continue\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/misc.rs", "rank": 70, "score": 182575.15931016178 }, { "content": "#[inline(always)]\n\npub fn pc(machine: &mut Machine, position: usize) -> Control {\n\n gas!(machine, gas::BASE);\n\n push_u256!(machine, U256::from(position));\n\n Control::Continue\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/misc.rs", "rank": 71, "score": 180324.26927489162 }, { "content": "pub fn berlin_run(input: &[u8], gas_limit: u64) -> PrecompileResult {\n\n run_inner(input, gas_limit, 200, |a, b, c, d| {\n\n berlin_gas_calc(a, b, c, d)\n\n })\n\n}\n\n\n", "file_path": "crates/revm_precompiles/src/modexp.rs", "rank": 72, "score": 179226.25264718797 }, { "content": "pub fn log_cost(n: u8, len: U256) -> Option<u64> {\n\n let gas = U256::from(LOG)\n\n .checked_add(U256::from(LOGDATA).checked_mul(len)?)?\n\n .checked_add(U256::from(LOGTOPIC * n as u64))?;\n\n\n\n if gas > U256::from(u64::MAX) {\n\n return None;\n\n }\n\n\n\n Some(gas.as_u64())\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/gas/calc.rs", "rank": 73, "score": 179034.0742363669 }, { "content": "#[inline(always)]\n\npub fn push(machine: &mut Machine, n: usize, position: usize) -> Control {\n\n gas!(machine, gas::VERYLOW);\n\n let end = min(position + 1 + n, machine.contract.code.len());\n\n let slice = &machine.contract.code[(position + 1)..end];\n\n let mut val = [0u8; 32];\n\n val[(32 - slice.len())..32].copy_from_slice(slice);\n\n\n\n push!(machine, H256(val));\n\n Control::ContinueN(1 + n)\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/misc.rs", "rank": 74, "score": 174109.47566509558 }, { "content": "pub trait Spec {\n\n /// litle bit of magic. We can have child version of Spec that contains static flag enabled\n\n type STATIC: Spec;\n\n\n\n #[inline(always)]\n\n fn enabled(spec_id: SpecId) -> bool {\n\n Self::SPEC_ID as u8 >= spec_id as u8\n\n }\n\n const SPEC_ID: SpecId;\n\n /// static flag used in STATIC type;\n\n const IS_STATIC_CALL: bool;\n\n}\n\n\n\nmacro_rules! spec {\n\n ($spec_id:tt) => {\n\n #[allow(non_snake_case)]\n\n mod $spec_id {\n\n use super::{NotStaticSpec, Spec};\n\n use crate::SpecId;\n\n\n", "file_path": "crates/revm/src/spec/spec.rs", "rank": 75, "score": 168953.9762285916 }, { "content": "#[auto_impl(&mut, Box)]\n\npub trait Inspector {\n\n // get opcode by calling `machine.contract.opcode(machine.program_counter())`.\n\n // all other information can be obtained from machine.\n\n fn step(&mut self, machine: &mut Machine);\n\n fn eval(&mut self, eval: &mut Control, machine: &mut Machine);\n\n\n\n fn load_account(&mut self, address: &H160);\n\n\n\n fn sload(&mut self, address: &H160, slot: &H256, value: &H256, is_cold: bool);\n\n\n\n fn sstore(\n\n &mut self,\n\n address: H160,\n\n slot: H256,\n\n new_value: H256,\n\n old_value: H256,\n\n original_value: H256,\n\n is_cold: bool,\n\n );\n\n\n", "file_path": "crates/revm/src/inspector.rs", "rank": 76, "score": 168890.4637355319 }, { "content": "#[auto_impl(& mut, Box)]\n\npub trait DatabaseCommit {\n\n fn commit(&mut self, changes: Map<H160, Account>);\n\n}\n\n\n", "file_path": "crates/revm/src/db/db.rs", "rank": 77, "score": 165112.26387496133 }, { "content": "#[auto_impl(&, Box)]\n\npub trait DatabaseRef {\n\n /// Whether account at address exists.\n\n fn exists(&self, address: H160) -> Option<AccountInfo>;\n\n /// Get basic account information.\n\n fn basic(&self, address: H160) -> AccountInfo;\n\n /// Get account code by its hash\n\n fn code_by_hash(&self, code_hash: H256) -> Bytes;\n\n /// Get storage value of address at index.\n\n fn storage(&self, address: H160, index: H256) -> H256;\n\n\n\n // History related\n\n fn block_hash(&self, number: U256) -> H256;\n\n}\n\n\n\npub struct RefDBWrapper<'a> {\n\n pub db: &'a dyn DatabaseRef,\n\n}\n\n\n\nimpl<'a> RefDBWrapper<'a> {\n\n pub fn new(db: &'a dyn DatabaseRef) -> Self {\n", "file_path": "crates/revm/src/db/db.rs", "rank": 78, "score": 165108.07983409293 }, { "content": "pub trait Transact {\n\n /// Do transaction.\n\n /// Return ExitReason, Output for call or Address if we are creating contract, gas spend, State that needs to be applied.\n\n fn transact(&mut self) -> (ExitReason, TransactOut, u64, State);\n\n}\n\n\n\nimpl<'a, GSPEC: Spec, DB: Database, const INSPECT: bool> Transact\n\n for EVMImpl<'a, GSPEC, DB, INSPECT>\n\n{\n\n fn transact(&mut self) -> (ExitReason, TransactOut, u64, State) {\n\n let caller = self.env.tx.caller;\n\n let value = self.env.tx.value;\n\n let data = self.env.tx.data.clone();\n\n let gas_limit = self.env.tx.gas_limit;\n\n let exit_error = |reason: ExitReason| (reason, TransactOut::None, 0, State::new());\n\n\n\n if GSPEC::enabled(LONDON) {\n\n if let Some(priority_fee) = self.env.tx.gas_priority_fee {\n\n if priority_fee > self.env.tx.gas_price {\n\n // or gas_max_fee for eip1559\n", "file_path": "crates/revm/src/evm_impl.rs", "rank": 79, "score": 158319.5298807958 }, { "content": "pub fn deserialize_str_as_u64<'de, D>(deserializer: D) -> Result<u64, D::Error>\n\nwhere\n\n D: de::Deserializer<'de>,\n\n{\n\n let string = String::deserialize(deserializer)?;\n\n\n\n let output = if let Some(stripped) = string.strip_prefix(\"0x\") {\n\n u64::from_str_radix(stripped, 16).unwrap()\n\n } else {\n\n string.parse().unwrap()\n\n };\n\n\n\n Ok(output)\n\n}\n\n\n", "file_path": "bins/revm-ethereum-tests/src/models/deserializer.rs", "rank": 80, "score": 154421.41827640252 }, { "content": "// Calculate gas cost according to EIP 2565:\n\n// https://eips.ethereum.org/EIPS/eip-2565\n\nfn berlin_gas_calc(base_length: u64, exp_length: u64, mod_length: u64, exp_highp: &BigUint) -> u64 {\n\n fn calculate_multiplication_complexity(base_length: u64, mod_length: u64) -> U256 {\n\n let max_length = max(base_length, mod_length);\n\n let mut words = max_length / 8;\n\n if max_length % 8 > 0 {\n\n words += 1;\n\n }\n\n let words = U256::from(words);\n\n words * words\n\n }\n\n\n\n let multiplication_complexity = calculate_multiplication_complexity(base_length, mod_length);\n\n let iteration_count = calculate_iteration_count(exp_length, exp_highp);\n\n let gas = (multiplication_complexity * U256::from(iteration_count)) / 3;\n\n if gas > U256::from(u64::MAX) {\n\n return u64::MAX;\n\n } else {\n\n max(200, gas.as_u64())\n\n }\n\n}\n", "file_path": "crates/revm_precompiles/src/modexp.rs", "rank": 81, "score": 147945.00010794686 }, { "content": "fn byzantium_gas_calc(base_len: u64, exp_len: u64, mod_len: u64, exp_highp: &BigUint) -> u64 {\n\n // ouput of this function is bounded by 2^128\n\n fn mul_complexity(x: u64) -> U256 {\n\n if x <= 64 {\n\n U256::from(x * x)\n\n } else if x <= 1_024 {\n\n U256::from(x * x / 4 + 96 * x - 3_072)\n\n } else {\n\n // up-cast to avoid overflow\n\n let x = U256::from(x);\n\n let x_sq = x * x; // x < 2^64 => x*x < 2^128 < 2^256 (no overflow)\n\n x_sq / U256::from(16) + U256::from(480) * x - U256::from(199_680)\n\n }\n\n }\n\n\n\n let mul = mul_complexity(core::cmp::max(mod_len, base_len));\n\n let iter_count = U256::from(calculate_iteration_count(exp_len, exp_highp));\n\n // mul * iter_count bounded by 2^195 < 2^256 (no overflow)\n\n let gas = (mul * iter_count) / U256::from(20);\n\n\n\n if gas.bits() > 64 {\n\n u64::MAX\n\n } else {\n\n gas.as_u64()\n\n }\n\n}\n\n\n", "file_path": "crates/revm_precompiles/src/modexp.rs", "rank": 82, "score": 147945.00010794686 }, { "content": "fn calculate_iteration_count(exp_length: u64, exp_highp: &BigUint) -> u64 {\n\n let mut iteration_count: u64 = 0;\n\n\n\n if exp_length <= 32 && exp_highp.is_zero() {\n\n iteration_count = 0;\n\n } else if exp_length <= 32 {\n\n iteration_count = exp_highp.bits() - 1;\n\n } else if exp_length > 32 {\n\n iteration_count = (8 * (exp_length - 32)) + max(1, exp_highp.bits()) - 1;\n\n }\n\n\n\n max(iteration_count, 1)\n\n}\n\n\n\nmacro_rules! read_u64_with_overflow {\n\n ($input:expr,$from:expr,$to:expr, $overflow_limit:expr) => {{\n\n const SPLIT: usize = 32 - size_of::<u64>();\n\n let len = $input.len();\n\n let from_zero = min($from, len);\n\n let from = min(from_zero + SPLIT, len);\n", "file_path": "crates/revm_precompiles/src/modexp.rs", "rank": 83, "score": 146055.50896205037 }, { "content": "fn run_mul(input: &[u8], cost: u64, target_gas: u64) -> PrecompileResult {\n\n let cost = gas_query(cost, target_gas)?;\n\n use bn::AffineG1;\n\n\n\n let mut input = input.to_vec();\n\n input.resize(MUL_INPUT_LEN, 0);\n\n\n\n let p = read_point(&input, 0)?;\n\n\n\n let mut fr_buf = [0u8; 32];\n\n fr_buf.copy_from_slice(&input[64..96]);\n\n let fr = bn::Fr::from_slice(&fr_buf[..])\n\n .map_err(|_| ExitError::Other(Cow::Borrowed(\"Invalid field element\")))?;\n\n\n\n let mut out = [0u8; 64];\n\n if let Some(mul) = AffineG1::from_jacobian(p * fr) {\n\n mul.x().to_big_endian(&mut out[..32]).unwrap();\n\n mul.y().to_big_endian(&mut out[32..]).unwrap();\n\n }\n\n\n\n Ok(PrecompileOutput::without_logs(cost, out.to_vec()))\n\n}\n\n\n", "file_path": "crates/revm_precompiles/src/bn128.rs", "rank": 84, "score": 143058.24259846288 }, { "content": "fn run_add(input: &[u8], cost: u64, target_gas: u64) -> PrecompileResult {\n\n let cost = gas_query(cost, target_gas)?;\n\n\n\n use bn::AffineG1;\n\n\n\n let mut input = input.to_vec();\n\n input.resize(ADD_INPUT_LEN, 0);\n\n\n\n let p1 = read_point(&input, 0)?;\n\n let p2 = read_point(&input, 64)?;\n\n\n\n let mut output = [0u8; 64];\n\n if let Some(sum) = AffineG1::from_jacobian(p1 + p2) {\n\n sum.x()\n\n .into_u256()\n\n .to_big_endian(&mut output[..32])\n\n .unwrap();\n\n sum.y()\n\n .into_u256()\n\n .to_big_endian(&mut output[32..])\n\n .unwrap();\n\n }\n\n\n\n Ok(PrecompileOutput::without_logs(cost, output.to_vec()))\n\n}\n\n\n", "file_path": "crates/revm_precompiles/src/bn128.rs", "rank": 85, "score": 143058.24259846288 }, { "content": "#[auto_impl(& mut, Box)]\n\npub trait Database {\n\n /// Whether account at address exists.\n\n fn exists(&mut self, address: H160) -> Option<AccountInfo>;\n\n /// Get basic account information.\n\n fn basic(&mut self, address: H160) -> AccountInfo;\n\n /// Get account code by its hash\n\n fn code_by_hash(&mut self, code_hash: H256) -> Bytes;\n\n /// Get storage value of address at index.\n\n fn storage(&mut self, address: H160, index: H256) -> H256;\n\n\n\n // History related\n\n fn block_hash(&mut self, number: U256) -> H256;\n\n}\n\n\n", "file_path": "crates/revm/src/db/db.rs", "rank": 86, "score": 134255.6131760743 }, { "content": "#[inline(always)]\n\npub fn not(op1: U256) -> U256 {\n\n !op1\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/bitwise.rs", "rank": 87, "score": 134108.99949877366 }, { "content": "/// reference: https://eips.ethereum.org/EIPS/eip-152\n\n/// input format:\n\n/// [4 bytes for rounds][64 bytes for h][128 bytes for m][8 bytes for t_0][8 bytes for t_1][1 byte for f]\n\nfn run(input: &[u8], target_gas: u64) -> PrecompileResult {\n\n if input.len() != INPUT_LENGTH {\n\n return Err(ExitError::Other(Cow::Borrowed(\n\n \"Invalid last flag for blake2\",\n\n )));\n\n }\n\n\n\n // rounds 4 bytes\n\n let rounds = u32::from_be_bytes(input[..4].try_into().unwrap()) as usize;\n\n let cost = gas_query(rounds as u64 * F_ROUND, target_gas)?;\n\n\n\n let mut h = [0u64; 8];\n\n let mut m = [0u64; 16];\n\n\n\n for (i, pos) in (4..68).step_by(8).enumerate() {\n\n h[i] = u64::from_le_bytes(input[pos..pos + 8].try_into().unwrap());\n\n }\n\n for (i, pos) in (68..196).step_by(8).enumerate() {\n\n m[i] = u64::from_le_bytes(input[pos..pos + 8].try_into().unwrap());\n\n }\n", "file_path": "crates/revm_precompiles/src/blake2.rs", "rank": 88, "score": 133110.00191059732 }, { "content": "fn run_inner<F>(input: &[u8], gas_limit: u64, min_gas: u64, calc_gas: F) -> PrecompileResult\n\nwhere\n\n F: FnOnce(u64, u64, u64, &BigUint) -> u64,\n\n{\n\n let len = input.len();\n\n let (base_len, base_overflow) = read_u64_with_overflow!(input, 0, 32, u32::MAX as usize);\n\n let (exp_len, exp_overflow) = read_u64_with_overflow!(input, 32, 64, u32::MAX as usize);\n\n let (mod_len, mod_overflow) = read_u64_with_overflow!(input, 64, 96, u32::MAX as usize);\n\n\n\n if base_overflow || mod_overflow {\n\n return Ok(PrecompileOutput::without_logs(u64::MAX, Vec::new()));\n\n }\n\n\n\n let (r, gas_cost) = if base_len == 0 && mod_len == 0 {\n\n (BigUint::zero(), min_gas)\n\n } else {\n\n // set limit for exp overflow\n\n if exp_overflow {\n\n return Ok(PrecompileOutput::without_logs(u64::MAX, Vec::new()));\n\n }\n", "file_path": "crates/revm_precompiles/src/modexp.rs", "rank": 89, "score": 132804.24698452433 }, { "content": "#[inline(always)]\n\npub fn iszero(op1: U256) -> U256 {\n\n if op1 == U256::zero() {\n\n U256::one()\n\n } else {\n\n U256::zero()\n\n }\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/bitwise.rs", "rank": 90, "score": 132289.69848306497 }, { "content": "/// See: https://ethereum.github.io/yellowpaper/paper.pdf\n\n/// See: https://docs.soliditylang.org/en/develop/units-and-global-variables.html#mathematical-and-cryptographic-functions\n\n/// See: https://etherscan.io/address/0000000000000000000000000000000000000002\n\nfn sha256_run(input: &[u8], gas_limit: u64) -> PrecompileResult {\n\n let cost = gas_query(calc_linear_cost_u32(input.len(), 60, 12), gas_limit)?;\n\n let output = sha2::Sha256::digest(input).to_vec();\n\n Ok(PrecompileOutput::without_logs(cost, output))\n\n}\n\n\n", "file_path": "crates/revm_precompiles/src/hash.rs", "rank": 91, "score": 131519.97468125616 }, { "content": "/// Takes the input bytes, copies them, and returns it as the output.\n\n///\n\n/// See: https://ethereum.github.io/yellowpaper/paper.pdf\n\n/// See: https://etherscan.io/address/0000000000000000000000000000000000000004\n\nfn identity_run(input: &[u8], gas_limit: u64) -> PrecompileResult {\n\n let gas_used = gas_query(\n\n calc_linear_cost_u32(input.len(), IDENTITY_BASE, IDENTITY_PER_WORD),\n\n gas_limit,\n\n )?;\n\n Ok(PrecompileOutput::without_logs(gas_used, input.to_vec()))\n\n}\n\n/*\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use evm::ExitError;\n\n\n\n use crate::test_utils::new_context;\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn test_identity() {\n\n let input = [0u8, 1, 2, 3];\n", "file_path": "crates/revm_precompiles/src/identity.rs", "rank": 92, "score": 131519.97468125616 }, { "content": "/// See: https://ethereum.github.io/yellowpaper/paper.pdf\n\n/// See: https://docs.soliditylang.org/en/develop/units-and-global-variables.html#mathematical-and-cryptographic-functions\n\n/// See: https://etherscan.io/address/0000000000000000000000000000000000000003\n\nfn ripemd160_run(input: &[u8], gas_limit: u64) -> PrecompileResult {\n\n let gas_used = gas_query(calc_linear_cost_u32(input.len(), 600, 120), gas_limit)?;\n\n let mut ret = [0u8; 32];\n\n ret[12..32].copy_from_slice(&ripemd160::Ripemd160::digest(input));\n\n Ok(PrecompileOutput::without_logs(gas_used, ret.to_vec()))\n\n}\n", "file_path": "crates/revm_precompiles/src/hash.rs", "rank": 93, "score": 131519.97468125614 }, { "content": "/// See: https://eips.ethereum.org/EIPS/eip-198\n\n/// See: https://etherscan.io/address/0000000000000000000000000000000000000005\n\nfn byzantium_run(input: &[u8], gas_limit: u64) -> PrecompileResult {\n\n run_inner(input, gas_limit, 0, |a, b, c, d| {\n\n byzantium_gas_calc(a, b, c, d)\n\n })\n\n}\n\n\n", "file_path": "crates/revm_precompiles/src/modexp.rs", "rank": 94, "score": 131519.97468125616 }, { "content": "fn ec_recover_run(i: &[u8], target_gas: u64) -> PrecompileResult {\n\n let cost = gas_query(ECRECOVER_BASE, target_gas)?;\n\n let mut input = [0u8; 128];\n\n input[..min(i.len(), 128)].copy_from_slice(&i[..min(i.len(), 128)]);\n\n\n\n let mut msg = [0u8; 32];\n\n let mut sig = [0u8; 65];\n\n\n\n msg[0..32].copy_from_slice(&input[0..32]);\n\n sig[0..32].copy_from_slice(&input[64..96]);\n\n sig[32..64].copy_from_slice(&input[96..128]);\n\n\n\n if input[32..63] != [0u8; 31] || !matches!(input[63], 27 | 28) {\n\n return Ok(PrecompileOutput::without_logs(cost, Vec::new()));\n\n }\n\n\n\n sig[64] = input[63];\n\n\n\n let out = match secp256k1_ecdsa_recover(&mut sig, &msg) {\n\n Ok(out) => H256::from(out).as_bytes().to_vec(),\n", "file_path": "crates/revm_precompiles/src/secp256k1.rs", "rank": 95, "score": 131519.97468125614 }, { "content": "fn xfer_cost(is_call_or_callcode: bool, transfers_value: bool) -> u64 {\n\n if is_call_or_callcode && transfers_value {\n\n CALLVALUE\n\n } else {\n\n 0\n\n }\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/gas/calc.rs", "rank": 96, "score": 129984.75890870979 }, { "content": "//use for test\n\npub fn u256_to_arr(value: &U256) -> [u8; 32] {\n\n let mut result = [0u8; 32];\n\n value.to_big_endian(&mut result);\n\n result\n\n}\n\n\n\n/*\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::precompiles::{Byzantium, Istanbul};\n\n use crate::prelude::Address;\n\n use rand::Rng;\n\n\n\n #[test]\n\n fn test_precompile_addresses() {\n\n assert_eq!(super::secp256k1::ECRecover::ADDRESS, u8_to_address(1));\n\n assert_eq!(super::hash::SHA256::ADDRESS, u8_to_address(2));\n\n assert_eq!(super::hash::RIPEMD160::ADDRESS, u8_to_address(3));\n\n assert_eq!(super::identity::Identity::ADDRESS, u8_to_address(4));\n\n assert_eq!(super::ModExp::<Byzantium>::ADDRESS, u8_to_address(5));\n", "file_path": "crates/revm_precompiles/src/lib.rs", "rank": 97, "score": 128969.4532056384 }, { "content": "#[inline(always)]\n\npub fn sgt(op1: U256, op2: U256) -> U256 {\n\n let op1: I256 = op1.into();\n\n let op2: I256 = op2.into();\n\n\n\n if op1.gt(&op2) {\n\n U256::one()\n\n } else {\n\n U256::zero()\n\n }\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/bitwise.rs", "rank": 98, "score": 125830.3550156727 }, { "content": "#[inline(always)]\n\npub fn slt(op1: U256, op2: U256) -> U256 {\n\n let op1: I256 = op1.into();\n\n let op2: I256 = op2.into();\n\n\n\n if op1.lt(&op2) {\n\n U256::one()\n\n } else {\n\n U256::zero()\n\n }\n\n}\n\n\n", "file_path": "crates/revm/src/instructions/bitwise.rs", "rank": 99, "score": 125830.3550156727 } ]
Rust
src/lib.rs
DerickEddington/cycle_deep_safe_compare
3c3d4f5615c9d434f4037e373a7390ae34656464
#![cfg_attr(unix, doc = include_str!("../README.md"))] #![cfg_attr(windows, doc = include_str!("..\\README.md"))] #![cfg_attr( not(feature = "std"), doc = "\n", doc = "Note: This crate was built without its `std` feature and some premade items are \ unavailable, and so custom types must be provided and used with the items of the \ [`generic`] module, to have cycle-safety and/or deep-safety." )] #![cfg_attr( all(not(feature = "std"), feature = "alloc"), doc = "\n", doc = "Note: This crate was built with its `alloc` feature, and so some premade items, \ that use the [`alloc`](https://doc.rust-lang.org/alloc/) crate, are available." )] #![no_std] #![forbid(unsafe_code)] #![warn( future_incompatible, nonstandard_style, rust_2018_compatibility, rust_2018_idioms, rust_2021_compatibility, unused, clippy::all, clippy::pedantic, clippy::restriction, clippy::cargo, macro_use_extern_crate, meta_variable_misuse, missing_docs, noop_method_call, pointer_structural_match, single_use_lifetimes, trivial_casts, trivial_numeric_casts, unreachable_pub, unused_extern_crates, unused_import_braces, unused_lifetimes, unused_qualifications, unused_results, variant_size_differences, )] #![cfg_attr(not(test), warn(unused_crate_dependencies))] #![allow( clippy::implicit_return, clippy::blanket_clippy_restriction_lints, clippy::default_numeric_fallback, clippy::separated_literal_suffix, clippy::single_char_lifetime_names, clippy::missing_docs_in_private_items, clippy::pattern_type_mismatch, clippy::shadow_reuse )] #![cfg_attr( all(feature = "anticipate", not(rust_lib_feature = "step_trait")), feature(step_trait) )] #![cfg_attr( all(feature = "anticipate", not(rust_lib_feature = "unwrap_infallible")), feature(unwrap_infallible) )] #![cfg_attr( all(feature = "anticipate", not(rust_lang_feature = "never_type")), feature(never_type) )] #[cfg(feature = "std")] pub mod robust; pub mod cycle_safe; #[cfg(feature = "alloc")] pub mod deep_safe; #[cfg(feature = "alloc")] pub mod wide_safe; pub mod basic; pub mod generic; pub mod utils; cfg_if::cfg_if! { if #[cfg(feature = "anticipate")] { mod anticipated; use anticipated as anticipated_or_like; use core::iter::Step; } else { mod like_anticipated; use like_anticipated as anticipated_or_like; pub use like_anticipated::Step; } } use core::{ cmp::Ordering, hash::Hash, }; pub trait Node: Sized { type Cmp: Cmp; type Id: Eq + Hash + Clone; type Index: Step + Default + Ord; fn id(&self) -> Self::Id; #[must_use] fn get_edge( &self, index: &Self::Index, ) -> Option<Self>; fn equiv_modulo_edges( &self, other: &Self, ) -> Self::Cmp; } pub trait Cmp { fn new_equiv() -> Self; fn is_equiv(&self) -> bool; fn from_ord(ord: Ordering) -> Self; } impl Cmp for bool { #[inline] fn new_equiv() -> Self { true } #[inline] fn is_equiv(&self) -> bool { *self } #[inline] fn from_ord(ord: Ordering) -> Self { ord.is_eq() } } impl Cmp for Ordering { #[inline] fn new_equiv() -> Self { Ordering::Equal } #[inline] fn is_equiv(&self) -> bool { self.is_eq() } #[inline] fn from_ord(ord: Ordering) -> Self { ord } }
#![cfg_attr(unix, doc = include_str!("../README.md"))] #![cfg_attr(windows, doc = include_str!("..\\README.md"))] #![cfg_attr( not(feature = "std"), doc = "\n", doc = "Note: This crate was built without its `std` feature and some premade items are \ unavailable, and so custom types must be provided and used with the items of the \ [`generic`] module, to have cycle-safety and/or deep-safety." )] #![cfg_attr( all(not(feature = "std"), feature = "alloc"), doc = "\n", doc = "Note: This crate was built with its `alloc` feature, and so some premade items, \ that use the [`alloc`](https://doc.rust-lang.org/alloc/) crate, are available." )] #![no_std] #![forbid(unsafe_code)] #![warn( future_incompatible, nonstandard_style, rust_2018_compatibility, rust_2018_idioms, rust_2021_compatibility, unused, clippy::all, clippy::pedantic, clippy::restriction, clippy::cargo,
self } #[inline] fn from_ord(ord: Ordering) -> Self { ord.is_eq() } } impl Cmp for Ordering { #[inline] fn new_equiv() -> Self { Ordering::Equal } #[inline] fn is_equiv(&self) -> bool { self.is_eq() } #[inline] fn from_ord(ord: Ordering) -> Self { ord } }
macro_use_extern_crate, meta_variable_misuse, missing_docs, noop_method_call, pointer_structural_match, single_use_lifetimes, trivial_casts, trivial_numeric_casts, unreachable_pub, unused_extern_crates, unused_import_braces, unused_lifetimes, unused_qualifications, unused_results, variant_size_differences, )] #![cfg_attr(not(test), warn(unused_crate_dependencies))] #![allow( clippy::implicit_return, clippy::blanket_clippy_restriction_lints, clippy::default_numeric_fallback, clippy::separated_literal_suffix, clippy::single_char_lifetime_names, clippy::missing_docs_in_private_items, clippy::pattern_type_mismatch, clippy::shadow_reuse )] #![cfg_attr( all(feature = "anticipate", not(rust_lib_feature = "step_trait")), feature(step_trait) )] #![cfg_attr( all(feature = "anticipate", not(rust_lib_feature = "unwrap_infallible")), feature(unwrap_infallible) )] #![cfg_attr( all(feature = "anticipate", not(rust_lang_feature = "never_type")), feature(never_type) )] #[cfg(feature = "std")] pub mod robust; pub mod cycle_safe; #[cfg(feature = "alloc")] pub mod deep_safe; #[cfg(feature = "alloc")] pub mod wide_safe; pub mod basic; pub mod generic; pub mod utils; cfg_if::cfg_if! { if #[cfg(feature = "anticipate")] { mod anticipated; use anticipated as anticipated_or_like; use core::iter::Step; } else { mod like_anticipated; use like_anticipated as anticipated_or_like; pub use like_anticipated::Step; } } use core::{ cmp::Ordering, hash::Hash, }; pub trait Node: Sized { type Cmp: Cmp; type Id: Eq + Hash + Clone; type Index: Step + Default + Ord; fn id(&self) -> Self::Id; #[must_use] fn get_edge( &self, index: &Self::Index, ) -> Option<Self>; fn equiv_modulo_edges( &self, other: &Self, ) -> Self::Cmp; } pub trait Cmp { fn new_equiv() -> Self; fn is_equiv(&self) -> bool; fn from_ord(ord: Ordering) -> Self; } impl Cmp for bool { #[inline] fn new_equiv() -> Self { true } #[inline] fn is_equiv(&self) -> bool { *
random
[ { "content": "struct Args<N>(PhantomData<N>);\n\n\n\nimpl<N: Node> interleave::Params for Args<N>\n\n{\n\n type Node = N;\n\n type RNG = default::RandomNumberGenerator;\n\n type Table = hash_map::Table<Self>;\n\n}\n\n\n\nimpl<N: Node> hash_map::Params for Args<N>\n\n{\n\n type Node = N;\n\n}\n\n\n\nimpl<N: Node> recursion::queue::Params for Args<N>\n\n{\n\n type Node = N;\n\n}\n\n\n\n\n\n/// Equivalence predicate that can handle cyclic graphs and very-deep graphs.\n", "file_path": "src/robust.rs", "rank": 0, "score": 69069.8307035913 }, { "content": "type Region = Rc<[RefCell<Inner>]>;\n\n\n\n#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]\n\npub enum Index\n\n{\n\n Zero,\n\n One,\n\n Two,\n\n Three,\n\n Four,\n\n Five,\n\n Six,\n\n Seven,\n\n}\n\n\n\nimpl Default for Index\n\n{\n\n fn default() -> Self\n\n {\n\n Self::Zero\n", "file_path": "tests_utils/src/node_types/diff_index.rs", "rank": 1, "score": 57507.958973508605 }, { "content": "/// Prevent the dropping of cyclic and/or deep graphs from causing stack overflows, for any\n\n/// [`Pair`] type.\n\npub fn cycle_deep_safe_drop<T: Pair, const N: usize>(graphs: [(T, T); N])\n\n{\n\n if T::needs_cycle_deep_safe_drop() {\n\n for (head, tail) in graphs {\n\n // Enable dropping to free the memory of shapes that were cyclic, by resetting their\n\n // tails to no longer form cycles if they did.\n\n drop(Pair::take(&tail));\n\n // Now dropping will free the memory.\n\n drop(Dropper(head));\n\n }\n\n }\n\n}\n", "file_path": "tests_utils/src/shapes.rs", "rank": 2, "score": 54378.82711478654 }, { "content": "#[inline]\n\npub fn equiv<N: Node>(\n\n a: N,\n\n b: N,\n\n) -> N::Cmp\n\n{\n\n impl<N: Node> equiv::Params for Args<N>\n\n {\n\n type DescendMode = Interleave<Self>;\n\n type Error = Infallible;\n\n type Node = N;\n\n type RecurMode = RecurQueue<Self>;\n\n }\n\n\n\n let mut e = Equiv::<Args<N>>::default();\n\n #[allow(unstable_name_collisions)]\n\n e.equiv(a, b).into_ok()\n\n}\n\n\n\n\n\n/// Like [`equiv`](equiv()) but first tries the precheck that is faster for small acyclic graphs.\n", "file_path": "src/robust.rs", "rank": 3, "score": 53737.01442758378 }, { "content": " /// Generic parameters of [`equiv`].\n\n pub trait Params<N: Node>: Sized\n\n {\n\n /// Type of recursion mode for the precheck.\n\n type PrecheckRecurMode: RecurMode<PrecheckArgs<N, Self>>\n\n + Into<Self::InterleaveRecurMode>;\n\n /// Type of recursion mode for the interleave.\n\n type InterleaveRecurMode: RecurMode<InterleaveArgs<N, Self>>;\n\n /// Type that `impl`s the arguments for the generic parameters for the interleave.\n\n type InterleaveParams: interleave::Params<Node = N>;\n\n /// Type that represents the errors that can occur from [`Self::PrecheckRecurMode`]\n\n /// and [`Self::InterleaveRecurMode`].\n\n type Error;\n\n }\n\n\n\n /// Equivalence predicate that can handle cyclic graphs, but first tries the precheck that\n\n /// is faster for small acyclic graphs, and that requires choosing specific type arguments\n\n /// that determine the implementations of internal dynamic data structures. Safe for\n\n /// very-deep graphs only when the interleave recursion-mode type is.\n\n ///\n\n /// # Errors\n", "file_path": "src/generic.rs", "rank": 4, "score": 51302.742452479455 }, { "content": "#[inline]\n\npub fn precheck_equiv<N: Node + Clone>(\n\n a: N,\n\n b: N,\n\n) -> N::Cmp\n\n{\n\n impl<N: Node> precheck_interleave::Params<N> for Args<N>\n\n {\n\n type Error = Infallible;\n\n type InterleaveParams = Self;\n\n type InterleaveRecurMode = RecurQueue<Self>;\n\n type PrecheckRecurMode = RecurQueue<Self>;\n\n }\n\n\n\n #[allow(unstable_name_collisions)]\n\n precheck_interleave::equiv::<N, Args<N>>(a, b).into_ok()\n\n}\n", "file_path": "src/robust.rs", "rank": 5, "score": 50275.261680991054 }, { "content": "/// Use our custom `Table`, `Rc`, `RecurMode`, and `NumberGenerator` types.\n\nfn custom_equiv(\n\n a: My,\n\n b: My,\n\n) -> bool\n\n{\n\n use {\n\n custom_recur_stack::{\n\n ExhaustedError,\n\n ListStack,\n\n },\n\n custom_rng::PseudoPseudoRNG,\n\n graph_safe_compare::{\n\n cycle_safe::modes::interleave,\n\n generic::precheck_interleave::{\n\n self,\n\n InterleaveError,\n\n PrecheckError,\n\n },\n\n },\n\n other_recur_stack::{\n", "file_path": "tests/custom.rs", "rank": 6, "score": 45198.771973446215 }, { "content": "/// Also, enables this integration test to be used when the `graph_safe_compare` crate is\n\n/// built without the \"std\" feature enabled, and enables running the test cases of very-deep\n\n/// shapes.\n\nmod custom_recur_stack\n\n{\n\n extern crate alloc;\n\n\n\n use {\n\n super::{\n\n other_recur_stack::OtherStack,\n\n My,\n\n },\n\n alloc::collections::LinkedList,\n\n graph_safe_compare::{\n\n basic::recursion::callstack::CallStack,\n\n generic::equiv::{\n\n self,\n\n CounterpartsResult,\n\n EdgesIter,\n\n Equiv,\n", "file_path": "tests/custom.rs", "rank": 7, "score": 39066.97108450684 }, { "content": "mod common\n\n{\n\n pub mod rc_pair;\n\n}\n\n\n\nuse {\n\n common::rc_pair::*,\n\n core::convert::identity,\n\n};\n\n\n\n\n\n/// Use `Arc` just because it's different than\n\n/// `graph_safe_compare::generic::equiv_classes::premade::Rc`. The multi-thread ability\n\n/// provided by `Arc` is ignored.\n\nmod custom_rc\n\n{\n\n extern crate alloc;\n\n\n\n use {\n\n alloc::sync::Arc,\n", "file_path": "tests/custom.rs", "rank": 8, "score": 39061.57669514519 }, { "content": " }\n\n }\n\n\n\n impl equiv_classes::Rc for Rc\n\n {\n\n fn new(val: Cell<Class<Self>>) -> Self\n\n {\n\n Self(Arc::new(val))\n\n }\n\n }\n\n}\n\n\n\n\n\n/// Use `BTreeMap` just because it's different than\n\n/// `graph_safe_compare::generic::equiv_classes::premade::HashMap`. The ordering of keys\n\n/// provided by `BTreeMap` is ignored.\n\nmod custom_table\n\n{\n\n extern crate alloc;\n\n\n", "file_path": "tests/custom.rs", "rank": 9, "score": 39059.10715090158 }, { "content": "\n\n/// Use our own (dummy) PRNG to test not depending on any from the crate.\n\nmod custom_rng\n\n{\n\n use graph_safe_compare::cycle_safe::modes::interleave::random;\n\n\n\n #[derive(Default)]\n\n pub struct PseudoPseudoRNG(u128);\n\n\n\n impl random::NumberGenerator for PseudoPseudoRNG\n\n {\n\n fn rand_upto(\n\n &mut self,\n\n exclusive_end: std::num::NonZeroU16,\n\n ) -> u16\n\n {\n\n self.0 = self.0.wrapping_mul(42);\n\n self.0 = self.0.wrapping_add(987654321);\n\n self.0 as u16 % exclusive_end\n\n }\n\n }\n\n}\n\n\n\n\n\n/// Use our custom `Table`, `Rc`, `RecurMode`, and `NumberGenerator` types.\n", "file_path": "tests/custom.rs", "rank": 10, "score": 39053.96429563548 }, { "content": " use super::*;\n\n\n\n tests_utils::eq_variation_mod_body!(\n\n custom_equiv,\n\n My,\n\n Rc<Datum>,\n\n identity,\n\n DatumAllocator::new\n\n );\n\n}\n\n\n\ntests_utils::eq_shapes_tests!(identity, DatumAllocator::new, eq_variation::MyEq::new,\n\n #[cfg(all())], #[cfg(all())]);\n", "file_path": "tests/custom.rs", "rank": 11, "score": 39051.25511358475 }, { "content": " use {\n\n super::{\n\n My,\n\n Node,\n\n },\n\n alloc::collections::BTreeMap,\n\n graph_safe_compare::generic::equiv_classes::Table,\n\n };\n\n\n\n #[derive(Default)]\n\n pub struct Map(BTreeMap<<My as Node>::Id, super::custom_rc::Rc>);\n\n\n\n impl Table for Map\n\n {\n\n type Node = My;\n\n type Rc = super::custom_rc::Rc;\n\n\n\n fn get(\n\n &self,\n\n k: &<Self::Node as Node>::Id,\n", "file_path": "tests/custom.rs", "rank": 12, "score": 39050.571126327515 }, { "content": " OtherStack,\n\n OtherStackError,\n\n },\n\n };\n\n\n\n struct InterleaveArgs;\n\n\n\n impl interleave::Params for InterleaveArgs\n\n {\n\n type Node = My;\n\n type RNG = PseudoPseudoRNG;\n\n type Table = custom_table::Map;\n\n\n\n // Use custom values for these constants, not their defaults.\n\n const FAST_LIMIT_MAX: u16 = Self::PRECHECK_LIMIT / 4;\n\n const PRECHECK_LIMIT: u16 = 2000;\n\n const SLOW_LIMIT: u16 = Self::PRECHECK_LIMIT / 2;\n\n }\n\n\n\n // Exercise the call-stack for the precheck since that is limited and will not overflow the\n", "file_path": "tests/custom.rs", "rank": 13, "score": 39047.24176266835 }, { "content": " }\n\n\n\n impl From<OtherStack> for ListStack\n\n {\n\n fn from(_: OtherStack) -> Self\n\n {\n\n Self::default()\n\n }\n\n }\n\n}\n\n\n\n/// A different type, to test combining two custom `RecurMode` types.\n\nmod other_recur_stack\n\n{\n\n use {\n\n super::My,\n\n graph_safe_compare::{\n\n generic::equiv::{\n\n self,\n\n CounterpartsResult,\n", "file_path": "tests/custom.rs", "rank": 14, "score": 39047.23920652021 }, { "content": " EdgesIter,\n\n Equiv,\n\n RecurMode,\n\n },\n\n Cmp,\n\n Node,\n\n },\n\n };\n\n\n\n #[derive(Default)]\n\n pub struct OtherStack;\n\n\n\n pub enum OtherStackError<R>\n\n {\n\n Novel,\n\n Recur(R),\n\n }\n\n\n\n /// Like `CallStack` but with custom error.\n\n impl<P> RecurMode<P> for OtherStack\n", "file_path": "tests/custom.rs", "rank": 15, "score": 39043.87240890962 }, { "content": " ) -> Option<&Self::Rc>\n\n {\n\n self.0.get(k)\n\n }\n\n\n\n fn insert(\n\n &mut self,\n\n k: <My as Node>::Id,\n\n v: Self::Rc,\n\n )\n\n {\n\n self.0.insert(k, v);\n\n }\n\n }\n\n}\n\n\n\n\n\n/// Use `LinkedList` just because it's different than\n\n/// `graph_safe_compare::wide_safe::recursion::stack::RecurStack`.\n\n///\n", "file_path": "tests/custom.rs", "rank": 16, "score": 39043.66151691487 }, { "content": " // stack when the stack is already shallow, and use the list-stack for the interleave so great\n\n // depth is supported since an input could be very-deep.\n\n let precheck_on_callstack = {\n\n #[derive(Debug)]\n\n enum ExhaustedOrOtherError\n\n {\n\n ExhaustedList,\n\n OtherNovel,\n\n }\n\n\n\n impl From<OtherStackError<Self>> for PrecheckError<ExhaustedOrOtherError>\n\n {\n\n fn from(e: OtherStackError<Self>) -> Self\n\n {\n\n match e {\n\n OtherStackError::Recur(e) => e,\n\n OtherStackError::Novel => Self::RecurError(ExhaustedOrOtherError::OtherNovel),\n\n }\n\n }\n\n }\n", "file_path": "tests/custom.rs", "rank": 17, "score": 39043.14216701688 }, { "content": " struct Args;\n\n\n\n impl precheck_interleave::Params<My> for Args\n\n {\n\n type Error = ExhaustedError;\n\n type InterleaveParams = InterleaveArgs;\n\n type InterleaveRecurMode = ListStack;\n\n type PrecheckRecurMode = ListStack;\n\n }\n\n\n\n precheck_interleave::equiv::<_, Args>(a, b).unwrap()\n\n };\n\n\n\n assert_eq!(precheck_on_callstack, precheck_on_liststack);\n\n\n\n precheck_on_callstack == Ordering::Equal\n\n}\n\n\n\nmod eq_variation\n\n{\n", "file_path": "tests/custom.rs", "rank": 18, "score": 39040.298015612585 }, { "content": " }\n\n Ok(Cmp::new_equiv())\n\n }\n\n else {\n\n Err(OtherStackError::Novel)\n\n }\n\n }\n\n\n\n fn next(&mut self) -> Option<CounterpartsResult<P::Node>>\n\n {\n\n None\n\n }\n\n\n\n fn reset(self) -> Self\n\n {\n\n self\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/custom.rs", "rank": 19, "score": 39040.298015612585 }, { "content": "\n\n impl From<ExhaustedError> for InterleaveError<ExhaustedOrOtherError>\n\n {\n\n fn from(_: ExhaustedError) -> Self\n\n {\n\n InterleaveError(ExhaustedOrOtherError::ExhaustedList)\n\n }\n\n }\n\n\n\n struct Args;\n\n\n\n impl precheck_interleave::Params<My> for Args\n\n {\n\n type Error = ExhaustedOrOtherError;\n\n type InterleaveParams = InterleaveArgs;\n\n type InterleaveRecurMode = ListStack;\n\n type PrecheckRecurMode = OtherStack;\n\n }\n\n\n\n precheck_interleave::equiv::<_, Args>(a.clone(), b.clone()).unwrap()\n", "file_path": "tests/custom.rs", "rank": 20, "score": 39040.298015612585 }, { "content": " }\n\n // Remove empty iterators from the stack.\n\n drop(self.0.pop_front());\n\n }\n\n None\n\n }\n\n\n\n fn reset(mut self) -> Self\n\n {\n\n self.0.clear();\n\n self\n\n }\n\n }\n\n\n\n impl From<CallStack> for ListStack\n\n {\n\n fn from(_: CallStack) -> Self\n\n {\n\n Self::default()\n\n }\n", "file_path": "tests/custom.rs", "rank": 21, "score": 39040.298015612585 }, { "content": " };\n\n\n\n // Exercise our list-stack for the precheck.\n\n let precheck_on_liststack = {\n\n impl From<ExhaustedError> for PrecheckError<ExhaustedError>\n\n {\n\n fn from(e: ExhaustedError) -> Self\n\n {\n\n PrecheckError::RecurError(e)\n\n }\n\n }\n\n\n\n impl From<ExhaustedError> for InterleaveError<ExhaustedError>\n\n {\n\n fn from(e: ExhaustedError) -> Self\n\n {\n\n InterleaveError(e)\n\n }\n\n }\n\n\n", "file_path": "tests/custom.rs", "rank": 22, "score": 39040.298015612585 }, { "content": " core::{\n\n cell::Cell,\n\n ops::Deref,\n\n },\n\n graph_safe_compare::generic::equiv_classes::{\n\n self,\n\n Class,\n\n },\n\n };\n\n\n\n #[derive(Clone)]\n\n pub struct Rc(Arc<Cell<Class<Self>>>);\n\n\n\n impl Deref for Rc\n\n {\n\n type Target = Cell<Class<Self>>;\n\n\n\n fn deref(&self) -> &Self::Target\n\n {\n\n &*self.0\n", "file_path": "tests/custom.rs", "rank": 23, "score": 39040.298015612585 }, { "content": " RecurMode,\n\n },\n\n Cmp,\n\n Node,\n\n },\n\n };\n\n\n\n #[derive(Default)]\n\n pub struct ListStack(LinkedList<EdgesIter<My>>);\n\n\n\n #[derive(Debug)]\n\n pub struct ExhaustedError;\n\n\n\n impl<P> RecurMode<P> for ListStack\n\n where\n\n P: equiv::Params<Node = My, RecurMode = Self>,\n\n ExhaustedError: Into<P::Error>,\n\n {\n\n type Error = ExhaustedError;\n\n\n", "file_path": "tests/custom.rs", "rank": 24, "score": 39040.298015612585 }, { "content": " fn recur(\n\n it: &mut Equiv<P>,\n\n edges_iter: EdgesIter<P::Node>,\n\n ) -> Result<<P::Node as Node>::Cmp, Self::Error>\n\n {\n\n if it.recur_mode.0.len() < 2_usize.pow(30) {\n\n it.recur_mode.0.push_front(edges_iter);\n\n Ok(Cmp::new_equiv())\n\n }\n\n else {\n\n Err(ExhaustedError)\n\n }\n\n }\n\n\n\n fn next(&mut self) -> Option<CounterpartsResult<P::Node>>\n\n {\n\n while let Some(edges_iter) = self.0.front_mut() {\n\n let next = edges_iter.next();\n\n if next.is_some() {\n\n return next;\n", "file_path": "tests/custom.rs", "rank": 25, "score": 39040.298015612585 }, { "content": " where\n\n P: equiv::Params<Node = My, RecurMode = Self>,\n\n OtherStackError<P::Error>: Into<P::Error>,\n\n {\n\n type Error = OtherStackError<P::Error>;\n\n\n\n fn recur(\n\n it: &mut Equiv<P>,\n\n edges_iter: EdgesIter<P::Node>,\n\n ) -> Result<<P::Node as Node>::Cmp, Self::Error>\n\n {\n\n if true {\n\n for next in edges_iter {\n\n match next {\n\n Ok([a, b]) => match it.equiv_main(a, b) {\n\n Ok(cmp) if cmp.is_equiv() => (),\n\n result => return result.map_err(OtherStackError::Recur),\n\n },\n\n Err(cmp_amount_edges) => return Ok(cmp_amount_edges),\n\n }\n", "file_path": "tests/custom.rs", "rank": 26, "score": 39040.298015612585 }, { "content": " unsafe {\n\n abort();\n\n }\n\n }\n\n}\n\n\n\n\n\n/// When using the parent package with its \"std\" feature enabled, check that we can use its items\n\n/// that are only available with that feature.\n\n#[macro_export]\n\nmacro_rules! static_assert_dep_is_std {\n\n () => {\n\n mod static_assert_dep_is_std\n\n {\n\n #[allow(unused_imports)]\n\n use graph_safe_compare::{\n\n cycle_safe::{\n\n equiv,\n\n precheck_equiv,\n\n },\n", "file_path": "test_no_std/src/lib.rs", "rank": 27, "score": 36404.15439369826 }, { "content": "//! Integration test of using the parent package as `no_std` from an executable.\n\n\n\n#![no_std]\n\n// Must have this, otherwise the following error would happen:\n\n// error: requires `start` lang_item\n\n#![no_main]\n\n\n\n// Only link the lib to have its `panic_handler`.\n\nextern crate test_no_std as _;\n\n\n\n#[cfg(any(feature = \"std\", feature = \"prove_dep_is_no_std\"))]\n\ntest_no_std::static_assert_dep_is_std!();\n\n\n\n\n\nuse {\n\n core::cmp::Ordering,\n\n graph_safe_compare::{\n\n basic::equiv,\n\n Node,\n\n },\n", "file_path": "test_no_std/src/main.rs", "rank": 28, "score": 36403.000045981986 }, { "content": "/// using it suffices for this test where the premade more-capable functions are not present (due\n\n/// to no \"std\") (would have to go to more effort to provide custom types for the generic\n\n/// more-capable functions that are present).\n\nimpl PartialEq for It\n\n{\n\n fn eq(\n\n &self,\n\n other: &Self,\n\n ) -> bool\n\n {\n\n equiv(*self, *other).is_eq()\n\n }\n\n}\n\nimpl Eq for It {}\n\n\n\n\n\n// Must provide the classic entry-point `main` symbol, otherwise the following error would happen\n\n// (on NixOS Linux, at least):\n\n//\n\n// error: linking with `cc` failed: exit status: 1\n", "file_path": "test_no_std/src/main.rs", "rank": 29, "score": 36400.225064258026 }, { "content": "\n\n#[cfg(not(any(feature = \"std\", test)))]\n\nmod for_dylib_and_bin\n\n{\n\n use libc::abort;\n\n\n\n // When `cfg(test)` is true, i.e. when building this crate as a `--test` or bench harness,\n\n // `libtest` is linked which links `libstd` which already provides a `panic_handler`, and that\n\n // would conflict. Without conditional compilation, the following error would happen:\n\n // error[E0152]: found duplicate lang item `panic_impl`\n\n //\n\n // This also serves to detect, and fail the build, if `libstd` does somehow end up being\n\n // linked in.\n\n #[panic_handler]\n\n fn panic(_info: &core::panic::PanicInfo) -> !\n\n {\n\n // Abort by calling the standard C `_Noreturn void abort(void)`. Not strictly necessary\n\n // for this crate to build, since `loop {}` could be used instead to satisfy the `!`\n\n // return type, but `abort` causes the program to actually terminate if a panic occurs\n\n // (instead of looping infinitely).\n", "file_path": "test_no_std/src/lib.rs", "rank": 30, "score": 36399.92132380962 }, { "content": " }\n\n\n\n /// The usual reason for using `graph_safe_compare` is to impl this trait, but usually\n\n /// `basic::equiv` would not be used and instead the more-capable functionality would be, but\n\n /// using it suffices for this test where the premade more-capable functions are not present\n\n /// (due to no \"std\") (would have to go to more effort to provide custom types for the generic\n\n /// more-capable functions that are present).\n\n impl PartialEq for My\n\n {\n\n fn eq(\n\n &self,\n\n other: &Self,\n\n ) -> bool\n\n {\n\n equiv(*self, *other)\n\n }\n\n }\n\n impl Eq for My {}\n\n}\n\n\n", "file_path": "test_no_std/src/lib.rs", "rank": 31, "score": 36398.18421911342 }, { "content": " generic::equiv_classes::premade::{\n\n hash_map::Table,\n\n rc::Rc,\n\n },\n\n robust,\n\n wide_safe,\n\n };\n\n }\n\n };\n\n}\n\n\n\n#[cfg(any(feature = \"std\", feature = \"prove_dep_is_no_std\"))]\n\nstatic_assert_dep_is_std!();\n\n\n\n\n\n#[cfg(test)]\n\nmod tests\n\n{\n\n use super::*;\n\n\n\n #[test]\n\n fn it_works()\n\n {\n\n let (a, b) = (My, My);\n\n assert!(a == b);\n\n }\n\n}\n", "file_path": "test_no_std/src/lib.rs", "rank": 32, "score": 36396.666266973276 }, { "content": "//! Integration test of using the parent package as `no_std` from a library.\n\n\n\n#![no_std]\n\n\n\n\n\npub use my::My;\n\n\n\nmod my\n\n{\n\n use graph_safe_compare::{\n\n basic::equiv,\n\n Node,\n\n };\n\n\n\n #[derive(Copy, Clone)]\n\n pub struct My;\n\n\n\n impl Node for My\n\n {\n\n type Cmp = bool;\n", "file_path": "test_no_std/src/lib.rs", "rank": 33, "score": 36389.88849823868 }, { "content": "\n\n fn get_edge(\n\n &self,\n\n _index: &Self::Index,\n\n ) -> Option<Self>\n\n {\n\n None\n\n }\n\n\n\n fn equiv_modulo_edges(\n\n &self,\n\n other: &Self,\n\n ) -> Ordering\n\n {\n\n self.0.cmp(&other.0)\n\n }\n\n}\n\n\n\n/// The usual reason for using `graph_safe_compare` is to impl this trait, but usually\n\n/// `basic::equiv` would not be used and instead the more-capable functionality would be, but\n", "file_path": "test_no_std/src/main.rs", "rank": 34, "score": 36384.96750011266 }, { "content": "// = note: .../ld: .../Scrt1.o: in function `_start':\n\n// .../start.S:104: undefined reference to `main'\n\n// collect2: error: ld returned 1 exit status\n\n#[no_mangle] // ensure that this symbol is called `main` in the output\n\npub extern \"C\" fn main(\n\n _argc: c_int,\n\n _argv: *const *const c_char,\n\n) -> c_int\n\n{\n\n my_main();\n\n 0\n\n}\n\n\n\n\n", "file_path": "test_no_std/src/main.rs", "rank": 35, "score": 36384.38889793159 }, { "content": " type Id = ();\n\n type Index = u8;\n\n\n\n fn id(&self) -> Self::Id {}\n\n\n\n fn get_edge(\n\n &self,\n\n _index: &Self::Index,\n\n ) -> Option<Self>\n\n {\n\n None\n\n }\n\n\n\n fn equiv_modulo_edges(\n\n &self,\n\n _other: &Self,\n\n ) -> bool\n\n {\n\n true\n\n }\n", "file_path": "test_no_std/src/lib.rs", "rank": 36, "score": 36380.886348445725 }, { "content": " libc::{\n\n c_char,\n\n c_int,\n\n },\n\n};\n\n\n\n\n\n#[derive(Copy, Clone)]\n\npub struct It(i32);\n\n\n\nimpl Node for It\n\n{\n\n type Cmp = Ordering;\n\n type Id = i32;\n\n type Index = u8;\n\n\n\n fn id(&self) -> Self::Id\n\n {\n\n self.0\n\n }\n", "file_path": "test_no_std/src/main.rs", "rank": 37, "score": 36380.886348445725 }, { "content": "fn my_main()\n\n{\n\n let (a, b) = (It(42), It(42));\n\n assert!(a == b);\n\n}\n", "file_path": "test_no_std/src/main.rs", "rank": 38, "score": 35130.49241527753 }, { "content": "use {\n\n crate::shapes::{\n\n Allocator,\n\n Leaf,\n\n Pair,\n\n },\n\n std::{\n\n cell::{\n\n RefCell,\n\n RefMut,\n\n },\n\n hash::Hash,\n\n ops::Sub,\n\n ptr,\n\n rc::Rc,\n\n },\n\n};\n\n\n\n\n\n#[derive(PartialEq, Eq, Copy, Clone, Debug)]\n", "file_path": "tests_utils/src/node_types/lazy.rs", "rank": 39, "score": 33080.805999153585 }, { "content": "{\n\n /// Must only be used by `PairChainMaker`.\n\n fn set(\n\n &self,\n\n a: Self,\n\n b: Self,\n\n )\n\n {\n\n use Shape::*;\n\n\n\n let mut inner = self.inner();\n\n let alloc = Rc::clone(&inner.id.alloc);\n\n let (a, b) = (a.0.into_inner(), b.0.into_inner());\n\n\n\n debug_assert_eq!(inner.id.num, 0);\n\n debug_assert_eq!(inner.shape, Leaf);\n\n debug_assert_eq!(inner.depth, 0);\n\n debug_assert_eq!(alloc, a.id.alloc);\n\n debug_assert_eq!(a.id.alloc, b.id.alloc);\n\n\n", "file_path": "tests_utils/src/node_types/lazy.rs", "rank": 40, "score": 33078.500521154296 }, { "content": "\n\n\n\n#[cfg(test)]\n\nmod tests\n\n{\n\n use {\n\n super::*,\n\n crate::{\n\n shapes::PairChainMaker,\n\n sizes,\n\n },\n\n std::cmp::max,\n\n Shape::*,\n\n };\n\n\n\n // #[test]\n\n // fn size()\n\n // {\n\n // dbg!(std::mem::size_of::<Datum>());\n\n // }\n", "file_path": "tests_utils/src/node_types/lazy.rs", "rank": 41, "score": 33078.415973022784 }, { "content": " case!(degenerate_dag, 0, alloc);\n\n case!(degenerate_dag, 1, alloc);\n\n case!(degenerate_dag, 2, alloc);\n\n case!(degenerate_dag, 3, alloc);\n\n case!(degenerate_dag, 15, alloc);\n\n }\n\n\n\n #[test]\n\n fn degenerate_cyclic()\n\n {\n\n let alloc: Rc<DatumAllocator> = Default::default();\n\n\n\n case!(degenerate_cyclic, 0, alloc);\n\n }\n\n\n\n mod long\n\n {\n\n use super::*;\n\n\n\n #[test]\n", "file_path": "tests_utils/src/node_types/lazy.rs", "rank": 42, "score": 33075.088923667194 }, { "content": " #[ignore]\n\n fn degenerate_dag()\n\n {\n\n let alloc: Rc<DatumAllocator> = Default::default();\n\n case!(degenerate_dag, long_depth(), alloc);\n\n }\n\n }\n\n\n\n mod stack_overflow\n\n {\n\n use super::*;\n\n\n\n #[test]\n\n #[ignore]\n\n fn degenerate_cyclic1()\n\n {\n\n let alloc: Rc<DatumAllocator> = Default::default();\n\n\n\n case!(degenerate_cyclic, 1, alloc);\n\n }\n", "file_path": "tests_utils/src/node_types/lazy.rs", "rank": 43, "score": 33075.06693033912 }, { "content": " {\n\n use super::*;\n\n\n\n #[test]\n\n #[ignore]\n\n fn list()\n\n {\n\n let alloc: Rc<DatumAllocator> = Default::default();\n\n case!(list, sizes::long_list_length(), alloc);\n\n }\n\n\n\n #[test]\n\n #[ignore]\n\n fn inverted_list()\n\n {\n\n let alloc: Rc<DatumAllocator> = Default::default();\n\n case!(inverted_list, sizes::long_list_length(), alloc);\n\n }\n\n\n\n #[test]\n", "file_path": "tests_utils/src/node_types/lazy.rs", "rank": 44, "score": 33074.96971032714 }, { "content": " #![allow(clippy::eq_op)]\n\n\n\n use super::*;\n\n\n\n macro_rules! make {\n\n ($shape:ident, $depth:expr) => {{\n\n let (head, _tail): (Datum, Datum) = PairChainMaker::new($depth).$shape();\n\n head\n\n }};\n\n ($alloc:expr, $shape:ident, $depth:expr) => {{\n\n let alloc = Rc::clone(&$alloc);\n\n let (head, _tail): (Datum, Datum) =\n\n PairChainMaker::new_with($depth, alloc).$shape();\n\n head\n\n }};\n\n }\n\n macro_rules! case {\n\n ($shape:ident, $depth:expr, $alloc:expr) => {{\n\n let depth = $depth;\n\n let alloc = Rc::clone(&$alloc);\n", "file_path": "tests_utils/src/node_types/lazy.rs", "rank": 45, "score": 33074.42656153197 }, { "content": "use std::{\n\n iter::repeat,\n\n ops::Index,\n\n rc::Rc,\n\n};\n\n\n\n\n\n/// Generates very many edges but uses little memory itself.\n\n#[derive(Eq, Clone, Debug)]\n\npub enum Datum\n\n{\n\n Leaf,\n\n Branch\n\n {\n\n width: usize,\n\n proto_edge: Rc<Self>,\n\n },\n\n}\n\n\n\nimpl Datum\n", "file_path": "tests_utils/src/node_types/wide.rs", "rank": 46, "score": 33074.3956962725 }, { "content": "{\n\n fn eq(\n\n &self,\n\n other: &Self,\n\n ) -> bool\n\n {\n\n ptr::eq(self, other)\n\n }\n\n}\n\n\n\nimpl Hash for DatumAllocator\n\n{\n\n fn hash<H: std::hash::Hasher>(\n\n &self,\n\n state: &mut H,\n\n )\n\n {\n\n ptr::hash(self, state)\n\n }\n\n}\n", "file_path": "tests_utils/src/node_types/lazy.rs", "rank": 47, "score": 33074.28433904327 }, { "content": "\n\n #[rustfmt::skip::macros(case, id)]\n\n #[allow(clippy::redundant_clone)]\n\n mod make\n\n {\n\n use super::*;\n\n\n\n macro_rules! case_with {\n\n (\n\n $alloc:expr, $depth:expr,degenerate_cyclic, $expect_head:expr, $expect_tail:expr\n\n ) => {\n\n case_with!($alloc, $depth, degenerate_cyclic, 1, $expect_head, $expect_tail);\n\n };\n\n ($alloc:expr, $depth:expr, $shape:ident, $expect_head:expr, $expect_tail:expr) => {\n\n case_with!($alloc, $depth, $shape, 0, $expect_head, $expect_tail);\n\n };\n\n (\n\n $alloc:expr,\n\n $depth:expr,\n\n $shape:ident,\n", "file_path": "tests_utils/src/node_types/lazy.rs", "rank": 48, "score": 33074.12482311938 }, { "content": " #[inline]\n\n pub fn inner(&self) -> RefMut<'_, Inner>\n\n {\n\n self.0.borrow_mut()\n\n }\n\n\n\n #[inline]\n\n pub fn get_edges(&self) -> Option<(Self, Self)>\n\n {\n\n self.inner()\n\n .get_edges()\n\n .map(|(left, right)| (Self(RefCell::new(left)), Self(RefCell::new(right))))\n\n }\n\n}\n\n\n\n/// This `PartialEq` does not implement a `graph_safe_compare` algorithm and is only used for\n\n/// having an intentionally-naive algorithm that acts as if the shapes exist. When\n\n/// `graph_safe_compare` algorithms are tested against this type, their functions must be\n\n/// called directly.\n\nimpl PartialEq for Datum\n", "file_path": "tests_utils/src/node_types/lazy.rs", "rank": 49, "score": 33072.38740569053 }, { "content": " case!(list, len(), alloc);\n\n }\n\n\n\n #[test]\n\n fn inverted_list()\n\n {\n\n let alloc: Rc<DatumAllocator> = Default::default();\n\n\n\n case!(inverted_list, 0, alloc);\n\n case!(inverted_list, 1, alloc);\n\n case!(inverted_list, 2, alloc);\n\n case!(inverted_list, 3, alloc);\n\n case!(inverted_list, len(), alloc);\n\n }\n\n\n\n #[test]\n\n fn degenerate_dag()\n\n {\n\n let alloc: Rc<DatumAllocator> = Default::default();\n\n\n", "file_path": "tests_utils/src/node_types/lazy.rs", "rank": 50, "score": 33072.10368391552 }, { "content": " #[ignore]\n\n fn degenerate_dag()\n\n {\n\n let alloc: Rc<DatumAllocator> = Default::default();\n\n case!(degenerate_dag, sizes::long_list_length(), alloc);\n\n }\n\n\n\n #[test]\n\n #[ignore]\n\n fn degenerate_cyclic()\n\n {\n\n let alloc: Rc<DatumAllocator> = Default::default();\n\n\n\n case!(degenerate_cyclic, sizes::degenerate_depth(), alloc);\n\n }\n\n\n\n #[test]\n\n #[ignore]\n\n fn degenerate_cyclic_very_long()\n\n {\n\n let alloc: Rc<DatumAllocator> = Default::default();\n\n case!(degenerate_cyclic, sizes::long_list_length(), alloc);\n\n }\n\n }\n\n }\n\n }\n\n}\n", "file_path": "tests_utils/src/node_types/lazy.rs", "rank": 51, "score": 33071.99095293671 }, { "content": "{\n\n type Alloc = Rc<DatumAllocator>;\n\n\n\n fn new_in(alloc: &Self::Alloc) -> Self\n\n {\n\n let mut next_gen = alloc.next_gen.borrow_mut();\n\n let gen = *next_gen;\n\n *next_gen += 1;\n\n Self(RefCell::new(Inner {\n\n id: Id { alloc: Rc::clone(alloc), gen, num: 0 },\n\n shape: Shape::Leaf,\n\n depth: 0,\n\n }))\n\n }\n\n}\n\n\n\n\n\n#[derive(Default, Eq, Debug)]\n\npub struct DatumAllocator\n\n{\n", "file_path": "tests_utils/src/node_types/lazy.rs", "rank": 52, "score": 33071.979950365545 }, { "content": "\n\n #[test]\n\n #[ignore]\n\n fn degenerate_cyclic2()\n\n {\n\n let alloc: Rc<DatumAllocator> = Default::default();\n\n\n\n case!(degenerate_cyclic, 2, alloc);\n\n }\n\n\n\n #[test]\n\n #[ignore]\n\n fn degenerate_cyclic3()\n\n {\n\n let alloc: Rc<DatumAllocator> = Default::default();\n\n\n\n case!(degenerate_cyclic, 3, alloc);\n\n }\n\n\n\n mod long\n", "file_path": "tests_utils/src/node_types/lazy.rs", "rank": 53, "score": 33071.9668620314 }, { "content": "\n\n fn len() -> u32\n\n {\n\n max(100, sizes::long_list_length() / 1000)\n\n }\n\n\n\n fn long_depth() -> u32\n\n {\n\n max(17, sizes::degenerate_depth().saturating_sub(5))\n\n }\n\n\n\n #[test]\n\n fn list()\n\n {\n\n let alloc: Rc<DatumAllocator> = Default::default();\n\n\n\n case!(list, 0, alloc);\n\n case!(list, 1, alloc);\n\n case!(list, 2, alloc);\n\n case!(list, 3, alloc);\n", "file_path": "tests_utils/src/node_types/lazy.rs", "rank": 54, "score": 33071.908394624865 }, { "content": " next_gen: RefCell<usize>,\n\n}\n\n\n\nimpl DatumAllocator\n\n{\n\n pub fn new(_size: u32) -> Rc<Self>\n\n {\n\n Rc::new(Self::default())\n\n }\n\n}\n\n\n\nimpl Allocator<Datum> for Rc<DatumAllocator>\n\n{\n\n fn alloc(&self) -> Datum\n\n {\n\n Leaf::new_in(self)\n\n }\n\n}\n\n\n\nimpl PartialEq for DatumAllocator\n", "file_path": "tests_utils/src/node_types/lazy.rs", "rank": 55, "score": 33071.83006027717 }, { "content": "\n\n/// This `PartialEq` does not implement a `graph_safe_compare` algorithm and is only used for\n\n/// having an intentionally-naive algorithm that acts as if there are `width` amount of edges.\n\n/// When `graph_safe_compare` algorithms are tested against this type, their functions must\n\n/// be called directly.\n\nimpl PartialEq for Datum\n\n{\n\n fn eq(\n\n &self,\n\n other: &Self,\n\n ) -> bool\n\n {\n\n match (self, other) {\n\n (Datum::Leaf, Datum::Leaf) => true,\n\n (\n\n Datum::Branch { width: aw, proto_edge: ae },\n\n Datum::Branch { width: bw, proto_edge: be },\n\n ) =>\n\n aw == bw\n\n && repeat((Rc::clone(ae), Rc::clone(be)))\n\n .take(*aw)\n\n .all(|(ae, be)| Datum::eq(&ae, &be)),\n\n (Datum::Leaf, Datum::Branch { width: 0, .. }) => true,\n\n (Datum::Branch { width: 0, .. }, Datum::Leaf) => true,\n\n _ => false,\n\n }\n\n }\n\n}\n", "file_path": "tests_utils/src/node_types/wide.rs", "rank": 56, "score": 33071.79933648337 }, { "content": " );\n\n }\n\n\n\n #[test]\n\n fn depth1()\n\n {\n\n let alloc: Rc<DatumAllocator> = Default::default();\n\n\n\n macro_rules! case {\n\n ($shape:ident, $expect_head:expr, $expect_tail:expr) => {\n\n case_with!(alloc, 1, $shape, $expect_head, $expect_tail);\n\n };\n\n }\n\n macro_rules! id {\n\n ($gen:expr, $num:expr) => {\n\n Id { gen: $gen, num: $num, alloc: Rc::clone(&alloc) }\n\n };\n\n }\n\n\n\n case!(\n", "file_path": "tests_utils/src/node_types/lazy.rs", "rank": 57, "score": 33071.72047760126 }, { "content": "\n\n #[test]\n\n fn depth2()\n\n {\n\n let alloc: Rc<DatumAllocator> = Default::default();\n\n\n\n macro_rules! case {\n\n ($shape:ident, $expect_head:expr, $expect_tail:expr) => {\n\n case_with!(alloc, 2, $shape, $expect_head, $expect_tail);\n\n };\n\n }\n\n macro_rules! id {\n\n ($gen:expr, $num:expr) => {\n\n Id { gen: $gen, num: $num, alloc: Rc::clone(&alloc) }\n\n };\n\n }\n\n\n\n case!(\n\n list,\n\n Inner { id: id!(0, 4), shape: List, depth: 2 },\n", "file_path": "tests_utils/src/node_types/lazy.rs", "rank": 58, "score": 33071.64659480841 }, { "content": " assert_eq!(head, $expect_head);\n\n assert_eq!(tail, $expect_tail);\n\n }};\n\n }\n\n\n\n #[test]\n\n fn depth0()\n\n {\n\n let alloc: Rc<DatumAllocator> = Default::default();\n\n\n\n macro_rules! case {\n\n ($shape:ident, $expect_head:expr, $expect_tail:expr) => {\n\n case_with!(alloc, 0, $shape, $expect_head, $expect_tail);\n\n };\n\n }\n\n macro_rules! id {\n\n ($gen:expr, $num:expr) => {\n\n Id { gen: $gen, num: $num, alloc: Rc::clone(&alloc) }\n\n };\n\n }\n", "file_path": "tests_utils/src/node_types/lazy.rs", "rank": 59, "score": 33071.625895248544 }, { "content": " fn depth3()\n\n {\n\n let alloc: Rc<DatumAllocator> = Default::default();\n\n\n\n macro_rules! case {\n\n ($shape:ident, $expect_head:expr, $expect_tail:expr) => {\n\n case_with!(alloc, 3, $shape, $expect_head, $expect_tail);\n\n };\n\n }\n\n macro_rules! id {\n\n ($gen:expr, $num:expr) => {\n\n Id { gen: $gen, num: $num, alloc: Rc::clone(&alloc) }\n\n };\n\n }\n\n\n\n case!(\n\n list,\n\n Inner { id: id!(0, 6), shape: List, depth: 3 },\n\n Inner { id: id!(0, 0), shape: Leaf, depth: 0 }\n\n );\n", "file_path": "tests_utils/src/node_types/lazy.rs", "rank": 60, "score": 33071.59517863363 }, { "content": " depth: a.depth + 1,\n\n }\n\n }\n\n else {\n\n Inner {\n\n id: Id { alloc, gen: a.id.gen, num: a.id.num + 1 },\n\n shape: DegenerateDAG,\n\n depth: a.depth + 1,\n\n }\n\n }\n\n },\n\n (DegenerateCyclic, DegenerateCyclic) => {\n\n debug_assert_eq!(a, b);\n\n\n\n Inner {\n\n id: Id { alloc: a.id.alloc, gen: a.id.gen, num: 0 },\n\n shape: DegenerateCyclic,\n\n depth: a.depth,\n\n }\n\n },\n", "file_path": "tests_utils/src/node_types/lazy.rs", "rank": 61, "score": 33071.34891826488 }, { "content": " Inner { id: Id { alloc, gen: b.id.gen, num: 2 }, shape: List, depth: 1 }\n\n }\n\n else if a.id.gen < b.id.gen {\n\n Inner {\n\n id: Id { alloc, gen: a.id.gen, num: 2 },\n\n shape: InvertedList,\n\n depth: 1,\n\n }\n\n }\n\n else {\n\n unreachable!()\n\n }\n\n },\n\n (Leaf, List) => {\n\n debug_assert_eq!(a.id.num, 0);\n\n debug_assert_eq!(a.depth, 0);\n\n debug_assert!(a.id.gen > b.id.gen);\n\n Inner {\n\n id: Id { alloc, gen: b.id.gen, num: b.id.num + 2 },\n\n shape: List,\n", "file_path": "tests_utils/src/node_types/lazy.rs", "rank": 62, "score": 33071.09033599629 }, { "content": " $same_at_depth:expr,\n\n $expect_head:expr,\n\n $expect_tail:expr\n\n ) => {{\n\n let alloc = Rc::clone(&$alloc);\n\n let depth = $depth;\n\n let same_at_depth = $same_at_depth;\n\n\n\n let (head, tail): (Datum, Datum) =\n\n PairChainMaker::new_with(depth, alloc).$shape();\n\n let (head, tail): (Inner, Inner) = (head.inner().clone(), tail.inner().clone());\n\n\n\n if depth > same_at_depth {\n\n assert_ne!(head, tail);\n\n assert_ne!(head.id, tail.id);\n\n }\n\n else {\n\n assert_eq!(head, tail);\n\n assert_eq!(head.id, tail.id);\n\n }\n", "file_path": "tests_utils/src/node_types/lazy.rs", "rank": 63, "score": 33071.06342726051 }, { "content": "pub enum Shape\n\n{\n\n Leaf,\n\n List,\n\n InvertedList,\n\n DegenerateDAG,\n\n DegenerateCyclic,\n\n}\n\n\n\n\n\n#[derive(PartialEq, Eq, Hash, Clone, Debug)]\n\npub struct Id\n\n{\n\n pub alloc: Rc<DatumAllocator>,\n\n pub gen: usize,\n\n pub num: usize,\n\n}\n\n\n\nimpl Sub<usize> for &Id\n\n{\n", "file_path": "tests_utils/src/node_types/lazy.rs", "rank": 64, "score": 33071.0152041778 }, { "content": " let (a, b) = (make!($shape, depth), make!($shape, depth));\n\n let (c, d) = (make!(alloc, $shape, depth), make!(alloc, $shape, depth));\n\n assert_eq!(a, a);\n\n assert_eq!(b, b);\n\n assert_eq!(a, b);\n\n assert_eq!(b, a);\n\n assert_eq!(c, c);\n\n assert_eq!(d, d);\n\n assert_eq!(c, d);\n\n assert_eq!(d, c);\n\n assert_eq!(a, c);\n\n assert_eq!(a, d);\n\n assert_eq!(b, c);\n\n assert_eq!(b, d);\n\n assert_eq!(c, a);\n\n assert_eq!(d, a);\n\n assert_eq!(c, b);\n\n assert_eq!(d, b);\n\n }};\n\n }\n", "file_path": "tests_utils/src/node_types/lazy.rs", "rank": 65, "score": 33070.864111039264 }, { "content": " depth: b.depth + 1,\n\n }\n\n },\n\n (InvertedList, Leaf) => {\n\n debug_assert_eq!(b.id.num, 0);\n\n debug_assert_eq!(b.depth, 0);\n\n debug_assert!(a.id.gen < b.id.gen);\n\n Inner {\n\n id: Id { alloc, gen: a.id.gen, num: a.id.num + 2 },\n\n shape: InvertedList,\n\n depth: a.depth + 1,\n\n }\n\n },\n\n (DegenerateDAG, DegenerateDAG) => {\n\n debug_assert_eq!(a, b);\n\n\n\n if inner.id.gen == a.id.gen {\n\n Inner {\n\n id: Id { alloc, gen: inner.id.gen, num: 0 },\n\n shape: DegenerateCyclic,\n", "file_path": "tests_utils/src/node_types/lazy.rs", "rank": 66, "score": 33070.57681886943 }, { "content": " &self.id - 1\n\n };\n\n left = Inner { id, shape: DegenerateCyclic, depth: self.depth };\n\n right = left.clone();\n\n },\n\n Leaf => unreachable!(),\n\n }\n\n\n\n (left, right)\n\n })\n\n }\n\n}\n\n\n\n\n\n/// Generates edges lazily and uses little memory itself.\n\n#[derive(Eq, Clone, Debug)]\n\npub struct Datum(RefCell<Inner>);\n\n\n\nimpl Datum\n\n{\n", "file_path": "tests_utils/src/node_types/lazy.rs", "rank": 67, "score": 33070.01115189841 }, { "content": "impl Inner\n\n{\n\n fn get_edges(&self) -> Option<(Self, Self)>\n\n {\n\n use Shape::*;\n\n\n\n (self.depth >= 1).then(|| {\n\n let (left, right);\n\n let depth = self.depth - 1;\n\n\n\n match self.shape {\n\n List => {\n\n left = Inner { id: &self.id - 1, shape: Leaf, depth: 0 };\n\n right = Inner {\n\n id: &left.id - 1,\n\n shape: if depth >= 1 { List } else { Leaf },\n\n depth,\n\n };\n\n },\n\n InvertedList => {\n", "file_path": "tests_utils/src/node_types/lazy.rs", "rank": 68, "score": 33069.94187521731 }, { "content": " let new = match (a.shape, b.shape) {\n\n (Leaf, Leaf) => {\n\n debug_assert_eq!(a.id.num, 0);\n\n debug_assert_eq!(a.depth, 0);\n\n debug_assert_eq!(b.id.num, 0);\n\n debug_assert_eq!(b.depth, 0);\n\n\n\n if a.id == b.id {\n\n if inner.id == a.id {\n\n Inner { id: inner.id.clone(), shape: DegenerateCyclic, depth: 1 }\n\n }\n\n else {\n\n Inner {\n\n id: Id { alloc, gen: a.id.gen, num: 1 },\n\n shape: DegenerateDAG,\n\n depth: 1,\n\n }\n\n }\n\n }\n\n else if a.id.gen > b.id.gen {\n", "file_path": "tests_utils/src/node_types/lazy.rs", "rank": 69, "score": 33069.707067853284 }, { "content": " case!(\n\n inverted_list,\n\n Inner { id: id!(7, 6), shape: InvertedList, depth: 3 },\n\n Inner { id: id!(7, 0), shape: Leaf, depth: 0 }\n\n );\n\n case!(\n\n degenerate_dag,\n\n Inner { id: id!(14, 3), shape: DegenerateDAG, depth: 3 },\n\n Inner { id: id!(14, 0), shape: Leaf, depth: 0 }\n\n );\n\n case!(\n\n degenerate_cyclic,\n\n Inner { id: id!(18, 2), shape: DegenerateCyclic, depth: 3 },\n\n Inner { id: id!(18, 0), shape: DegenerateCyclic, depth: 3 }\n\n );\n\n }\n\n }\n\n\n\n mod derived_eq\n\n {\n", "file_path": "tests_utils/src/node_types/lazy.rs", "rank": 70, "score": 33066.84852189013 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl Index<usize> for Datum\n\n{\n\n type Output = Rc<Self>;\n\n\n\n #[inline]\n\n fn index(\n\n &self,\n\n index: usize,\n\n ) -> &Self::Output\n\n {\n\n match self {\n\n Datum::Branch { width, proto_edge } if index < *width => proto_edge,\n\n _ => panic!(\"out of bounds\"),\n\n }\n\n }\n\n}\n", "file_path": "tests_utils/src/node_types/wide.rs", "rank": 71, "score": 33066.84852189013 }, { "content": "\n\n case!(\n\n list,\n\n Inner { id: id!(0, 0), shape: Leaf, depth: 0 },\n\n Inner { id: id!(0, 0), shape: Leaf, depth: 0 }\n\n );\n\n case!(\n\n inverted_list,\n\n Inner { id: id!(1, 0), shape: Leaf, depth: 0 },\n\n Inner { id: id!(1, 0), shape: Leaf, depth: 0 }\n\n );\n\n case!(\n\n degenerate_dag,\n\n Inner { id: id!(2, 0), shape: Leaf, depth: 0 },\n\n Inner { id: id!(2, 0), shape: Leaf, depth: 0 }\n\n );\n\n case!(\n\n degenerate_cyclic,\n\n Inner { id: id!(3, 0), shape: Leaf, depth: 0 },\n\n Inner { id: id!(3, 0), shape: Leaf, depth: 0 }\n", "file_path": "tests_utils/src/node_types/lazy.rs", "rank": 72, "score": 33066.84852189013 }, { "content": " _ => unreachable!(),\n\n };\n\n *inner = new;\n\n }\n\n\n\n fn take(&self) -> Option<(Self, Self)>\n\n {\n\n let result = self.get_edges();\n\n let new = self.inner().clone();\n\n *self.inner() = Inner { shape: Shape::Leaf, depth: 0, ..new }; // Keep same ID.\n\n result\n\n }\n\n\n\n fn needs_cycle_deep_safe_drop() -> bool\n\n {\n\n false\n\n }\n\n}\n\n\n\nimpl Leaf for Datum\n", "file_path": "tests_utils/src/node_types/lazy.rs", "rank": 73, "score": 33066.84852189013 }, { "content": " type Output = Id;\n\n\n\n fn sub(\n\n self,\n\n rhs: usize,\n\n ) -> Self::Output\n\n {\n\n Id { num: self.num - rhs, ..self.clone() }\n\n }\n\n}\n\n\n\n\n\n#[derive(PartialEq, Eq, Clone, Debug)]\n\npub struct Inner\n\n{\n\n pub id: Id,\n\n pub shape: Shape,\n\n pub depth: usize,\n\n}\n\n\n", "file_path": "tests_utils/src/node_types/lazy.rs", "rank": 74, "score": 33066.84852189013 }, { "content": "{\n\n fn eq(\n\n &self,\n\n other: &Self,\n\n ) -> bool\n\n {\n\n let self_shape = self.inner().shape; // (Drop temporary.)\n\n let other_shape = other.inner().shape; // (Drop temporary.)\n\n self_shape == other_shape\n\n && match (self.get_edges(), other.get_edges()) {\n\n (Some(self_edges), Some(other_edges)) =>\n\n self_edges.0 == other_edges.0 && self_edges.1 == other_edges.1,\n\n (None, None) => true,\n\n _ => false,\n\n }\n\n }\n\n}\n\n\n\n\n\nimpl Pair for Datum\n", "file_path": "tests_utils/src/node_types/lazy.rs", "rank": 75, "score": 33066.84852189013 }, { "content": " Inner { id: id!(0, 0), shape: Leaf, depth: 0 }\n\n );\n\n case!(\n\n inverted_list,\n\n Inner { id: id!(5, 4), shape: InvertedList, depth: 2 },\n\n Inner { id: id!(5, 0), shape: Leaf, depth: 0 }\n\n );\n\n case!(\n\n degenerate_dag,\n\n Inner { id: id!(10, 2), shape: DegenerateDAG, depth: 2 },\n\n Inner { id: id!(10, 0), shape: Leaf, depth: 0 }\n\n );\n\n case!(\n\n degenerate_cyclic,\n\n Inner { id: id!(13, 1), shape: DegenerateCyclic, depth: 2 },\n\n Inner { id: id!(13, 0), shape: DegenerateCyclic, depth: 2 }\n\n );\n\n }\n\n\n\n #[test]\n", "file_path": "tests_utils/src/node_types/lazy.rs", "rank": 76, "score": 33066.84852189013 }, { "content": " right = Inner { id: &self.id - 1, shape: Leaf, depth: 0 };\n\n left = Inner {\n\n id: &right.id - 1,\n\n shape: if depth >= 1 { InvertedList } else { Leaf },\n\n depth,\n\n };\n\n },\n\n DegenerateDAG => {\n\n left = Inner {\n\n id: &self.id - 1,\n\n shape: if depth >= 1 { DegenerateDAG } else { Leaf },\n\n depth,\n\n };\n\n right = left.clone();\n\n },\n\n DegenerateCyclic => {\n\n let id = if self.id.num == 0 {\n\n Id { num: depth, ..self.id.clone() } // cycle\n\n }\n\n else {\n", "file_path": "tests_utils/src/node_types/lazy.rs", "rank": 77, "score": 33066.84852189013 }, { "content": "{\n\n pub fn degenerate_chain(\n\n width: usize,\n\n depth: u32,\n\n ) -> Self\n\n {\n\n let mut head = Self::Leaf;\n\n\n\n for _ in 0 .. depth {\n\n head = Self::Branch { width, proto_edge: Rc::new(head) }\n\n }\n\n\n\n head\n\n }\n\n\n\n pub fn width(&self) -> usize\n\n {\n\n match self {\n\n Datum::Leaf => 0,\n\n Datum::Branch { width, .. } => *width,\n", "file_path": "tests_utils/src/node_types/wide.rs", "rank": 78, "score": 33066.84852189013 }, { "content": " list,\n\n Inner { id: id!(0, 2), shape: List, depth: 1 },\n\n Inner { id: id!(0, 0), shape: Leaf, depth: 0 }\n\n );\n\n case!(\n\n inverted_list,\n\n Inner { id: id!(3, 2), shape: InvertedList, depth: 1 },\n\n Inner { id: id!(3, 0), shape: Leaf, depth: 0 }\n\n );\n\n case!(\n\n degenerate_dag,\n\n Inner { id: id!(6, 1), shape: DegenerateDAG, depth: 1 },\n\n Inner { id: id!(6, 0), shape: Leaf, depth: 0 }\n\n );\n\n case!(\n\n degenerate_cyclic,\n\n Inner { id: id!(8, 0), shape: DegenerateCyclic, depth: 1 },\n\n Inner { id: id!(8, 0), shape: DegenerateCyclic, depth: 1 }\n\n );\n\n }\n", "file_path": "tests_utils/src/node_types/lazy.rs", "rank": 79, "score": 33066.84852189013 }, { "content": "pub trait Allocator<T>\n\n{\n\n fn alloc(&self) -> T;\n\n}\n\n\n\n\n", "file_path": "tests_utils/src/shapes.rs", "rank": 80, "score": 33032.865450207675 }, { "content": "use {\n\n crate::shapes::{\n\n Allocator,\n\n Leaf,\n\n Pair,\n\n },\n\n std::{\n\n cell::RefCell,\n\n rc::Rc,\n\n },\n\n};\n\n\n\n\n\n// Note that this derived PartialEq does not implement a `graph_safe_compare` algorithm and\n\n// is only used for demonstrating the limitations of the derived algorithm. When\n\n// `graph_safe_compare` algorithms are tested against this type, their functions must be\n\n// called directly.\n\n#[derive(PartialEq, Eq, Clone, Debug)]\n\npub struct Datum(pub RefCell<Option<(Rc<Self>, Rc<Self>)>>);\n\n\n", "file_path": "tests_utils/src/node_types/rc_pair.rs", "rank": 81, "score": 32022.009755444302 }, { "content": "use {\n\n crate::shapes::{\n\n Allocator,\n\n Leaf,\n\n Pair,\n\n },\n\n std::cell::Cell,\n\n};\n\n\n\n\n\n// Note that this derived PartialEq does not implement a `graph_safe_compare` algorithm and\n\n// is only used for demonstrating the limitations of the derived algorithm. When\n\n// `graph_safe_compare` algorithms are tested against this type, their functions must be\n\n// called directly.\n\n#[derive(PartialEq, Eq, Default, Debug)]\n\npub struct Datum<'l>(pub Cell<Inner<'l>>);\n\n\n\n#[derive(Copy, Clone, PartialEq, Eq, Debug)]\n\npub enum Inner<'l>\n\n{\n", "file_path": "tests_utils/src/node_types/borrow_pair.rs", "rank": 82, "score": 32021.897226531713 }, { "content": "use {\n\n crate::shapes::{\n\n Allocator,\n\n Leaf,\n\n Pair,\n\n },\n\n std::{\n\n any::Any,\n\n cell::{\n\n Cell,\n\n Ref,\n\n RefCell,\n\n RefMut,\n\n },\n\n rc::Rc,\n\n },\n\n};\n\n\n\n\n\npub enum Datum1\n", "file_path": "tests_utils/src/node_types/dyn_pair.rs", "rank": 83, "score": 32018.349433199193 }, { "content": "use {\n\n crate::shapes::{\n\n Allocator,\n\n Leaf,\n\n Pair,\n\n },\n\n std::{\n\n cell::{\n\n Cell,\n\n Ref,\n\n RefCell,\n\n RefMut,\n\n },\n\n rc::Rc,\n\n },\n\n};\n\n\n\n\n\n#[derive(Clone, Debug)]\n\npub struct Datum\n", "file_path": "tests_utils/src/node_types/diff_index.rs", "rank": 84, "score": 32017.98300398533 }, { "content": "use std::{\n\n cell::RefCell,\n\n rc::Rc,\n\n};\n\n\n\n// Note that these derived PartialEq implementations do not do a `graph_safe_compare`\n\n// algorithm and are only used for demonstrating the limitations of the derived algorithm. When\n\n// `graph_safe_compare` algorithms are tested against this type, their functions must be\n\n// called directly.\n\n\n\n#[derive(PartialEq, Eq, Clone, Debug)]\n\npub struct Datum1\n\n{\n\n pub child: Option<Rc<Datum2>>,\n\n}\n\n\n\n#[derive(PartialEq, Eq, Clone, Debug)]\n\npub enum Datum2\n\n{\n\n Double(Rc<Datum3>, Rc<Datum3>),\n", "file_path": "tests_utils/src/node_types/diff_edge.rs", "rank": 85, "score": 32015.885519105457 }, { "content": " fn take(&self) -> Option<(Self, Self)>\n\n {\n\n let val = std::mem::replace(&mut *self.deref_mut(), Inner::Leaf);\n\n match val {\n\n Inner::Leaf => None,\n\n Inner::Pair(a, b) => Some((a, b)),\n\n }\n\n }\n\n}\n\n\n\n\n\npub struct DatumAllocator\n\n{\n\n region: Region,\n\n next: Cell<Option<Index>>,\n\n}\n\n\n\nimpl DatumAllocator\n\n{\n\n pub fn new(size: u32) -> Self\n", "file_path": "tests_utils/src/node_types/diff_index.rs", "rank": 86, "score": 32010.870728170135 }, { "content": "{\n\n pub index: Index,\n\n pub region: Region,\n\n}\n\n\n\nimpl Datum\n\n{\n\n pub fn deref(&self) -> Ref<'_, Inner>\n\n {\n\n self.region[self.index as usize].borrow()\n\n }\n\n\n\n pub fn deref_mut(&self) -> RefMut<'_, Inner>\n\n {\n\n self.region[self.index as usize].borrow_mut()\n\n }\n\n}\n\n\n\n// Note that this PartialEq impl does not implement a `graph_safe_compare` algorithm and is\n\n// only used for demonstrating the limitations of a naive algorithm. When\n", "file_path": "tests_utils/src/node_types/diff_index.rs", "rank": 87, "score": 32009.67435895636 }, { "content": " DowncastMutDatum::Datum2Int32(mut rd2) => *rd2 = Datum2::Two(a, b),\n\n DowncastMutDatum::Datum2Char(mut rd2) => *rd2 = Datum2::Two(a, b),\n\n }\n\n }\n\n\n\n fn take(&self) -> Option<(Self, Self)>\n\n {\n\n use std::mem::replace;\n\n\n\n match self.downcast_mut() {\n\n DowncastMutDatum::Datum1(mut rd1) => {\n\n let val = replace(&mut *rd1, Datum1::Empty);\n\n match val {\n\n Datum1::Empty => None,\n\n Datum1::Double(a, b) => Some((a, b)),\n\n }\n\n },\n\n DowncastMutDatum::Datum2Int32(mut rd2) => {\n\n let val = replace(&mut *rd2, Datum2::Value(0));\n\n match val {\n", "file_path": "tests_utils/src/node_types/dyn_pair.rs", "rank": 88, "score": 32008.75658166875 }, { "content": "{\n\n type Alloc = DatumAllocator;\n\n\n\n fn new_in(alloc: &Self::Alloc) -> Self\n\n {\n\n alloc.alloc()\n\n }\n\n}\n\n\n\nimpl Pair for Datum\n\n{\n\n fn set(\n\n &self,\n\n a: Self,\n\n b: Self,\n\n )\n\n {\n\n *self.deref_mut() = Inner::Pair(a, b);\n\n }\n\n\n", "file_path": "tests_utils/src/node_types/diff_index.rs", "rank": 89, "score": 32008.67214224996 }, { "content": "impl Leaf for Rc<Datum>\n\n{\n\n type Alloc = DatumAllocator;\n\n\n\n fn new_in(alloc: &Self::Alloc) -> Self\n\n {\n\n alloc.alloc()\n\n }\n\n}\n\n\n\nimpl Pair for Rc<Datum>\n\n{\n\n fn set(\n\n &self,\n\n a: Self,\n\n b: Self,\n\n )\n\n {\n\n *self.0.borrow_mut() = Some((a, b));\n\n }\n", "file_path": "tests_utils/src/node_types/rc_pair.rs", "rank": 90, "score": 32008.64157802601 }, { "content": " Leaf,\n\n Pair(&'l Datum<'l>, &'l Datum<'l>),\n\n}\n\n\n\nimpl Default for Inner<'_>\n\n{\n\n fn default() -> Self\n\n {\n\n Self::Leaf\n\n }\n\n}\n\n\n\nimpl<'l> Leaf for &'l Datum<'l>\n\n{\n\n type Alloc = &'l DatumAllocator<Datum<'l>>;\n\n\n\n fn new_in(alloc: &Self::Alloc) -> Self\n\n {\n\n let datum_ref = alloc.alloc();\n\n datum_ref.0.set(Inner::Leaf);\n", "file_path": "tests_utils/src/node_types/borrow_pair.rs", "rank": 91, "score": 32008.551996262122 }, { "content": "impl Leaf for DatumRef\n\n{\n\n type Alloc = DatumAllocator;\n\n\n\n fn new_in(alloc: &Self::Alloc) -> Self\n\n {\n\n alloc.alloc()\n\n }\n\n}\n\n\n\nimpl Pair for DatumRef\n\n{\n\n fn set(\n\n &self,\n\n a: Self,\n\n b: Self,\n\n )\n\n {\n\n match self.downcast_mut() {\n\n DowncastMutDatum::Datum1(mut rd1) => *rd1 = Datum1::Double(a, b),\n", "file_path": "tests_utils/src/node_types/dyn_pair.rs", "rank": 92, "score": 32008.54223332194 }, { "content": "\n\n fn take(&self) -> Option<(Self, Self)>\n\n {\n\n self.0.replace(None)\n\n }\n\n}\n\n\n\n\n\npub struct DatumAllocator;\n\n\n\nimpl Allocator<Rc<Datum>> for DatumAllocator\n\n{\n\n fn alloc(&self) -> Rc<Datum>\n\n {\n\n Rc::new(Datum(RefCell::new(None)))\n\n }\n\n}\n\n\n\nimpl DatumAllocator\n\n{\n", "file_path": "tests_utils/src/node_types/rc_pair.rs", "rank": 93, "score": 32008.4914941592 }, { "content": "// only used for demonstrating the limitations of a naive algorithm. When\n\n// `graph_safe_compare` algorithms are tested against this type, their functions must be\n\n// called directly.\n\nimpl PartialEq for DatumRef\n\n{\n\n fn eq(\n\n &self,\n\n other: &Self,\n\n ) -> bool\n\n {\n\n match (self.downcast(), other.downcast()) {\n\n (DowncastDatum::Datum1(d1a), DowncastDatum::Datum1(d1b)) => match (&*d1a, &*d1b) {\n\n (Datum1::Empty, Datum1::Empty) => true,\n\n (Datum1::Double(aa, ab), Datum1::Double(ba, bb)) => aa == ba && ab == bb,\n\n _ => false,\n\n },\n\n (DowncastDatum::Datum2Int32(d2a), DowncastDatum::Datum2Int32(d2b)) => {\n\n match (&*d2a, &*d2b) {\n\n (Datum2::Value(a), Datum2::Value(b)) => a == b,\n\n (Datum2::Two(aa, ab), Datum2::Two(ba, bb)) => aa == ba && ab == bb,\n", "file_path": "tests_utils/src/node_types/dyn_pair.rs", "rank": 94, "score": 32008.26954456972 }, { "content": " vec.resize_with(size, D::default);\n\n Self { slice: vec.into_boxed_slice(), next: Cell::new(0) }\n\n }\n\n}\n\n\n\nimpl<'a, D> Allocator<&'a D> for &'a DatumAllocator<D>\n\n{\n\n fn alloc(&self) -> &'a D\n\n {\n\n let i = self.next.get();\n\n self.next.set(i + 1);\n\n let i: usize = i.try_into().unwrap();\n\n &self.slice[i]\n\n }\n\n}\n", "file_path": "tests_utils/src/node_types/borrow_pair.rs", "rank": 95, "score": 32008.111448093605 }, { "content": " counter: Cell<usize>,\n\n}\n\n\n\nimpl DatumAllocator\n\n{\n\n pub fn new(_size: u32) -> Self\n\n {\n\n Self { counter: Cell::new(0) }\n\n }\n\n}\n\n\n\nimpl Allocator<DatumRef> for DatumAllocator\n\n{\n\n fn alloc(&self) -> DatumRef\n\n {\n\n self.counter.set(self.counter.get() + 1);\n\n\n\n match self.counter.get() % 3 {\n\n 0 => DatumRef(Rc::new(RefCell::new(Datum1::Empty))),\n\n 1 => DatumRef(Rc::new(RefCell::new(Datum2::Value(42_i32)))),\n\n 2 => DatumRef(Rc::new(RefCell::new(Datum2::Value('λ')))),\n\n _ => unreachable!(),\n\n }\n\n }\n\n}\n", "file_path": "tests_utils/src/node_types/dyn_pair.rs", "rank": 96, "score": 32007.951016568313 }, { "content": " {\n\n assert!(size <= 8);\n\n let size = size.try_into().unwrap();\n\n let mut vec = Vec::with_capacity(size);\n\n vec.resize(size, RefCell::new(Inner::default()));\n\n Self { region: vec.into(), next: Cell::new(Some(Index::Zero)) }\n\n }\n\n}\n\n\n\nimpl Allocator<Datum> for DatumAllocator\n\n{\n\n fn alloc(&self) -> Datum\n\n {\n\n let index = self.next.get().unwrap();\n\n self.next.set(index.increment());\n\n Datum { index, region: Rc::clone(&self.region) }\n\n }\n\n}\n", "file_path": "tests_utils/src/node_types/diff_index.rs", "rank": 97, "score": 32007.834976244318 }, { "content": "\n\n fn needs_cycle_deep_safe_drop() -> bool\n\n {\n\n false\n\n }\n\n}\n\n\n\n\n\npub struct DatumAllocator<D>\n\n{\n\n slice: Box<[D]>,\n\n next: Cell<u32>,\n\n}\n\n\n\nimpl<D: Default> DatumAllocator<D>\n\n{\n\n pub fn new(size: u32) -> Self\n\n {\n\n let size = size.try_into().unwrap();\n\n let mut vec = Vec::with_capacity(size);\n", "file_path": "tests_utils/src/node_types/borrow_pair.rs", "rank": 98, "score": 32007.581147788416 }, { "content": " pub fn new(_size: u32) -> Self\n\n {\n\n Self::default()\n\n }\n\n}\n\n\n\nimpl Default for DatumAllocator\n\n{\n\n fn default() -> Self\n\n {\n\n Self\n\n }\n\n}\n", "file_path": "tests_utils/src/node_types/rc_pair.rs", "rank": 99, "score": 32007.30649409006 } ]
Rust
src/proto/par_vec.rs
gereeter/collect-rs
dc4380faac395ef412937dba58249e72873414e9
extern crate alloc; use self::alloc::arc; use std::cmp::min; use std::fmt::{Formatter, Show}; use std::fmt::Error as FmtError; use std::iter::range_inclusive; use std::sync::Arc; use std::mem; use std::ops; pub struct ParVec<T> { data: Arc<Vec<T>>, } impl<T: Send + Sync> ParVec<T> { pub fn new(vec: Vec<T>, slices: uint) -> (ParVec<T>, Vec<ParSlice<T>>) { let data = Arc::new(vec); let par_slices = sub_slices(data.as_slice(), slices).into_iter() .map(|slice| ParSlice { _vec: data.clone(), data: unsafe { mem::transmute(slice) }, } ).collect(); let par_vec = ParVec { data: data, }; (par_vec, par_slices) } pub fn into_inner_opt(self) -> Result<Vec<T>, ParVec<T>> { if arc::strong_count(&self.data) == 1 { let vec_ptr: &mut Vec<T> = unsafe { mem::transmute(&*self.data) }; Ok(mem::replace(vec_ptr, Vec::new())) } else { Err(self) } } pub fn into_inner(mut self) -> Vec<T> { loop { match self.into_inner_opt() { Ok(vec) => return vec, Err(new_self) => self = new_self, } } } } fn sub_slices<T>(parent: &[T], slice_count: uint) -> Vec<&[T]> { let mut slices = Vec::new(); let len = parent.len(); let mut start = 0u; for curr in range_inclusive(1, slice_count).rev() { let slice_len = (len - start) / curr; let end = min(start + slice_len, len); slices.push(parent.slice(start, end)); start += slice_len; } slices } pub struct ParSlice<T: Send> { _vec: Arc<Vec<T>>, data: &'static mut [T], } impl<T: Send> ops::Deref for ParSlice<T> { type Target = [T]; fn deref<'a>(&'a self) -> &'a [T] { self.data } } impl<T: Send> ops::DerefMut for ParSlice<T> { fn deref_mut<'a>(&'a mut self) -> &'a mut [T] { self.data } } impl<T: Send> Show for ParSlice<T> where T: Show { fn fmt(&self, f: &mut Formatter) -> Result<(), FmtError> { write!(f, "{}", self.data) } } #[cfg(test)] mod test { extern crate test; use self::test::Bencher; use super::ParVec; use std::mem; use std::rand::{thread_rng, Rng}; use std::iter::range_inclusive; const TEST_SLICES: uint = 8; const TEST_MAX: u32 = 1000; #[test] fn test_unwrap_safely() { let (vec, slices) = ParVec::new([5u; TEST_MAX as uint].to_vec(), TEST_SLICES); mem::drop(slices); let vec = vec.into_inner(); assert_eq!(&*vec, [5u; TEST_MAX as uint].as_slice()); } #[test] fn test_slices() { let (_, slices) = ParVec::new(range(1u32, TEST_MAX).collect(), TEST_SLICES); assert_eq!(slices.len(), TEST_SLICES); } #[bench] fn seq_prime_factors_1000(b: &mut Bencher) { let vec: Vec<u32> = range_inclusive(1, TEST_MAX).collect(); b.iter(|| { let _: Vec<(u32, Vec<u32>)> = vec.iter() .map(|&x| (x, get_prime_factors(x))) .collect(); }); } #[bench] fn par_prime_factors_1000(b: &mut Bencher) { use std::sync::TaskPool; let mut rng = thread_rng(); let pool = TaskPool::new(TEST_SLICES); b.iter(|| { let mut vec: Vec<(u32, Vec<u32>)> = range_inclusive(1, TEST_MAX) .map(|x| (x, Vec::new())).collect(); rng.shuffle(&mut *vec); let (par_vec, par_slices) = ParVec::new(vec, TEST_SLICES); for mut slice in par_slices.into_iter() { pool.execute(move || for pair in slice.iter_mut() { let (x, ref mut x_primes) = *pair; *x_primes = get_prime_factors(x); } ); } let mut vec = par_vec.into_inner(); vec.sort(); }); } fn get_prime_factors(x: u32) -> Vec<u32> { range(1, x).filter(|&y| x % y == 0 && is_prime(y)).collect() } fn is_prime(x: u32) -> bool { use std::iter::range_step; if x < 3 { return true; } if x & 1 == 0 { return false; } for i in range_step(3, x, 2) { if x % i == 0 { return false; } } true } }
extern crate alloc; use self::alloc::arc; use std::cmp::min; use std::fmt::{Formatter, Show}; use std::fmt::Error as FmtError; use std::iter::range_inclusive; use std::sync::Arc; use std::mem; use std::ops; pub struct ParVec<T> { data: Arc<Vec<T>>, } impl<T: Send + Sync> ParVec<T> { pub fn new(vec: Vec<T>, slices: uint) -> (ParVec<T>, Vec<ParSlice<T>>) { let data = Arc::new(vec); let par_slices = sub_slices(data.as_slice(), slices).into_iter() .map(|slice| ParSlice { _vec: data.clone(), data: unsafe { mem::transmute(slice) }, } ).collect(); let par_vec = ParVec { data: data, }; (par_vec, par_slices) } pub fn into_inner_opt(self) -> Result<Vec<T>, ParVec<T>> {
} pub fn into_inner(mut self) -> Vec<T> { loop { match self.into_inner_opt() { Ok(vec) => return vec, Err(new_self) => self = new_self, } } } } fn sub_slices<T>(parent: &[T], slice_count: uint) -> Vec<&[T]> { let mut slices = Vec::new(); let len = parent.len(); let mut start = 0u; for curr in range_inclusive(1, slice_count).rev() { let slice_len = (len - start) / curr; let end = min(start + slice_len, len); slices.push(parent.slice(start, end)); start += slice_len; } slices } pub struct ParSlice<T: Send> { _vec: Arc<Vec<T>>, data: &'static mut [T], } impl<T: Send> ops::Deref for ParSlice<T> { type Target = [T]; fn deref<'a>(&'a self) -> &'a [T] { self.data } } impl<T: Send> ops::DerefMut for ParSlice<T> { fn deref_mut<'a>(&'a mut self) -> &'a mut [T] { self.data } } impl<T: Send> Show for ParSlice<T> where T: Show { fn fmt(&self, f: &mut Formatter) -> Result<(), FmtError> { write!(f, "{}", self.data) } } #[cfg(test)] mod test { extern crate test; use self::test::Bencher; use super::ParVec; use std::mem; use std::rand::{thread_rng, Rng}; use std::iter::range_inclusive; const TEST_SLICES: uint = 8; const TEST_MAX: u32 = 1000; #[test] fn test_unwrap_safely() { let (vec, slices) = ParVec::new([5u; TEST_MAX as uint].to_vec(), TEST_SLICES); mem::drop(slices); let vec = vec.into_inner(); assert_eq!(&*vec, [5u; TEST_MAX as uint].as_slice()); } #[test] fn test_slices() { let (_, slices) = ParVec::new(range(1u32, TEST_MAX).collect(), TEST_SLICES); assert_eq!(slices.len(), TEST_SLICES); } #[bench] fn seq_prime_factors_1000(b: &mut Bencher) { let vec: Vec<u32> = range_inclusive(1, TEST_MAX).collect(); b.iter(|| { let _: Vec<(u32, Vec<u32>)> = vec.iter() .map(|&x| (x, get_prime_factors(x))) .collect(); }); } #[bench] fn par_prime_factors_1000(b: &mut Bencher) { use std::sync::TaskPool; let mut rng = thread_rng(); let pool = TaskPool::new(TEST_SLICES); b.iter(|| { let mut vec: Vec<(u32, Vec<u32>)> = range_inclusive(1, TEST_MAX) .map(|x| (x, Vec::new())).collect(); rng.shuffle(&mut *vec); let (par_vec, par_slices) = ParVec::new(vec, TEST_SLICES); for mut slice in par_slices.into_iter() { pool.execute(move || for pair in slice.iter_mut() { let (x, ref mut x_primes) = *pair; *x_primes = get_prime_factors(x); } ); } let mut vec = par_vec.into_inner(); vec.sort(); }); } fn get_prime_factors(x: u32) -> Vec<u32> { range(1, x).filter(|&y| x % y == 0 && is_prime(y)).collect() } fn is_prime(x: u32) -> bool { use std::iter::range_step; if x < 3 { return true; } if x & 1 == 0 { return false; } for i in range_step(3, x, 2) { if x % i == 0 { return false; } } true } }
if arc::strong_count(&self.data) == 1 { let vec_ptr: &mut Vec<T> = unsafe { mem::transmute(&*self.data) }; Ok(mem::replace(vec_ptr, Vec::new())) } else { Err(self) }
if_condition
[ { "content": "pub fn insert_seq_n<M, I, R>(n: uint,\n\n map: &mut M,\n\n b: &mut Bencher,\n\n mut insert: I,\n\n mut remove: R) where\n\n I: FnMut(&mut M, uint),\n\n R: FnMut(&mut M, uint),\n\n{\n\n // setup\n\n for i in range(0u, n) {\n\n insert(map, i * 2);\n\n }\n\n\n\n // measure\n\n let mut i = 1;\n\n b.iter(|| {\n\n insert(map, i);\n\n remove(map, i);\n\n i = (i + 2) % n;\n\n })\n\n}\n\n\n", "file_path": "src/bench.rs", "rank": 0, "score": 125964.91968159622 }, { "content": "pub fn insert_rand_n<M, I, R>(n: uint,\n\n map: &mut M,\n\n b: &mut Bencher,\n\n mut insert: I,\n\n mut remove: R) where\n\n I: FnMut(&mut M, uint),\n\n R: FnMut(&mut M, uint),\n\n{\n\n // setup\n\n let mut rng = rand::weak_rng();\n\n\n\n for _ in range(0, n) {\n\n insert(map, rng.gen::<uint>() % n);\n\n }\n\n\n\n // measure\n\n b.iter(|| {\n\n let k = rng.gen::<uint>() % n;\n\n insert(map, k);\n\n remove(map, k);\n\n })\n\n}\n\n\n", "file_path": "src/bench.rs", "rank": 1, "score": 125964.9196815962 }, { "content": "pub fn find_rand_n<M, T, I, F>(n: uint,\n\n map: &mut M,\n\n b: &mut Bencher,\n\n mut insert: I,\n\n mut find: F) where\n\n I: FnMut(&mut M, uint),\n\n F: FnMut(&M, uint) -> T,\n\n{\n\n // setup\n\n let mut rng = rand::weak_rng();\n\n let mut keys: Vec<_> = range(0, n).map(|_| rng.gen::<uint>() % n).collect();\n\n\n\n for k in keys.iter() {\n\n insert(map, *k);\n\n }\n\n\n\n rng.shuffle(keys.as_mut_slice());\n\n\n\n // measure\n\n let mut i = 0;\n\n b.iter(|| {\n\n let t = find(map, keys[i]);\n\n i = (i + 1) % n;\n\n t\n\n })\n\n}\n\n\n", "file_path": "src/bench.rs", "rank": 2, "score": 120216.73846696595 }, { "content": "pub fn find_seq_n<M, T, I, F>(n: uint,\n\n map: &mut M,\n\n b: &mut Bencher,\n\n mut insert: I,\n\n mut find: F) where\n\n I: FnMut(&mut M, uint),\n\n F: FnMut(&M, uint) -> T,\n\n{\n\n // setup\n\n for i in range(0u, n) {\n\n insert(map, i);\n\n }\n\n\n\n // measure\n\n let mut i = 0;\n\n b.iter(|| {\n\n let x = find(map, i);\n\n i = (i + 1) % n;\n\n x\n\n })\n\n}\n", "file_path": "src/bench.rs", "rank": 3, "score": 120216.73846696595 }, { "content": "#[inline]\n\nfn chunk(n: uint, idx: uint) -> uint {\n\n let sh = uint::BITS - (SHIFT * (idx + 1));\n\n (n >> sh) & MASK\n\n}\n\n\n", "file_path": "src/trie/map.rs", "rank": 4, "score": 110389.94997236467 }, { "content": "/// Gets the largest a block is allowed to become.\n\nfn block_max(b: uint) -> uint {\n\n b + 1\n\n}\n\n\n\n/// Gets the smallest a (non-end) block is allowed to become.\n", "file_path": "src/blist.rs", "rank": 5, "score": 110122.10341684437 }, { "content": "#[allow(unused)]\n\nfn block_min(b: uint) -> uint {\n\n b - 1\n\n}\n\n\n", "file_path": "src/blist.rs", "rank": 6, "score": 110122.10341684437 }, { "content": "/// Returns index of \"left\" item of parent node.\n\nfn parent_left(x: uint) -> uint {\n\n debug_assert!(!is_root(x));\n\n left((x - 2) / 2)\n\n}\n\n\n", "file_path": "src/interval_heap.rs", "rank": 7, "score": 107365.29117561226 }, { "content": "/// Set LSB to zero for the \"left\" item index of a node.\n\nfn left(x: uint) -> uint { x & !1u }\n\n\n", "file_path": "src/interval_heap.rs", "rank": 8, "score": 106615.90725978317 }, { "content": "fn is_root(x: uint) -> bool { x < 2 }\n\n\n", "file_path": "src/interval_heap.rs", "rank": 10, "score": 89695.78195499262 }, { "content": "#[cfg(test)]\n\npub fn as_slice<T, C: Compare<T>>(x: &IntervalHeap<T, C>) -> &[T] {\n\n x.data.as_slice()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use std::rand::{ thread_rng, Rng };\n\n use super::{ IntervalHeap, as_slice };\n\n\n\n fn is_interval_heap<T: Ord>(x: &[T]) -> bool {\n\n if x.len() < 2 { return true; }\n\n if x[1] < x[0] { return false; }\n\n let mut ofs = 2;\n\n while ofs < x.len() {\n\n let ofz = ofs + (ofs + 1 < x.len()) as uint;\n\n if x[ofz] < x[ofs] { return false; }\n\n let parent = (ofs / 2 - 1) & !1u;\n\n if x[ofs] < x[parent] { return false; }\n\n if x[parent+1] < x[ofz] { return false; }\n\n ofs += 2;\n", "file_path": "src/interval_heap.rs", "rank": 11, "score": 87088.84007770024 }, { "content": "/// Makes a new block for insertion in the list.\n\nfn make_block<T>(b: uint) -> RingBuf<T> {\n\n RingBuf::with_capacity(block_max(b))\n\n}\n\n\n", "file_path": "src/blist.rs", "rank": 12, "score": 82014.2080613854 }, { "content": "/// Compare `x` and `y`, but return `short` if x is None and `long` if y is None\n\nfn cmp_opt(x: Option<&uint>, y: Option<&uint>, short: Ordering, long: Ordering) -> Ordering {\n\n match (x, y) {\n\n (None , _ ) => short,\n\n (_ , None ) => long,\n\n (Some(x1), Some(y1)) => x1.cmp(y1),\n\n }\n\n}\n\n\n\nimpl<'a> Iterator for Iter<'a> {\n\n type Item = uint;\n\n fn next(&mut self) -> Option<uint> {\n\n self.iter.next().map(|(key, _)| key)\n\n }\n\n\n\n fn size_hint(&self) -> (uint, Option<uint>) {\n\n self.iter.size_hint()\n\n }\n\n}\n\n\n\nimpl<'a> Iterator for Difference<'a> {\n", "file_path": "src/trie/set.rs", "rank": 13, "score": 81885.00253427612 }, { "content": "fn remove<T>(count: &mut uint, child: &mut TrieNode<T>, key: uint,\n\n idx: uint) -> Option<T> {\n\n let (ret, this) = match *child {\n\n External(stored, _) if stored == key => {\n\n match mem::replace(child, Nothing) {\n\n External(_, value) => (Some(value), true),\n\n _ => unreachable!()\n\n }\n\n }\n\n External(..) => (None, false),\n\n Internal(box ref mut x) => {\n\n let ret = remove(&mut x.count, &mut x.children[chunk(key, idx)],\n\n key, idx + 1);\n\n (ret, x.count == 0)\n\n }\n\n Nothing => (None, false)\n\n };\n\n\n\n if this {\n\n *child = Nothing;\n", "file_path": "src/trie/map.rs", "rank": 14, "score": 81387.41023862876 }, { "content": "/// Inserts a new node for the given key and value, at or below `start_node`.\n\n///\n\n/// The index (`idx`) is the index of the next node, such that the start node\n\n/// was accessed via parent.children[chunk(key, idx - 1)].\n\n///\n\n/// The count is the external node counter for the start node's parent,\n\n/// which will be incremented only if `start_node` is transformed into a *new* external node.\n\n///\n\n/// Returns a mutable reference to the inserted value and an optional previous value.\n\nfn insert<'a, T>(count: &mut uint, start_node: &'a mut TrieNode<T>, key: uint, value: T, idx: uint)\n\n -> (&'a mut T, Option<T>) {\n\n // We branch twice to avoid having to do the `replace` when we\n\n // don't need to; this is much faster, especially for keys that\n\n // have long shared prefixes.\n\n match *start_node {\n\n Nothing => {\n\n *count += 1;\n\n *start_node = External(key, value);\n\n match *start_node {\n\n External(_, ref mut value_ref) => return (value_ref, None),\n\n _ => unreachable!()\n\n }\n\n }\n\n Internal(box ref mut x) => {\n\n return insert(&mut x.count, &mut x.children[chunk(key, idx)], key, value, idx + 1);\n\n }\n\n External(stored_key, ref mut stored_value) if stored_key == key => {\n\n // Swap in the new value and return the old.\n\n let old_value = mem::replace(stored_value, value);\n", "file_path": "src/trie/map.rs", "rank": 15, "score": 79569.28420980736 }, { "content": "fn find_mut<'r, T>(child: &'r mut TrieNode<T>, key: uint, idx: uint) -> Option<&'r mut T> {\n\n match *child {\n\n External(stored, ref mut value) if stored == key => Some(value),\n\n External(..) => None,\n\n Internal(ref mut x) => find_mut(&mut x.children[chunk(key, idx)], key, idx + 1),\n\n Nothing => None\n\n }\n\n}\n\n\n", "file_path": "src/trie/map.rs", "rank": 16, "score": 72664.92986672273 }, { "content": "/// Returns the maximum of two values according to the given comparator, or `lhs` if they\n\n/// are equal.\n\n///\n\n/// # Examples\n\n///\n\n/// ```rust\n\n/// use collect::compare::{Extract, Natural, max};\n\n///\n\n/// struct Foo { key: char, id: u8 }\n\n///\n\n/// let f1 = &Foo { key: 'a', id: 1};\n\n/// let f2 = &Foo { key: 'a', id: 2};\n\n/// let f3 = &Foo { key: 'b', id: 3};\n\n///\n\n/// let cmp = Extract::new(|f: &Foo| f.key, Natural);\n\n/// assert_eq!(max(&cmp, f1, f2).id, f1.id);\n\n/// assert_eq!(max(&cmp, f1, f3).id, f3.id);\n\n/// ```\n\n// FIXME: convert to default method on `Compare` once where clauses permit equality\n\n// (https://github.com/rust-lang/rust/issues/20041)\n\npub fn max<'a, C: ?Sized, T: ?Sized>(cmp: &C, lhs: &'a T, rhs: &'a T) -> &'a T\n\n where C: Compare<T> {\n\n\n\n if cmp.compares_ge(lhs, rhs) { lhs } else { rhs }\n\n}\n\n\n", "file_path": "src/compare.rs", "rank": 17, "score": 60004.11480452161 }, { "content": "/// Returns the minimum of two values according to the given comparator, or `lhs` if they\n\n/// are equal.\n\n///\n\n/// # Examples\n\n///\n\n/// ```rust\n\n/// use collect::compare::{Extract, Natural, min};\n\n///\n\n/// struct Foo { key: char, id: u8 }\n\n///\n\n/// let f1 = &Foo { key: 'b', id: 1};\n\n/// let f2 = &Foo { key: 'b', id: 2};\n\n/// let f3 = &Foo { key: 'a', id: 3};\n\n///\n\n/// let cmp = Extract::new(|f: &Foo| f.key, Natural);\n\n/// assert_eq!(min(&cmp, f1, f2).id, f1.id);\n\n/// assert_eq!(min(&cmp, f1, f3).id, f3.id);\n\n/// ```\n\n// FIXME: convert to default method on `Compare` once where clauses permit equality\n\n// (https://github.com/rust-lang/rust/issues/20041)\n\npub fn min<'a, C: ?Sized, T: ?Sized>(cmp: &C, lhs: &'a T, rhs: &'a T) -> &'a T\n\n where C: Compare<T> {\n\n\n\n if cmp.compares_le(lhs, rhs) { lhs } else { rhs }\n\n}\n\n\n", "file_path": "src/compare.rs", "rank": 18, "score": 60004.11480452161 }, { "content": "/// A non-owning link, based on a raw ptr.\n\nstruct Raw<T> {\n\n ptr: *mut Node<T>,\n\n marker: NoCopy,\n\n}\n\n\n\nimpl<T> Raw<T> {\n\n /// Makes a null reference.\n\n #[inline]\n\n fn none() -> Raw<T> {\n\n Raw { ptr: ptr::null_mut(), marker: NoCopy }\n\n }\n\n\n\n /// Makes a reference to the given node.\n\n #[inline]\n\n fn some(ptr: &mut Node<T>) -> Raw<T> {\n\n Raw { ptr: ptr, marker: NoCopy }\n\n }\n\n\n\n /// Converts the ref to an Option containing a reference.\n\n #[inline]\n", "file_path": "src/proto/dlist.rs", "rank": 19, "score": 53770.53556438172 }, { "content": "struct Node<T> {\n\n elem: T,\n\n next: Option<Rc<Node<T>>>,\n\n}\n\n\n\nimpl<T> Node<T> {\n\n fn new(elem: T) -> Node<T> {\n\n Node { elem: elem, next: None }\n\n }\n\n}\n\n\n\n/// An iterator over the items of an ImmutSList\n\n#[derive(Clone)]\n\npub struct Iter<'a, T: 'a> {\n\n head: Option<&'a Node<T>>,\n\n nelem: uint,\n\n}\n\n\n\n/// An immutable singly-linked list, as seen in basically every functional language\n\npub struct ImmutSList<T> {\n", "file_path": "src/immut_slist.rs", "rank": 20, "score": 53770.53556438172 }, { "content": "/// A DList node.\n\nstruct Node<T> {\n\n prev: Raw<T>,\n\n next: Link<T>,\n\n elem: T,\n\n}\n\n\n\nimpl<T> Node<T> {\n\n /// Makes a node with the given element.\n\n #[inline]\n\n fn new(elem: T) -> Node<T> {\n\n Node {\n\n prev: Raw::none(),\n\n next: None,\n\n elem: elem,\n\n }\n\n }\n\n\n\n /// Joins two lists.\n\n #[inline]\n\n fn link(&mut self, mut next: Box<Node<T>>) {\n", "file_path": "src/proto/dlist.rs", "rank": 21, "score": 53770.53556438172 }, { "content": "// An internal node holds SIZE child nodes, which may themselves contain more internal nodes.\n\n//\n\n// Throughout this implementation, \"idx\" is used to refer to a section of key that is used\n\n// to access a node. The layer of the tree directly below the root corresponds to idx 0.\n\nstruct InternalNode<T> {\n\n // The number of direct children which are external (i.e. that store a value).\n\n count: uint,\n\n children: [TrieNode<T>; SIZE]\n\n}\n\n\n\n// Each child of an InternalNode may be internal, in which case nesting continues,\n\n// external (containing a value), or empty\n", "file_path": "src/trie/map.rs", "rank": 22, "score": 52210.44526937603 }, { "content": "/// An interface for casting C-like enum to u32 and back. A typical\n\n/// implementation can be seen below:\n\n///\n\n/// ```{rust}\n\n/// # use collect::enum_set::CLike;\n\n/// use std::mem;\n\n///\n\n/// #[derive(Copy)]\n\n/// #[repr(u32)]\n\n/// enum Foo {\n\n/// A, B, C\n\n/// }\n\n///\n\n/// impl CLike for Foo {\n\n/// fn to_u32(&self) -> u32 {\n\n/// *self as u32\n\n/// }\n\n/// unsafe fn from_u32(v: u32) -> Foo {\n\n/// mem::transmute(v)\n\n/// }\n\n/// }\n\n/// ```\n\npub trait CLike {\n\n /// Converts a C-like enum to a `u32`.\n\n fn to_u32(&self) -> u32;\n\n /// Converts a `u32` to a C-like enum. This method only needs to be safe\n\n /// for possible return values of `to_u32` of this trait.\n\n unsafe fn from_u32(u32) -> Self;\n\n}\n\n\n", "file_path": "src/enum_set.rs", "rank": 23, "score": 51598.58258384034 }, { "content": "/// An Iterator adapter that walks through all the elements in the Iterator,\n\n/// converts them to Strings and joins them to one big String, seperated by\n\n/// some seperator string slice.\n\npub trait StringJoiner {\n\n /// Converts all elements the Iterator yields to Strings,\n\n /// then combines them all into one String, seperated by sep.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```rust\n\n /// use collect::iter::StringJoiner;\n\n ///\n\n /// let vec = vec![1,2,3];\n\n /// assert_eq!(vec.iter().join(\", \").as_slice(), \"1, 2, 3\");\n\n /// ```\n\n fn join(&mut self, sep: &str) -> String;\n\n}\n\n\n\n// Implement it for all Iterators with Elements convertable into a String\n\nimpl<A: ToString, T: Iterator<Item=A>> StringJoiner for T {\n\n fn join(&mut self, sep: &str) -> String {\n\n match self.next() {\n\n Some(elem) => {\n", "file_path": "src/iter/string_joiner.rs", "rank": 24, "score": 50180.52980401393 }, { "content": "/// A list of nodes encoding a path from the root of a TrieMap to a node.\n\n///\n\n/// Invariants:\n\n/// * The last node is either `External` or `Nothing`.\n\n/// * Pointers at indexes less than `length` can be safely dereferenced.\n\nstruct SearchStack<'a, T: 'a> {\n\n map: &'a mut TrieMap<T>,\n\n length: uint,\n\n key: uint,\n\n items: [*mut TrieNode<T>; MAX_DEPTH]\n\n}\n\n\n\nimpl<'a, T> SearchStack<'a, T> {\n\n /// Creates a new search-stack with empty entries.\n\n fn new(map: &'a mut TrieMap<T>, key: uint) -> SearchStack<'a, T> {\n\n SearchStack {\n\n map: map,\n\n length: 0,\n\n key: key,\n\n items: [ptr::null_mut(); MAX_DEPTH]\n\n }\n\n }\n\n\n\n fn push(&mut self, node: *mut TrieNode<T>) {\n\n self.length += 1;\n", "file_path": "src/trie/map.rs", "rank": 25, "score": 49475.71044310765 }, { "content": "struct LruEntry<K, V> {\n\n next: *mut LruEntry<K, V>,\n\n prev: *mut LruEntry<K, V>,\n\n key: K,\n\n value: V,\n\n}\n\n\n\n/// An LRU Cache.\n\npub struct LruCache<K, V> {\n\n map: HashMap<KeyRef<K>, Box<LruEntry<K, V>>>,\n\n max_size: uint,\n\n head: *mut LruEntry<K, V>,\n\n}\n\n\n\nimpl<S, K: Hash<S>> Hash<S> for KeyRef<K> {\n\n fn hash(&self, state: &mut S) {\n\n unsafe { (*self.k).hash(state) }\n\n }\n\n}\n\n\n", "file_path": "src/lru_cache.rs", "rank": 26, "score": 49342.89073819404 }, { "content": "#[derive(Clone)]\n\nstruct TreeNode<K, V> {\n\n key: K,\n\n value: V,\n\n left: Option<Box<TreeNode<K, V>>>,\n\n right: Option<Box<TreeNode<K, V>>>,\n\n level: uint\n\n}\n\n\n\nimpl<K, V> TreeNode<K, V> {\n\n /// Creates a new tree node.\n\n #[inline]\n\n pub fn new(key: K, value: V) -> TreeNode<K, V> {\n\n TreeNode{key: key, value: value, left: None, right: None, level: 1}\n\n }\n\n}\n\n\n", "file_path": "src/tree/map.rs", "rank": 27, "score": 49342.89073819404 }, { "content": "#[test]\n\nfn test_join() {\n\n let many = vec![1u,2,3];\n\n let one = vec![1u];\n\n let none: Vec<uint> = vec![];\n\n\n\n assert_eq!(many.iter().join(\", \").as_slice(), \"1, 2, 3\");\n\n assert_eq!( one.iter().join(\", \").as_slice(), \"1\");\n\n assert_eq!(none.iter().join(\", \").as_slice(), \"\");\n\n}\n", "file_path": "src/iter/string_joiner.rs", "rank": 28, "score": 48470.79779706679 }, { "content": "/// An iterator that abstracts over all three kinds of ownership for a BList\n\nstruct AbsIter<DListIter, RingBufIter> {\n\n list_iter: DListIter,\n\n left_block_iter: Option<RingBufIter>,\n\n right_block_iter: Option<RingBufIter>,\n\n len: uint,\n\n}\n\n\n\nimpl<A,\n\n RingBufIter: Iterator<Item=A>,\n\n DListIter: Iterator<Item=T>,\n\n T: Traverse<RingBufIter>> Iterator for AbsIter<DListIter, RingBufIter> {\n\n type Item = A;\n\n // I would like to thank all my friends and the fact that Iterator::next doesn't\n\n // borrow self, for this passing borrowck with minimal gymnastics\n\n fn next(&mut self) -> Option<A> {\n\n if self.len > 0 { self.len -= 1; }\n\n // Keep loopin' till we hit gold\n\n loop {\n\n // Try to read off the left iterator\n\n let (ret, iter) = match self.left_block_iter.as_mut() {\n", "file_path": "src/blist.rs", "rank": 29, "score": 45830.5448047512 }, { "content": "struct KeyRef<K> { k: *const K }\n\n\n", "file_path": "src/lru_cache.rs", "rank": 30, "score": 45641.196802317725 }, { "content": "/// A comparator imposing a [total order](https://en.wikipedia.org/wiki/Total_order).\n\n///\n\n/// See the [`compare` module's documentation](index.html) for detailed usage.\n\n///\n\n/// The `compares_*` methods may be overridden to provide more efficient implementations.\n\npub trait Compare<Lhs: ?Sized, Rhs: ?Sized = Lhs> {\n\n /// Compares two values, returning `Less`, `Equal`, or `Greater` if `lhs` is less\n\n /// than, equal to, or greater than `rhs`, respectively.\n\n fn compare(&self, lhs: &Lhs, rhs: &Rhs) -> Ordering;\n\n\n\n /// Checks if `lhs` is less than `rhs`.\n\n fn compares_lt(&self, lhs: &Lhs, rhs: &Rhs) -> bool {\n\n self.compare(lhs, rhs) == Less\n\n }\n\n\n\n /// Checks if `lhs` is less than or equal to `rhs`.\n\n fn compares_le(&self, lhs: &Lhs, rhs: &Rhs) -> bool {\n\n self.compare(lhs, rhs) != Greater\n\n }\n\n\n\n /// Checks if `lhs` is greater than or equal to `rhs`.\n\n fn compares_ge(&self, lhs: &Lhs, rhs: &Rhs) -> bool {\n\n self.compare(lhs, rhs) != Less\n\n }\n\n\n", "file_path": "src/compare.rs", "rank": 31, "score": 42067.53851850211 }, { "content": "// Next 2 functions have the same convention: comparator gets\n\n// at input current key and returns search_key cmp cur_key\n\n// (i.e. search_key.cmp(&cur_key))\n\nfn tree_find_with<'r, K, V, F>(\n\n node: &'r Option<Box<TreeNode<K, V>>>,\n\n mut f: F,\n\n) -> Option<&'r V> where\n\n F: FnMut(&K) -> Ordering,\n\n{\n\n let mut current: &'r Option<Box<TreeNode<K, V>>> = node;\n\n loop {\n\n match *current {\n\n Some(ref r) => {\n\n match f(&r.key) {\n\n Less => current = &r.left,\n\n Greater => current = &r.right,\n\n Equal => return Some(&r.value)\n\n }\n\n }\n\n None => return None\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/tree/map.rs", "rank": 32, "score": 41417.48155312739 }, { "content": "// See comments above tree_find_with\n\nfn tree_find_with_mut<'r, K, V, F>(\n\n node: &'r mut Option<Box<TreeNode<K, V>>>,\n\n mut f: F,\n\n) -> Option<&'r mut V> where\n\n F: FnMut(&K) -> Ordering,\n\n{\n\n\n\n let mut current = node;\n\n loop {\n\n let temp = current; // hack to appease borrowck\n\n match *temp {\n\n Some(ref mut r) => {\n\n match f(&r.key) {\n\n Less => current = &mut r.left,\n\n Greater => current = &mut r.right,\n\n Equal => return Some(&mut r.value)\n\n }\n\n }\n\n None => return None\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/tree/map.rs", "rank": 33, "score": 40259.199214142864 }, { "content": "fn bit<E:CLike>(e: &E) -> u32 {\n\n let value = e.to_u32();\n\n assert!(value < u32::BITS as u32,\n\n \"EnumSet only supports up to {} variants.\", u32::BITS - 1);\n\n 1 << value as uint\n\n}\n\n\n\nimpl<E:CLike> EnumSet<E> {\n\n /// Deprecated: Renamed to `new`.\n\n #[deprecated = \"Renamed to `new`\"]\n\n pub fn empty() -> EnumSet<E> {\n\n EnumSet::new()\n\n }\n\n\n\n /// Returns an empty `EnumSet`.\n\n #[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n\n pub fn new() -> EnumSet<E> {\n\n EnumSet::new_with_bits(0)\n\n }\n\n\n", "file_path": "src/enum_set.rs", "rank": 34, "score": 40259.199214142864 }, { "content": "/// Allows an iterator to be do an inner join with another\n\n/// iterator to combine their values or filter based on their keys.\n\n/// this trait is applied to an iterator over a set like structure\n\npub trait OrderedSetIterator<K>: Iterator<Item=K> + Sized {\n\n /// join two ordered maps together\n\n fn inner_join_map<B, T: OrderedMapIterator<K, B>>(self, map: T)\n\n -> InnerJoinMapSetIterator<T, Self> {\n\n InnerJoinMapSetIterator {\n\n map: map,\n\n set: self\n\n }\n\n }\n\n\n\n /// filter an ordered map with an ordered set\n\n fn inner_join_set<T: OrderedSetIterator<K>>(self, map: T)\n\n -> InnerJoinSetIterator<Self, T> {\n\n InnerJoinSetIterator {\n\n a: self,\n\n b: map\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/iter/ordered_iter.rs", "rank": 35, "score": 38288.820818122804 }, { "content": "/// Allows an iterator to be do an inner join with another\n\n/// iterator to combine their values or filter based on their keys.\n\n/// this trait is applied to an iterator over a map like structure\n\npub trait OrderedMapIterator<K, A>: Iterator<Item=(K, A)> + Sized {\n\n /// join two ordered maps together\n\n fn inner_join_map<B, T: OrderedMapIterator<K, B>>(self, map: T)\n\n -> InnerJoinMapIterator<Self, T> {\n\n InnerJoinMapIterator {\n\n a: self,\n\n b: map\n\n }\n\n }\n\n\n\n /// filter an ordered map with an ordered set\n\n fn inner_join_set<B, T: OrderedSetIterator<K>>(self, set: T)\n\n -> InnerJoinMapSetIterator<Self, T> {\n\n InnerJoinMapSetIterator {\n\n map: self,\n\n set: set\n\n }\n\n }\n\n\n\n /// Join an ordered iterator with the right ordered iterator. The\n", "file_path": "src/iter/ordered_iter.rs", "rank": 36, "score": 36741.69874273577 }, { "content": "/// An extension trait with methods applicable to all comparators.\n\npub trait CompareExt<Lhs: ?Sized, Rhs: ?Sized = Lhs> : Compare<Lhs, Rhs> + Sized {\n\n /// Borrows the comparator's parameters before comparing them.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```rust\n\n /// use collect::compare::{Compare, CompareExt, Natural};\n\n /// use std::cmp::Ordering::{Less, Equal, Greater};\n\n ///\n\n /// let a_str = \"a\";\n\n /// let a_string = a_str.to_string();\n\n ///\n\n /// let b_str = \"b\";\n\n /// let b_string = b_str.to_string();\n\n ///\n\n /// let cmp = Natural::<str>.borrow();\n\n /// assert_eq!(cmp.compare(a_str, &a_string), Equal);\n\n /// assert_eq!(cmp.compare(a_str, b_str), Less);\n\n /// assert_eq!(cmp.compare(&b_string, a_str), Greater);\n\n /// ```\n", "file_path": "src/compare.rs", "rank": 37, "score": 34679.099209028354 }, { "content": "// Remove dual horizontal link by rotating left and increasing level of\n\n// the parent\n\nfn split<K, V>(node: &mut Box<TreeNode<K, V>>) {\n\n if node.right.as_ref().map_or(false,\n\n |x| x.right.as_ref().map_or(false, |y| y.level == node.level)) {\n\n let mut save = node.right.take().unwrap();\n\n swap(&mut node.right, &mut save.left); // save.left now None\n\n save.level += 1;\n\n swap(node, &mut save);\n\n node.left = Some(save);\n\n }\n\n}\n\n\n", "file_path": "src/tree/map.rs", "rank": 38, "score": 33710.047456747874 }, { "content": "// Remove left horizontal link by rotating right\n\nfn skew<K, V>(node: &mut Box<TreeNode<K, V>>) {\n\n if node.left.as_ref().map_or(false, |x| x.level == node.level) {\n\n let mut save = node.left.take().unwrap();\n\n swap(&mut node.left, &mut save.right); // save.right now None\n\n swap(node, &mut save);\n\n node.right = Some(save);\n\n }\n\n}\n\n\n", "file_path": "src/tree/map.rs", "rank": 39, "score": 33710.047456747874 }, { "content": "/// The max element in the root node of an otherwise valid interval heap\n\n/// has been been replaced with some other value without violating rule (1)\n\n/// for the root node. This function restores the interval heap properties.\n\nfn update_max<T, C: Compare<T>>(v: &mut [T], cmp: &C) {\n\n debug_assert!(cmp.compares_le(&v[0], &v[1]));\n\n // Starting at the root, we go down the tree...\n\n let mut right = 1;\n\n loop {\n\n let c1 = right * 2 + 1; // index of 1st child's right element\n\n let c2 = right * 2 + 3; // index of 2nd child's right element\n\n if v.len() <= c1 { return; } // No children. We're done.\n\n // Pick child with greatest max\n\n let ch = if v.len() <= c2 || cmp.compares_gt(&v[c1], &v[c2]) { c1 }\n\n else { c2 };\n\n if cmp.compares_gt(&v[ch], &v[right]) {\n\n v.swap(ch, right);\n\n right = ch;\n\n let left = right - 1; // always exists\n\n if cmp.compares_gt(&v[left], &v[right]) { v.swap(left, right); }\n\n } else {\n\n break;\n\n }\n\n }\n", "file_path": "src/interval_heap.rs", "rank": 40, "score": 33163.58363523401 }, { "content": "/// The min element in the root node of an otherwise valid interval heap\n\n/// has been been replaced with some other value without violating rule (1)\n\n/// for the root node. This function restores the interval heap properties.\n\nfn update_min<T, C: Compare<T>>(v: &mut [T], cmp: &C) {\n\n // Starting at the root, we go down the tree...\n\n debug_assert!(cmp.compares_le(&v[0], &v[1]));\n\n let mut left = 0;\n\n loop {\n\n let c1 = left * 2 + 2; // index of 1st child's left element\n\n let c2 = left * 2 + 4; // index of 2nd child's left element\n\n if v.len() <= c1 { return; } // No children. We're done.\n\n // Pick child with lowest min\n\n let ch = if v.len() <= c2 || cmp.compares_lt(&v[c1], &v[c2]) { c1 }\n\n else { c2 };\n\n if cmp.compares_lt(&v[ch], &v[left]) {\n\n v.swap(ch, left);\n\n left = ch;\n\n let right = left + 1;\n\n if right < v.len() {\n\n if cmp.compares_gt(&v[left], &v[right]) { v.swap(left, right); }\n\n }\n\n } else {\n\n break;\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/interval_heap.rs", "rank": 41, "score": 33163.58363523401 }, { "content": "/// Compare `x` and `y`, but return `short` if x is None and `long` if y is None\n\nfn cmp_opt<T, C: Compare<T>>(x: Option<& &T>, y: Option<& &T>,\n\n short: Ordering, long: Ordering, cmp: &C) -> Ordering {\n\n match (x, y) {\n\n (None , _ ) => short,\n\n (_ , None ) => long,\n\n (Some(x1), Some(y1)) => cmp.compare(*x1, *y1),\n\n }\n\n}\n\n\n\n\n\nimpl<'a, T> Iterator for Iter<'a, T> {\n\n type Item = &'a T;\n\n #[inline] fn next(&mut self) -> Option<&'a T> { self.iter.next().map(|(value, _)| value) }\n\n #[inline] fn size_hint(&self) -> (uint, Option<uint>) { self.iter.size_hint() }\n\n}\n\n\n\nimpl<'a, T> Iterator for RevIter<'a, T> {\n\n type Item = &'a T;\n\n #[inline] fn next(&mut self) -> Option<&'a T> { self.iter.next().map(|(value, _)| value) }\n\n #[inline] fn size_hint(&self) -> (uint, Option<uint>) { self.iter.size_hint() }\n", "file_path": "src/tree/set.rs", "rank": 42, "score": 32662.553295416456 }, { "content": "fn deref_mut<K, V>(x: &mut Option<Box<TreeNode<K, V>>>)\n\n -> *mut TreeNode<K, V> {\n\n match *x {\n\n Some(ref mut n) => {\n\n let n: &mut TreeNode<K, V> = &mut **n;\n\n n as *mut TreeNode<K, V>\n\n }\n\n None => ptr::null_mut()\n\n }\n\n}\n\n\n\n/// Lazy forward iterator over a map that consumes the map while iterating\n\npub struct IntoIter<K, V> {\n\n stack: Vec<TreeNode<K, V>>,\n\n remaining: uint\n\n}\n\n\n\nimpl<K, V> Iterator for IntoIter<K,V> {\n\n type Item = (K, V);\n\n #[inline]\n", "file_path": "src/tree/map.rs", "rank": 43, "score": 32395.088243729788 }, { "content": "/// The first `v.len() - 1` elements are considered a valid interval heap\n\n/// and the last element is to be inserted.\n\nfn interval_heap_push<T, C: Compare<T>>(v: &mut [T], cmp: &C) {\n\n debug_assert!(v.len() > 0);\n\n // Start with the last new/modified node and work our way to\n\n // the root if necessary...\n\n let mut node_max = v.len() - 1;\n\n let mut node_min = left(node_max);\n\n // The reason for using two variables instead of one is to\n\n // get around the special case of the last node only containing\n\n // one element (node_min == node_max).\n\n if cmp.compares_gt(&v[node_min], &v[node_max]) { v.swap(node_min, node_max); }\n\n while !is_root(node_min) {\n\n let par_min = parent_left(node_min);\n\n let par_max = par_min + 1;\n\n if cmp.compares_lt(&v[node_min], &v[par_min]) {\n\n v.swap(par_min, node_min);\n\n } else if cmp.compares_lt(&v[par_max], &v[node_max]) {\n\n v.swap(par_max, node_max);\n\n } else {\n\n return; // nothing to do anymore\n\n }\n\n debug_assert!(cmp.compares_le(&v[node_min], &v[node_max]));\n\n node_min = par_min;\n\n node_max = par_max;\n\n }\n\n}\n\n\n", "file_path": "src/interval_heap.rs", "rank": 44, "score": 32395.088243729788 }, { "content": "fn deref<'a, K, V>(node: &'a Option<Box<TreeNode<K, V>>>) -> *const TreeNode<K, V> {\n\n match *node {\n\n Some(ref n) => {\n\n let n: &TreeNode<K, V> = &**n;\n\n n as *const TreeNode<K, V>\n\n }\n\n None => ptr::null()\n\n }\n\n}\n\n\n", "file_path": "src/tree/map.rs", "rank": 45, "score": 27661.514716462167 }, { "content": "fn remove<K, V, C, Q: ?Sized>(node: &mut Option<Box<TreeNode<K, V>>>, key: &Q, cmp: &C)\n\n -> Option<V> where C: Compare<Q, K> {\n\n\n\n fn heir_swap<K, V>(node: &mut Box<TreeNode<K, V>>,\n\n child: &mut Option<Box<TreeNode<K, V>>>) {\n\n // *could* be done without recursion, but it won't borrow check\n\n for x in child.iter_mut() {\n\n if x.right.is_some() {\n\n heir_swap(node, &mut x.right);\n\n } else {\n\n swap(&mut node.key, &mut x.key);\n\n swap(&mut node.value, &mut x.value);\n\n }\n\n }\n\n }\n\n\n\n match *node {\n\n None => {\n\n return None; // bottom of tree\n\n }\n", "file_path": "src/tree/map.rs", "rank": 46, "score": 25457.03118852192 }, { "content": "fn insert<K, V, C>(node: &mut Option<Box<TreeNode<K, V>>>, key: K, value: V, cmp: &C)\n\n -> Option<V> where C: Compare<K> {\n\n\n\n match *node {\n\n Some(ref mut save) => {\n\n match cmp.compare(&key, &save.key) {\n\n Less => {\n\n let inserted = insert(&mut save.left, key, value, cmp);\n\n skew(save);\n\n split(save);\n\n inserted\n\n }\n\n Greater => {\n\n let inserted = insert(&mut save.right, key, value, cmp);\n\n skew(save);\n\n split(save);\n\n inserted\n\n }\n\n Equal => {\n\n save.key = key;\n", "file_path": "src/tree/map.rs", "rank": 47, "score": 25457.03118852192 }, { "content": "collect-rs is intended as an experimental extension of the Rust standard library's\n\nlibcollections. Ideas that are too niche, crazy, or experimental to land in libcollections\n\ncan be gathered here where they can gain the maintenance and network-effect benefits that\n\nlibcollections enjoys, but without worrying about such ivory tower concepts as\n\n\"general usefulness\" and \"consistency\".\n\n\n\nFor the time being, we plan to be highly volatile with a low barrier of entry. We want to\n\nexplore the space of data structuring in Rust. We want to prove out ideas and implementations\n\nthat could one day make their way into the standard library.\n\n\n\nGot a concurrent, immutable, or persistent collection? Awesome! Crazy ideas for collection or\n\niterator adapters? Heck yeah!\n\n\n\nCome on in!\n\n\n\n[Documentation](http://Gankro.github.io/collect-rs/collect)\n\n\n\n-----------\n\n\n\nNote that anything included in collect-rs is theoretically a candidate for inclusion in\n\nlibcollections. As such, this project is licensed under the same terms as Rust itself.\n", "file_path": "README.md", "rank": 48, "score": 21304.57696917831 }, { "content": "\n\n\n\n#![feature(unsafe_destructor)]\n\n#![feature(unboxed_closures)]\n\n#![feature(slicing_syntax)]\n\n#![feature(old_orphan_check)]\n\n\n\n#[cfg(test)] extern crate test;\n\nextern crate core;\n\nextern crate traverse;\n\n\n\n\n\n\n\n\n\n// Re-Exports\n\n\n\npub use blist::BList;\n\npub use enum_set::EnumSet;\n\npub use immut_slist::ImmutSList;\n\npub use interval_heap::IntervalHeap;\n", "file_path": "src/lib.rs", "rank": 52, "score": 21.867443066747498 }, { "content": " 0 => None,\n\n 1 => Some((&self.data[0],&self.data[0])),\n\n _ => Some((&self.data[0],&self.data[1]))\n\n }\n\n }\n\n\n\n /// Returns the number of items the interval heap could hold\n\n /// without reallocation.\n\n pub fn capacity(&self) -> uint {\n\n self.data.capacity()\n\n }\n\n\n\n /// Reserves the minimum capacity for exactly `additional` more elements\n\n /// to be inserted in the given `IntervalHeap`. Does nothing if the capacity\n\n /// is already sufficient.\n\n ///\n\n /// Note that the allocator may give the collection more space than it\n\n /// requests. Therefore capacity can not be relied upon to be precisely\n\n /// minimal. Prefer `reserve` if future insertions are expected.\n\n pub fn reserve_exact(&mut self, additional: uint) {\n", "file_path": "src/interval_heap.rs", "rank": 53, "score": 15.537523890479699 }, { "content": " ///\n\n /// ```rust\n\n /// use collect::TrieMap;\n\n ///\n\n /// let mut a = TrieMap::new();\n\n /// assert_eq!(a.len(), 0);\n\n /// a.insert(1, \"a\");\n\n /// assert_eq!(a.len(), 1);\n\n /// ```\n\n #[inline]\n\n #[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n\n pub fn len(&self) -> uint { self.length }\n\n\n\n /// Return true if the map contains no elements.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```rust\n\n /// use collect::TrieMap;\n\n ///\n", "file_path": "src/trie/map.rs", "rank": 55, "score": 12.902821609132502 }, { "content": " self.data.reserve_exact(additional);\n\n }\n\n\n\n /// Reserves capacity for at least `additional` more elements to be inserted\n\n /// in the `IntervalHeap`. The collection may reserve more space to avoid\n\n /// frequent reallocations.\n\n pub fn reserve(&mut self, additional: uint) {\n\n self.data.reserve(additional);\n\n }\n\n\n\n /// Discards as much additional capacity as possible.\n\n pub fn shrink_to_fit(&mut self) {\n\n self.data.shrink_to_fit()\n\n }\n\n\n\n /// Removes the smallest item and returns it, or None if is empty.\n\n pub fn pop_min(&mut self) -> Option<T> {\n\n match self.data.len() {\n\n 0 => None,\n\n 1...2 => Some(self.data.swap_remove(0)),\n", "file_path": "src/interval_heap.rs", "rank": 56, "score": 12.792056161439715 }, { "content": "use std::ops::{self, Slice, SliceMut};\n\nuse std::uint;\n\nuse std::iter;\n\nuse std::ptr;\n\nuse std::hash::{Writer, Hash};\n\n\n\nuse std::slice;\n\n\n\n// FIXME(conventions): implement bounded iterators\n\n// FIXME(conventions): implement into_iter\n\n// FIXME(conventions): replace each_reverse by making iter DoubleEnded\n\n\n\n// FIXME: #5244: need to manually update the InternalNode constructor\n\nconst SHIFT: uint = 4;\n\nconst SIZE: uint = 1 << SHIFT;\n\nconst MASK: uint = SIZE - 1;\n\n// The number of chunks that the key is divided into. Also the maximum depth of the TrieMap.\n\nconst MAX_DEPTH: uint = uint::BITS / SHIFT;\n\n\n\n/// A map implemented as a radix trie.\n", "file_path": "src/trie/map.rs", "rank": 57, "score": 12.474836721334967 }, { "content": " /// use collect::TreeMap;\n\n ///\n\n /// let mut a = TreeMap::new();\n\n /// assert_eq!(a.len(), 0);\n\n /// a.insert(1, \"a\");\n\n /// assert_eq!(a.len(), 1);\n\n /// ```\n\n #[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n\n pub fn len(&self) -> uint { self.length }\n\n\n\n /// Return true if the map contains no elements.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```rust\n\n /// use collect::TreeMap;\n\n ///\n\n /// let mut a = TreeMap::new();\n\n /// assert!(a.is_empty());\n\n /// a.insert(1, \"a\");\n", "file_path": "src/tree/map.rs", "rank": 58, "score": 12.378686293054422 }, { "content": " try!(write!(f, \": \"));\n\n unsafe {\n\n try!(write!(f, \"{}\", (*cur).value));\n\n }\n\n }\n\n write!(f, r\"}}\")\n\n }\n\n}\n\n\n\nunsafe impl<K: Send, V: Send> Send for LruCache<K, V> {}\n\n\n\nunsafe impl<K: Sync, V: Sync> Sync for LruCache<K, V> {}\n\n\n\n#[unsafe_destructor]\n\nimpl<K, V> Drop for LruCache<K, V> {\n\n fn drop(&mut self) {\n\n unsafe {\n\n let node: Box<LruEntry<K, V>> = mem::transmute(self.head);\n\n // Prevent compiler from trying to drop the un-initialized field in the sigil node.\n\n let box internal_node = node;\n", "file_path": "src/lru_cache.rs", "rank": 59, "score": 12.297076513398624 }, { "content": " #[inline]\n\n pub fn each_reverse<'a, F>(&'a self, mut f: F) -> bool\n\n where F: FnMut(&uint, &'a T) -> bool {\n\n self.root.each_reverse(&mut f)\n\n }\n\n\n\n /// Gets an iterator visiting all keys in ascending order by the keys.\n\n /// The iterator's element type is `uint`.\n\n #[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n\n pub fn keys<'r>(&'r self) -> Keys<'r, T> {\n\n fn first<A, B>((a, _): (A, B)) -> A { a }\n\n let first: fn((uint, &'r T)) -> uint = first; // coerce to fn pointer\n\n\n\n self.iter().map(first)\n\n }\n\n\n\n /// Gets an iterator visiting all values in ascending order by the keys.\n\n /// The iterator's element type is `&'r T`.\n\n #[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n\n pub fn values<'r>(&'r self) -> Values<'r, T> {\n", "file_path": "src/trie/map.rs", "rank": 60, "score": 12.258316720268052 }, { "content": " /// ```\n\n #[inline]\n\n #[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n\n pub fn clear(&mut self) {\n\n self.root = InternalNode::new();\n\n self.length = 0;\n\n }\n\n\n\n /// Deprecated: renamed to `get`.\n\n #[deprecated = \"renamed to `get`\"]\n\n pub fn find(&self, key: &uint) -> Option<&T> {\n\n self.get(key)\n\n }\n\n\n\n /// Returns a reference to the value corresponding to the key.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```rust\n\n /// use collect::TrieMap;\n", "file_path": "src/trie/map.rs", "rank": 61, "score": 12.132980493889589 }, { "content": " Some(&(true, false)) => format!(\"Fizz\"),\n\n Some(&(false, true)) => format!(\"Buzz\"),\n\n Some(&(true, true)) => format!(\"FizzBuzz\"),\n\n Some(&(false, false)) => panic!(\"Outer join failed...\")\n\n })).collect();\n\n\n\n for i in range(1, 100) {\n\n match (i % 3, i % 5) {\n\n (0, 0) => assert_eq!(\"FizzBuzz\", res[i].as_slice()),\n\n (0, _) => assert_eq!(\"Fizz\", res[i].as_slice()),\n\n (_, 0) => assert_eq!(\"Buzz\", res[i].as_slice()),\n\n _ => assert_eq!(format!(\"{}\", i).as_slice(), res[i].as_slice())\n\n }\n\n }\n\n }\n\n\n\n\n\n #[bench]\n\n pub fn inner_join_map(b: &mut test::Bencher) {\n\n use std::collections::BTreeSet;\n", "file_path": "src/iter/ordered_iter.rs", "rank": 62, "score": 12.052873286525873 }, { "content": " ///\n\n /// ```rust\n\n /// use collect::TrieSet;\n\n ///\n\n /// let a: TrieSet = vec![1, 2, 3].into_iter().collect();\n\n /// let b: TrieSet = vec![3, 4, 5].into_iter().collect();\n\n ///\n\n /// let set: TrieSet = &a - &b;\n\n /// let v: Vec<uint> = set.iter().collect();\n\n /// assert_eq!(v, vec![1, 2]);\n\n /// ```\n\n fn sub(self, rhs: &TrieSet) -> TrieSet {\n\n self.difference(rhs).collect()\n\n }\n\n}\n\n\n\n/// A forward iterator over a set.\n\npub struct Iter<'a> {\n\n iter: trie_map::Iter<'a, ()>\n\n}\n", "file_path": "src/trie/set.rs", "rank": 63, "score": 12.049947884548581 }, { "content": " #[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n\n pub fn contains_key(&self, key: &uint) -> bool {\n\n self.get(key).is_some()\n\n }\n\n\n\n /// Deprecated: renamed to `get_mut`.\n\n #[deprecated = \"renamed to `get_mut`\"]\n\n pub fn find_mut(&mut self, key: &uint) -> Option<&mut T> {\n\n self.get_mut(key)\n\n }\n\n\n\n /// Returns a mutable reference to the value corresponding to the key.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```rust\n\n /// use collect::TrieMap;\n\n ///\n\n /// let mut map = TrieMap::new();\n\n /// map.insert(1, \"a\");\n", "file_path": "src/trie/map.rs", "rank": 64, "score": 11.91121460482114 }, { "content": " #[inline]\n\n #[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n\n pub fn len(&self) -> uint { self.map.len() }\n\n\n\n /// Returns true if the set contains no elements\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```rust\n\n /// use collect::TreeSet;\n\n ///\n\n /// let mut v = TreeSet::new();\n\n /// assert!(v.is_empty());\n\n /// v.insert(1);\n\n /// assert!(!v.is_empty());\n\n /// ```\n\n #[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n\n pub fn is_empty(&self) -> bool { self.len() == 0 }\n\n\n\n /// Clears the set, removing all values.\n", "file_path": "src/tree/set.rs", "rank": 65, "score": 11.896955787412393 }, { "content": " /// Create an LRU Cache that holds at most `capacity` items.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```rust\n\n /// use collect::LruCache;\n\n /// let mut cache: LruCache<int, &str> = LruCache::new(10);\n\n /// ```\n\n #[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n\n pub fn new(capacity: uint) -> LruCache<K, V> {\n\n let cache = LruCache {\n\n map: HashMap::new(),\n\n max_size: capacity,\n\n head: unsafe{ mem::transmute(box mem::uninitialized::<LruEntry<K, V>>()) },\n\n };\n\n unsafe {\n\n (*cache.head).next = cache.head;\n\n (*cache.head).prev = cache.head;\n\n }\n\n return cache;\n", "file_path": "src/lru_cache.rs", "rank": 66, "score": 11.796141401919023 }, { "content": " _ => {\n\n let res = self.data.swap_remove(0);\n\n update_min(self.data.as_mut_slice(), &self.cmp);\n\n Some(res)\n\n }\n\n }\n\n }\n\n\n\n /// Removes the greatest item and returns it, or None if is empty.\n\n pub fn pop_max(&mut self) -> Option<T> {\n\n match self.data.len() {\n\n 0...2 => self.data.pop(),\n\n _ => {\n\n let res = self.data.swap_remove(1);\n\n update_max(self.data.as_mut_slice(), &self.cmp);\n\n Some(res)\n\n }\n\n }\n\n }\n\n\n", "file_path": "src/interval_heap.rs", "rank": 67, "score": 11.679945691971811 }, { "content": " /// Return the maximum number of key-value pairs the cache can hold.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```rust\n\n /// use collect::LruCache;\n\n /// let mut cache: LruCache<int, &str> = LruCache::new(2);\n\n /// assert_eq!(cache.capacity(), 2);\n\n /// ```\n\n #[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n\n pub fn capacity(&self) -> uint {\n\n self.max_size\n\n }\n\n\n\n /// Deprecated: Renamed to `set_capacity`.\n\n #[deprecated = \"Renamed to `set_capacity`\"]\n\n pub fn change_capacity(&mut self, capacity: uint) {\n\n self.set_capacity(capacity)\n\n }\n\n\n", "file_path": "src/lru_cache.rs", "rank": 68, "score": 11.652764385124666 }, { "content": " ///\n\n /// ```rust\n\n /// use collect::TrieSet;\n\n ///\n\n /// let set: TrieSet = [2, 4, 6, 8].iter().map(|&x| x).collect();\n\n /// assert_eq!(set.upper_bound(4).next(), Some(6));\n\n /// assert_eq!(set.upper_bound(5).next(), Some(6));\n\n /// assert_eq!(set.upper_bound(10).next(), None);\n\n /// ```\n\n pub fn upper_bound<'a>(&'a self, val: uint) -> Iter<'a> {\n\n Iter { iter: self.map.upper_bound(val) }\n\n }\n\n\n\n /// Visits the values representing the difference, in ascending order.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```rust\n\n /// use collect::TrieSet;\n\n ///\n", "file_path": "src/trie/set.rs", "rank": 69, "score": 11.54063236535242 }, { "content": " .map(|&s| s)\n\n .collect();\n\n assert!(list.to_string().as_slice() == \"[just, one, test, more]\");\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod bench{\n\n use super::BList;\n\n use test;\n\n use traverse::Traversal;\n\n\n\n #[bench]\n\n fn bench_collect_into(b: &mut test::Bencher) {\n\n let v = &[0i; 64];\n\n b.iter(|| {\n\n let _: BList<int> = v.iter().map(|x| *x).collect();\n\n })\n\n }\n\n\n", "file_path": "src/blist.rs", "rank": 72, "score": 11.378133299741677 }, { "content": " /// Pushes an item onto the queue.\n\n pub fn push(&mut self, x: T) {\n\n self.data.push(x);\n\n interval_heap_push(self.data.as_mut_slice(), &self.cmp);\n\n }\n\n\n\n /// Consumes the `IntervalHeap` and returns the underlying vector\n\n /// in arbitrary order.\n\n pub fn into_vec(self) -> Vec<T> { self.data }\n\n\n\n /// Consumes the `IntervalHeap` and returns a vector in sorted\n\n /// (ascending) order.\n\n pub fn into_sorted_vec(self) -> Vec<T> {\n\n let mut vec = self.data;\n\n for hsize in range(2, vec.len()).rev() {\n\n vec.swap(1, hsize);\n\n update_max(vec.slice_to_mut(hsize), &self.cmp);\n\n }\n\n vec\n\n }\n", "file_path": "src/interval_heap.rs", "rank": 73, "score": 11.30781228646843 }, { "content": " /// comparator.\n\n pub fn with_capacity_and_comparator(capacity: uint, cmp: C) -> IntervalHeap<T, C> {\n\n IntervalHeap { data: Vec::with_capacity(capacity), cmp: cmp }\n\n }\n\n\n\n /// Returns a heap containing all the elements of the given vector and ordered\n\n /// according to the given comparator.\n\n pub fn from_vec_and_comparator(mut vec: Vec<T>, cmp: C) -> IntervalHeap<T, C> {\n\n for to in 2 .. vec.len() + 1 {\n\n interval_heap_push(vec.slice_to_mut(to), &cmp);\n\n }\n\n IntervalHeap { data: vec, cmp: cmp }\n\n }\n\n\n\n /// An iterator visiting all values in underlying vector,\n\n /// in arbitrary order.\n\n pub fn iter(&self) -> Iter<T> {\n\n Iter(self.data.iter())\n\n }\n\n\n", "file_path": "src/interval_heap.rs", "rank": 74, "score": 11.099134059509232 }, { "content": " /// let mut v = TrieSet::new();\n\n /// assert_eq!(v.len(), 0);\n\n /// v.insert(1);\n\n /// assert_eq!(v.len(), 1);\n\n /// ```\n\n #[inline]\n\n #[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n\n pub fn len(&self) -> uint { self.map.len() }\n\n\n\n /// Returns true if the set contains no elements\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```rust\n\n /// use collect::TrieSet;\n\n ///\n\n /// let mut v = TrieSet::new();\n\n /// assert!(v.is_empty());\n\n /// v.insert(1);\n\n /// assert!(!v.is_empty());\n", "file_path": "src/trie/set.rs", "rank": 75, "score": 10.79047678849736 }, { "content": "\n\n assert!(a <= b && a >= b);\n\n assert!(a.insert(1u));\n\n assert!(a > b && a >= b);\n\n assert!(b < a && b <= a);\n\n assert!(b.insert(2u));\n\n assert!(b > a && b >= a);\n\n assert!(a < b && a <= b);\n\n }\n\n\n\n struct Counter<'a, 'b> {\n\n i: &'a mut uint,\n\n expected: &'b [uint],\n\n }\n\n\n\n impl<'a, 'b> FnMut(uint) -> bool for Counter<'a, 'b> {\n\n extern \"rust-call\" fn call_mut(&mut self, (x,): (uint,)) -> bool {\n\n assert_eq!(x, self.expected[*self.i]);\n\n *self.i += 1;\n\n true\n", "file_path": "src/trie/set.rs", "rank": 76, "score": 10.736379688714939 }, { "content": " pub fn lower_bound<'a>(&'a self, key: uint) -> Iter<'a, T> {\n\n self.bound(key, false)\n\n }\n\n\n\n /// Gets an iterator pointing to the first key-value pair whose key is greater than `key`.\n\n /// If all keys in the map are not greater than `key` an empty iterator is returned.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```rust\n\n /// use collect::TrieMap;\n\n /// let map: TrieMap<&str> = [(2, \"a\"), (4, \"b\"), (6, \"c\")].iter().map(|&x| x).collect();\n\n ///\n\n /// assert_eq!(map.upper_bound(4).next(), Some((6, &\"c\")));\n\n /// assert_eq!(map.upper_bound(5).next(), Some((6, &\"c\")));\n\n /// assert_eq!(map.upper_bound(10).next(), None);\n\n /// ```\n\n pub fn upper_bound<'a>(&'a self, key: uint) -> Iter<'a, T> {\n\n self.bound(key, true)\n\n }\n", "file_path": "src/trie/map.rs", "rank": 77, "score": 10.723626429226359 }, { "content": " // everything else is zero'd, as we want.\n\n it.remaining_max = this.length;\n\n\n\n // this addr is necessary for the `Internal` pattern.\n\n addr!(loop {\n\n let children = unsafe {addr!(& $($mut_)* (*node).children)};\n\n // it.length is the current depth in the iterator and the\n\n // current depth through the `uint` key we've traversed.\n\n let child_id = chunk(key, it.length);\n\n let (slice_idx, ret) = match children[child_id] {\n\n Internal(ref $($mut_)* n) => {\n\n node = unsafe {\n\n mem::transmute::<_, uint>(&**n)\n\n as *mut InternalNode<T>\n\n };\n\n (child_id + 1, false)\n\n }\n\n External(stored, _) => {\n\n (if stored < key || ($upper && stored == key) {\n\n child_id + 1\n", "file_path": "src/trie/map.rs", "rank": 78, "score": 10.571584859975601 }, { "content": " /// Returns `true` if the set contains a value.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```rust\n\n /// use collect::TrieSet;\n\n ///\n\n /// let set: TrieSet = [1, 2, 3].iter().map(|&x| x).collect();\n\n /// assert_eq!(set.contains(&1), true);\n\n /// assert_eq!(set.contains(&4), false);\n\n /// ```\n\n #[inline]\n\n #[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n\n pub fn contains(&self, value: &uint) -> bool {\n\n self.map.contains_key(value)\n\n }\n\n\n\n /// Returns `true` if the set has no elements in common with `other`.\n\n /// This is equivalent to checking for an empty intersection.\n\n ///\n", "file_path": "src/trie/set.rs", "rank": 79, "score": 10.521756996368282 }, { "content": " fn second<A, B>((_, b): (A, B)) -> B { b }\n\n let second: fn((uint, &'r T)) -> &'r T = second; // coerce to fn pointer\n\n\n\n self.iter().map(second)\n\n }\n\n\n\n /// Gets an iterator over the key-value pairs in the map, ordered by keys.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```rust\n\n /// use collect::TrieMap;\n\n /// let map: TrieMap<&str> = [(3, \"c\"), (1, \"a\"), (2, \"b\")].iter().map(|&x| x).collect();\n\n ///\n\n /// for (key, value) in map.iter() {\n\n /// println!(\"{}: {}\", key, value);\n\n /// }\n\n /// ```\n\n #[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n\n pub fn iter<'a>(&'a self) -> Iter<'a, T> {\n", "file_path": "src/trie/map.rs", "rank": 80, "score": 10.521756996368282 }, { "content": "/// A forward iterator over the key-value pairs of a map, with the\n\n/// values being mutable.\n\npub struct IterMut<'a, T:'a> {\n\n stack: [slice::IterMut<'a, TrieNode<T>>; MAX_DEPTH],\n\n length: uint,\n\n remaining_min: uint,\n\n remaining_max: uint\n\n}\n\n\n\n/// A forward iterator over the keys of a map.\n\npub type Keys<'a, T> = iter::Map<(uint, &'a T), uint, Iter<'a, T>, fn((uint, &'a T)) -> uint>;\n\n\n\n/// A forward iterator over the values of a map.\n\npub type Values<'a, T> =\n\n iter::Map<(uint, &'a T), &'a T, Iter<'a, T>, fn((uint, &'a T)) -> &'a T>;\n\n\n\n// FIXME #5846: see `addr!` above.\n\nmacro_rules! item { ($i:item) => {$i}}\n\n\n\nmacro_rules! iterator_impl {\n", "file_path": "src/trie/map.rs", "rank": 81, "score": 10.320317701978828 }, { "content": " /// ```\n\n #[inline]\n\n #[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n\n pub fn insert(&mut self, value: uint) -> bool {\n\n self.map.insert(value, ()).is_none()\n\n }\n\n\n\n /// Removes a value from the set. Returns `true` if the value was\n\n /// present in the set.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```rust\n\n /// use collect::TrieSet;\n\n ///\n\n /// let mut set = TrieSet::new();\n\n ///\n\n /// set.insert(2);\n\n /// assert_eq!(set.remove(&2), true);\n\n /// assert_eq!(set.remove(&2), false);\n", "file_path": "src/trie/set.rs", "rank": 82, "score": 10.30153281102054 }, { "content": "\n\n /// Adds an enum to the `EnumSet`, and returns `true` if it wasn't there before\n\n #[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n\n pub fn insert(&mut self, e: E) -> bool {\n\n let result = !self.contains(&e);\n\n self.bits |= bit(&e);\n\n result\n\n }\n\n\n\n /// Removes an enum from the EnumSet\n\n #[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n\n pub fn remove(&mut self, e: &E) -> bool {\n\n let result = self.contains(e);\n\n self.bits &= !bit(e);\n\n result\n\n }\n\n\n\n /// Deprecated: use `contains`.\n\n #[deprecated = \"use `contains\"]\n\n pub fn contains_elem(&self, e: E) -> bool {\n", "file_path": "src/enum_set.rs", "rank": 83, "score": 10.224722390354497 }, { "content": " old_val\n\n }\n\n\n\n /// Deprecated: Renamed to `remove`.\n\n #[deprecated = \"Renamed to `remove`\"]\n\n pub fn pop(&mut self, key: &uint) -> Option<T> {\n\n self.remove(key)\n\n }\n\n\n\n /// Removes a key from the map, returning the value at the key if the key\n\n /// was previously in the map.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```rust\n\n /// use collect::TrieMap;\n\n ///\n\n /// let mut map = TrieMap::new();\n\n /// map.insert(1, \"a\");\n\n /// assert_eq!(map.remove(&1), Some(\"a\"));\n", "file_path": "src/trie/map.rs", "rank": 84, "score": 9.980441616008815 }, { "content": " /// assert!(!a.is_empty());\n\n /// ```\n\n #[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n\n #[inline]\n\n pub fn is_empty(&self) -> bool { self.len() == 0 }\n\n\n\n /// Clears the map, removing all values.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```rust\n\n /// use collect::TreeMap;\n\n ///\n\n /// let mut a = TreeMap::new();\n\n /// a.insert(1, \"a\");\n\n /// a.clear();\n\n /// assert!(a.is_empty());\n\n /// ```\n\n #[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n\n pub fn clear(&mut self) {\n", "file_path": "src/tree/map.rs", "rank": 85, "score": 9.966605934317027 }, { "content": " /// ```\n\n #[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n\n pub fn is_empty(&self) -> bool { self.len() == 0 }\n\n\n\n /// Clears the set, removing all values.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```rust\n\n /// use collect::TrieSet;\n\n ///\n\n /// let mut v = TrieSet::new();\n\n /// v.insert(1);\n\n /// v.clear();\n\n /// assert!(v.is_empty());\n\n /// ```\n\n #[inline]\n\n #[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n\n pub fn clear(&mut self) { self.map.clear() }\n\n\n", "file_path": "src/trie/set.rs", "rank": 86, "score": 9.966605934317027 }, { "content": "\n\n /// Returns the number of items in the interval heap\n\n pub fn len(&self) -> uint {\n\n self.data.len()\n\n }\n\n\n\n /// Returns true if the queue contains no items.\n\n pub fn is_empty(&self) -> bool {\n\n self.data.is_empty()\n\n }\n\n\n\n /// Drops all items from the queue.\n\n pub fn clear(&mut self) {\n\n self.data.clear();\n\n }\n\n}\n\n\n\nimpl<T, C: Compare<T> + Default> iter::FromIterator<T> for IntervalHeap<T, C> {\n\n /// Creates an interval heap with all the items from an iterator\n\n fn from_iter<Iter: Iterator<Item=T>>(iter: Iter) -> IntervalHeap<T, C> {\n", "file_path": "src/interval_heap.rs", "rank": 87, "score": 9.95197149319186 }, { "content": " fn bound<'a>(&'a self, key: uint, upper: bool) -> Iter<'a, T> {\n\n bound!(Iter, self = self,\n\n key = key, is_upper = upper,\n\n slice_from = slice_from_or_fail, iter = iter,\n\n mutability = )\n\n }\n\n\n\n /// Gets an iterator pointing to the first key-value pair whose key is not less than `key`.\n\n /// If all keys in the map are less than `key` an empty iterator is returned.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```rust\n\n /// use collect::TrieMap;\n\n /// let map: TrieMap<&str> = [(2, \"a\"), (4, \"b\"), (6, \"c\")].iter().map(|&x| x).collect();\n\n ///\n\n /// assert_eq!(map.lower_bound(4).next(), Some((4, &\"b\")));\n\n /// assert_eq!(map.lower_bound(5).next(), Some((6, &\"c\")));\n\n /// assert_eq!(map.lower_bound(10).next(), None);\n\n /// ```\n", "file_path": "src/trie/map.rs", "rank": 88, "score": 9.938887327619694 }, { "content": "/// ```{rust}\n\n/// # use collect::enum_set::CLike;\n\n/// use std::mem;\n\n///\n\n/// #[derive(Copy)]\n\n/// #[repr(u32)]\n\n/// enum Foo {\n\n/// A, B, C\n\n/// }\n\n///\n\n/// impl CLike for Foo {\n\n/// fn to_u32(&self) -> u32 {\n\n/// *self as u32\n\n/// }\n\n/// unsafe fn from_u32(v: u32) -> Foo {\n\n/// mem::transmute(v)\n\n/// }\n\n/// }\n\n/// ```\n", "file_path": "src/enum_set.rs", "rank": 89, "score": 9.908920061622142 }, { "content": "}\n\n\n\n/// `IntervalHeap` iterator.\n\npub struct Iter<'a, T: 'a>(slice::Iter<'a, T>);\n\n\n\nimpl<T: Ord> IntervalHeap<T> {\n\n /// Returns an empty heap ordered according to the natural order of its elements.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```rust\n\n /// use collect::IntervalHeap;\n\n ///\n\n /// let heap = IntervalHeap::<u32>::new();\n\n /// assert!(heap.is_empty());\n\n /// ```\n\n pub fn new() -> IntervalHeap<T> { IntervalHeap::with_comparator(Natural) }\n\n\n\n /// Returns an empty heap with the given capacity and ordered according to the\n\n /// natural order of its elements.\n", "file_path": "src/interval_heap.rs", "rank": 90, "score": 9.845755949569424 }, { "content": "\n\n }\n\n\n\n /// Returns an iterator over references to the elements of the list in order\n\n pub fn iter <'a> (&'a self) -> Iter<'a, T> {\n\n Iter{ head: self.front.as_ref().map(|x| &**x), nelem: self.len() }\n\n }\n\n\n\n pub fn len (&self) -> uint {\n\n self.length\n\n }\n\n\n\n pub fn is_empty(&self) -> bool {\n\n return self.len() == 0\n\n }\n\n}\n\n\n\n#[unsafe_destructor]\n\nimpl<T> Drop for ImmutSList<T> {\n\n fn drop (&mut self) {\n", "file_path": "src/immut_slist.rs", "rank": 91, "score": 9.816466542602399 }, { "content": " ///\n\n /// ```rust\n\n /// use collect::TrieSet;\n\n /// let mut set = TrieSet::new();\n\n /// ```\n\n #[inline]\n\n #[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n\n pub fn new() -> TrieSet {\n\n TrieSet{map: TrieMap::new()}\n\n }\n\n\n\n /// Visits all values in reverse order. Aborts traversal when `f` returns `false`.\n\n /// Returns `true` if `f` returns `true` for all elements.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```rust\n\n /// use collect::TrieSet;\n\n ///\n\n /// let set: TrieSet = [1, 2, 3, 4, 5].iter().map(|&x| x).collect();\n", "file_path": "src/trie/set.rs", "rank": 92, "score": 9.772013565732916 }, { "content": "//! use collect::TreeSet;\n\n//!\n\n//! let mut tree_set = TreeSet::new();\n\n//!\n\n//! tree_set.insert(2);\n\n//! tree_set.insert(1);\n\n//! tree_set.insert(3);\n\n//!\n\n//! for i in tree_set.iter() {\n\n//! println!(\"{}\", i) // prints 1, then 2, then 3\n\n//! }\n\n//! ```\n\n\n\npub mod map;\n\npub mod set;\n", "file_path": "src/tree/mod.rs", "rank": 93, "score": 9.64908832885708 }, { "content": "pub use lru_cache::LruCache;\n\npub use tree_map::TreeMap;\n\npub use tree_set::TreeSet;\n\npub use trie_map::TrieMap;\n\npub use trie_set::TrieSet;\n\n\n\n\n\n\n\n\n\n// privates\n\n\n\nmod tree;\n\nmod trie;\n\n#[cfg(test)] mod bench;\n\n\n\n\n\n\n\n// publics\n\n\n\npub mod compare;\n", "file_path": "src/lib.rs", "rank": 95, "score": 9.586180472381912 }, { "content": "pub mod iter;\n\n\n\npub mod blist;\n\npub mod enum_set;\n\npub mod immut_slist;\n\npub mod interval_heap;\n\npub mod lru_cache;\n\n\n\npub mod tree_map {\n\n pub use tree::map::*;\n\n}\n\n\n\npub mod tree_set {\n\n pub use tree::set::*;\n\n}\n\n\n\npub mod trie_map {\n\n pub use trie::map::*;\n\n}\n\n\n\npub mod trie_set {\n\n pub use trie::set::*;\n\n}\n\n\n\n\n\n\n\npub mod proto;\n\n\n", "file_path": "src/lib.rs", "rank": 96, "score": 9.527128354690891 }, { "content": " #[inline]\n\n pub fn rev_iter<'a>(&'a self) -> RevIter<'a, T> {\n\n RevIter { iter: self.map.rev_iter() }\n\n }\n\n\n\n /// Creates a consuming iterator, that is, one that moves each value out of the\n\n /// set in ascending order. The set cannot be used after calling this.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```rust\n\n /// use collect::TreeSet;\n\n /// let set: TreeSet<int> = [1, 4, 3, 5, 2].iter().map(|&x| x).collect();\n\n ///\n\n /// // Not possible with a regular `.iter()`\n\n /// let v: Vec<int> = set.into_iter().collect();\n\n /// assert_eq!(v, vec![1, 2, 3, 4, 5]);\n\n /// ```\n\n #[inline]\n\n #[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n", "file_path": "src/tree/set.rs", "rank": 97, "score": 9.400497284884027 }, { "content": " }\n\n }\n\n}\n\n\n\n#[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n\nimpl<'a, 'b> ops::BitOr<&'b TrieSet> for &'a TrieSet {\n\n type Output = TrieSet;\n\n\n\n /// Returns the union of `self` and `rhs` as a new `TrieSet`.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```rust\n\n /// use collect::TrieSet;\n\n ///\n\n /// let a: TrieSet = vec![1, 2, 3].into_iter().collect();\n\n /// let b: TrieSet = vec![3, 4, 5].into_iter().collect();\n\n ///\n\n /// let set: TrieSet = &a | &b;\n\n /// let v: Vec<uint> = set.iter().collect();\n", "file_path": "src/trie/set.rs", "rank": 98, "score": 9.316600933049331 }, { "content": " (*node).prev = self.head;\n\n (*self.head).next = node;\n\n (*(*node).next).prev = node;\n\n }\n\n }\n\n\n\n /// Return the number of key-value pairs in the cache.\n\n #[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n\n pub fn len(&self) -> uint { self.map.len() }\n\n\n\n /// Returns whether the cache is currently empty.\n\n #[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n\n pub fn is_empty(&self) -> bool { self.len() == 0 }\n\n\n\n /// Clear the cache of all key-value pairs.\n\n #[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n\n pub fn clear(&mut self) { self.map.clear(); }\n\n\n\n}\n\n\n", "file_path": "src/lru_cache.rs", "rank": 99, "score": 9.3031823328872 } ]
Rust
postgres/src/transaction.rs
dvic/rust-postgres
5d08af01ec520cba1a8642cb7c66ce070b03f4ca
use crate::{ CancelToken, CopyInWriter, CopyOutReader, GenericClient, Portal, RowIter, Rt, Statement, ToStatement, }; use tokio::runtime::Runtime; use tokio_postgres::types::{ToSql, Type}; use tokio_postgres::{Error, Row, SimpleQueryMessage}; pub struct Transaction<'a> { runtime: &'a mut Runtime, transaction: tokio_postgres::Transaction<'a>, } impl<'a> Transaction<'a> { pub(crate) fn new( runtime: &'a mut Runtime, transaction: tokio_postgres::Transaction<'a>, ) -> Transaction<'a> { Transaction { runtime, transaction, } } fn rt(&mut self) -> Rt<'_> { Rt(self.runtime) } pub fn commit(self) -> Result<(), Error> { self.runtime.block_on(self.transaction.commit()) } pub fn rollback(self) -> Result<(), Error> { self.runtime.block_on(self.transaction.rollback()) } pub fn prepare(&mut self, query: &str) -> Result<Statement, Error> { self.runtime.block_on(self.transaction.prepare(query)) } pub fn prepare_typed(&mut self, query: &str, types: &[Type]) -> Result<Statement, Error> { self.runtime .block_on(self.transaction.prepare_typed(query, types)) } pub fn execute<T>(&mut self, query: &T, params: &[&(dyn ToSql + Sync)]) -> Result<u64, Error> where T: ?Sized + ToStatement, { self.runtime .block_on(self.transaction.execute(query, params)) } pub fn query<T>(&mut self, query: &T, params: &[&(dyn ToSql + Sync)]) -> Result<Vec<Row>, Error> where T: ?Sized + ToStatement, { self.runtime.block_on(self.transaction.query(query, params)) } pub fn query_one<T>(&mut self, query: &T, params: &[&(dyn ToSql + Sync)]) -> Result<Row, Error> where T: ?Sized + ToStatement, { self.runtime .block_on(self.transaction.query_one(query, params)) } pub fn query_opt<T>( &mut self, query: &T, params: &[&(dyn ToSql + Sync)], ) -> Result<Option<Row>, Error> where T: ?Sized + ToStatement, { self.runtime .block_on(self.transaction.query_opt(query, params)) } pub fn query_raw<'b, T, I>(&mut self, query: &T, params: I) -> Result<RowIter<'_>, Error> where T: ?Sized + ToStatement, I: IntoIterator<Item = &'b dyn ToSql>, I::IntoIter: ExactSizeIterator, { let stream = self .runtime .block_on(self.transaction.query_raw(query, params))?; Ok(RowIter::new(self.rt(), stream)) } pub fn bind<T>(&mut self, query: &T, params: &[&(dyn ToSql + Sync)]) -> Result<Portal, Error> where T: ?Sized + ToStatement, { self.runtime.block_on(self.transaction.bind(query, params)) } pub fn query_portal(&mut self, portal: &Portal, max_rows: i32) -> Result<Vec<Row>, Error> { self.runtime .block_on(self.transaction.query_portal(portal, max_rows)) } pub fn query_portal_raw( &mut self, portal: &Portal, max_rows: i32, ) -> Result<RowIter<'_>, Error> { let stream = self .runtime .block_on(self.transaction.query_portal_raw(portal, max_rows))?; Ok(RowIter::new(self.rt(), stream)) } pub fn copy_in<T>(&mut self, query: &T) -> Result<CopyInWriter<'_>, Error> where T: ?Sized + ToStatement, { let sink = self.runtime.block_on(self.transaction.copy_in(query))?; Ok(CopyInWriter::new(self.rt(), sink)) } pub fn copy_out<T>(&mut self, query: &T) -> Result<CopyOutReader<'_>, Error> where T: ?Sized + ToStatement, { let stream = self.runtime.block_on(self.transaction.copy_out(query))?; Ok(CopyOutReader::new(self.rt(), stream)) } pub fn simple_query(&mut self, query: &str) -> Result<Vec<SimpleQueryMessage>, Error> { self.runtime.block_on(self.transaction.simple_query(query)) } pub fn batch_execute(&mut self, query: &str) -> Result<(), Error> { self.runtime.block_on(self.transaction.batch_execute(query)) } pub fn cancel_token(&self) -> CancelToken { CancelToken::new(self.transaction.cancel_token()) } pub fn transaction(&mut self) -> Result<Transaction<'_>, Error> { let transaction = self.runtime.block_on(self.transaction.transaction())?; Ok(Transaction { runtime: self.runtime, transaction, }) } } impl<'a> GenericClient for Transaction<'a> { fn execute<T>(&mut self, query: &T, params: &[&(dyn ToSql + Sync)]) -> Result<u64, Error> where T: ?Sized + ToStatement, { self.execute(query, params) } fn query<T>(&mut self, query: &T, params: &[&(dyn ToSql + Sync)]) -> Result<Vec<Row>, Error> where T: ?Sized + ToStatement, { self.query(query, params) } fn prepare(&mut self, query: &str) -> Result<Statement, Error> { self.prepare(query) } fn transaction(&mut self) -> Result<Transaction<'_>, Error> { self.transaction() } }
use crate::{ CancelToken, CopyInWriter, CopyOutReader, GenericClient, Portal, RowIter, Rt, Statement, ToStatement, }; use tokio::runtime::Runtime; use tokio_postgres::types::{ToSql, Type}; use tokio_postgres::{Error, Row, SimpleQueryMessage}; pub struct Transaction<'a> { runtime: &'a mut Runtime, transaction: tokio_postgres::Transaction<'a>, } impl<'a> Transaction<'a> { pub(crate) fn new( runtime: &'a mut Runtime, transaction: tokio_postgres::Transaction<'a>, ) -> Transaction<'a> { Transaction { runtime, transaction, } } fn rt(&mut self) -> Rt<'_> { Rt(self.runtime) } pub fn commit(self) -> Result<(), Error> { self.runtime.block_on(self.transaction.commit()) } pub fn rollback(self) -> Result<(), Error> { self.runtime.block_on(self.transaction.rollback()) } pub fn prepare(&mut self, query: &str) -> Result<Statement, Error> { self.runtime.block_on(self.transaction.prepare(query)) } pub fn prepare_typed(&mut self, query: &str, types: &[Type]) -> Result<Statement, Error> { self.runtime .block_on(self.transaction.prepare_typed(query, types)) } pub fn execute<T>(&mut self, query: &T, params: &[&(dyn ToSql + Sync)]) -> Result<u64, Error> where T: ?Sized + ToStatement, { self.runtime .block_on(self.transaction.execute(query, params)) } pub fn query<T>(&mut self, query: &T, params: &[&(dyn ToSql + Sync)]) -> Result<Vec<Row>, Error> where T: ?Sized + ToStatement, { self.runtime.block_on(self.transaction.query(query, params)) } pub fn query_one<T>(&mut self, query: &T, params: &[&(dyn ToSql + Sync)]) -> Result<Row, Error> where T: ?Sized + ToStatement, { self.runtime .block_on(self.transaction.query_one(query, params)) } pub fn query_opt<T>( &mut self, query: &T, params: &[&(dyn ToSql + Sync)], ) -> Result<Option<Row>, Error> where T: ?Sized + ToStatement, { self.runtime .block_on(self.transaction.query_opt(query, params)) } pub fn query_raw<'b, T, I>(&mut self, query: &T, params: I) -> Result<RowIter<'_>, Error> where T: ?Sized + ToStatement, I: IntoIterator<Item = &'b dyn ToSql>, I::IntoIter: ExactSizeIterator, { let stream = self .runtime .block_on(self.transaction.query_raw(query, params))?; Ok(RowIter::new(self.rt(), stream)) } pub fn bind<T>(&mut self, query: &T, params: &[&(dyn ToSql + Sync)]) -> Result<Portal, Error> where T: ?Sized + ToStatement, { self.runtime.block_on(self.transaction.bind(query, params)) } pub fn query_portal(&mut self, portal: &Portal, max_rows: i32) -> Result<Vec<Row>, Error> { self.runtime .block_on(self.transaction.query_portal(portal, max_rows)) } pub fn query_portal_raw( &mut self, portal: &Portal, max_rows: i32, ) -> Result<RowIter<'_>, Error> { let stream = self .runtime .block_on(self.transaction.query_portal_raw(portal, max_rows))?; Ok(RowIter::new(self.rt(), stream)) } pub fn copy_in<T>(&mut self, query: &T) -> Result<CopyInWriter<'_>, Error> where T: ?Sized + ToStatement, { let sink = self.runtime.block_on(self.transaction.copy_in(query))?; Ok(CopyInWriter::new(self.rt(), sink)) } pub fn copy_out<T>(&mut self, query: &T) -> Result<CopyOutReader<'_>, Error> where T: ?Sized + ToStatement, { let stream = self.runtime.block_on(self.transaction.copy_out(query))?; Ok(CopyOutReader::new(self.rt(), stream)) } pub fn simple_query(&mut self, query: &str) -> Result<Vec<SimpleQueryMessage>, Error> { self.runtime.block_on(self.transaction.simple_query(query)) } pub fn batch_execute(&mut self, query: &str) -> Result<(), Error> { self.runtime.block_on(self.transaction.batch_execute(query)) } pub fn cancel_token(&self) -> CancelToken { CancelToken::new(self.transaction.cancel_token()) } pub fn transaction(&mut self) -> Result<Transaction<'_>, Error> { let transaction = self.runtime.block_on(self.transactio
s) } fn query<T>(&mut self, query: &T, params: &[&(dyn ToSql + Sync)]) -> Result<Vec<Row>, Error> where T: ?Sized + ToStatement, { self.query(query, params) } fn prepare(&mut self, query: &str) -> Result<Statement, Error> { self.prepare(query) } fn transaction(&mut self) -> Result<Transaction<'_>, Error> { self.transaction() } }
n.transaction())?; Ok(Transaction { runtime: self.runtime, transaction, }) } } impl<'a> GenericClient for Transaction<'a> { fn execute<T>(&mut self, query: &T, params: &[&(dyn ToSql + Sync)]) -> Result<u64, Error> where T: ?Sized + ToStatement, { self.execute(query, param
random
[ { "content": "pub fn read_be_i32(buf: &mut &[u8]) -> Result<i32, Box<dyn Error + Sync + Send>> {\n\n if buf.len() < 4 {\n\n return Err(\"invalid buffer size\".into());\n\n }\n\n let mut bytes = [0; 4];\n\n bytes.copy_from_slice(&buf[..4]);\n\n *buf = &buf[4..];\n\n Ok(i32::from_be_bytes(bytes))\n\n}\n\n\n", "file_path": "postgres-types/src/private.rs", "rank": 0, "score": 487243.8452265913 }, { "content": "#[inline]\n\npub fn date_from_sql(mut buf: &[u8]) -> Result<i32, StdBox<dyn Error + Sync + Send>> {\n\n let v = buf.read_i32::<BigEndian>()?;\n\n if !buf.is_empty() {\n\n return Err(\"invalid message length: date not drained\".into());\n\n }\n\n Ok(v)\n\n}\n\n\n\n/// Serializes a `TIME` or `TIMETZ` value.\n\n///\n\n/// The value should represent the number of microseconds since midnight.\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 1, "score": 464365.55580535054 }, { "content": "#[inline]\n\npub fn int4_from_sql(mut buf: &[u8]) -> Result<i32, StdBox<dyn Error + Sync + Send>> {\n\n let v = buf.read_i32::<BigEndian>()?;\n\n if !buf.is_empty() {\n\n return Err(\"invalid buffer size\".into());\n\n }\n\n Ok(v)\n\n}\n\n\n\n/// Serializes an `OID` value.\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 2, "score": 464365.5558053505 }, { "content": "#[inline]\n\npub fn execute(portal: &str, max_rows: i32, buf: &mut BytesMut) -> io::Result<()> {\n\n buf.put_u8(b'E');\n\n write_body(buf, |buf| {\n\n write_cstr(portal.as_bytes(), buf)?;\n\n buf.put_i32(max_rows);\n\n Ok(())\n\n })\n\n}\n\n\n", "file_path": "postgres-protocol/src/message/frontend.rs", "rank": 3, "score": 434256.932953694 }, { "content": "#[inline]\n\npub fn text_from_sql(buf: &[u8]) -> Result<&str, StdBox<dyn Error + Sync + Send>> {\n\n Ok(str::from_utf8(buf)?)\n\n}\n\n\n\n/// Serializes a `\"char\"` value.\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 4, "score": 420723.4119467739 }, { "content": "fn write_pascal_string(s: &str, buf: &mut BytesMut) -> Result<(), StdBox<dyn Error + Sync + Send>> {\n\n let size = i32::from_usize(s.len())?;\n\n buf.put_i32(size);\n\n buf.put_slice(s.as_bytes());\n\n Ok(())\n\n}\n\n\n\n/// Deserializes an `HSTORE` value.\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 5, "score": 417600.88437930145 }, { "content": "#[inline]\n\npub fn float4_from_sql(mut buf: &[u8]) -> Result<f32, StdBox<dyn Error + Sync + Send>> {\n\n let v = buf.read_f32::<BigEndian>()?;\n\n if !buf.is_empty() {\n\n return Err(\"invalid buffer size\".into());\n\n }\n\n Ok(v)\n\n}\n\n\n\n/// Serializes a `FLOAT8` value.\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 6, "score": 409086.8345690678 }, { "content": "#[inline]\n\npub fn float8_from_sql(mut buf: &[u8]) -> Result<f64, StdBox<dyn Error + Sync + Send>> {\n\n let v = buf.read_f64::<BigEndian>()?;\n\n if !buf.is_empty() {\n\n return Err(\"invalid buffer size\".into());\n\n }\n\n Ok(v)\n\n}\n\n\n\n/// Serializes an `HSTORE` value.\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 7, "score": 409086.8345690678 }, { "content": "#[inline]\n\npub fn time_from_sql(mut buf: &[u8]) -> Result<i64, StdBox<dyn Error + Sync + Send>> {\n\n let v = buf.read_i64::<BigEndian>()?;\n\n if !buf.is_empty() {\n\n return Err(\"invalid message length: time not drained\".into());\n\n }\n\n Ok(v)\n\n}\n\n\n\n/// Serializes a `MACADDR` value.\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 8, "score": 409086.8345690678 }, { "content": "#[inline]\n\npub fn point_from_sql(mut buf: &[u8]) -> Result<Point, StdBox<dyn Error + Sync + Send>> {\n\n let x = buf.read_f64::<BigEndian>()?;\n\n let y = buf.read_f64::<BigEndian>()?;\n\n if !buf.is_empty() {\n\n return Err(\"invalid buffer size\".into());\n\n }\n\n Ok(Point { x, y })\n\n}\n\n\n\n/// A Postgres point.\n\n#[derive(Copy, Clone)]\n\npub struct Point {\n\n x: f64,\n\n y: f64,\n\n}\n\n\n\nimpl Point {\n\n /// Returns the x coordinate of the point.\n\n #[inline]\n\n pub fn x(&self) -> f64 {\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 9, "score": 409086.8345690678 }, { "content": "#[inline]\n\npub fn int8_from_sql(mut buf: &[u8]) -> Result<i64, StdBox<dyn Error + Sync + Send>> {\n\n let v = buf.read_i64::<BigEndian>()?;\n\n if !buf.is_empty() {\n\n return Err(\"invalid buffer size\".into());\n\n }\n\n Ok(v)\n\n}\n\n\n\n/// Serializes a `FLOAT4` value.\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 10, "score": 409086.8345690678 }, { "content": "#[inline]\n\npub fn inet_from_sql(mut buf: &[u8]) -> Result<Inet, StdBox<dyn Error + Sync + Send>> {\n\n let family = buf.read_u8()?;\n\n let netmask = buf.read_u8()?;\n\n buf.read_u8()?; // is_cidr\n\n let len = buf.read_u8()?;\n\n\n\n let addr = match family {\n\n PGSQL_AF_INET => {\n\n if netmask > 32 {\n\n return Err(\"invalid IPv4 netmask\".into());\n\n }\n\n if len != 4 {\n\n return Err(\"invalid IPv4 address length\".into());\n\n }\n\n let mut addr = [0; 4];\n\n buf.read_exact(&mut addr)?;\n\n IpAddr::V4(Ipv4Addr::from(addr))\n\n }\n\n PGSQL_AF_INET6 => {\n\n if netmask > 128 {\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 11, "score": 409086.8345690678 }, { "content": "#[inline]\n\npub fn timestamp_from_sql(mut buf: &[u8]) -> Result<i64, StdBox<dyn Error + Sync + Send>> {\n\n let v = buf.read_i64::<BigEndian>()?;\n\n if !buf.is_empty() {\n\n return Err(\"invalid message length: timestamp not drained\".into());\n\n }\n\n Ok(v)\n\n}\n\n\n\n/// Serializes a `DATE` value.\n\n///\n\n/// The value should represent the number of days since January 1st, 2000.\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 12, "score": 409086.8345690678 }, { "content": "#[inline]\n\npub fn char_from_sql(mut buf: &[u8]) -> Result<i8, StdBox<dyn Error + Sync + Send>> {\n\n let v = buf.read_i8()?;\n\n if !buf.is_empty() {\n\n return Err(\"invalid buffer size\".into());\n\n }\n\n Ok(v)\n\n}\n\n\n\n/// Serializes an `INT2` value.\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 13, "score": 409086.8345690678 }, { "content": "#[inline]\n\npub fn int2_from_sql(mut buf: &[u8]) -> Result<i16, StdBox<dyn Error + Sync + Send>> {\n\n let v = buf.read_i16::<BigEndian>()?;\n\n if !buf.is_empty() {\n\n return Err(\"invalid buffer size\".into());\n\n }\n\n Ok(v)\n\n}\n\n\n\n/// Serializes an `INT4` value.\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 14, "score": 409086.8345690678 }, { "content": "#[inline]\n\npub fn box_from_sql(mut buf: &[u8]) -> Result<Box, StdBox<dyn Error + Sync + Send>> {\n\n let x1 = buf.read_f64::<BigEndian>()?;\n\n let y1 = buf.read_f64::<BigEndian>()?;\n\n let x2 = buf.read_f64::<BigEndian>()?;\n\n let y2 = buf.read_f64::<BigEndian>()?;\n\n if !buf.is_empty() {\n\n return Err(\"invalid buffer size\".into());\n\n }\n\n Ok(Box {\n\n upper_right: Point { x: x1, y: y1 },\n\n lower_left: Point { x: x2, y: y2 },\n\n })\n\n}\n\n\n\n/// A Postgres box.\n\n#[derive(Copy, Clone)]\n\npub struct Box {\n\n upper_right: Point,\n\n lower_left: Point,\n\n}\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 15, "score": 409086.8345690678 }, { "content": "#[inline]\n\npub fn oid_from_sql(mut buf: &[u8]) -> Result<Oid, StdBox<dyn Error + Sync + Send>> {\n\n let v = buf.read_u32::<BigEndian>()?;\n\n if !buf.is_empty() {\n\n return Err(\"invalid buffer size\".into());\n\n }\n\n Ok(v)\n\n}\n\n\n\n/// Serializes an `INT8` value.\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 16, "score": 409086.8345690678 }, { "content": "#[inline]\n\npub fn array_from_sql<'a>(mut buf: &'a [u8]) -> Result<Array<'a>, StdBox<dyn Error + Sync + Send>> {\n\n let dimensions = buf.read_i32::<BigEndian>()?;\n\n if dimensions < 0 {\n\n return Err(\"invalid dimension count\".into());\n\n }\n\n let has_nulls = buf.read_i32::<BigEndian>()? != 0;\n\n let element_type = buf.read_u32::<BigEndian>()?;\n\n\n\n let mut r = buf;\n\n let mut elements = 1i32;\n\n for _ in 0..dimensions {\n\n let len = r.read_i32::<BigEndian>()?;\n\n if len < 0 {\n\n return Err(\"invalid dimension size\".into());\n\n }\n\n let _lower_bound = r.read_i32::<BigEndian>()?;\n\n elements = match elements.checked_mul(len) {\n\n Some(elements) => elements,\n\n None => return Err(\"too many array elements\".into()),\n\n };\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 17, "score": 393884.51048788667 }, { "content": "#[inline]\n\npub fn path_from_sql<'a>(mut buf: &'a [u8]) -> Result<Path<'a>, StdBox<dyn Error + Sync + Send>> {\n\n let closed = buf.read_u8()? != 0;\n\n let points = buf.read_i32::<BigEndian>()?;\n\n\n\n Ok(Path {\n\n closed,\n\n points,\n\n buf,\n\n })\n\n}\n\n\n\n/// A Postgres point.\n\npub struct Path<'a> {\n\n closed: bool,\n\n points: i32,\n\n buf: &'a [u8],\n\n}\n\n\n\nimpl<'a> Path<'a> {\n\n /// Determines if the path is closed or open.\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 18, "score": 393884.51048788667 }, { "content": "#[inline]\n\npub fn range_from_sql<'a>(mut buf: &'a [u8]) -> Result<Range<'a>, StdBox<dyn Error + Sync + Send>> {\n\n let tag = buf.read_u8()?;\n\n\n\n if tag == RANGE_EMPTY {\n\n if !buf.is_empty() {\n\n return Err(\"invalid message size\".into());\n\n }\n\n return Ok(Range::Empty);\n\n }\n\n\n\n let lower = read_bound(&mut buf, tag, RANGE_LOWER_UNBOUNDED, RANGE_LOWER_INCLUSIVE)?;\n\n let upper = read_bound(&mut buf, tag, RANGE_UPPER_UNBOUNDED, RANGE_UPPER_INCLUSIVE)?;\n\n\n\n if !buf.is_empty() {\n\n return Err(\"invalid message size\".into());\n\n }\n\n\n\n Ok(Range::Nonempty(lower, upper))\n\n}\n\n\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 19, "score": 393884.5104878866 }, { "content": "fn downcast(len: usize) -> Result<i32, Box<dyn Error + Sync + Send>> {\n\n if len > i32::max_value() as usize {\n\n Err(\"value too large to transmit\".into())\n\n } else {\n\n Ok(len as i32)\n\n }\n\n}\n", "file_path": "postgres-types/src/lib.rs", "rank": 20, "score": 390241.79258100735 }, { "content": "#[inline]\n\npub fn parse<I>(name: &str, query: &str, param_types: I, buf: &mut BytesMut) -> io::Result<()>\n\nwhere\n\n I: IntoIterator<Item = Oid>,\n\n{\n\n buf.put_u8(b'P');\n\n write_body(buf, |buf| {\n\n write_cstr(name.as_bytes(), buf)?;\n\n write_cstr(query.as_bytes(), buf)?;\n\n write_counted(\n\n param_types,\n\n |t, buf| {\n\n buf.put_u32(t);\n\n Ok::<_, io::Error>(())\n\n },\n\n buf,\n\n )?;\n\n Ok(())\n\n })\n\n}\n\n\n", "file_path": "postgres-protocol/src/message/frontend.rs", "rank": 21, "score": 390135.2737072218 }, { "content": "pub fn encode<'a, I>(client: &InnerClient, statement: &Statement, params: I) -> Result<Bytes, Error>\n\nwhere\n\n I: IntoIterator<Item = &'a dyn ToSql>,\n\n I::IntoIter: ExactSizeIterator,\n\n{\n\n client.with_buf(|buf| {\n\n encode_bind(statement, params, \"\", buf)?;\n\n frontend::execute(\"\", 0, buf).map_err(Error::encode)?;\n\n frontend::sync(buf);\n\n Ok(buf.split().freeze())\n\n })\n\n}\n\n\n", "file_path": "tokio-postgres/src/query.rs", "rank": 22, "score": 369921.5106323971 }, { "content": "#[inline]\n\npub fn bool_from_sql(buf: &[u8]) -> Result<bool, StdBox<dyn Error + Sync + Send>> {\n\n if buf.len() != 1 {\n\n return Err(\"invalid buffer size\".into());\n\n }\n\n\n\n Ok(buf[0] != 0)\n\n}\n\n\n\n/// Serializes a `BYTEA` value.\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 23, "score": 364187.3253439196 }, { "content": "#[inline]\n\npub fn uuid_from_sql(buf: &[u8]) -> Result<[u8; 16], StdBox<dyn Error + Sync + Send>> {\n\n if buf.len() != 16 {\n\n return Err(\"invalid message length: uuid size mismatch\".into());\n\n }\n\n let mut out = [0; 16];\n\n out.copy_from_slice(buf);\n\n Ok(out)\n\n}\n\n\n\n/// Serializes an array value.\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 24, "score": 359184.44143655256 }, { "content": "#[inline]\n\npub fn macaddr_from_sql(buf: &[u8]) -> Result<[u8; 6], StdBox<dyn Error + Sync + Send>> {\n\n if buf.len() != 6 {\n\n return Err(\"invalid message length: macaddr length mismatch\".into());\n\n }\n\n let mut out = [0; 6];\n\n out.copy_from_slice(buf);\n\n Ok(out)\n\n}\n\n\n\n/// Serializes a `UUID` value.\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 25, "score": 359184.44143655256 }, { "content": "#[inline]\n\npub fn query(query: &str, buf: &mut BytesMut) -> io::Result<()> {\n\n buf.put_u8(b'Q');\n\n write_body(buf, |buf| write_cstr(query.as_bytes(), buf))\n\n}\n\n\n", "file_path": "postgres-protocol/src/message/frontend.rs", "rank": 26, "score": 353019.3373603685 }, { "content": "fn encode(client: &InnerClient, name: &str, query: &str, types: &[Type]) -> Result<Bytes, Error> {\n\n if types.is_empty() {\n\n debug!(\"preparing query {}: {}\", name, query);\n\n } else {\n\n debug!(\"preparing query {} with types {:?}: {}\", name, types, query);\n\n }\n\n\n\n client.with_buf(|buf| {\n\n frontend::parse(name, query, types.iter().map(Type::oid), buf).map_err(Error::encode)?;\n\n frontend::describe(b'S', &name, buf).map_err(Error::encode)?;\n\n frontend::sync(buf);\n\n Ok(buf.split().freeze())\n\n })\n\n}\n\n\n\nasync fn get_type(client: &Arc<InnerClient>, oid: Oid) -> Result<Type, Error> {\n\n if let Some(type_) = Type::from_oid(oid) {\n\n return Ok(type_);\n\n }\n\n\n", "file_path": "tokio-postgres/src/prepare.rs", "rank": 27, "score": 341486.1149530444 }, { "content": "pub fn expand_derive_tosql(input: DeriveInput) -> Result<TokenStream, Error> {\n\n let overrides = Overrides::extract(&input.attrs)?;\n\n\n\n let name = overrides.name.unwrap_or_else(|| input.ident.to_string());\n\n\n\n let (accepts_body, to_sql_body) = match input.data {\n\n Data::Enum(ref data) => {\n\n let variants = data\n\n .variants\n\n .iter()\n\n .map(Variant::parse)\n\n .collect::<Result<Vec<_>, _>>()?;\n\n (\n\n accepts::enum_body(&name, &variants),\n\n enum_body(&input.ident, &variants),\n\n )\n\n }\n\n Data::Struct(DataStruct {\n\n fields: Fields::Unnamed(ref fields),\n\n ..\n", "file_path": "postgres-derive/src/tosql.rs", "rank": 28, "score": 325311.49685094657 }, { "content": "#[inline]\n\npub fn copy_fail(message: &str, buf: &mut BytesMut) -> io::Result<()> {\n\n buf.put_u8(b'f');\n\n write_body(buf, |buf| write_cstr(message.as_bytes(), buf))\n\n}\n\n\n", "file_path": "postgres-protocol/src/message/frontend.rs", "rank": 29, "score": 303156.473121075 }, { "content": "#[inline]\n\npub fn int4_to_sql(v: i32, buf: &mut BytesMut) {\n\n buf.put_i32(v);\n\n}\n\n\n\n/// Deserializes an `INT4` value.\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 30, "score": 295943.2159273333 }, { "content": "#[inline]\n\npub fn date_to_sql(v: i32, buf: &mut BytesMut) {\n\n buf.put_i32(v);\n\n}\n\n\n\n/// Deserializes a `DATE` value.\n\n///\n\n/// The value represents the number of days since January 1st, 2000.\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 31, "score": 295943.2159273333 }, { "content": "#[inline]\n\npub fn text_to_sql(v: &str, buf: &mut BytesMut) {\n\n buf.put_slice(v.as_bytes());\n\n}\n\n\n\n/// Deserializes a `TEXT`, `VARCHAR`, `CHAR(n)`, `NAME`, or `CITEXT` value.\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 32, "score": 295674.9080698228 }, { "content": "fn encode(client: &InnerClient, query: &str) -> Result<Bytes, Error> {\n\n client.with_buf(|buf| {\n\n frontend::query(query, buf).map_err(Error::encode)?;\n\n Ok(buf.split().freeze())\n\n })\n\n}\n\n\n\npin_project! {\n\n /// A stream of simple query results.\n\n pub struct SimpleQueryStream {\n\n responses: Responses,\n\n columns: Option<Arc<[String]>>,\n\n #[pin]\n\n _p: PhantomPinned,\n\n }\n\n}\n\n\n\nimpl Stream for SimpleQueryStream {\n\n type Item = Result<SimpleQueryMessage, Error>;\n\n\n", "file_path": "tokio-postgres/src/simple_query.rs", "rank": 33, "score": 290837.8672955741 }, { "content": "#[inline]\n\npub fn close(variant: u8, name: &str, buf: &mut BytesMut) -> io::Result<()> {\n\n buf.put_u8(b'C');\n\n write_body(buf, |buf| {\n\n buf.put_u8(variant);\n\n write_cstr(name.as_bytes(), buf)\n\n })\n\n}\n\n\n\npub struct CopyData<T> {\n\n buf: T,\n\n len: i32,\n\n}\n\n\n\nimpl<T> CopyData<T>\n\nwhere\n\n T: Buf,\n\n{\n\n pub fn new(buf: T) -> io::Result<CopyData<T>> {\n\n let len = buf\n\n .remaining()\n", "file_path": "postgres-protocol/src/message/frontend.rs", "rank": 34, "score": 290657.90963345196 }, { "content": "#[inline]\n\npub fn describe(variant: u8, name: &str, buf: &mut BytesMut) -> io::Result<()> {\n\n buf.put_u8(b'D');\n\n write_body(buf, |buf| {\n\n buf.put_u8(variant);\n\n write_cstr(name.as_bytes(), buf)\n\n })\n\n}\n\n\n", "file_path": "postgres-protocol/src/message/frontend.rs", "rank": 35, "score": 290657.90963345196 }, { "content": "#[inline]\n\npub fn sasl_initial_response(mechanism: &str, data: &[u8], buf: &mut BytesMut) -> io::Result<()> {\n\n buf.put_u8(b'p');\n\n write_body(buf, |buf| {\n\n write_cstr(mechanism.as_bytes(), buf)?;\n\n let len = i32::from_usize(data.len())?;\n\n buf.put_i32(len);\n\n buf.put_slice(data);\n\n Ok(())\n\n })\n\n}\n\n\n", "file_path": "postgres-protocol/src/message/frontend.rs", "rank": 36, "score": 284522.1644990844 }, { "content": "pub fn expand_derive_fromsql(input: DeriveInput) -> Result<TokenStream, Error> {\n\n let overrides = Overrides::extract(&input.attrs)?;\n\n\n\n let name = overrides.name.unwrap_or_else(|| input.ident.to_string());\n\n\n\n let (accepts_body, to_sql_body) = match input.data {\n\n Data::Enum(ref data) => {\n\n let variants = data\n\n .variants\n\n .iter()\n\n .map(Variant::parse)\n\n .collect::<Result<Vec<_>, _>>()?;\n\n (\n\n accepts::enum_body(&name, &variants),\n\n enum_body(&input.ident, &variants),\n\n )\n\n }\n\n Data::Struct(DataStruct {\n\n fields: Fields::Unnamed(ref fields),\n\n ..\n", "file_path": "postgres-derive/src/fromsql.rs", "rank": 37, "score": 278692.74349916936 }, { "content": "#[inline]\n\npub fn sync(buf: &mut BytesMut) {\n\n buf.put_u8(b'S');\n\n write_body(buf, |_| Ok::<(), io::Error>(())).unwrap();\n\n}\n\n\n", "file_path": "postgres-protocol/src/message/frontend.rs", "rank": 38, "score": 277333.97224324744 }, { "content": "#[inline]\n\npub fn cancel_request(process_id: i32, secret_key: i32, buf: &mut BytesMut) {\n\n write_body(buf, |buf| {\n\n buf.put_i32(80_877_102);\n\n buf.put_i32(process_id);\n\n buf.put_i32(secret_key);\n\n Ok::<_, io::Error>(())\n\n })\n\n .unwrap();\n\n}\n\n\n", "file_path": "postgres-protocol/src/message/frontend.rs", "rank": 39, "score": 260502.48923864914 }, { "content": "pub fn test_type<T, S>(conn: &mut Client, sql_type: &str, checks: &[(T, S)])\n\nwhere\n\n T: PartialEq + FromSqlOwned + ToSql + Sync,\n\n S: fmt::Display,\n\n{\n\n for &(ref val, ref repr) in checks.iter() {\n\n let stmt = conn\n\n .prepare(&*format!(\"SELECT {}::{}\", *repr, sql_type))\n\n .unwrap();\n\n let result = conn.query_one(&stmt, &[]).unwrap().get(0);\n\n assert_eq!(val, &result);\n\n\n\n let stmt = conn.prepare(&*format!(\"SELECT $1::{}\", sql_type)).unwrap();\n\n let result = conn.query_one(&stmt, &[val]).unwrap().get(0);\n\n assert_eq!(val, &result);\n\n }\n\n}\n\n\n", "file_path": "postgres-derive-test/src/lib.rs", "rank": 40, "score": 256226.69733876205 }, { "content": "#[inline]\n\nfn write_cstr(s: &[u8], buf: &mut BytesMut) -> Result<(), io::Error> {\n\n if s.contains(&0) {\n\n return Err(io::Error::new(\n\n io::ErrorKind::InvalidInput,\n\n \"string contains embedded null\",\n\n ));\n\n }\n\n buf.put_slice(s);\n\n buf.put_u8(0);\n\n Ok(())\n\n}\n", "file_path": "postgres-protocol/src/message/frontend.rs", "rank": 41, "score": 247079.80485320155 }, { "content": "#[inline]\n\npub fn password_message(password: &[u8], buf: &mut BytesMut) -> io::Result<()> {\n\n buf.put_u8(b'p');\n\n write_body(buf, |buf| write_cstr(password, buf))\n\n}\n\n\n", "file_path": "postgres-protocol/src/message/frontend.rs", "rank": 42, "score": 243371.63338950253 }, { "content": "#[inline]\n\npub fn sasl_response(data: &[u8], buf: &mut BytesMut) -> io::Result<()> {\n\n buf.put_u8(b'p');\n\n write_body(buf, |buf| {\n\n buf.put_slice(data);\n\n Ok(())\n\n })\n\n}\n\n\n", "file_path": "postgres-protocol/src/message/frontend.rs", "rank": 43, "score": 243371.63338950253 }, { "content": "#[inline]\n\npub fn empty_range_to_sql(buf: &mut BytesMut) {\n\n buf.put_u8(RANGE_EMPTY);\n\n}\n\n\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 44, "score": 239075.49298499903 }, { "content": "#[inline]\n\npub fn startup_message<'a, I>(parameters: I, buf: &mut BytesMut) -> io::Result<()>\n\nwhere\n\n I: IntoIterator<Item = (&'a str, &'a str)>,\n\n{\n\n write_body(buf, |buf| {\n\n buf.put_i32(196_608);\n\n for (key, value) in parameters {\n\n write_cstr(key.as_bytes(), buf)?;\n\n write_cstr(value.as_bytes(), buf)?;\n\n }\n\n buf.put_u8(0);\n\n Ok(())\n\n })\n\n}\n\n\n", "file_path": "postgres-protocol/src/message/frontend.rs", "rank": 45, "score": 238721.01756159973 }, { "content": "pub fn composite_body(name: &str, trait_: &str, fields: &[Field]) -> TokenStream {\n\n let num_fields = fields.len();\n\n let trait_ = Ident::new(trait_, Span::call_site());\n\n let traits = iter::repeat(&trait_);\n\n let field_names = fields.iter().map(|f| &f.name);\n\n let field_types = fields.iter().map(|f| &f.type_);\n\n\n\n quote! {\n\n if type_.name() != #name {\n\n return false;\n\n }\n\n\n\n match *type_.kind() {\n\n ::postgres_types::Kind::Composite(ref fields) => {\n\n if fields.len() != #num_fields {\n\n return false;\n\n }\n\n\n\n fields.iter().all(|f| {\n\n match f.name() {\n", "file_path": "postgres-derive/src/accepts.rs", "rank": 46, "score": 231793.85261329502 }, { "content": "#[inline]\n\npub fn oid_to_sql(v: Oid, buf: &mut BytesMut) {\n\n buf.put_u32(v);\n\n}\n\n\n\n/// Deserializes an `OID` value.\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 47, "score": 231431.6906307917 }, { "content": "#[inline]\n\npub fn bytea_to_sql(v: &[u8], buf: &mut BytesMut) {\n\n buf.put_slice(v);\n\n}\n\n\n\n/// Deserializes a `BYTEA value.\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 48, "score": 231431.6906307917 }, { "content": "#[inline]\n\npub fn int2_to_sql(v: i16, buf: &mut BytesMut) {\n\n buf.put_i16(v);\n\n}\n\n\n\n/// Deserializes an `INT2` value.\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 49, "score": 231431.6906307917 }, { "content": "#[inline]\n\npub fn bool_to_sql(v: bool, buf: &mut BytesMut) {\n\n buf.put_u8(v as u8);\n\n}\n\n\n\n/// Deserializes a `BOOL` value.\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 50, "score": 231431.6906307917 }, { "content": "#[inline]\n\npub fn int8_to_sql(v: i64, buf: &mut BytesMut) {\n\n buf.put_i64(v);\n\n}\n\n\n\n/// Deserializes an `INT8` value.\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 51, "score": 231431.6906307917 }, { "content": "#[inline]\n\npub fn timestamp_to_sql(v: i64, buf: &mut BytesMut) {\n\n buf.put_i64(v);\n\n}\n\n\n\n/// Deserializes a `TIMESTAMP` or `TIMESTAMPTZ` value.\n\n///\n\n/// The value represents the number of microseconds since midnight, January 1st, 2000.\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 52, "score": 231431.6906307917 }, { "content": "#[inline]\n\npub fn time_to_sql(v: i64, buf: &mut BytesMut) {\n\n buf.put_i64(v);\n\n}\n\n\n\n/// Deserializes a `TIME` or `TIMETZ` value.\n\n///\n\n/// The value represents the number of microseconds since midnight.\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 53, "score": 231431.6906307917 }, { "content": "#[inline]\n\npub fn float8_to_sql(v: f64, buf: &mut BytesMut) {\n\n buf.put_f64(v);\n\n}\n\n\n\n/// Deserializes a `FLOAT8` value.\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 54, "score": 231431.6906307917 }, { "content": "#[inline]\n\npub fn char_to_sql(v: i8, buf: &mut BytesMut) {\n\n buf.put_i8(v);\n\n}\n\n\n\n/// Deserializes a `\"char\"` value.\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 55, "score": 231431.6906307917 }, { "content": "#[inline]\n\npub fn float4_to_sql(v: f32, buf: &mut BytesMut) {\n\n buf.put_f32(v);\n\n}\n\n\n\n/// Deserializes a `FLOAT4` value.\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 56, "score": 231431.6906307917 }, { "content": "#[inline]\n\npub fn uuid_to_sql(v: [u8; 16], buf: &mut BytesMut) {\n\n buf.put_slice(&v);\n\n}\n\n\n\n/// Deserializes a `UUID` value.\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 57, "score": 228103.39449558363 }, { "content": "#[inline]\n\npub fn macaddr_to_sql(v: [u8; 6], buf: &mut BytesMut) {\n\n buf.put_slice(&v);\n\n}\n\n\n\n/// Deserializes a `MACADDR` value.\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 58, "score": 228103.39449558363 }, { "content": "fn make_consts(w: &mut BufWriter<File>, types: &BTreeMap<u32, Type>) {\n\n write!(w, \"impl Type {{\").unwrap();\n\n for type_ in types.values() {\n\n write!(\n\n w,\n\n \"\n\n /// {docs}\n\n pub const {ident}: Type = Type(Inner::{variant});\n\n\",\n\n docs = type_.doc,\n\n ident = type_.ident,\n\n variant = type_.variant\n\n )\n\n .unwrap();\n\n }\n\n\n\n write!(w, \"}}\").unwrap();\n\n}\n", "file_path": "codegen/src/type_gen.rs", "rank": 59, "score": 223899.67923311197 }, { "content": "fn make_enum(w: &mut BufWriter<File>, types: &BTreeMap<u32, Type>) {\n\n write!(\n\n w,\n\n \"\n\n#[derive(PartialEq, Eq, Clone, Debug, Hash)]\n\npub enum Inner {{\"\n\n )\n\n .unwrap();\n\n\n\n for type_ in types.values() {\n\n write!(\n\n w,\n\n \"\n\n {},\",\n\n type_.variant\n\n )\n\n .unwrap();\n\n }\n\n\n\n write!(\n\n w,\n\n r\"\n\n Other(Arc<Other>),\n\n}}\n\n\n\n\"\n\n )\n\n .unwrap();\n\n}\n\n\n", "file_path": "codegen/src/type_gen.rs", "rank": 60, "score": 223899.67923311197 }, { "content": "fn make_impl(w: &mut BufWriter<File>, types: &BTreeMap<u32, Type>) {\n\n write!(\n\n w,\n\n \"impl Inner {{\n\n pub fn from_oid(oid: Oid) -> Option<Inner> {{\n\n match oid {{\n\n\",\n\n )\n\n .unwrap();\n\n\n\n for (oid, type_) in types {\n\n write!(\n\n w,\n\n \" {} => Some(Inner::{}),\n\n\",\n\n oid, type_.variant\n\n )\n\n .unwrap();\n\n }\n\n\n", "file_path": "codegen/src/type_gen.rs", "rank": 61, "score": 223899.67923311197 }, { "content": "pub fn enum_body(name: &str, variants: &[Variant]) -> TokenStream {\n\n let num_variants = variants.len();\n\n let variant_names = variants.iter().map(|v| &v.name);\n\n\n\n quote! {\n\n if type_.name() != #name {\n\n return false;\n\n }\n\n\n\n match *type_.kind() {\n\n ::postgres_types::Kind::Enum(ref variants) => {\n\n if variants.len() != #num_variants {\n\n return false;\n\n }\n\n\n\n variants.iter().all(|v| {\n\n match &**v {\n\n #(\n\n #variant_names => true,\n\n )*\n\n _ => false,\n\n }\n\n })\n\n }\n\n _ => false,\n\n }\n\n }\n\n}\n\n\n", "file_path": "postgres-derive/src/accepts.rs", "rank": 62, "score": 222965.3410985672 }, { "content": "#[inline]\n\npub fn point_to_sql(x: f64, y: f64, buf: &mut BytesMut) {\n\n buf.put_f64(x);\n\n buf.put_f64(y);\n\n}\n\n\n\n/// Deserializes a point value.\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 63, "score": 222205.54834576373 }, { "content": "pub fn domain_body(name: &str, field: &syn::Field) -> TokenStream {\n\n let ty = &field.ty;\n\n\n\n quote! {\n\n if type_.name() != #name {\n\n return false;\n\n }\n\n\n\n match *type_.kind() {\n\n ::postgres_types::Kind::Domain(ref type_) => {\n\n <#ty as ::postgres_types::ToSql>::accepts(type_)\n\n }\n\n _ => false,\n\n }\n\n }\n\n}\n\n\n", "file_path": "postgres-derive/src/accepts.rs", "rank": 64, "score": 216615.15105323045 }, { "content": "#[proc_macro_derive(ToSql, attributes(postgres))]\n\npub fn derive_tosql(input: TokenStream) -> TokenStream {\n\n let input = syn::parse(input).unwrap();\n\n tosql::expand_derive_tosql(input)\n\n .unwrap_or_else(|e| e.to_compile_error())\n\n .into()\n\n}\n\n\n", "file_path": "postgres-derive/src/lib.rs", "rank": 65, "score": 214592.59082097563 }, { "content": "#[inline]\n\npub fn inet_to_sql(addr: IpAddr, netmask: u8, buf: &mut BytesMut) {\n\n let family = match addr {\n\n IpAddr::V4(_) => PGSQL_AF_INET,\n\n IpAddr::V6(_) => PGSQL_AF_INET6,\n\n };\n\n buf.put_u8(family);\n\n buf.put_u8(netmask);\n\n buf.put_u8(0); // is_cidr\n\n match addr {\n\n IpAddr::V4(addr) => {\n\n buf.put_u8(4);\n\n buf.put_slice(&addr.octets());\n\n }\n\n IpAddr::V6(addr) => {\n\n buf.put_u8(16);\n\n buf.put_slice(&addr.octets());\n\n }\n\n }\n\n}\n\n\n\n/// Deserializes a Postgres inet.\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 66, "score": 214286.15486367163 }, { "content": "#[inline]\n\npub fn terminate(buf: &mut BytesMut) {\n\n buf.put_u8(b'X');\n\n write_body(buf, |_| Ok::<(), io::Error>(())).unwrap();\n\n}\n\n\n", "file_path": "postgres-protocol/src/message/frontend.rs", "rank": 67, "score": 208792.89754381482 }, { "content": "#[inline]\n\npub fn ssl_request(buf: &mut BytesMut) {\n\n write_body(buf, |buf| {\n\n buf.put_i32(80_877_103);\n\n Ok::<_, io::Error>(())\n\n })\n\n .unwrap();\n\n}\n\n\n", "file_path": "postgres-protocol/src/message/frontend.rs", "rank": 68, "score": 206497.33588928464 }, { "content": "#[inline]\n\npub fn copy_done(buf: &mut BytesMut) {\n\n buf.put_u8(b'c');\n\n write_body(buf, |_| Ok::<(), io::Error>(())).unwrap();\n\n}\n\n\n", "file_path": "postgres-protocol/src/message/frontend.rs", "rank": 69, "score": 206497.33588928464 }, { "content": "#[inline]\n\nfn get_str(buf: &[u8]) -> io::Result<&str> {\n\n str::from_utf8(buf).map_err(|e| io::Error::new(io::ErrorKind::InvalidInput, e))\n\n}\n", "file_path": "postgres-protocol/src/message/backend.rs", "rank": 70, "score": 204957.1142329249 }, { "content": "#[derive(ToSql)]\n\nstruct ToSqlTuple(i32, i32);\n\n\n", "file_path": "postgres-derive-test/src/compile-fail/invalid-types.rs", "rank": 71, "score": 197875.45456235827 }, { "content": "#[derive(FromSql)]\n\nstruct FromSqlTuple(i32, i32);\n\n\n", "file_path": "postgres-derive-test/src/compile-fail/invalid-types.rs", "rank": 72, "score": 197875.45456235827 }, { "content": "#[inline]\n\npub fn box_to_sql(x1: f64, y1: f64, x2: f64, y2: f64, buf: &mut BytesMut) {\n\n buf.put_f64(x1);\n\n buf.put_f64(y1);\n\n buf.put_f64(x2);\n\n buf.put_f64(y2);\n\n}\n\n\n\n/// Deserializes a box value.\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 73, "score": 197124.18128365965 }, { "content": "fn can_skip_channel_binding(config: &Config) -> Result<(), Error> {\n\n match config.channel_binding {\n\n config::ChannelBinding::Disable | config::ChannelBinding::Prefer => Ok(()),\n\n config::ChannelBinding::Require => Err(Error::authentication(\n\n \"server did not use channel binding\".into(),\n\n )),\n\n }\n\n}\n\n\n\nasync fn authenticate_password<S, T>(\n\n stream: &mut StartupStream<S, T>,\n\n password: &[u8],\n\n) -> Result<(), Error>\n\nwhere\n\n S: AsyncRead + AsyncWrite + Unpin,\n\n T: AsyncRead + AsyncWrite + Unpin,\n\n{\n\n let mut buf = BytesMut::new();\n\n frontend::password_message(password, &mut buf).map_err(Error::encode)?;\n\n\n", "file_path": "tokio-postgres/src/connect_raw.rs", "rank": 74, "score": 194842.9419948942 }, { "content": "pub fn build() {\n\n let mut file = BufWriter::new(File::create(\"../postgres-types/src/type_gen.rs\").unwrap());\n\n let types = parse_types();\n\n\n\n make_header(&mut file);\n\n make_enum(&mut file, &types);\n\n make_impl(&mut file, &types);\n\n make_consts(&mut file, &types);\n\n}\n\n\n", "file_path": "codegen/src/type_gen.rs", "rank": 75, "score": 190058.42593167108 }, { "content": "fn parse_types() -> BTreeMap<u32, Type> {\n\n let raw_types = DatParser::new(PG_TYPE_DAT).parse_array();\n\n let raw_ranges = DatParser::new(PG_RANGE_DAT).parse_array();\n\n\n\n let oids_by_name = raw_types\n\n .iter()\n\n .map(|m| (m[\"typname\"].clone(), m[\"oid\"].parse::<u32>().unwrap()))\n\n .collect::<HashMap<_, _>>();\n\n\n\n let range_elements = raw_ranges\n\n .iter()\n\n .map(|m| {\n\n (\n\n oids_by_name[&*m[\"rngtypid\"]],\n\n oids_by_name[&*m[\"rngsubtype\"]],\n\n )\n\n })\n\n .collect::<HashMap<_, _>>();\n\n\n\n let range_vector_re = Regex::new(\"(range|vector)$\").unwrap();\n", "file_path": "codegen/src/type_gen.rs", "rank": 76, "score": 187585.5541531709 }, { "content": "// spawned: 249us 252us 255us\n\n// local: 214us 216us 219us\n\nfn query_prepared(c: &mut Criterion) {\n\n let mut client = Client::connect(\"host=localhost port=5433 user=postgres\", NoTls).unwrap();\n\n\n\n let stmt = client.prepare(\"SELECT $1::INT8\").unwrap();\n\n\n\n c.bench_function(\"query_prepared\", move |b| {\n\n b.iter(|| client.query(&stmt, &[&1i64]).unwrap())\n\n });\n\n}\n\n\n\ncriterion_group!(group, query_prepared);\n\ncriterion_main!(group);\n", "file_path": "postgres/benches/bench.rs", "rank": 77, "score": 183030.27326364815 }, { "content": "#[inline]\n\nfn write_body<F, E>(buf: &mut BytesMut, f: F) -> Result<(), E>\n\nwhere\n\n F: FnOnce(&mut BytesMut) -> Result<(), E>,\n\n E: From<io::Error>,\n\n{\n\n let base = buf.len();\n\n buf.extend_from_slice(&[0; 4]);\n\n\n\n f(buf)?;\n\n\n\n let size = i32::from_usize(buf.len() - base)?;\n\n BigEndian::write_i32(&mut buf[base..], size);\n\n Ok(())\n\n}\n\n\n\npub enum BindError {\n\n Conversion(Box<dyn Error + marker::Sync + Send>),\n\n Serialization(io::Error),\n\n}\n\n\n", "file_path": "postgres-protocol/src/message/frontend.rs", "rank": 78, "score": 182389.2256106147 }, { "content": "fn write_nullable<F, E>(serializer: F, buf: &mut BytesMut) -> Result<(), E>\n\nwhere\n\n F: FnOnce(&mut BytesMut) -> Result<IsNull, E>,\n\n E: From<io::Error>,\n\n{\n\n let base = buf.len();\n\n buf.put_i32(0);\n\n let size = match serializer(buf)? {\n\n IsNull::No => i32::from_usize(buf.len() - base - 4)?,\n\n IsNull::Yes => -1,\n\n };\n\n BigEndian::write_i32(&mut buf[base..], size);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "postgres-protocol/src/lib.rs", "rank": 79, "score": 182389.2256106147 }, { "content": "pub fn encode_bind<'a, I>(\n\n statement: &Statement,\n\n params: I,\n\n portal: &str,\n\n buf: &mut BytesMut,\n\n) -> Result<(), Error>\n\nwhere\n\n I: IntoIterator<Item = &'a dyn ToSql>,\n\n I::IntoIter: ExactSizeIterator,\n\n{\n\n let params = params.into_iter();\n\n\n\n assert!(\n\n statement.params().len() == params.len(),\n\n \"expected {} parameters but got {}\",\n\n statement.params().len(),\n\n params.len()\n\n );\n\n\n\n let mut error_idx = 0;\n", "file_path": "tokio-postgres/src/query.rs", "rank": 80, "score": 182204.3402129819 }, { "content": "fn check_remaining(buf: &Cursor<Bytes>, len: usize) -> Result<(), Error> {\n\n if buf.remaining() < len {\n\n Err(Error::parse(io::Error::new(\n\n io::ErrorKind::UnexpectedEof,\n\n \"unexpected EOF\",\n\n )))\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n\n/// A row of data parsed from a binary copy out stream.\n\npub struct BinaryCopyOutRow {\n\n buf: Bytes,\n\n ranges: Vec<Option<Range<usize>>>,\n\n types: Arc<Vec<Type>>,\n\n}\n\n\n\nimpl BinaryCopyOutRow {\n\n /// Like `get`, but returns a `Result` rather than panicking.\n", "file_path": "tokio-postgres/src/binary_copy.rs", "rank": 81, "score": 181407.69493253337 }, { "content": "#[doc(hidden)]\n\npub fn __to_sql_checked<T>(\n\n v: &T,\n\n ty: &Type,\n\n out: &mut BytesMut,\n\n) -> Result<IsNull, Box<dyn Error + Sync + Send>>\n\nwhere\n\n T: ToSql,\n\n{\n\n if !T::accepts(ty) {\n\n return Err(Box::new(WrongType::new::<T>(ty.clone())));\n\n }\n\n v.to_sql(ty, out)\n\n}\n\n\n\n#[cfg(feature = \"with-bit-vec-0_6\")]\n\nmod bit_vec_06;\n\n#[cfg(feature = \"with-chrono-0_4\")]\n\nmod chrono_04;\n\n#[cfg(feature = \"with-eui48-0_4\")]\n\nmod eui48_04;\n", "file_path": "postgres-types/src/lib.rs", "rank": 82, "score": 180607.56911851704 }, { "content": "#[inline]\n\npub fn varbit_from_sql<'a>(\n\n mut buf: &'a [u8],\n\n) -> Result<Varbit<'a>, StdBox<dyn Error + Sync + Send>> {\n\n let len = buf.read_i32::<BigEndian>()?;\n\n if len < 0 {\n\n return Err(\"invalid varbit length: varbit < 0\".into());\n\n }\n\n let bytes = (len as usize + 7) / 8;\n\n if buf.len() != bytes {\n\n return Err(\"invalid message length: varbit mismatch\".into());\n\n }\n\n\n\n Ok(Varbit {\n\n len: len as usize,\n\n bytes: buf,\n\n })\n\n}\n\n\n\n/// A `VARBIT` value.\n\npub struct Varbit<'a> {\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 83, "score": 180607.56911851704 }, { "content": "#[inline]\n\npub fn path_to_sql<I>(\n\n closed: bool,\n\n points: I,\n\n buf: &mut BytesMut,\n\n) -> Result<(), StdBox<dyn Error + Sync + Send>>\n\nwhere\n\n I: IntoIterator<Item = (f64, f64)>,\n\n{\n\n buf.put_u8(closed as u8);\n\n let points_idx = buf.len();\n\n buf.put_i32(0);\n\n\n\n let mut num_points = 0;\n\n for (x, y) in points {\n\n num_points += 1;\n\n buf.put_f64(x);\n\n buf.put_f64(y);\n\n }\n\n\n\n let num_points = i32::from_usize(num_points)?;\n\n BigEndian::write_i32(&mut buf[points_idx..], num_points);\n\n\n\n Ok(())\n\n}\n\n\n\n/// Deserializes a Postgres path.\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 84, "score": 180607.56911851704 }, { "content": "#[inline]\n\npub fn hstore_from_sql<'a>(\n\n mut buf: &'a [u8],\n\n) -> Result<HstoreEntries<'a>, StdBox<dyn Error + Sync + Send>> {\n\n let count = buf.read_i32::<BigEndian>()?;\n\n if count < 0 {\n\n return Err(\"invalid entry count\".into());\n\n }\n\n\n\n Ok(HstoreEntries {\n\n remaining: count,\n\n buf,\n\n })\n\n}\n\n\n\n/// A fallible iterator over `HSTORE` entries.\n\npub struct HstoreEntries<'a> {\n\n remaining: i32,\n\n buf: &'a [u8],\n\n}\n\n\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 85, "score": 180607.56911851704 }, { "content": "#[inline]\n\npub fn varbit_to_sql<I>(\n\n len: usize,\n\n v: I,\n\n buf: &mut BytesMut,\n\n) -> Result<(), StdBox<dyn Error + Sync + Send>>\n\nwhere\n\n I: Iterator<Item = u8>,\n\n{\n\n let len = i32::from_usize(len)?;\n\n buf.put_i32(len);\n\n\n\n for byte in v {\n\n buf.put_u8(byte);\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n/// Deserializes a `VARBIT` or `BIT` value.\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 86, "score": 180607.56911851704 }, { "content": "fn query_prepared(c: &mut Criterion) {\n\n let (client, mut runtime) = setup();\n\n let statement = runtime.block_on(client.prepare(\"SELECT $1::INT8\")).unwrap();\n\n c.bench_function(\"runtime_block_on\", move |b| {\n\n b.iter(|| {\n\n runtime\n\n .block_on(client.query(&statement, &[&1i64]))\n\n .unwrap()\n\n })\n\n });\n\n\n\n let (client, mut runtime) = setup();\n\n let statement = runtime.block_on(client.prepare(\"SELECT $1::INT8\")).unwrap();\n\n c.bench_function(\"executor_block_on\", move |b| {\n\n b.iter(|| executor::block_on(client.query(&statement, &[&1i64])).unwrap())\n\n });\n\n\n\n let (client, mut runtime) = setup();\n\n let client = Arc::new(client);\n\n let statement = runtime.block_on(client.prepare(\"SELECT $1::INT8\")).unwrap();\n", "file_path": "tokio-postgres/benches/bench.rs", "rank": 87, "score": 180110.6219665034 }, { "content": "#[inline]\n\nfn write_counted<I, T, F, E>(items: I, mut serializer: F, buf: &mut BytesMut) -> Result<(), E>\n\nwhere\n\n I: IntoIterator<Item = T>,\n\n F: FnMut(T, &mut BytesMut) -> Result<(), E>,\n\n E: From<io::Error>,\n\n{\n\n let base = buf.len();\n\n buf.extend_from_slice(&[0; 2]);\n\n let mut count = 0;\n\n for item in items {\n\n serializer(item, buf)?;\n\n count += 1;\n\n }\n\n let count = i16::from_usize(count)?;\n\n BigEndian::write_i16(&mut buf[base..], count);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "postgres-protocol/src/message/frontend.rs", "rank": 88, "score": 179092.02605735607 }, { "content": "pub fn read_value<'a, T>(\n\n type_: &Type,\n\n buf: &mut &'a [u8],\n\n) -> Result<T, Box<dyn Error + Sync + Send>>\n\nwhere\n\n T: FromSql<'a>,\n\n{\n\n let len = read_be_i32(buf)?;\n\n let value = if len < 0 {\n\n None\n\n } else {\n\n if len as usize > buf.len() {\n\n return Err(\"invalid buffer size\".into());\n\n }\n\n let (head, tail) = buf.split_at(len as usize);\n\n *buf = tail;\n\n Some(head)\n\n };\n\n T::from_sql_nullable(type_, value)\n\n}\n", "file_path": "postgres-types/src/private.rs", "rank": 89, "score": 177410.78305072308 }, { "content": "#[inline]\n\npub fn hstore_to_sql<'a, I>(\n\n values: I,\n\n buf: &mut BytesMut,\n\n) -> Result<(), StdBox<dyn Error + Sync + Send>>\n\nwhere\n\n I: IntoIterator<Item = (&'a str, Option<&'a str>)>,\n\n{\n\n let base = buf.len();\n\n buf.put_i32(0);\n\n\n\n let mut count = 0;\n\n for (key, value) in values {\n\n count += 1;\n\n\n\n write_pascal_string(key, buf)?;\n\n\n\n match value {\n\n Some(value) => {\n\n write_pascal_string(value, buf)?;\n\n }\n\n None => buf.put_i32(-1),\n\n }\n\n }\n\n\n\n let count = i32::from_usize(count)?;\n\n BigEndian::write_i32(&mut buf[base..], count);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 90, "score": 177410.78305072308 }, { "content": "#[proc_macro_derive(FromSql, attributes(postgres))]\n\npub fn derive_fromsql(input: TokenStream) -> TokenStream {\n\n let input = syn::parse(input).unwrap();\n\n fromsql::expand_derive_fromsql(input)\n\n .unwrap_or_else(|e| e.to_compile_error())\n\n .into()\n\n}\n", "file_path": "postgres-derive/src/lib.rs", "rank": 91, "score": 177247.22958889639 }, { "content": "/// Serializes a range value.\n\npub fn range_to_sql<F, G>(\n\n lower: F,\n\n upper: G,\n\n buf: &mut BytesMut,\n\n) -> Result<(), StdBox<dyn Error + Sync + Send>>\n\nwhere\n\n F: FnOnce(&mut BytesMut) -> Result<RangeBound<IsNull>, StdBox<dyn Error + Sync + Send>>,\n\n G: FnOnce(&mut BytesMut) -> Result<RangeBound<IsNull>, StdBox<dyn Error + Sync + Send>>,\n\n{\n\n let tag_idx = buf.len();\n\n buf.put_u8(0);\n\n let mut tag = 0;\n\n\n\n match write_bound(lower, buf)? {\n\n RangeBound::Inclusive(()) => tag |= RANGE_LOWER_INCLUSIVE,\n\n RangeBound::Exclusive(()) => {}\n\n RangeBound::Unbounded => tag |= RANGE_LOWER_UNBOUNDED,\n\n }\n\n\n\n match write_bound(upper, buf)? {\n\n RangeBound::Inclusive(()) => tag |= RANGE_UPPER_INCLUSIVE,\n\n RangeBound::Exclusive(()) => {}\n\n RangeBound::Unbounded => tag |= RANGE_UPPER_UNBOUNDED,\n\n }\n\n\n\n buf[tag_idx] = tag;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 92, "score": 172056.58995987807 }, { "content": "struct Type {\n\n name: String,\n\n variant: String,\n\n ident: String,\n\n kind: String,\n\n element: u32,\n\n doc: String,\n\n}\n\n\n", "file_path": "codegen/src/type_gen.rs", "rank": 93, "score": 171568.09553966665 }, { "content": "fn make_type(file: &mut BufWriter<File>) {\n\n write!(\n\n file,\n\n \"// Autogenerated file - DO NOT EDIT\n\nuse std::borrow::Cow;\n\n\n\n/// A SQLSTATE error code\n\n#[derive(PartialEq, Eq, Clone, Debug)]\n\npub struct SqlState(Cow<'static, str>);\n\n\n\nimpl SqlState {{\n\n /// Creates a `SqlState` from its error code.\n\n pub fn from_code(s: &str) -> SqlState {{\n\n match SQLSTATE_MAP.get(s) {{\n\n Some(state) => state.clone(),\n\n None => SqlState(Cow::Owned(s.to_string())),\n\n }}\n\n }}\n\n\n\n /// Returns the error code corresponding to the `SqlState`.\n\n pub fn code(&self) -> &str {{\n\n &self.0\n\n }}\n\n\"\n\n )\n\n .unwrap();\n\n}\n\n\n", "file_path": "codegen/src/sqlstate.rs", "rank": 94, "score": 169958.07124708948 }, { "content": "fn make_header(w: &mut BufWriter<File>) {\n\n write!(\n\n w,\n\n \"// Autogenerated file - DO NOT EDIT\n\nuse std::sync::Arc;\n\n\n\nuse crate::{{Type, Oid, Kind}};\n\n\n\n#[derive(PartialEq, Eq, Debug, Hash)]\n\npub struct Other {{\n\n pub name: String,\n\n pub oid: Oid,\n\n pub kind: Kind,\n\n pub schema: String,\n\n}}\n\n\"\n\n )\n\n .unwrap();\n\n}\n\n\n", "file_path": "codegen/src/type_gen.rs", "rank": 95, "score": 169958.07124708948 }, { "content": "#[inline]\n\npub fn bytea_from_sql(buf: &[u8]) -> &[u8] {\n\n buf\n\n}\n\n\n\n/// Serializes a `TEXT`, `VARCHAR`, `CHAR(n)`, `NAME`, or `CITEXT` value.\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 96, "score": 166675.27608632768 }, { "content": "#[inline]\n\npub fn array_to_sql<T, I, J, F>(\n\n dimensions: I,\n\n element_type: Oid,\n\n elements: J,\n\n mut serializer: F,\n\n buf: &mut BytesMut,\n\n) -> Result<(), StdBox<dyn Error + Sync + Send>>\n\nwhere\n\n I: IntoIterator<Item = ArrayDimension>,\n\n J: IntoIterator<Item = T>,\n\n F: FnMut(T, &mut BytesMut) -> Result<IsNull, StdBox<dyn Error + Sync + Send>>,\n\n{\n\n let dimensions_idx = buf.len();\n\n buf.put_i32(0);\n\n let flags_idx = buf.len();\n\n buf.put_i32(0);\n\n buf.put_u32(element_type);\n\n\n\n let mut num_dimensions = 0;\n\n for dimension in dimensions {\n", "file_path": "postgres-protocol/src/types/mod.rs", "rank": 97, "score": 164065.95866248285 }, { "content": "#[test]\n\nfn portal() {\n\n let mut client = Client::connect(\"host=localhost port=5433 user=postgres\", NoTls).unwrap();\n\n\n\n client\n\n .simple_query(\n\n \"CREATE TEMPORARY TABLE foo (id INT);\n\n INSERT INTO foo (id) VALUES (1), (2), (3);\",\n\n )\n\n .unwrap();\n\n\n\n let mut transaction = client.transaction().unwrap();\n\n\n\n let portal = transaction\n\n .bind(\"SELECT * FROM foo ORDER BY id\", &[])\n\n .unwrap();\n\n\n\n let rows = transaction.query_portal(&portal, 2).unwrap();\n\n assert_eq!(rows.len(), 2);\n\n assert_eq!(rows[0].get::<_, i32>(0), 1);\n\n assert_eq!(rows[1].get::<_, i32>(0), 2);\n\n\n\n let rows = transaction.query_portal(&portal, 2).unwrap();\n\n assert_eq!(rows.len(), 1);\n\n assert_eq!(rows[0].get::<_, i32>(0), 3);\n\n}\n\n\n", "file_path": "postgres/src/test.rs", "rank": 98, "score": 160617.12120047986 }, { "content": "// Domains are sometimes but not always just represented by the bare type (!?)\n\nfn domain_accepts_body(name: &str, field: &syn::Field) -> TokenStream {\n\n let ty = &field.ty;\n\n let normal_body = accepts::domain_body(name, field);\n\n\n\n quote! {\n\n if <#ty as postgres_types::FromSql>::accepts(type_) {\n\n return true;\n\n }\n\n\n\n #normal_body\n\n }\n\n}\n\n\n", "file_path": "postgres-derive/src/fromsql.rs", "rank": 99, "score": 158274.42610769896 } ]
Rust
termwiz/src/widgets/mod.rs
bcully/wezterm
ea401e1f58ca5a088ac5d5e1d7963f36269afb76
#![allow(clippy::new_without_default)] use crate::color::ColorAttribute; use crate::input::InputEvent; use crate::surface::{Change, CursorShape, Position, SequenceNo, Surface}; use anyhow::Error; use fnv::FnvHasher; use std::collections::{HashMap, VecDeque}; use std::hash::BuildHasherDefault; type FnvHashMap<K, V> = HashMap<K, V, BuildHasherDefault<FnvHasher>>; pub mod layout; pub enum WidgetEvent { Input(InputEvent), } #[derive(Clone, Debug, Default, PartialEq, Eq)] pub struct CursorShapeAndPosition { pub shape: CursorShape, pub coords: ParentRelativeCoords, pub color: ColorAttribute, } #[derive(Clone, Debug, Default, PartialEq, Eq)] pub struct Rect { pub x: usize, pub y: usize, pub width: usize, pub height: usize, } pub struct RenderArgs<'a> { pub id: WidgetId, pub is_focused: bool, pub cursor: &'a mut CursorShapeAndPosition, pub surface: &'a mut Surface, } pub struct UpdateArgs<'a> { pub id: WidgetId, pub cursor: &'a mut CursorShapeAndPosition, } pub trait Widget { fn render(&mut self, args: &mut RenderArgs); fn get_size_constraints(&self) -> layout::Constraints { Default::default() } fn process_event(&mut self, _event: &WidgetEvent, _args: &mut UpdateArgs) -> bool { false } } #[derive(Clone, Copy, Debug, Default, PartialEq, Eq)] pub struct ParentRelativeCoords { pub x: usize, pub y: usize, } impl ParentRelativeCoords { pub fn new(x: usize, y: usize) -> Self { Self { x, y } } } impl From<(usize, usize)> for ParentRelativeCoords { fn from(coords: (usize, usize)) -> ParentRelativeCoords { ParentRelativeCoords::new(coords.0, coords.1) } } #[derive(Clone, Copy, Debug, Default, PartialEq, Eq)] pub struct ScreenRelativeCoords { pub x: usize, pub y: usize, } impl ScreenRelativeCoords { pub fn new(x: usize, y: usize) -> Self { Self { x, y } } pub fn offset_by(&self, rel: &ParentRelativeCoords) -> Self { Self { x: self.x + rel.x, y: self.y + rel.y, } } } static WIDGET_ID: ::std::sync::atomic::AtomicUsize = ::std::sync::atomic::AtomicUsize::new(0); #[derive(Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord, Debug)] pub struct WidgetId(usize); impl WidgetId { pub fn new() -> Self { WidgetId(WIDGET_ID.fetch_add(1, ::std::sync::atomic::Ordering::Relaxed)) } } impl Default for WidgetId { fn default() -> Self { Self::new() } } struct RenderData<'widget> { surface: Surface, cursor: CursorShapeAndPosition, coordinates: ParentRelativeCoords, widget: Box<dyn Widget + 'widget>, } #[derive(Default)] struct Graph { root: Option<WidgetId>, children: FnvHashMap<WidgetId, Vec<WidgetId>>, parent: FnvHashMap<WidgetId, WidgetId>, } impl Graph { fn add(&mut self, parent: Option<WidgetId>) -> WidgetId { let id = WidgetId::new(); if self.root.is_none() { self.root = Some(id); } self.children.insert(id, Vec::new()); if let Some(parent) = parent { self.parent.insert(id, parent); self.children.get_mut(&parent).unwrap().push(id); } id } fn children(&self, id: WidgetId) -> &[WidgetId] { self.children .get(&id) .map(|v| v.as_slice()) .unwrap_or_else(|| &[]) } } #[derive(Default)] pub struct Ui<'widget> { graph: Graph, render: FnvHashMap<WidgetId, RenderData<'widget>>, input_queue: VecDeque<WidgetEvent>, focused: Option<WidgetId>, } impl<'widget> Ui<'widget> { pub fn new() -> Self { Default::default() } pub fn add<W: Widget + 'widget>(&mut self, parent: Option<WidgetId>, w: W) -> WidgetId { let id = self.graph.add(parent); self.render.insert( id, RenderData { surface: Surface::new(1, 1), cursor: Default::default(), coordinates: Default::default(), widget: Box::new(w), }, ); if parent.is_none() && self.focused.is_none() { self.focused = Some(id); } id } pub fn set_root<W: Widget + 'widget>(&mut self, w: W) -> WidgetId { self.add(None, w) } pub fn add_child<W: Widget + 'widget>(&mut self, parent: WidgetId, w: W) -> WidgetId { self.add(Some(parent), w) } fn do_deliver(&mut self, id: WidgetId, event: &WidgetEvent) -> bool { let render_data = self.render.get_mut(&id).unwrap(); let mut args = UpdateArgs { id, cursor: &mut render_data.cursor, }; render_data.widget.process_event(event, &mut args) } fn deliver_event(&mut self, mut id: WidgetId, event: &WidgetEvent) { loop { let handled = match event { WidgetEvent::Input(InputEvent::Resized { .. }) => true, WidgetEvent::Input(InputEvent::Mouse(m)) => { let mut m = m.clone(); let coords = self.to_widget_coords( id, &ScreenRelativeCoords::new(m.x as usize, m.y as usize), ); m.x = coords.x as u16; m.y = coords.y as u16; self.do_deliver(id, &WidgetEvent::Input(InputEvent::Mouse(m))) } WidgetEvent::Input(InputEvent::Paste(_)) | WidgetEvent::Input(InputEvent::Key(_)) | WidgetEvent::Input(InputEvent::Wake) => self.do_deliver(id, event), }; if handled { return; } id = match self.graph.parent.get(&id) { Some(parent) => *parent, None => return, }; } } fn hovered_widget(&self, coords: &ScreenRelativeCoords) -> Option<WidgetId> { let root = match self.graph.root { Some(id) => id, _ => return None, }; let depth = 0; let mut best = (depth, root); self.hovered_recursive(root, depth, coords.x, coords.y, &mut best); Some(best.1) } fn hovered_recursive( &self, widget: WidgetId, depth: usize, x: usize, y: usize, best: &mut (usize, WidgetId), ) { let render = &self.render[&widget]; if depth >= best.0 && x >= render.coordinates.x && y >= render.coordinates.y { let (width, height) = render.surface.dimensions(); if (x - render.coordinates.x < width) && (y - render.coordinates.y < height) { *best = (depth, widget); } } for child in self.graph.children(widget) { self.hovered_recursive( *child, depth + 1, x + render.coordinates.x, y + render.coordinates.y, best, ); } } pub fn process_event_queue(&mut self) -> Result<(), Error> { while let Some(event) = self.input_queue.pop_front() { match event { WidgetEvent::Input(InputEvent::Resized { rows, cols }) => { self.compute_layout(cols, rows)?; } WidgetEvent::Input(InputEvent::Mouse(ref m)) => { if let Some(hover) = self.hovered_widget(&ScreenRelativeCoords::new(m.x as usize, m.y as usize)) { self.deliver_event(hover, &event); } } WidgetEvent::Input(InputEvent::Key(_)) | WidgetEvent::Input(InputEvent::Paste(_)) | WidgetEvent::Input(InputEvent::Wake) => { if let Some(focus) = self.focused { self.deliver_event(focus, &event); } } } } Ok(()) } pub fn queue_event(&mut self, event: WidgetEvent) { self.input_queue.push_back(event); } pub fn set_focus(&mut self, id: WidgetId) { self.focused = Some(id); } fn render_recursive( &mut self, id: WidgetId, screen: &mut Surface, abs_coords: &ScreenRelativeCoords, ) -> Result<(), Error> { let (x, y) = { let render_data = self.render.get_mut(&id).unwrap(); let surface = &mut render_data.surface; { let mut args = RenderArgs { id, cursor: &mut render_data.cursor, surface, is_focused: self.focused.map(|f| f == id).unwrap_or(false), }; render_data.widget.render(&mut args); } screen.draw_from_screen(surface, abs_coords.x, abs_coords.y); surface.flush_changes_older_than(SequenceNo::max_value()); (render_data.coordinates.x, render_data.coordinates.y) }; for child in self.graph.children(id).to_vec() { self.render_recursive( child, screen, &ScreenRelativeCoords::new(x + abs_coords.x, y + abs_coords.y), )?; } Ok(()) } fn compute_layout(&mut self, width: usize, height: usize) -> Result<bool, Error> { let mut layout = layout::LayoutState::new(); let root = self.graph.root.unwrap(); self.add_widget_to_layout(&mut layout, root)?; let mut changed = false; #[cfg_attr(feature = "cargo-clippy", allow(clippy::identity_conversion))] for result in layout.compute_constraints(width, height, root)? { let render_data = self.render.get_mut(&result.widget).unwrap(); let coords = ParentRelativeCoords::new(result.rect.x, result.rect.y); if coords != render_data.coordinates { render_data.coordinates = coords; changed = true; } if (result.rect.width, result.rect.height) != render_data.surface.dimensions() { render_data .surface .resize(result.rect.width, result.rect.height); changed = true; } } Ok(changed) } fn add_widget_to_layout( &mut self, layout: &mut layout::LayoutState, widget: WidgetId, ) -> Result<(), Error> { let constraints = self.render[&widget].widget.get_size_constraints(); let children = self.graph.children(widget).to_vec(); layout.add_widget(widget, &constraints, &children); for child in children { self.add_widget_to_layout(layout, child)?; } Ok(()) } pub fn render_to_screen(&mut self, screen: &mut Surface) -> Result<bool, Error> { if let Some(root) = self.graph.root { self.render_recursive(root, screen, &ScreenRelativeCoords::new(0, 0))?; } if let Some(id) = self.focused { let cursor = &self.render[&id].cursor; let coords = self.to_screen_coords(id, &cursor.coords); screen.add_changes(vec![ Change::CursorShape(cursor.shape), Change::CursorColor(cursor.color), Change::CursorPosition { x: Position::Absolute(coords.x), y: Position::Absolute(coords.y), }, ]); } let (width, height) = screen.dimensions(); self.compute_layout(width, height) } fn coord_walk<F: Fn(usize, usize) -> usize>( &self, widget: WidgetId, mut x: usize, mut y: usize, f: F, ) -> (usize, usize) { let mut widget = widget; loop { let render = &self.render[&widget]; x = f(x, render.coordinates.x); y = f(y, render.coordinates.y); widget = match self.graph.parent.get(&widget) { Some(parent) => *parent, None => break, }; } (x, y) } pub fn to_screen_coords( &self, widget: WidgetId, coords: &ParentRelativeCoords, ) -> ScreenRelativeCoords { let (x, y) = self.coord_walk(widget, coords.x, coords.y, |a, b| a + b); ScreenRelativeCoords { x, y } } pub fn to_widget_coords( &self, widget: WidgetId, coords: &ScreenRelativeCoords, ) -> ParentRelativeCoords { let (x, y) = self.coord_walk(widget, coords.x, coords.y, |a, b| a - b); ParentRelativeCoords { x, y } } }
#![allow(clippy::new_without_default)] use crate::color::ColorAttribute; use crate::input::InputEvent; use crate::surface::{Change, CursorShape, Position, SequenceNo, Surface}; use anyhow::Error; use fnv::FnvHasher; use std::collections::{HashMap, VecDeque}; use std::hash::BuildHasherDefault; type FnvHashMap<K, V> = HashMap<K, V, BuildHasherDefault<FnvHasher>>; pub mod layout; pub enum WidgetEvent { Input(InputEvent), } #[derive(Clone, Debug, Default, PartialEq, Eq)] pub struct CursorShapeAndPosition { pub shape: CursorShape, pub coords: ParentRelativeCoords, pub color: ColorAttribute, } #[derive(Clone, Debug, Default, PartialEq, Eq)] pub struct Rect { pub x: usize, pub y: usize, pub width: usize, pub height: usize, } pub struct RenderArgs<'a> { pub id: WidgetId, pub is_focused: bool, pub cursor: &'a mut CursorShapeAndPosition, pub surface: &'a mut Surface, } pub struct UpdateArgs<'a> { pub id: WidgetId, pub cursor: &'a mut CursorShapeAndPosition, } pub trait Widget { fn render(&mut self, args: &mut RenderArgs); fn get_size_constraints(&self) -> layout::Constraints { Default::default() } fn process_event(&mut self, _event: &WidgetEvent, _args: &mut UpdateArgs) -> bool { false } } #[derive(Clone, Copy, Debug, Default, PartialEq, Eq)] pub struct ParentRelativeCoords { pub x: usize, pub y: usize, } impl ParentRelativeCoords { pub fn new(x: usize, y: usize) -> Self { Self { x, y } } } impl From<(usize, usize)> for ParentRelativeCoords { fn from(coords: (usize, usize)) -> ParentRelativeCoords { ParentRelativeCoords::new(coords.0, coords.1) } } #[derive(Clone, Copy, Debug, Default, PartialEq, Eq)] pub struct ScreenRelativeCoords { pub x: usize, pub y: usize, } impl ScreenRelativeCoords { pub fn new(x: usize, y: usize) -> Self { Self { x, y } } pub fn offset_by(&self, rel: &ParentRelativeCoords) -> Self { Self { x: self.x + rel.x, y: self.y + rel.y, } } } static WIDGET_ID: ::std::sync::atomic::AtomicUsize = ::std::sync::atomic::AtomicUsize::new(0); #[derive(Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord, Debug)] pub struct WidgetId(usize); impl WidgetId { pub fn new() -> Self { WidgetId(WIDGET_ID.fetch_add(1, ::std::sync::atomic::Ordering::Relaxed)) } } impl Default for WidgetId { fn default() -> Self { Self::new() } } struct RenderData<'widget> { surface: Surface, cursor: CursorShapeAndPosition, coordinates: ParentRelativeCoords, widget: Box<dyn Widget + 'widget>, } #[derive(Default)] struct Graph { root: Option<WidgetId>, children: FnvHashMap<WidgetId, Vec<WidgetId>>, parent: FnvHashMap<WidgetId, WidgetId>, } impl Graph { fn add(&mut self, parent: Option<WidgetId>) -> WidgetId { let id = WidgetId::new(); if self.root.is_none() { self.root = Some(id); } self.children.insert(id, Vec::new()); if let Some(parent) = parent { self.parent.insert(id, parent); self.children.get_mut(&parent).unwrap().push(id); } id } fn children(&self, id: WidgetId) -> &[WidgetId] { self.children .get(&id) .map(|v| v.as_slice()) .unwrap_or_else(|| &[]) } } #[derive(Default)] pub struct Ui<'widget> { graph: Graph, render: FnvHashMap<WidgetId, RenderData<'widget>>, input_queue: VecDeque<WidgetEvent>, focused: Option<WidgetId>, } impl<'widget> Ui<'widget> { pub fn new() -> Self { Default::default() } pub fn add<W: Widget + 'widget>(&mut self, parent: Option<WidgetId>, w: W) -> WidgetId { let id = self.graph.add(parent); self.render.insert( id, RenderData { surface: Surface::new(1, 1), cursor: Default::default(), coordinates: Default::default(), widget: Box::new(w), }, ); if parent.is_none() && self.focused.is_none() { self.focused = Some(id); } id } pub fn set_root<W: Widget + 'widget>(&mut self, w: W) -> WidgetId { self.add(None, w) } pub fn add_child<W: Widget + 'widget>(&mut self, parent: WidgetId, w: W) -> WidgetId { self.add(Some(parent), w) } fn do_deliver(&mut self, id: WidgetId, event: &WidgetEvent) -> bool { let render_data = self.render.get_mut(&id).unwrap(); let mut args = UpdateArgs { id, cursor: &mut render_data.cursor, }; render_data.widget.process_event(event, &mut args) } fn deliver_event(&mut self, mut id: WidgetId, event: &WidgetEvent) { loop { let handled = match event { WidgetEvent::Input(InputEvent::Resized { .. }) => true, WidgetEvent::Input(InputEvent::Mouse(m)) => { let mut m = m.clone(); let coords = self.to_widget_coords( id, &ScreenRelativeCoords::new(m.x as usize, m.y as usize), ); m.x = coords.x as u16; m.y = coords.y as u16; self.do_deliver(id, &WidgetEvent::Input(InputEvent::Mouse(m))) } WidgetEvent::Input(InputEvent::Paste(_)) | WidgetEvent::Input(InputEvent::Key(_)) | WidgetEvent::Input(InputEvent::Wake) => self.do_deliver(id, event), }; if handled { return; } id = match self.graph.parent.get(&id) { Some(parent) => *parent, None => return, }; } } fn hovered_widget(&self, coords: &ScreenRelativeCoords) -> Option<WidgetId> {
fn hovered_recursive( &self, widget: WidgetId, depth: usize, x: usize, y: usize, best: &mut (usize, WidgetId), ) { let render = &self.render[&widget]; if depth >= best.0 && x >= render.coordinates.x && y >= render.coordinates.y { let (width, height) = render.surface.dimensions(); if (x - render.coordinates.x < width) && (y - render.coordinates.y < height) { *best = (depth, widget); } } for child in self.graph.children(widget) { self.hovered_recursive( *child, depth + 1, x + render.coordinates.x, y + render.coordinates.y, best, ); } } pub fn process_event_queue(&mut self) -> Result<(), Error> { while let Some(event) = self.input_queue.pop_front() { match event { WidgetEvent::Input(InputEvent::Resized { rows, cols }) => { self.compute_layout(cols, rows)?; } WidgetEvent::Input(InputEvent::Mouse(ref m)) => { if let Some(hover) = self.hovered_widget(&ScreenRelativeCoords::new(m.x as usize, m.y as usize)) { self.deliver_event(hover, &event); } } WidgetEvent::Input(InputEvent::Key(_)) | WidgetEvent::Input(InputEvent::Paste(_)) | WidgetEvent::Input(InputEvent::Wake) => { if let Some(focus) = self.focused { self.deliver_event(focus, &event); } } } } Ok(()) } pub fn queue_event(&mut self, event: WidgetEvent) { self.input_queue.push_back(event); } pub fn set_focus(&mut self, id: WidgetId) { self.focused = Some(id); } fn render_recursive( &mut self, id: WidgetId, screen: &mut Surface, abs_coords: &ScreenRelativeCoords, ) -> Result<(), Error> { let (x, y) = { let render_data = self.render.get_mut(&id).unwrap(); let surface = &mut render_data.surface; { let mut args = RenderArgs { id, cursor: &mut render_data.cursor, surface, is_focused: self.focused.map(|f| f == id).unwrap_or(false), }; render_data.widget.render(&mut args); } screen.draw_from_screen(surface, abs_coords.x, abs_coords.y); surface.flush_changes_older_than(SequenceNo::max_value()); (render_data.coordinates.x, render_data.coordinates.y) }; for child in self.graph.children(id).to_vec() { self.render_recursive( child, screen, &ScreenRelativeCoords::new(x + abs_coords.x, y + abs_coords.y), )?; } Ok(()) } fn compute_layout(&mut self, width: usize, height: usize) -> Result<bool, Error> { let mut layout = layout::LayoutState::new(); let root = self.graph.root.unwrap(); self.add_widget_to_layout(&mut layout, root)?; let mut changed = false; #[cfg_attr(feature = "cargo-clippy", allow(clippy::identity_conversion))] for result in layout.compute_constraints(width, height, root)? { let render_data = self.render.get_mut(&result.widget).unwrap(); let coords = ParentRelativeCoords::new(result.rect.x, result.rect.y); if coords != render_data.coordinates { render_data.coordinates = coords; changed = true; } if (result.rect.width, result.rect.height) != render_data.surface.dimensions() { render_data .surface .resize(result.rect.width, result.rect.height); changed = true; } } Ok(changed) } fn add_widget_to_layout( &mut self, layout: &mut layout::LayoutState, widget: WidgetId, ) -> Result<(), Error> { let constraints = self.render[&widget].widget.get_size_constraints(); let children = self.graph.children(widget).to_vec(); layout.add_widget(widget, &constraints, &children); for child in children { self.add_widget_to_layout(layout, child)?; } Ok(()) } pub fn render_to_screen(&mut self, screen: &mut Surface) -> Result<bool, Error> { if let Some(root) = self.graph.root { self.render_recursive(root, screen, &ScreenRelativeCoords::new(0, 0))?; } if let Some(id) = self.focused { let cursor = &self.render[&id].cursor; let coords = self.to_screen_coords(id, &cursor.coords); screen.add_changes(vec![ Change::CursorShape(cursor.shape), Change::CursorColor(cursor.color), Change::CursorPosition { x: Position::Absolute(coords.x), y: Position::Absolute(coords.y), }, ]); } let (width, height) = screen.dimensions(); self.compute_layout(width, height) } fn coord_walk<F: Fn(usize, usize) -> usize>( &self, widget: WidgetId, mut x: usize, mut y: usize, f: F, ) -> (usize, usize) { let mut widget = widget; loop { let render = &self.render[&widget]; x = f(x, render.coordinates.x); y = f(y, render.coordinates.y); widget = match self.graph.parent.get(&widget) { Some(parent) => *parent, None => break, }; } (x, y) } pub fn to_screen_coords( &self, widget: WidgetId, coords: &ParentRelativeCoords, ) -> ScreenRelativeCoords { let (x, y) = self.coord_walk(widget, coords.x, coords.y, |a, b| a + b); ScreenRelativeCoords { x, y } } pub fn to_widget_coords( &self, widget: WidgetId, coords: &ScreenRelativeCoords, ) -> ParentRelativeCoords { let (x, y) = self.coord_walk(widget, coords.x, coords.y, |a, b| a - b); ParentRelativeCoords { x, y } } }
let root = match self.graph.root { Some(id) => id, _ => return None, }; let depth = 0; let mut best = (depth, root); self.hovered_recursive(root, depth, coords.x, coords.y, &mut best); Some(best.1) }
function_block-function_prefix_line
[]
Rust
arci-ros/src/ros_localization_client.rs
OpenRR/OpenRR
bfafe3707164cf8ca1143b5daa039f60b1831fdb
use std::borrow::Borrow; use arci::*; use nalgebra as na; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use crate::{msg, rosrust_utils::*}; rosrust::rosmsg_include! { std_srvs / Empty } const AMCL_POSE_TOPIC: &str = "/amcl_pose"; const NO_MOTION_UPDATE_SERVICE: &str = "request_nomotion_update"; #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)] #[serde(deny_unknown_fields)] pub struct RosLocalizationClientConfig { pub request_final_nomotion_update_hack: bool, #[serde(default = "default_nomotion_update_service_name")] pub nomotion_update_service_name: String, #[serde(default = "default_amcl_pose_topic_name")] pub amcl_pose_topic_name: String, } #[derive(Clone, Debug)] pub struct RosLocalizationClientBuilder { amcl_pose_topic_name: String, nomotion_update_service_name: String, request_final_nomotion_update_hack: bool, } impl RosLocalizationClientBuilder { pub fn new() -> Self { Self { amcl_pose_topic_name: AMCL_POSE_TOPIC.to_string(), nomotion_update_service_name: NO_MOTION_UPDATE_SERVICE.to_string(), request_final_nomotion_update_hack: false, } } pub fn request_final_nomotion_update_hack(mut self, val: bool) -> Self { self.request_final_nomotion_update_hack = val; self } pub fn finalize(self) -> RosLocalizationClient { RosLocalizationClient::new( self.request_final_nomotion_update_hack, self.nomotion_update_service_name, self.amcl_pose_topic_name, ) } } impl Default for RosLocalizationClientBuilder { fn default() -> Self { Self::new() } } pub struct RosLocalizationClient { pose_subscriber: SubscriberHandler<msg::geometry_msgs::PoseWithCovarianceStamped>, nomotion_update_client: Option<rosrust::Client<msg::std_srvs::Empty>>, amcl_pose_topic_name: String, } impl RosLocalizationClient { pub fn new( request_final_nomotion_update_hack: bool, nomotion_update_service_name: String, amcl_pose_topic_name: String, ) -> Self { let pose_subscriber = SubscriberHandler::new(&amcl_pose_topic_name, 1); let nomotion_update_client = if request_final_nomotion_update_hack { rosrust::wait_for_service( &nomotion_update_service_name, Some(std::time::Duration::from_secs(10)), ) .unwrap(); Some(rosrust::client::<msg::std_srvs::Empty>(&nomotion_update_service_name).unwrap()) } else { None }; Self { pose_subscriber, nomotion_update_client, amcl_pose_topic_name, } } pub fn new_from_config(config: RosLocalizationClientConfig) -> Self { Self::new( config.request_final_nomotion_update_hack, config.nomotion_update_service_name, config.amcl_pose_topic_name, ) } pub fn request_nomotion_update(&self) { self.nomotion_update_client .borrow() .as_ref() .unwrap() .req(&msg::std_srvs::EmptyReq {}) .unwrap() .unwrap(); } } impl Localization for RosLocalizationClient { fn current_pose(&self, _frame_id: &str) -> Result<na::Isometry2<f64>, Error> { self.pose_subscriber.wait_message(100); let pose_with_cov_stamped = self.pose_subscriber .get()? .ok_or_else(|| Error::Connection { message: format!("Failed to get pose from {}", self.amcl_pose_topic_name), })?; let pose: na::Isometry3<f64> = pose_with_cov_stamped.pose.pose.into(); Ok(na::Isometry2::new( na::Vector2::new(pose.translation.vector[0], pose.translation.vector[1]), pose.rotation.euler_angles().2, )) } } fn default_nomotion_update_service_name() -> String { NO_MOTION_UPDATE_SERVICE.to_string() } fn default_amcl_pose_topic_name() -> String { AMCL_POSE_TOPIC.to_string() }
use std::borrow::Borrow; use arci::*; use nalgebra as na; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use crate::{msg, rosrust_utils::*}; rosrust::rosmsg_include! { std_srvs / Empty } const AMCL_POSE_TOPIC: &str = "/amcl_pose"; const NO_MOTION_UPDATE_SERVICE: &str = "request_nomotion_update"; #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)] #[serde(deny_unknown_fields)] pub struct RosLocalizationClientConfig { pub request_final_nomotion_update_hack: bool, #[serde(default = "default_nomotion_update_service_name")] pub nomotion_update_service_name: String, #[serde(default = "default_amcl_pose_topic_name")] pub amcl_pose_topic_name: String, } #[derive(Clone, Debug)] pub struct RosLocalizationClientBuilder { amcl_pose_topic_name: String, nomotion_update_service_name: String, request_final_nomotion_update_hack: bool, } impl RosLocalizationClientBuilder { pub fn new() -> Self { Self { amcl_pose_topic_name: AMCL_POSE_TOPIC.to_string(), nomotion_update_service_name: NO_MOTION_UPDATE_SERVICE.to_string(), request_final_nomotion_update_hack: false, } } pub fn request_final_nomotion_update_hack(mut self, val: bool) -> Self { self.request_final_nomotion_update_hack = val; self } pub fn finalize(self) -> RosLocalizationClient { RosLocalizationClient::new( self.request_final_nomotion_update_hack, self.nomotion_update_service_name, self.amcl_pose_topic_name, ) } } impl Default for RosLocalizationClientBuilder { fn default() -> Self { Self::new() } } pub struct RosLocalizationClient { pose_subscriber: SubscriberHandler<msg::geometry_msgs::PoseWithCovarianceStamped>, nomotion_update_client: Option<rosrust::Client<msg::std_srvs::Empty>>, amcl_pose_topic_name: String, } impl RosLocalizationClient { pub fn new( request_final_nomotion_update_hack: bool, nomotion_update_service_name: String, amcl_pose_topic_name: String, ) -> Self { let pose_subscriber = SubscriberHandler::new(&amcl_pose_topic_name, 1); let nomotion_update_client = if request_final_nomotion_update_hack { rosrust::wait_for_service( &nomotion_update_service_name, Some(std::time::Duration::from_secs(10)), ) .unwrap(); Some(rosrust::client::<msg::std_srvs::Empty>(&nomotion_update_service_name).unwrap()) } else { None }; Self { pose_subscriber, nomotion_update_client, amcl_pose_topic_name, } } pub fn new_from_config(config: RosLocalizationClientConfig) -> Self { Self::new( config.request_final_nomotion_update_hack, config.nomotion_update_service_name, config.amcl_pose_topic_name, ) } pub fn request_nomotion_update(&self) { self.nomotion_update_client .borrow() .as_ref() .unwrap() .req(&msg::std_srvs::EmptyReq {}) .unwrap() .unwrap(); } } impl Localization for RosLocalizationClient {
} fn default_nomotion_update_service_name() -> String { NO_MOTION_UPDATE_SERVICE.to_string() } fn default_amcl_pose_topic_name() -> String { AMCL_POSE_TOPIC.to_string() }
fn current_pose(&self, _frame_id: &str) -> Result<na::Isometry2<f64>, Error> { self.pose_subscriber.wait_message(100); let pose_with_cov_stamped = self.pose_subscriber .get()? .ok_or_else(|| Error::Connection { message: format!("Failed to get pose from {}", self.amcl_pose_topic_name), })?; let pose: na::Isometry3<f64> = pose_with_cov_stamped.pose.pose.into(); Ok(na::Isometry2::new( na::Vector2::new(pose.translation.vector[0], pose.translation.vector[1]), pose.rotation.euler_angles().2, )) }
function_block-function_prefix_line
[ { "content": "/// Replaces the contents of the specified TOML document based on the specified scripts,\n\n/// returning edited document as string.\n\n///\n\n/// See [`overwrite`] for more.\n\npub fn overwrite_str(doc: &str, scripts: &str) -> Result<String> {\n\n let mut doc: toml::Value = toml::from_str(doc)?;\n\n overwrite(&mut doc, scripts)?;\n\n Ok(toml::to_string(&doc)?)\n\n}\n\n\n", "file_path": "openrr-config/src/overwrite.rs", "rank": 0, "score": 265520.0204061151 }, { "content": "/// Evaluates the given string and returns a concatenated string of the results.\n\n///\n\n/// # Syntax\n\n///\n\n/// Command:\n\n///\n\n/// ```text\n\n/// $(...)\n\n/// ```\n\n///\n\n/// Environment variable:\n\n///\n\n/// ```text\n\n/// ${...}\n\n/// ```\n\n///\n\n/// # Note\n\n///\n\n/// Nesting and escaping are not supported yet.\n\npub fn evaluate(mut s: &str, current_dir: Option<&Path>) -> Result<String> {\n\n let mut out = String::new();\n\n\n\n loop {\n\n match s.find('$') {\n\n Some(pos) => {\n\n out.push_str(&s[..pos]);\n\n s = &s[pos..];\n\n }\n\n None => {\n\n out.push_str(s);\n\n break;\n\n }\n\n }\n\n match s.as_bytes().get(1) {\n\n Some(b'(') => {\n\n let end = match s.find(')') {\n\n Some(end) => end,\n\n None => bail!(\"unclosed command literal {s:?}\"),\n\n };\n", "file_path": "openrr-config/src/evaluate.rs", "rank": 3, "score": 226726.86893478542 }, { "content": "#[allow(dead_code)]\n\npub fn assert_success_and_output_containing(output: Output, expected: &str) {\n\n assert!(\n\n output.status.success(),\n\n \"STDERR: {}\",\n\n from_utf8(&output.stderr).unwrap_or(\"not valid UTF-8\"),\n\n );\n\n let stdout = output.stdout;\n\n assert!(\n\n bytes_contain(&stdout, expected.as_bytes()),\n\n \"expected: {}, STDOUT: {}\",\n\n expected,\n\n from_utf8(&stdout).unwrap_or(\"not valid UTF-8\")\n\n );\n\n}\n\n\n\n/// # initialize roscore, rosrust\n\n///\n\n/// ``roscore`` and rosrust is running only one.\n\n/// This function enable to run test using roscore.\n\n/// To strict call once its parts.\n", "file_path": "arci-ros/tests/util/mod.rs", "rank": 4, "score": 215548.60528707554 }, { "content": "pub fn bytes_contain(sequence: &[u8], subsequence: &[u8]) -> bool {\n\n sequence\n\n .windows(subsequence.len())\n\n .any(|window| window == subsequence)\n\n}\n\n\n", "file_path": "arci-ros/tests/util/mod.rs", "rank": 5, "score": 215127.44454236198 }, { "content": "fn default_clear_costmap_service_name() -> String {\n\n CLEAR_COSTMAP_SERVICE.to_string()\n\n}\n", "file_path": "arci-ros/src/ros_nav_client.rs", "rank": 6, "score": 209635.7946600134 }, { "content": "fn default_nomotion_update_service_name() -> String {\n\n NO_MOTION_UPDATE_SERVICE.to_string()\n\n}\n\n\n", "file_path": "arci-ros/src/ros_nav_client.rs", "rank": 7, "score": 209635.7946600134 }, { "content": "pub fn run_roscore_and_rosrust_init_once(init_name: &str) -> Arc<ChildProcessTerminator> {\n\n use once_cell::sync::{Lazy, OnceCell};\n\n\n\n static ONCE: Once = Once::new();\n\n static PORT: Lazy<u32> = Lazy::new(|| {\n\n portpicker::pick_unused_port()\n\n .expect(\"No ports free\")\n\n .into()\n\n });\n\n\n\n // static memory is not guaranteed to be dropped.\n\n // if it isn't be dropped, ``roscore`` do not down and is running after test.\n\n // Therefore, having weak reference(which cannot live without strong reference).\n\n static ROSCORE_STATIC: OnceCell<RwLock<Weak<ChildProcessTerminator>>> = OnceCell::new();\n\n // keep strong reference at least one\n\n let mut roscore_strong: Option<Arc<ChildProcessTerminator>> = None;\n\n\n\n ONCE.call_once(|| {\n\n let roscore_terminator = run_roscore(*PORT);\n\n\n", "file_path": "arci-ros/tests/util/mod.rs", "rank": 8, "score": 206864.1995769607 }, { "content": "fn default_move_base_action_base_name() -> String {\n\n MOVE_BASE_ACTION.to_string()\n\n}\n\n\n", "file_path": "arci-ros/src/ros_nav_client.rs", "rank": 9, "score": 206339.94652330672 }, { "content": "fn from_nalgebra(na_q: &na::UnitQuaternion<f64>) -> [f64; 4] {\n\n let mut q = [0.0; 4];\n\n q[0] = na_q.w;\n\n q[1] = na_q.i;\n\n q[2] = na_q.j;\n\n q[3] = na_q.k;\n\n q\n\n}\n\n\n", "file_path": "arci-urdf-viz/src/utils.rs", "rank": 10, "score": 206258.9089507749 }, { "content": "fn convert_query(s: &str) -> Result<String> {\n\n let mut out = String::with_capacity(s.len());\n\n let mut chars = s.char_indices().peekable();\n\n while let Some((_, ch)) = chars.next() {\n\n match ch {\n\n '\"' | '\\'' => {\n\n let end = parse_string_literal(&mut out, ch, &mut chars);\n\n assert!(end);\n\n }\n\n '[' => {\n\n if !out.ends_with('.') {\n\n out.push('.');\n\n }\n\n out.push(ch);\n\n }\n\n _ => out.push(ch),\n\n }\n\n }\n\n\n\n Ok(out)\n", "file_path": "openrr-config/src/overwrite.rs", "rank": 11, "score": 200096.66118647225 }, { "content": "#[cfg(windows)]\n\nfn run_local_command(message: &str) -> io::Result<()> {\n\n // TODO: Ideally, it would be more efficient to use SAPI directly via winapi or something.\n\n // https://stackoverflow.com/questions/1040655/ms-speech-from-command-line\n\n let cmd = format!(\"PowerShell -Command \\\"Add-Type –AssemblyName System.Speech; (New-Object System.Speech.Synthesis.SpeechSynthesizer).Speak('{message}');\\\"\");\n\n let status = Command::new(\"powershell\").arg(cmd).status()?;\n\n\n\n if status.success() {\n\n Ok(())\n\n } else {\n\n Err(io::Error::new(\n\n io::ErrorKind::Other,\n\n format!(\"failed to run `powershell` with message {message:?}\"),\n\n ))\n\n }\n\n}\n", "file_path": "arci-speak-cmd/src/lib.rs", "rank": 12, "score": 199250.56538863367 }, { "content": "/// Parse args to get self collision pair\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// let pairs = openrr_planner::collision::parse_colon_separated_pairs(&vec![\"ab:cd\".to_owned(), \"ab:ef\".to_owned()]).unwrap();\n\n/// assert_eq!(pairs.len(), 2);\n\n/// assert_eq!(pairs[0].0, \"ab\");\n\n/// assert_eq!(pairs[0].1, \"cd\");\n\n/// assert_eq!(pairs[1].0, \"ab\");\n\n/// assert_eq!(pairs[1].1, \"ef\");\n\n/// ```\n\npub fn parse_colon_separated_pairs(pair_strs: &[String]) -> Result<Vec<(String, String)>> {\n\n let mut pairs = Vec::new();\n\n for pair_str in pair_strs {\n\n let mut sp = pair_str.split(':');\n\n if let Some(p1) = sp.next() {\n\n if let Some(p2) = sp.next() {\n\n pairs.push((p1.to_owned(), p2.to_owned()));\n\n } else {\n\n return Err(Error::ParseError(pair_str.to_owned()));\n\n }\n\n } else {\n\n return Err(Error::ParseError(pair_str.to_owned()));\n\n }\n\n }\n\n Ok(pairs)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::parse_colon_separated_pairs;\n", "file_path": "openrr-planner/src/collision/collision_detector.rs", "rank": 13, "score": 199112.97182958794 }, { "content": "fn to_nalgebra(q: &[f64; 4]) -> na::UnitQuaternion<f64> {\n\n na::UnitQuaternion::from_quaternion(na::Quaternion::new(q[0], q[1], q[2], q[3]))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use assert_approx_eq::assert_approx_eq;\n\n\n\n use super::*;\n\n #[test]\n\n fn test_euler_quaternion() {\n\n const R: f64 = 0.5;\n\n const P: f64 = -0.2;\n\n const Y: f64 = 1.0;\n\n let q = quaternion_from_euler_angles(R, P, Y);\n\n let angles = euler_angles_from_quaternion(&q);\n\n assert_approx_eq!(angles.0, R);\n\n assert_approx_eq!(angles.1, P);\n\n assert_approx_eq!(angles.2, Y);\n\n let q2 = quaternion_from_euler_angles(angles.0, angles.1, angles.2);\n\n assert_approx_eq!(q[0], q2[0]);\n\n assert_approx_eq!(q[1], q2[1]);\n\n assert_approx_eq!(q[2], q2[2]);\n\n assert_approx_eq!(q[3], q2[3]);\n\n }\n\n}\n", "file_path": "arci-urdf-viz/src/utils.rs", "rank": 14, "score": 197835.8056104098 }, { "content": "// (node_name, action_name)\n\nfn node_and_action_name() -> (String, String) {\n\n static COUNT: AtomicUsize = AtomicUsize::new(0);\n\n let n = COUNT.fetch_add(1, Ordering::SeqCst);\n\n let node_name = format!(\"test_nav2_node_{n}\");\n\n let action_name = format!(\"/test_nav2_{n}\");\n\n (node_name, action_name)\n\n}\n\n\n", "file_path": "arci-ros2/tests/test_navigation.rs", "rank": 15, "score": 193135.45615900538 }, { "content": "fn evaluate_overwrite_str(doc: &str, scripts: &str, path: Option<&Path>) -> Result<String> {\n\n if path.and_then(Path::extension).and_then(OsStr::to_str) == Some(\"toml\") {\n\n if let Err(e) = toml::from_str::<toml::Value>(doc) {\n\n warn!(\n\n \"config {} is not valid toml: {}\",\n\n path.unwrap().display(),\n\n e\n\n );\n\n }\n\n }\n\n openrr_config::overwrite_str(\n\n &openrr_config::evaluate(doc, None)?,\n\n &openrr_config::evaluate(scripts, None)?,\n\n )\n\n}\n\n\n", "file_path": "openrr-apps/src/utils.rs", "rank": 16, "score": 192920.2426835539 }, { "content": "pub fn get_joint_index<J>(joint_trajectory_client: &J, joint_name: &str) -> Result<usize, Error>\n\nwhere\n\n J: JointTrajectoryClient,\n\n{\n\n joint_trajectory_client\n\n .joint_names()\n\n .iter()\n\n .position(|name| name == joint_name)\n\n .ok_or_else(|| Error::NoJoint(joint_name.to_owned()))\n\n}\n\n\n\n#[allow(clippy::too_many_arguments)]\n\npub async fn move_joint_until_stop<J>(\n\n joint_trajectory_client: &J,\n\n joint_name: &str,\n\n target_position: f64,\n\n target_duration: Duration,\n\n diff_threshold_for_stop: f64,\n\n stopped_duration: Duration,\n\n monitor_interval: Duration,\n", "file_path": "arci/src/utils.rs", "rank": 17, "score": 191083.04801957606 }, { "content": "// (node_name, action_name)\n\nfn node_and_action_name() -> (String, String) {\n\n static COUNT: AtomicUsize = AtomicUsize::new(0);\n\n let n = COUNT.fetch_add(1, Ordering::SeqCst);\n\n let node_name = format!(\"test_ros2_control_node_{n}\");\n\n let action_name = format!(\"/test_ros2_control_{n}\");\n\n (node_name, action_name)\n\n}\n\n\n\n#[tokio::test(flavor = \"multi_thread\")]\n\nasync fn test_control() {\n\n let (node_name, action_name) = &node_and_action_name();\n\n let ctx = r2r::Context::create().unwrap();\n\n\n\n let node = Arc::new(Mutex::new(\n\n r2r::Node::create(ctx.clone(), node_name, \"arci_ros2\").unwrap(),\n\n ));\n\n let server_requests = node\n\n .lock()\n\n .create_action_server::<FollowJointTrajectory::Action>(&format!(\n\n \"{action_name}/follow_joint_trajectory\"\n", "file_path": "arci-ros2/tests/test_ros2_control.rs", "rank": 18, "score": 190596.466800985 }, { "content": "/// Do something needed to start the program\n\npub fn init(name: &str, config: &RobotConfig) {\n\n #[cfg(feature = \"ros\")]\n\n if config.has_ros_clients() {\n\n arci_ros::init(name);\n\n }\n\n debug!(\"init {name} with {config:?}\");\n\n}\n\n\n", "file_path": "openrr-apps/src/utils.rs", "rank": 19, "score": 189072.66794681543 }, { "content": "/// Do something needed to start the program for multiple\n\npub fn init_with_anonymize(name: &str, config: &RobotConfig) {\n\n let suffix: u64 = rand::thread_rng().gen();\n\n let anon_name = format!(\"{name}_{suffix}\");\n\n init(&anon_name, config);\n\n}\n\n\n", "file_path": "openrr-apps/src/utils.rs", "rank": 20, "score": 186448.91215796856 }, { "content": "fn default_mode() -> String {\n\n \"pose\".to_string()\n\n}\n\n\n", "file_path": "openrr-teleop/src/joints_pose_sender.rs", "rank": 21, "score": 182355.3191974618 }, { "content": "fn evaluate(doc: &str, path: Option<&Path>) -> Result<String> {\n\n if path.and_then(Path::extension).and_then(OsStr::to_str) == Some(\"toml\") {\n\n if let Err(e) = toml::from_str::<toml::Value>(doc) {\n\n warn!(\n\n \"config {} is not valid toml: {}\",\n\n path.unwrap().display(),\n\n e\n\n );\n\n }\n\n }\n\n openrr_config::evaluate(doc, None)\n\n}\n\n\n", "file_path": "openrr-apps/src/utils.rs", "rank": 22, "score": 180249.19635498308 }, { "content": "fn rostopic_listing_succeeds() -> bool {\n\n Command::new(\"rostopic\")\n\n .arg(\"list\")\n\n .output()\n\n .unwrap()\n\n .status\n\n .success()\n\n}\n\n\n", "file_path": "arci-ros/tests/util/mod.rs", "rank": 23, "score": 180007.02997466282 }, { "content": "/// Replaces the contents of the specified TOML document based on the specified scripts.\n\n///\n\n/// You can specify multiple scripts at once (newline-separated or semicolon-separated).\n\n/// Empty scripts, and leading and trailing separators will be ignored.\n\n///\n\n/// # Set operation\n\n///\n\n/// Syntax:\n\n///\n\n/// ```text\n\n/// <key> = <value>\n\n/// ```\n\n///\n\n/// - If the specified key or array index exists, replace its value.\n\n/// - If the specified key does not exist, create the specified key and value.\n\n/// - If the specified array index does not exist, append the specified value to the array.\n\n/// - If the intermediate data structures do not exist, create them.\n\n///\n\n/// # Delete operation\n\n///\n\n/// Syntax:\n\n///\n\n/// ```text\n\n/// <key> =\n\n/// ```\n\n///\n\n/// - Deletes the specified key and its value or specified array element.\n\n/// - If the specified key or array index does not exist, it will be ignored.\n\npub fn overwrite(doc: &mut Value, scripts: &str) -> Result<()> {\n\n let scripts = parse_scripts(scripts)?;\n\n\n\n for script in scripts {\n\n let query = &script.query;\n\n let old = doc.read_mut(query)?;\n\n let exists = old.is_some();\n\n let is_structure = matches!(&old, Some(r) if r.is_table() || r.is_array());\n\n match script.operation {\n\n Operation::Set(value) => {\n\n // TODO:\n\n // - Workaround for toml-query bug: https://docs.rs/toml-query/0.10/toml_query/insert/trait.TomlValueInsertExt.html#known-bugs\n\n // - Validate that the query points to a valid configuration.\n\n debug!(?query, ?value, \"executing insert operation\");\n\n doc.insert(query, value)?;\n\n }\n\n Operation::Delete => {\n\n if !exists {\n\n debug!(\n\n ?query,\n", "file_path": "openrr-config/src/overwrite.rs", "rank": 24, "score": 179245.21457258845 }, { "content": "fn map_connection_error<E: fmt::Display>(url: &Url) -> impl FnOnce(E) -> arci::Error + '_ {\n\n move |e: E| arci::Error::Connection {\n\n message: format!(\"url:{url}: {e}\"),\n\n }\n\n}\n\n\n", "file_path": "arci-urdf-viz/src/utils.rs", "rank": 25, "score": 177271.64147596288 }, { "content": "/// Returns a map of clients for each config.\n\n///\n\n/// The key for the map is [the name of the client](UrdfVizWebClientConfig::name),\n\n/// and in case of conflict, it becomes an error.\n\n///\n\n/// Returns empty map when `configs` are empty.\n\npub fn create_joint_trajectory_clients(\n\n configs: Vec<UrdfVizWebClientConfig>,\n\n urdf_robot: Option<&urdf_rs::Robot>,\n\n) -> Result<HashMap<String, Arc<dyn JointTrajectoryClient>>, arci::Error> {\n\n create_joint_trajectory_clients_inner(configs, urdf_robot, false)\n\n}\n\n\n", "file_path": "arci-urdf-viz/src/client.rs", "rank": 26, "score": 175387.3598608282 }, { "content": "fn post<T: Serialize, U: DeserializeOwned>(url: Url, msg: T) -> Result<U, arci::Error> {\n\n ureq::post(url.as_str())\n\n .send_json(serde_json::to_value(msg).unwrap())\n\n .map_err(map_connection_error(&url))?\n\n .into_json()\n\n .map_err(map_connection_error(&url))\n\n}\n\n\n\npub(crate) fn get_joint_positions(base_url: &Url) -> Result<JointState, arci::Error> {\n\n get(base_url.join(\"get_joint_positions\").unwrap())?\n\n .into_json()\n\n .map_err(map_connection_error(base_url))\n\n}\n\n\n\npub(crate) fn send_joint_positions(\n\n base_url: &Url,\n\n joint_state: JointState,\n\n) -> Result<(), arci::Error> {\n\n let res: RpcResult = post(base_url.join(\"set_joint_positions\").unwrap(), joint_state)?;\n\n if !res.is_ok {\n", "file_path": "arci-urdf-viz/src/utils.rs", "rank": 27, "score": 174821.0613471502 }, { "content": "pub fn init_tracing_with_file_appender(config: LogConfig, name: String) -> WorkerGuard {\n\n let default_level = match config.level {\n\n LogLevel::TRACE => Level::TRACE,\n\n LogLevel::DEBUG => Level::DEBUG,\n\n LogLevel::INFO => Level::INFO,\n\n LogLevel::WARN => Level::WARN,\n\n LogLevel::ERROR => Level::ERROR,\n\n };\n\n let rotation = match config.rotation {\n\n LogRotation::MINUTELY => Rotation::MINUTELY,\n\n LogRotation::HOURLY => Rotation::HOURLY,\n\n LogRotation::DAILY => Rotation::DAILY,\n\n LogRotation::NEVER => Rotation::NEVER,\n\n };\n\n let formatter = OpenrrFormatter::new(name);\n\n let file_appender = RollingFileAppender::new(rotation, config.directory, config.prefix);\n\n let (file_writer, guard) = tracing_appender::non_blocking(file_appender);\n\n let subscriber = tracing_subscriber::registry()\n\n .with(EnvFilter::from_default_env().add_directive(default_level.into()))\n\n .with(\n", "file_path": "openrr-apps/src/utils.rs", "rank": 28, "score": 173131.43494645192 }, { "content": "/// # To copy joint name and position between `from` and `to`\n\n///\n\n/// Copy position of same joint name.\n\n/// This function returns Ok() or Err().\n\n///\n\n/// # When this function through Error?\n\n///\n\n/// length of joint names and positions is difference.\n\n///\n\n/// # Sample code\n\n///\n\n/// ```\n\n/// use arci::copy_joint_positions;\n\n///\n\n/// let from_positions = vec![2.1_f64, 4.8, 1.0, 6.5];\n\n/// let from_joint_names = vec![\n\n/// String::from(\"part1\"),\n\n/// String::from(\"part2\"),\n\n/// String::from(\"part3\"),\n\n/// String::from(\"part4\"),\n\n/// ];\n\n///\n\n/// let mut to_positions = vec![3.3_f64, 8.1];\n\n/// let to_joint_names = vec![\n\n/// String::from(\"part4\"),\n\n/// String::from(\"part1\"),\n\n/// ];\n\n///\n\n/// copy_joint_positions(\n\n/// &from_joint_names,\n\n/// &from_positions,\n\n/// &to_joint_names,\n\n/// &mut to_positions,\n\n/// ).unwrap();\n\n/// ```\n\npub fn copy_joint_positions(\n\n from_joint_names: &[String],\n\n from_positions: &[f64],\n\n to_joint_names: &[String],\n\n to_positions: &mut [f64],\n\n) -> Result<(), Error> {\n\n if from_joint_names.len() != from_positions.len() || to_joint_names.len() != to_positions.len()\n\n {\n\n return Err(Error::CopyJointError(\n\n from_joint_names.to_vec(),\n\n from_positions.to_vec(),\n\n to_joint_names.to_vec(),\n\n to_positions.to_vec(),\n\n ));\n\n }\n\n for (to_index, to_joint_name) in to_joint_names.iter().enumerate() {\n\n if let Some(from_index) = from_joint_names.iter().position(|x| x == to_joint_name) {\n\n to_positions[to_index] = from_positions[from_index];\n\n }\n\n }\n", "file_path": "arci/src/clients/partial_joint_trajectory_client.rs", "rank": 29, "score": 172718.4818910883 }, { "content": "/// Returns a map of clients that will be created lazily for each config.\n\n///\n\n/// See [create_joint_trajectory_clients] for more.\n\npub fn create_joint_trajectory_clients_lazy(\n\n configs: Vec<UrdfVizWebClientConfig>,\n\n urdf_robot: Option<&urdf_rs::Robot>,\n\n) -> Result<HashMap<String, Arc<dyn JointTrajectoryClient>>, arci::Error> {\n\n create_joint_trajectory_clients_inner(configs, urdf_robot, true)\n\n}\n\n\n", "file_path": "arci-urdf-viz/src/client.rs", "rank": 30, "score": 172697.98797835893 }, { "content": "fn is_str(ty: &syn::Type) -> bool {\n\n if let syn::Type::Reference(ty) = ty {\n\n if let Some(path) = get_ty_path(&ty.elem) {\n\n if path.is_ident(\"str\") {\n\n return true;\n\n }\n\n }\n\n }\n\n false\n\n}\n\n\n", "file_path": "tools/codegen/src/main.rs", "rank": 31, "score": 172259.26892091625 }, { "content": "#[derive(Default)]\n\nstruct DebugLocalization {}\n\n\n\nimpl arci::Localization for DebugLocalization {\n\n fn current_pose(&self, frame_id: &str) -> Result<arci::Isometry2<f64>, arci::Error> {\n\n println!(\"Server received Localization::current_pose (frame_id: {frame_id:?})\");\n\n Ok(arci::Isometry2::new(arci::Vector2::new(0.0, 0.0), 0.0))\n\n }\n\n}\n\n\n", "file_path": "openrr-remote/examples/server.rs", "rank": 32, "score": 168332.078049941 }, { "content": "pub fn create_self_collision_checker<P: AsRef<Path>>(\n\n urdf_path: P,\n\n self_collision_check_pairs: &[String],\n\n config: &SelfCollisionCheckerConfig,\n\n full_chain: Arc<k::Chain<f64>>,\n\n) -> SelfCollisionChecker<f64> {\n\n SelfCollisionChecker::new(\n\n full_chain,\n\n CollisionDetector::from_urdf_robot(\n\n &urdf_rs::utils::read_urdf_or_xacro(urdf_path).unwrap(),\n\n config.prediction,\n\n ),\n\n parse_colon_separated_pairs(self_collision_check_pairs).unwrap(),\n\n config.time_interpolate_rate,\n\n )\n\n}\n\n\n", "file_path": "openrr-planner/src/collision/self_collision_checker.rs", "rank": 33, "score": 167438.6492657533 }, { "content": "/// Returns a map of clients for each builder.\n\n///\n\n/// The key for the map is [the name of the client](RosControlClientBuilder::name),\n\n/// and in case of conflict, it becomes an error.\n\n///\n\n/// Returns empty map when `builders` are empty.\n\npub fn create_joint_trajectory_clients<B>(\n\n builders: Vec<B>,\n\n urdf_robot: Option<&urdf_rs::Robot>,\n\n) -> Result<HashMap<String, Arc<dyn JointTrajectoryClient>>, arci::Error>\n\nwhere\n\n B: RosControlClientBuilder,\n\n{\n\n create_joint_trajectory_clients_inner(builders, urdf_robot, false)\n\n}\n\n\n", "file_path": "arci-ros/src/ros_control/utils.rs", "rank": 34, "score": 166252.06067305309 }, { "content": "pub fn port_and_url() -> (u16, Url) {\n\n let port = portpicker::pick_unused_port().expect(\"No ports free\");\n\n let url = Url::parse(&format!(\"http://127.0.0.1:{port}\")).unwrap();\n\n (port, url)\n\n}\n\n\n\n#[easy_ext::ext]\n\npub impl WebServer {\n\n fn start_background(self) {\n\n let handle = self.handle();\n\n std::thread::spawn(move || {\n\n tokio::runtime::Builder::new_current_thread()\n\n .enable_all()\n\n .build()\n\n .unwrap()\n\n .block_on(async move { self.bind().await.unwrap() })\n\n });\n\n std::thread::spawn(move || loop {\n\n if let Some(positions) = handle.take_target_joint_positions() {\n\n *handle.current_joint_positions() = positions;\n", "file_path": "arci-urdf-viz/tests/util/mod.rs", "rank": 35, "score": 165399.87818759496 }, { "content": "/// Returns a map of clients that will be created lazily for each builder.\n\n///\n\n/// See [create_joint_trajectory_clients] for more.\n\npub fn create_joint_trajectory_clients_lazy<B>(\n\n builders: Vec<B>,\n\n urdf_robot: Option<&urdf_rs::Robot>,\n\n) -> Result<HashMap<String, Arc<dyn JointTrajectoryClient>>, arci::Error>\n\nwhere\n\n B: RosControlClientBuilder,\n\n{\n\n create_joint_trajectory_clients_inner(builders, urdf_robot, true)\n\n}\n\n\n", "file_path": "arci-ros/src/ros_control/utils.rs", "rank": 36, "score": 163797.07988020987 }, { "content": "pub fn wrap_joint_trajectory_client(\n\n config: JointTrajectoryClientWrapperConfig,\n\n client: Arc<dyn JointTrajectoryClient>,\n\n urdf_robot: Option<&urdf_rs::Robot>,\n\n) -> Result<Arc<dyn JointTrajectoryClient>, arci::Error> {\n\n Ok(if config.wrap_with_joint_velocity_limiter {\n\n if config.wrap_with_joint_position_limiter {\n\n if config.wrap_with_joint_position_difference_limiter {\n\n let client = new_joint_position_difference_limiter(\n\n client,\n\n config.joint_position_difference_limits,\n\n )?;\n\n let client =\n\n new_joint_velocity_limiter(client, config.joint_velocity_limits, urdf_robot)?;\n\n let client = new_joint_position_limiter(\n\n client,\n\n config.joint_position_limits,\n\n config.joint_position_limiter_strategy,\n\n urdf_robot,\n\n )?;\n", "file_path": "arci-ros/src/ros_control/joint_trajectory_client_wrapper_config.rs", "rank": 37, "score": 163097.55117302458 }, { "content": "#[auto_impl(Box, Arc)]\n\npub trait Localization: Send + Sync {\n\n fn current_pose(&self, frame_id: &str) -> Result<Isometry2<f64>, Error>;\n\n}\n", "file_path": "arci/src/traits/localization.rs", "rank": 38, "score": 162774.2031822803 }, { "content": "/// # subscribe ROS message helper\n\n///\n\n/// using for inspect specific massage type.\n\n/// Message is displayed on screen and sent to `mpsc receiver`\n\n///\n\n/// # Panic!\n\n///\n\n/// If subscriber can't be construct, this function is panic.\n\n/// Or if ``Roscore`` is not up, could be panic.\n\n///\n\npub fn subscribe_with_channel<T: rosrust::Message>(\n\n topic_name: &str,\n\n queue_size: usize,\n\n) -> (flume::Receiver<T>, rosrust::Subscriber) {\n\n let (tx, rx) = flume::unbounded::<T>();\n\n\n\n let sub = rosrust::subscribe(topic_name, queue_size, move |v: T| {\n\n tx.send(v).unwrap();\n\n })\n\n .unwrap();\n\n\n\n (rx, sub)\n\n}\n", "file_path": "arci-ros/src/rosrust_utils.rs", "rank": 39, "score": 162390.46053543268 }, { "content": "#[derive(Debug, Parser)]\n\nstruct Args {\n\n message: String,\n\n}\n\n\n\n#[tokio::main]\n\nasync fn main() -> Result<(), arci::Error> {\n\n tracing_subscriber::fmt::init();\n\n let args = Args::parse();\n\n\n\n let speaker = LocalCommand::default();\n\n let wait = speaker.speak(&args.message)?;\n\n wait.await\n\n}\n", "file_path": "arci-speak-cmd/examples/local_command.rs", "rank": 40, "score": 161963.62713692855 }, { "content": "pub fn load_command_file_and_filter(file_path: PathBuf) -> Result<Vec<String>, OpenrrCommandError> {\n\n let file = File::open(&file_path)\n\n .map_err(|e| OpenrrCommandError::CommandFileOpenFailure(file_path, e.to_string()))?;\n\n let buf = BufReader::new(file);\n\n Ok(buf\n\n .lines()\n\n .map(|line| line.expect(\"Could not parse line\"))\n\n .filter(|command| {\n\n let command_parsed_iter = command.split_whitespace();\n\n // Ignore empty lines and comment lines\n\n command_parsed_iter.clone().count() > 0\n\n && command_parsed_iter.clone().next().unwrap().find('#') == None\n\n })\n\n .collect())\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n #[test]\n\n fn test_parse_joints() {\n\n let val: (usize, usize) = parse_joints(\"0=2\").unwrap();\n\n assert_eq!(val.0, 0);\n\n assert_eq!(val.1, 2);\n\n }\n\n}\n", "file_path": "openrr-command/src/robot_command.rs", "rank": 41, "score": 161753.39851823222 }, { "content": "fn should_interpolate_joint_trajectory(trajectory: &[TrajectoryPoint]) -> bool {\n\n if trajectory.is_empty() {\n\n return false;\n\n };\n\n match trajectory.iter().position(|p| p.velocities.is_some()) {\n\n Some(first_index_of_valid_velocity) => {\n\n let last_index = trajectory.len() - 1;\n\n if first_index_of_valid_velocity != last_index {\n\n false\n\n } else {\n\n return !trajectory[last_index]\n\n .velocities\n\n .as_ref()\n\n .unwrap()\n\n .iter()\n\n .any(|x| x.abs() > ZERO_VELOCITY_THRESHOLD);\n\n }\n\n }\n\n None => true,\n\n }\n\n}\n\n\n", "file_path": "arci/src/clients/joint_position_difference_limiter.rs", "rank": 42, "score": 159635.87923646282 }, { "content": "pub fn run_roscore(port: u32) -> ChildProcessTerminator {\n\n println!(\"Running roscore on port: {port}\");\n\n env::set_var(\"ROS_MASTER_URI\", format!(\"http://localhost:{port}\"));\n\n while !portpicker::is_free(port as u16) {\n\n println!(\"Waiting port={port}\");\n\n sleep(Duration::from_millis(100));\n\n }\n\n let roscore =\n\n ChildProcessTerminator::spawn(Command::new(\"roscore\").arg(\"-p\").arg(format!(\"{port}\")));\n\n await_roscore();\n\n roscore\n\n}\n\n\n", "file_path": "arci-ros/tests/util/mod.rs", "rank": 43, "score": 157373.97033481137 }, { "content": "pub fn find_nodes(joint_names: &[String], chain: &k::Chain<f64>) -> Option<Vec<k::Node<f64>>> {\n\n let mut nodes = vec![];\n\n for name in joint_names {\n\n if let Some(node) = chain.find(name) {\n\n nodes.push(node.clone());\n\n } else {\n\n return None;\n\n }\n\n }\n\n Some(nodes)\n\n}\n", "file_path": "openrr-client/src/utils.rs", "rank": 44, "score": 154607.9116837965 }, { "content": "#[test]\n\nfn test_each_condition_clone() {\n\n let c1 = EachJointDiffCondition::new(vec![1.0, 0.1], 0.1);\n\n let c2 = c1.clone();\n\n assert_approx_eq!(c2.allowable_errors[0], 1.0);\n\n assert_approx_eq!(c2.allowable_errors[1], 0.1);\n\n assert_approx_eq!(c2.timeout_sec, 0.1);\n\n assert_approx_eq!(c1.allowable_errors[0], 1.0);\n\n assert_approx_eq!(c1.allowable_errors[1], 0.1);\n\n assert_approx_eq!(c1.timeout_sec, 0.1);\n\n}\n", "file_path": "arci/tests/test_waits.rs", "rank": 45, "score": 153675.39224428937 }, { "content": "#[test]\n\nfn test_each_condition_debug() {\n\n let c1 = EachJointDiffCondition::new(vec![1.0, 0.1], 0.1);\n\n assert_eq!(\n\n format!(\"{c1:?}\"),\n\n \"EachJointDiffCondition { allowable_errors: [1.0, 0.1], timeout_sec: 0.1 }\"\n\n );\n\n}\n\n\n", "file_path": "arci/tests/test_waits.rs", "rank": 46, "score": 153641.80845820004 }, { "content": "pub fn convert_ros_time_to_system_time(time: &Time) -> SystemTime {\n\n let ros_now = rosrust::now();\n\n let system_now = SystemTime::now();\n\n let ros_time_nanos = time.nanos() as u64;\n\n let ros_now_nanos = ros_now.nanos() as u64;\n\n // from_nanos needs u64 as input\n\n // https://doc.rust-lang.org/stable/std/time/struct.Duration.html#method.from_nanos\n\n if ros_now_nanos < ros_time_nanos {\n\n system_now\n\n .checked_add(std::time::Duration::from_nanos(\n\n ros_time_nanos - ros_now_nanos,\n\n ))\n\n .unwrap()\n\n } else {\n\n system_now\n\n .checked_sub(std::time::Duration::from_nanos(\n\n ros_now_nanos - ros_time_nanos,\n\n ))\n\n .unwrap()\n\n }\n\n}\n\n\n", "file_path": "arci-ros/src/rosrust_utils.rs", "rank": 47, "score": 152785.77561173376 }, { "content": "pub fn convert_system_time_to_ros_time(time: &SystemTime) -> Time {\n\n let ros_now = rosrust::now();\n\n let system_now = SystemTime::now();\n\n\n\n // compare time to avoid SystemTimeError\n\n // https://doc.rust-lang.org/std/time/struct.SystemTime.html#method.duration_since\n\n if system_now < *time {\n\n Time::from_nanos(\n\n time.duration_since(system_now).unwrap().as_nanos() as i64 + ros_now.nanos() as i64,\n\n )\n\n } else {\n\n Time::from_nanos(\n\n ros_now.nanos() as i64 - system_now.duration_since(*time).unwrap().as_nanos() as i64,\n\n )\n\n }\n\n}\n\n\n", "file_path": "arci-ros/src/rosrust_utils.rs", "rank": 48, "score": 152785.77561173376 }, { "content": "pub fn wait_subscriber<T>(publisher: &rosrust::Publisher<T>)\n\nwhere\n\n T: rosrust::Message,\n\n{\n\n let rate = rosrust::rate(10.0);\n\n while rosrust::is_ok() && publisher.subscriber_count() == 0 {\n\n rate.sleep();\n\n }\n\n // one more to avoid `rostopic echo`\n\n rate.sleep();\n\n}\n\n\n", "file_path": "arci-ros/src/rosrust_utils.rs", "rank": 49, "score": 152286.985528029 }, { "content": "#[test]\n\nfn test_total_condition_clone() {\n\n let c1 = TotalJointDiffCondition::new(1.0, 0.1);\n\n let c2 = c1.clone();\n\n assert_approx_eq!(c2.allowable_error, 1.0);\n\n assert_approx_eq!(c2.timeout_sec, 0.1);\n\n assert_approx_eq!(c1.allowable_error, 1.0);\n\n assert_approx_eq!(c1.timeout_sec, 0.1);\n\n}\n\n\n\n#[tokio::test]\n\nasync fn test_each_condition() {\n\n let client = DummyJointTrajectoryClient::new(vec![\"j1\".to_owned(), \"j2\".to_owned()]);\n\n let c1 = EachJointDiffCondition::new(vec![1.0, 0.1], 0.1);\n\n assert!(c1.wait(&client, &[0.0, 0.0], 1.0).await.is_ok());\n\n assert!(c1.wait(&client, &[0.5, 0.0], 1.0).await.is_ok());\n\n assert!(c1.wait(&client, &[-0.5, 0.0], 1.0).await.is_ok());\n\n assert!(c1.wait(&client, &[-1.5, 0.0], 1.0).await.is_err());\n\n assert!(c1.wait(&client, &[-0.5, 0.2], 1.0).await.is_err());\n\n client\n\n .send_joint_positions(vec![3.0, -10.0], std::time::Duration::from_millis(100))\n", "file_path": "arci/tests/test_waits.rs", "rank": 50, "score": 150564.34890079405 }, { "content": "#[test]\n\nfn test_total_condition_debug() {\n\n let c1 = TotalJointDiffCondition::new(1.0, 0.1);\n\n assert_eq!(\n\n format!(\"{c1:?}\"),\n\n \"TotalJointDiffCondition { allowable_error: 1.0, timeout_sec: 0.1 }\"\n\n );\n\n}\n\n\n", "file_path": "arci/tests/test_waits.rs", "rank": 51, "score": 150531.59887260012 }, { "content": "fn header() -> String {\n\n concat!(\n\n \"// This file is @generated by \",\n\n env!(\"CARGO_BIN_NAME\"),\n\n \".\\n\",\n\n \"// It is not intended for manual editing.\\n\",\n\n \"\\n\",\n\n \"#![allow(unused_variables)]\\n\",\n\n \"#![allow(clippy::useless_conversion, clippy::unit_arg)]\\n\",\n\n \"\\n\",\n\n )\n\n .into()\n\n}\n\n\n", "file_path": "tools/codegen/src/main.rs", "rank": 52, "score": 147790.47463726957 }, { "content": "#[allow(dead_code)]\n\npub fn run_roscore_for(language: Language, feature: Feature) -> ChildProcessTerminator {\n\n run_roscore(generate_port(language, feature))\n\n}\n\n\n\n#[allow(dead_code)]\n\npub enum Language {\n\n None,\n\n Cpp,\n\n Python,\n\n Rust,\n\n Shell,\n\n Multi,\n\n}\n\n\n\nimpl Language {\n\n #[allow(dead_code)]\n\n fn get_offset(&self) -> u32 {\n\n match self {\n\n Language::None => 1,\n\n Language::Cpp => 2,\n", "file_path": "arci-ros/tests/util/mod.rs", "rank": 53, "score": 147698.79080495142 }, { "content": "fn default_prediction() -> f64 {\n\n 0.001\n\n}\n\n\n", "file_path": "openrr-planner/src/collision/self_collision_checker.rs", "rank": 54, "score": 145483.0893080782 }, { "content": "pub fn init_tracing() {\n\n tracing_subscriber::fmt()\n\n .with_env_filter(EnvFilter::from_default_env())\n\n .with_writer(io::stderr)\n\n .init();\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct OpenrrFormatter {\n\n formatter: Format,\n\n name: String,\n\n}\n\n\n\nimpl OpenrrFormatter {\n\n fn new(name: String) -> Self {\n\n Self {\n\n formatter: tracing_subscriber::fmt::format(),\n\n name,\n\n }\n\n }\n", "file_path": "openrr-apps/src/utils.rs", "rank": 55, "score": 144186.3442570003 }, { "content": "fn new_joint_velocity_limiter<C>(\n\n client: C,\n\n velocity_limits: Option<Vec<f64>>,\n\n urdf_robot: Option<&urdf_rs::Robot>,\n\n) -> Result<JointVelocityLimiter<C>, arci::Error>\n\nwhere\n\n C: JointTrajectoryClient,\n\n{\n\n match velocity_limits {\n\n Some(velocity_limits) => Ok(JointVelocityLimiter::new(client, velocity_limits)),\n\n None => JointVelocityLimiter::from_urdf(client, &urdf_robot.unwrap().joints),\n\n }\n\n}\n\n\n", "file_path": "arci-urdf-viz/src/client.rs", "rank": 56, "score": 142770.91090272952 }, { "content": "fn new_joint_position_limiter<C>(\n\n client: C,\n\n position_limits: Option<Vec<JointPositionLimit>>,\n\n urdf_robot: Option<&urdf_rs::Robot>,\n\n) -> Result<JointPositionLimiter<C>, arci::Error>\n\nwhere\n\n C: JointTrajectoryClient,\n\n{\n\n match position_limits {\n\n Some(position_limits) => Ok(JointPositionLimiter::new(client, position_limits)),\n\n None => JointPositionLimiter::from_urdf(client, &urdf_robot.unwrap().joints),\n\n }\n\n}\n\n\n", "file_path": "arci-urdf-viz/src/client.rs", "rank": 57, "score": 142770.91090272952 }, { "content": "pub fn resolve_teleop_config(\n\n config_path: Option<&Path>,\n\n overwrite: Option<&str>,\n\n) -> Result<RobotTeleopConfig> {\n\n match (config_path, overwrite) {\n\n (Some(teleop_config_path), Some(overwrite)) => {\n\n let s = &fs::read_to_string(teleop_config_path)?;\n\n let s = &evaluate_overwrite_str(s, overwrite, Some(teleop_config_path))?;\n\n Ok(RobotTeleopConfig::from_str(s, teleop_config_path)?)\n\n }\n\n (Some(teleop_config_path), None) => {\n\n let s = &evaluate(\n\n &fs::read_to_string(teleop_config_path)?,\n\n Some(teleop_config_path),\n\n )?;\n\n Ok(RobotTeleopConfig::from_str(s, teleop_config_path)?)\n\n }\n\n (None, overwrite) => {\n\n let mut config = RobotTeleopConfig::default();\n\n config.control_nodes_config.move_base_mode = Some(\"base\".into());\n", "file_path": "openrr-apps/src/utils.rs", "rank": 58, "score": 142076.97172431566 }, { "content": "pub fn wait_joint_positions(\n\n client: &dyn JointTrajectoryClient,\n\n target_positions: &[f64],\n\n timeout: std::time::Duration,\n\n allowable_total_diff: f64,\n\n) -> Result<(), Error> {\n\n let sleep_unit = std::time::Duration::from_millis(100);\n\n let max_num = timeout.as_micros() / sleep_unit.as_micros();\n\n let dof = target_positions.len();\n\n let mut sum_err = 0.0;\n\n for _iteration in 0..max_num {\n\n let cur = client.current_joint_positions()?;\n\n sum_err = 0.0;\n\n for i in 0..dof {\n\n sum_err += (target_positions[i] - cur[i]).abs();\n\n }\n\n if sum_err < allowable_total_diff {\n\n return Ok(());\n\n }\n\n std::thread::sleep(sleep_unit);\n\n }\n\n Err(Error::Timeout {\n\n timeout,\n\n allowable_total_diff,\n\n err: sum_err,\n\n })\n\n}\n\n\n", "file_path": "openrr-client/src/utils.rs", "rank": 59, "score": 142076.97172431566 }, { "content": "pub fn resolve_robot_config(\n\n config_path: Option<&Path>,\n\n overwrite: Option<&str>,\n\n) -> Result<RobotConfig> {\n\n match (config_path, overwrite) {\n\n (Some(config_path), Some(overwrite)) => {\n\n let s = &fs::read_to_string(config_path)?;\n\n let s = &evaluate_overwrite_str(s, overwrite, Some(config_path))?;\n\n Ok(RobotConfig::from_str(s, config_path)?)\n\n }\n\n (Some(config_path), None) => {\n\n let s = &evaluate(&fs::read_to_string(config_path)?, Some(config_path))?;\n\n Ok(RobotConfig::from_str(s, config_path)?)\n\n }\n\n (None, overwrite) => {\n\n let mut config = RobotConfig::default();\n\n config\n\n .urdf_viz_clients_configs\n\n .push(arci_urdf_viz::UrdfVizWebClientConfig {\n\n name: DEFAULT_JOINT_CLIENT_NAME.into(),\n", "file_path": "openrr-apps/src/utils.rs", "rank": 60, "score": 142076.97172431566 }, { "content": "/// Interpolate position vectors\n\n///\n\n/// returns vector of (position, velocity, acceleration)\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// let points = openrr_planner::interpolate(&[vec![0.0, 1.0], vec![2.0, 0.0]], 1.0, 0.1).unwrap();\n\n/// assert_eq!(points.len(), 12);\n\n/// assert_eq!(points[0].position[0], 0.0);\n\n/// assert_eq!(points[0].position[1], 1.0);\n\n/// assert_eq!(points[1].position[0], 0.2);\n\n/// assert_eq!(points[1].position[1], 0.9);\n\n/// ```\n\npub fn interpolate<T>(\n\n points: &[Vec<T>],\n\n total_duration: T,\n\n unit_duration: T,\n\n) -> Option<Vec<TrajectoryPoint<T>>>\n\nwhere\n\n T: Float,\n\n{\n\n let key_frame_unit_duration = total_duration / (T::from(points.len())? - T::one());\n\n let times = (0_usize..points.len())\n\n .map(|i| T::from(i).unwrap() * key_frame_unit_duration)\n\n .collect::<Vec<T>>();\n\n assert_eq!(times.len(), points.len());\n\n\n\n let spline = CubicSpline::new(times, points.to_vec())?;\n\n let mut t = T::zero();\n\n let mut ret = Vec::with_capacity(points.len());\n\n while t < total_duration {\n\n ret.push(TrajectoryPoint {\n\n position: spline.position(t)?,\n", "file_path": "openrr-planner/src/funcs.rs", "rank": 61, "score": 140305.4199082055 }, { "content": "fn default_time_interpolate_rate() -> f64 {\n\n 0.5\n\n}\n\n\n\nimpl Default for SelfCollisionCheckerConfig {\n\n fn default() -> Self {\n\n Self {\n\n prediction: default_prediction(),\n\n time_interpolate_rate: default_time_interpolate_rate(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "openrr-planner/src/collision/self_collision_checker.rs", "rank": 62, "score": 140103.0617415799 }, { "content": "pub fn create_ik_clients(\n\n configs: &[IkClientConfig],\n\n name_to_joint_trajectory_client: &HashMap<String, ArcJointTrajectoryClient>,\n\n name_to_ik_solvers: &HashMap<String, Arc<IkSolverWithChain>>,\n\n) -> HashMap<String, Arc<IkClient<ArcJointTrajectoryClient>>> {\n\n let mut clients = HashMap::new();\n\n for config in configs {\n\n clients.insert(\n\n config.name.clone(),\n\n Arc::new(IkClient::new(\n\n name_to_joint_trajectory_client[&config.client_name].clone(),\n\n name_to_ik_solvers[&config.solver_name].clone(),\n\n )),\n\n );\n\n }\n\n clients\n\n}\n\n\n\n#[derive(Clone, Serialize, Deserialize, Debug, JsonSchema)]\n\n#[serde(deny_unknown_fields)]\n\npub struct CollisionCheckClientConfig {\n\n pub name: String,\n\n pub client_name: String,\n\n #[serde(default)]\n\n pub self_collision_checker_config: SelfCollisionCheckerConfig,\n\n}\n\n\n", "file_path": "openrr-client/src/robot_client.rs", "rank": 63, "score": 140069.79741961628 }, { "content": "fn endpoint() -> (SocketAddr, String) {\n\n static PORT: AtomicU16 = AtomicU16::new(50061);\n\n let addr = format!(\"[::1]:{}\", PORT.fetch_add(1, Ordering::SeqCst));\n\n (addr.parse().unwrap(), format!(\"http://{addr}\"))\n\n}\n\n\n\n#[tokio::test(flavor = \"multi_thread\")]\n\nasync fn joint_trajectory_client() -> Result<()> {\n\n let (addr, endpoint) = endpoint();\n\n\n\n // Launch server\n\n {\n\n let client =\n\n RemoteJointTrajectoryClientReceiver::new(DummyJointTrajectoryClient::new(vec![\n\n \"a\".to_owned()\n\n ]));\n\n tokio::spawn(client.serve(addr));\n\n tokio::time::sleep(Duration::from_secs(1)).await;\n\n }\n\n\n", "file_path": "openrr-remote/tests/test.rs", "rank": 64, "score": 139485.23313053476 }, { "content": "#[allow(clippy::redundant_clone)] // This is intentional.\n\n#[test]\n\nfn test_urdf_viz_web_client_config_clone() {\n\n let config1 = UrdfVizWebClientConfig {\n\n name: \"test\".to_owned(),\n\n joint_names: Some(vec![\"j1\".to_owned(), \"j2\".to_owned()]),\n\n wrap_with_joint_position_limiter: true,\n\n joint_position_limits: None,\n\n wrap_with_joint_velocity_limiter: true,\n\n joint_velocity_limits: Some(vec![1.0, 2.0]),\n\n };\n\n let config2 = config1.clone();\n\n assert_eq!(config2.name, \"test\");\n\n assert_eq!(config2.joint_names.as_ref().unwrap()[0], \"j1\");\n\n assert_eq!(config2.joint_names.as_ref().unwrap()[1], \"j2\");\n\n assert!(config2.wrap_with_joint_position_limiter);\n\n assert!(config2.joint_position_limits.is_none());\n\n assert!(config2.wrap_with_joint_velocity_limiter);\n\n assert_approx_eq!(config2.joint_velocity_limits.as_ref().unwrap()[0], 1.0);\n\n assert_approx_eq!(config2.joint_velocity_limits.unwrap()[1], 2.0);\n\n}\n\n\n", "file_path": "arci-urdf-viz/tests/test_client.rs", "rank": 65, "score": 137075.3430530471 }, { "content": "#[test]\n\nfn test_urdf_viz_web_client_config_debug() {\n\n let config = UrdfVizWebClientConfig {\n\n name: \"test\".to_owned(),\n\n joint_names: Some(vec![\"j1\".to_owned(), \"j2\".to_owned()]),\n\n wrap_with_joint_position_limiter: true,\n\n joint_position_limits: None,\n\n wrap_with_joint_velocity_limiter: true,\n\n joint_velocity_limits: Some(vec![1.0, 2.0]),\n\n };\n\n assert_eq!(\n\n format!(\"{config:?}\"),\n\n \"UrdfVizWebClientConfig { name: \\\"test\\\", \\\n\n joint_names: Some([\\\"j1\\\", \\\"j2\\\"]), \\\n\n wrap_with_joint_position_limiter: true, \\\n\n wrap_with_joint_velocity_limiter: true, \\\n\n joint_velocity_limits: Some([1.0, 2.0]), \\\n\n joint_position_limits: None \\\n\n }\"\n\n )\n\n}\n\n\n", "file_path": "arci-urdf-viz/tests/test_client.rs", "rank": 66, "score": 137041.20118584964 }, { "content": "pub fn create_ik_solver_with_chain(\n\n full_chain: &k::Chain<f64>,\n\n config: &IkSolverConfig,\n\n) -> IkSolverWithChain {\n\n let chain = if let Some(root_node_name) = &config.root_node_name {\n\n k::SerialChain::from_end_to_root(\n\n full_chain\n\n .find(&config.ik_target)\n\n .unwrap_or_else(|| panic!(\"ik_target: {} not found\", config.ik_target)),\n\n full_chain\n\n .find(root_node_name)\n\n .unwrap_or_else(|| panic!(\"root_node_name: {root_node_name} not found\")),\n\n )\n\n } else {\n\n k::SerialChain::from_end(\n\n full_chain\n\n .find(&config.ik_target)\n\n .unwrap_or_else(|| panic!(\"ik_target: {} not found\", config.ik_target)),\n\n )\n\n };\n", "file_path": "openrr-client/src/clients/ik_client.rs", "rank": 67, "score": 136333.70758562622 }, { "content": "pub fn create_random_jacobian_ik_solver(\n\n parameters: &IkSolverParameters,\n\n) -> openrr_planner::RandomInitializeIkSolver<f64, k::JacobianIkSolver<f64>> {\n\n openrr_planner::RandomInitializeIkSolver::new(\n\n create_jacobian_ik_solver(parameters),\n\n parameters.num_max_try,\n\n )\n\n}\n\n\n\npub struct IkSolverWithChain {\n\n ik_arm: k::SerialChain<f64>,\n\n ik_solver: Arc<dyn k::InverseKinematicsSolver<f64> + Send + Sync>,\n\n constraints: Constraints,\n\n}\n\n\n\nimpl IkSolverWithChain {\n\n pub fn end_transform(&self) -> k::Isometry3<f64> {\n\n self.ik_arm.end_transform()\n\n }\n\n\n", "file_path": "openrr-client/src/clients/ik_client.rs", "rank": 68, "score": 134592.22391544734 }, { "content": "/// Check the poses which can be reached by the robot arm\n\npub fn get_reachable_region<T, I>(\n\n ik_solver: &I,\n\n arm: &k::SerialChain<T>,\n\n initial_pose: &na::Isometry3<T>,\n\n constraints: &k::Constraints,\n\n max_point: na::Vector3<T>,\n\n min_point: na::Vector3<T>,\n\n unit_check_length: T,\n\n) -> Vec<na::Isometry3<T>>\n\nwhere\n\n T: RealField + Copy + k::SubsetOf<f64> + Send + Sync,\n\n I: InverseKinematicsSolver<T> + Send + Sync,\n\n{\n\n let initial_angles = arm.joint_positions();\n\n let solved_poses = Mutex::new(Vec::new());\n\n let target_pose = *initial_pose;\n\n\n\n let mut z_points = vec![];\n\n let mut z = min_point[2];\n\n while z < max_point[2] {\n", "file_path": "openrr-planner/src/ik.rs", "rank": 69, "score": 132771.68762885232 }, { "content": "/// Clamp joint angles to set angles safely\n\npub fn generate_clamped_joint_positions_from_limits<T>(\n\n angles: &[T],\n\n limits: &Limits<T>,\n\n) -> Result<Vec<T>>\n\nwhere\n\n T: RealField + Copy,\n\n{\n\n if angles.len() != limits.len() {\n\n return Err(Error::DofMismatch(angles.len(), limits.len()));\n\n }\n\n Ok(limits\n\n .iter()\n\n .zip(angles.iter())\n\n .map(|(range, angle)| match *range {\n\n Some(ref range) => {\n\n if *angle > range.max {\n\n range.max\n\n } else if *angle < range.min {\n\n range.min\n\n } else {\n", "file_path": "openrr-planner/src/funcs.rs", "rank": 70, "score": 132445.0415697339 }, { "content": "fn default_axis_value_map() -> HashMap<Axis, f64> {\n\n let mut axis_value_map = HashMap::new();\n\n axis_value_map.insert(Axis::RightStickX, -1.0);\n\n axis_value_map.insert(Axis::LeftStickX, -1.0);\n\n axis_value_map\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct GilGamepad {\n\n rx: flume::Receiver<GamepadEvent>,\n\n is_running: Arc<AtomicBool>,\n\n}\n\n\n\nimpl GilGamepad {\n\n pub fn new(id: usize, map: Map) -> Self {\n\n let (tx, rx) = flume::unbounded();\n\n let is_running = Arc::new(AtomicBool::new(true));\n\n let is_running_cloned = is_running.clone();\n\n std::thread::spawn(move || {\n\n let mut gil = gilrs::Gilrs::new().unwrap();\n", "file_path": "arci-gamepad-gilrs/src/lib.rs", "rank": 71, "score": 131740.1610271266 }, { "content": "fn new_joint_position_limiter<C>(\n\n client: C,\n\n position_limits: Option<Vec<JointPositionLimit>>,\n\n strategy: JointPositionLimiterStrategy,\n\n urdf_robot: Option<&urdf_rs::Robot>,\n\n) -> Result<JointPositionLimiter<C>, Error>\n\nwhere\n\n C: JointTrajectoryClient,\n\n{\n\n match position_limits {\n\n Some(position_limits) => Ok(JointPositionLimiter::new_with_strategy(\n\n client,\n\n position_limits,\n\n strategy,\n\n )),\n\n None => JointPositionLimiter::from_urdf(client, &urdf_robot.unwrap().joints),\n\n }\n\n}\n\n\n", "file_path": "arci-ros/src/ros_control/joint_trajectory_client_wrapper_config.rs", "rank": 72, "score": 130461.09313013445 }, { "content": "fn new_joint_velocity_limiter<C>(\n\n client: C,\n\n velocity_limits: Option<Vec<f64>>,\n\n urdf_robot: Option<&urdf_rs::Robot>,\n\n) -> Result<JointVelocityLimiter<C>, Error>\n\nwhere\n\n C: JointTrajectoryClient,\n\n{\n\n match velocity_limits {\n\n Some(velocity_limits) => Ok(JointVelocityLimiter::new(client, velocity_limits)),\n\n None => JointVelocityLimiter::from_urdf(client, &urdf_robot.unwrap().joints),\n\n }\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)]\n\n#[serde(deny_unknown_fields)]\n\npub struct JointTrajectoryClientWrapperConfig {\n\n #[serde(default)]\n\n pub wrap_with_joint_position_limiter: bool,\n\n #[serde(default)]\n", "file_path": "arci-ros/src/ros_control/joint_trajectory_client_wrapper_config.rs", "rank": 73, "score": 130461.09313013445 }, { "content": "fn default_axis_map() -> HashMap<gilrs::Axis, Axis> {\n\n let mut axis_map = HashMap::new();\n\n axis_map.insert(gilrs::Axis::LeftStickX, Axis::LeftStickX);\n\n axis_map.insert(gilrs::Axis::LeftStickY, Axis::LeftStickY);\n\n axis_map.insert(gilrs::Axis::RightStickX, Axis::RightStickX);\n\n axis_map.insert(gilrs::Axis::RightStickY, Axis::RightStickY);\n\n axis_map.insert(gilrs::Axis::DPadX, Axis::DPadX);\n\n axis_map.insert(gilrs::Axis::DPadY, Axis::DPadY);\n\n axis_map\n\n}\n\n\n", "file_path": "arci-gamepad-gilrs/src/lib.rs", "rank": 74, "score": 130398.26672396058 }, { "content": "fn default_button_map() -> HashMap<gilrs::Button, Button> {\n\n let mut button_map = HashMap::new();\n\n button_map.insert(gilrs::Button::South, Button::South);\n\n button_map.insert(gilrs::Button::East, Button::East);\n\n button_map.insert(gilrs::Button::North, Button::North);\n\n button_map.insert(gilrs::Button::West, Button::West);\n\n button_map.insert(gilrs::Button::LeftTrigger, Button::LeftTrigger);\n\n button_map.insert(gilrs::Button::LeftTrigger2, Button::LeftTrigger2);\n\n button_map.insert(gilrs::Button::RightTrigger, Button::RightTrigger);\n\n button_map.insert(gilrs::Button::RightTrigger2, Button::RightTrigger2);\n\n button_map.insert(gilrs::Button::Select, Button::Select);\n\n button_map.insert(gilrs::Button::Start, Button::Start);\n\n button_map.insert(gilrs::Button::Mode, Button::Mode);\n\n button_map.insert(gilrs::Button::LeftThumb, Button::LeftThumb);\n\n button_map.insert(gilrs::Button::RightThumb, Button::RightThumb);\n\n button_map.insert(gilrs::Button::DPadUp, Button::DPadUp);\n\n button_map.insert(gilrs::Button::DPadDown, Button::DPadDown);\n\n button_map.insert(gilrs::Button::DPadLeft, Button::DPadLeft);\n\n button_map.insert(gilrs::Button::DPadRight, Button::DPadRight);\n\n button_map\n\n}\n\n\n", "file_path": "arci-gamepad-gilrs/src/lib.rs", "rank": 75, "score": 130398.26672396058 }, { "content": "fn new_joint_position_difference_limiter<C>(\n\n client: C,\n\n position_difference_limits: Option<Vec<f64>>,\n\n) -> Result<JointPositionDifferenceLimiter<C>, Error>\n\nwhere\n\n C: JointTrajectoryClient,\n\n{\n\n match position_difference_limits {\n\n Some(position_difference_limits) => Ok(JointPositionDifferenceLimiter::new(\n\n client,\n\n position_difference_limits,\n\n )?),\n\n None => Err(Error::Other(anyhow::format_err!(\n\n \"No position_difference_limits is specified\"\n\n ))),\n\n }\n\n}\n\n\n", "file_path": "arci-ros/src/ros_control/joint_trajectory_client_wrapper_config.rs", "rank": 76, "score": 128305.09011203717 }, { "content": "pub fn gen(workspace_root: &Path) -> Result<()> {\n\n const FULLY_IGNORE: &[&str] = &[\"SetCompleteCondition\"];\n\n const IGNORE: &[&str] = &[\"JointTrajectoryClient\", \"SetCompleteCondition\", \"Gamepad\"];\n\n const USE_TRY_INTO: &[&str] = &[\"SystemTime\"];\n\n\n\n let out_dir = &workspace_root.join(\"openrr-plugin/src/gen\");\n\n fs::create_dir_all(out_dir)?;\n\n let mut api_items = TokenStream::new();\n\n let mut proxy_impls = TokenStream::new();\n\n let mut traits = vec![];\n\n for item in arci_traits(workspace_root)? {\n\n let name = &&*item.ident.to_string();\n\n if FULLY_IGNORE.contains(name) {\n\n continue;\n\n }\n\n traits.push(item.ident.clone());\n\n if IGNORE.contains(name) {\n\n continue;\n\n }\n\n\n", "file_path": "tools/codegen/src/plugin.rs", "rank": 77, "score": 127845.79425543232 }, { "content": "pub fn gen(workspace_root: &Path) -> Result<()> {\n\n const FULLY_IGNORE: &[&str] = &[\"SetCompleteCondition\"];\n\n const IGNORE: &[&str] = &[\"JointTrajectoryClient\", \"SetCompleteCondition\", \"Gamepad\"];\n\n\n\n let out_dir = &workspace_root.join(\"openrr-remote/src/gen\");\n\n fs::create_dir_all(out_dir)?;\n\n let mut items = TokenStream::new();\n\n let mut traits = vec![];\n\n\n\n let mut pb_traits = vec![];\n\n let pb_file = fs::read_to_string(&workspace_root.join(\"openrr-remote/src/generated/arci.rs\"))?;\n\n CollectTrait(&mut pb_traits).visit_file_mut(&mut syn::parse_file(&pb_file)?);\n\n\n\n for item in arci_traits(workspace_root)? {\n\n let name = &&*item.ident.to_string();\n\n if FULLY_IGNORE.contains(name) {\n\n continue;\n\n }\n\n traits.push(item.ident.clone());\n\n\n", "file_path": "tools/codegen/src/rpc.rs", "rank": 78, "score": 127845.79425543232 }, { "content": "fn parse_scripts(s: &str) -> Result<Vec<Script>> {\n\n fn push_script(\n\n cur_query: &mut Option<String>,\n\n buf: &mut String,\n\n scripts: &mut Vec<Script>,\n\n i: usize,\n\n ) -> Result<()> {\n\n let query = cur_query.take().unwrap();\n\n let value = mem::take(buf);\n\n let value = value.trim();\n\n let operation = if value.is_empty() {\n\n Operation::Delete\n\n } else {\n\n let value: Value = toml::from_str(&format!(r#\"a = {value}\"#))\n\n .with_context(|| format!(\"invalid script syntax at {}: {value}\", i + 1))?;\n\n Operation::Set(value[\"a\"].clone())\n\n };\n\n\n\n scripts.push(Script {\n\n query: convert_query(&query)?,\n", "file_path": "openrr-config/src/overwrite.rs", "rank": 79, "score": 126396.04982228295 }, { "content": "fn instance_create_error<T: fmt::Debug, U>(\n\n res: Result<T, arci::Error>,\n\n instance_kind: PluginInstanceKind,\n\n instance_name: String,\n\n plugin_name: String,\n\n) -> Result<U, arci::Error> {\n\n error!(\n\n \"failed to create `{instance_kind:?}` instance `{instance_name}` from plugin `{plugin_name}`: {res:?}\",\n\n );\n\n res.and_then(|_| {\n\n // TODO: error msg\n\n Err(format_err!(\n\n \"failed to create `{instance_kind:?}` instance `{instance_name}` from plugin `{plugin_name}`: None\",\n\n )\n\n .into())\n\n })\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n", "file_path": "openrr-apps/src/robot_config.rs", "rank": 80, "score": 125833.67361457966 }, { "content": "fn get(url: Url) -> Result<ureq::Response, arci::Error> {\n\n ureq::get(url.as_str())\n\n .call()\n\n .map_err(map_connection_error(&url))\n\n}\n\n\n", "file_path": "arci-urdf-viz/src/utils.rs", "rank": 81, "score": 125224.64380117678 }, { "content": "#[derive(Default)]\n\nstruct DebugGamepad {}\n\n\n\n#[arci::async_trait]\n\nimpl arci::Gamepad for DebugGamepad {\n\n async fn next_event(&self) -> arci::gamepad::GamepadEvent {\n\n println!(\"Server received Gamepad::next_event\",);\n\n arci::gamepad::GamepadEvent::Unknown\n\n }\n\n\n\n fn stop(&self) {\n\n println!(\"Server received Gamepad::stop\");\n\n }\n\n}\n\n\n\n#[tokio::main]\n\nasync fn main() -> anyhow::Result<()> {\n\n let addr = \"[::1]:50051\".parse()?;\n\n println!(\"Server listening on {addr}\");\n\n\n\n let client = RemoteJointTrajectoryClientReceiver::new(DebugJointTrajectoryClient::default());\n", "file_path": "openrr-remote/examples/server.rs", "rank": 82, "score": 124209.59002971958 }, { "content": "#[derive(Default)]\n\nstruct DebugSpeaker {}\n\n\n\nimpl arci::Speaker for DebugSpeaker {\n\n fn speak(&self, message: &str) -> Result<arci::WaitFuture, arci::Error> {\n\n println!(\"Server received Speaker::speak (message: {message:?})\");\n\n Ok(arci::WaitFuture::ready())\n\n }\n\n}\n\n\n", "file_path": "openrr-remote/examples/server.rs", "rank": 83, "score": 124209.59002971958 }, { "content": "#[derive(Default)]\n\nstruct DebugNavigation {}\n\n\n\nimpl arci::Navigation for DebugNavigation {\n\n fn send_goal_pose(\n\n &self,\n\n goal: arci::Isometry2<f64>,\n\n frame_id: &str,\n\n timeout: std::time::Duration,\n\n ) -> Result<arci::WaitFuture, arci::Error> {\n\n println!(\n\n \"Server received Navigation::send_goal_pose (goal: {goal:?}, frame_id: {frame_id:?}, timeout: {timeout:?})\"\n\n );\n\n Ok(arci::WaitFuture::ready())\n\n }\n\n\n\n fn cancel(&self) -> Result<(), arci::Error> {\n\n println!(\"Server received Navigation::cancel\");\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "openrr-remote/examples/server.rs", "rank": 84, "score": 124209.59002971958 }, { "content": "pub fn create_collision_check_clients<P: AsRef<Path>>(\n\n urdf_path: P,\n\n self_collision_check_pairs: &[String],\n\n configs: &[CollisionCheckClientConfig],\n\n name_to_joint_trajectory_client: &HashMap<String, Arc<dyn JointTrajectoryClient>>,\n\n full_chain: Arc<k::Chain<f64>>,\n\n) -> HashMap<String, Arc<CollisionCheckClient<Arc<dyn JointTrajectoryClient>>>> {\n\n let mut clients = HashMap::new();\n\n for config in configs {\n\n clients.insert(\n\n config.name.clone(),\n\n Arc::new(create_collision_check_client(\n\n &urdf_path,\n\n self_collision_check_pairs,\n\n &config.self_collision_checker_config,\n\n name_to_joint_trajectory_client[&config.client_name].clone(),\n\n full_chain.clone(),\n\n )),\n\n );\n\n }\n\n clients\n\n}\n", "file_path": "openrr-client/src/robot_client.rs", "rank": 85, "score": 122615.88530398348 }, { "content": "/// Launches GUI that send joint positions from GUI to the given `robot_client`.\n\npub fn joint_position_sender<L, M, N>(\n\n robot_client: RobotClient<L, M, N>,\n\n robot: urdf_rs::Robot,\n\n) -> Result<(), Error>\n\nwhere\n\n L: Localization + 'static,\n\n M: MoveBase + 'static,\n\n N: Navigation + 'static,\n\n{\n\n let joints = joint_map(robot);\n\n validate_joints(&joints, &robot_client)?;\n\n\n\n let gui = JointPositionSender::new(robot_client, joints)?;\n\n\n\n // Should we expose some of the settings to the user?\n\n let settings = Settings {\n\n flags: Some(gui),\n\n window: window::Settings {\n\n size: (400, 550),\n\n ..window::Settings::default()\n\n },\n\n ..Settings::default()\n\n };\n\n\n\n JointPositionSender::run(settings)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "openrr-gui/src/joint_position_sender.rs", "rank": 86, "score": 122615.88530398348 }, { "content": "#[derive(Default)]\n\nstruct DebugTransformResolver {}\n\n\n\nimpl arci::TransformResolver for DebugTransformResolver {\n\n fn resolve_transformation(\n\n &self,\n\n from: &str,\n\n to: &str,\n\n time: std::time::SystemTime,\n\n ) -> Result<arci::Isometry3<f64>, arci::Error> {\n\n println!(\n\n \"Server received TransformResolver::resolve_transformation (from: {from:?}, to: {to:?}, time: {time:?})\",\n\n );\n\n Ok(arci::Isometry3::new(\n\n arci::Vector3::new(0.0, 0.0, 0.0),\n\n arci::Vector3::new(0.0, 0.0, 0.0),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "openrr-remote/examples/server.rs", "rank": 87, "score": 121963.38049479132 }, { "content": "#[derive(Default)]\n\nstruct DebugMoveBase {}\n\n\n\nimpl arci::MoveBase for DebugMoveBase {\n\n fn send_velocity(&self, velocity: &arci::BaseVelocity) -> Result<(), arci::Error> {\n\n println!(\"Server received MoveBase::send_velocity (velocity: {velocity:?})\");\n\n Ok(())\n\n }\n\n\n\n fn current_velocity(&self) -> Result<arci::BaseVelocity, arci::Error> {\n\n println!(\"Server received MoveBase::current_velocity\");\n\n Ok(arci::BaseVelocity::default())\n\n }\n\n}\n\n\n", "file_path": "openrr-remote/examples/server.rs", "rank": 88, "score": 121963.38049479132 }, { "content": "#[derive(Debug)]\n\nstruct State {\n\n sender: flume::Sender<GamepadEvent>,\n\n key_state: HashMap<char, bool>,\n\n button_map: HashMap<char, Button>,\n\n}\n\n\n\n#[rustfmt::skip]\n\nconst LEFT_STICK_KEYS: &[char] = &[\n\n 'q', 'w', 'e',\n\n 'a', 's', 'd',\n\n 'z', 'x', 'c',\n\n];\n\n#[rustfmt::skip]\n\nconst RIGHT_STICK_KEYS: &[char] = &[\n\n 'u', 'i', 'o',\n\n 'j', 'k', 'l',\n\n 'm', ',', '.',\n\n];\n\nconst DEFAULT_AXIS_VALUE: f64 = 0.3;\n\n\n", "file_path": "arci-gamepad-keyboard/src/lib.rs", "rank": 89, "score": 121132.34417080354 }, { "content": "struct Ros2Plugin {}\n\n\n\nimpl openrr_plugin::Plugin for Ros2Plugin {\n\n fn new_joint_trajectory_client(\n\n &self,\n\n args: String,\n\n ) -> Result<Option<Box<dyn arci::JointTrajectoryClient>>, arci::Error> {\n\n let config: Ros2ControlConfig = toml::from_str(&args).map_err(anyhow::Error::from)?;\n\n let ctx = r2r::Context::create().unwrap();\n\n let all_client = Ros2ControlClient::new(ctx, &config.action_name);\n\n if config.joint_names.is_empty() {\n\n Ok(Some(Box::new(all_client)))\n\n } else {\n\n Ok(Some(Box::new(arci::PartialJointTrajectoryClient::new(\n\n config.joint_names,\n\n all_client,\n\n )?)))\n\n }\n\n }\n\n\n", "file_path": "arci-ros2/src/plugin.rs", "rank": 90, "score": 121127.0664446402 }, { "content": "#[derive(Default)]\n\nstruct DebugJointTrajectoryClient {}\n\n\n\nimpl arci::JointTrajectoryClient for DebugJointTrajectoryClient {\n\n fn joint_names(&self) -> Vec<String> {\n\n println!(\"Server received JointTrajectoryClient::joint_names\");\n\n vec![\"a\".into(), \"b\".into()]\n\n }\n\n\n\n fn current_joint_positions(&self) -> Result<Vec<f64>, arci::Error> {\n\n println!(\"Server received JointTrajectoryClient::current_joint_positions\");\n\n Ok(vec![0.0, 0.0])\n\n }\n\n\n\n fn send_joint_positions(\n\n &self,\n\n positions: Vec<f64>,\n\n duration: std::time::Duration,\n\n ) -> Result<arci::WaitFuture, arci::Error> {\n\n println!(\n\n \"Server received JointTrajectoryClient::send_joint_positions (position: {positions:?}, duration: {duration:?})\",\n", "file_path": "openrr-remote/examples/server.rs", "rank": 91, "score": 119828.70066824318 }, { "content": "struct PanicLocalization;\n\n\n\nimpl Localization for PanicLocalization {\n\n #[track_caller]\n\n fn current_pose(&self, frame_id: &str) -> Result<arci::Isometry2<f64>, arci::Error> {\n\n panic!(\"PanicLocalization::current_pose frame_id={frame_id:?}\")\n\n }\n\n}\n\n\n", "file_path": "openrr-client/tests/test_robot_client.rs", "rank": 92, "score": 119758.0263417413 }, { "content": "pub fn create_robot_collision_detector<P: AsRef<Path>>(\n\n urdf_path: P,\n\n config: RobotCollisionDetectorConfig,\n\n self_collision_pairs: Vec<(String, String)>,\n\n) -> RobotCollisionDetector<f64> {\n\n let urdf_robot = urdf_rs::read_file(urdf_path).unwrap();\n\n let robot = k::Chain::<f64>::from(&urdf_robot);\n\n let collision_detector = CollisionDetector::from_urdf_robot(&urdf_robot, config.prediction);\n\n\n\n RobotCollisionDetector::new(robot, collision_detector, self_collision_pairs)\n\n}\n\n\n", "file_path": "openrr-planner/src/collision/robot_collision_detector.rs", "rank": 93, "score": 119497.9980725994 }, { "content": "pub fn create_joint_path_planner<P: AsRef<Path>>(\n\n urdf_path: P,\n\n self_collision_check_pairs: Vec<(String, String)>,\n\n config: &JointPathPlannerConfig,\n\n) -> JointPathPlanner<f64> {\n\n JointPathPlannerBuilder::from_urdf_file(urdf_path)\n\n .unwrap()\n\n .step_length(config.step_length)\n\n .max_try(config.max_try)\n\n .num_smoothing(config.num_smoothing)\n\n .collision_check_margin(config.margin)\n\n .self_collision_pairs(self_collision_check_pairs)\n\n .finalize()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n", "file_path": "openrr-planner/src/planner/joint_path_planner.rs", "rank": 94, "score": 119497.9980725994 }, { "content": "pub fn create_collision_avoidance_client<P: AsRef<Path>>(\n\n urdf_path: P,\n\n self_collision_check_pairs: Vec<(String, String)>,\n\n joint_path_planner_config: &JointPathPlannerConfig,\n\n client: Arc<dyn JointTrajectoryClient>,\n\n) -> CollisionAvoidanceClient<Arc<dyn JointTrajectoryClient>> {\n\n let planner = create_joint_path_planner(\n\n urdf_path,\n\n self_collision_check_pairs,\n\n joint_path_planner_config,\n\n );\n\n\n\n let nodes = planner\n\n .robot_collision_detector\n\n .robot\n\n .iter()\n\n .map(|node| (*node).clone())\n\n .collect();\n\n let using_joints = k::Chain::<f64>::from_nodes(nodes);\n\n\n", "file_path": "openrr-client/src/clients/collision_avoidance_client.rs", "rank": 95, "score": 119497.9980725994 }, { "content": "pub fn create_collision_check_client<P: AsRef<Path>>(\n\n urdf_path: P,\n\n self_collision_check_pairs: &[String],\n\n config: &SelfCollisionCheckerConfig,\n\n client: Arc<dyn JointTrajectoryClient>,\n\n full_chain: Arc<k::Chain<f64>>,\n\n) -> CollisionCheckClient<Arc<dyn JointTrajectoryClient>> {\n\n let joint_names = client.joint_names();\n\n let nodes = joint_names\n\n .iter()\n\n .map(|joint_name| (*full_chain.find(joint_name).unwrap()).clone())\n\n .collect();\n\n let using_joints = k::Chain::<f64>::from_nodes(nodes);\n\n CollisionCheckClient::new(\n\n client,\n\n using_joints,\n\n Arc::new(create_self_collision_checker(\n\n urdf_path,\n\n self_collision_check_pairs,\n\n config,\n", "file_path": "openrr-client/src/clients/collision_check_client.rs", "rank": 96, "score": 119497.9980725994 }, { "content": "#[derive(Debug, Default)]\n\nstruct ThreadState {\n\n has_send_joint_positions_thread: bool,\n\n has_send_velocity_thread: bool,\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct UrdfVizWebClient(Arc<UrdfVizWebClientInner>);\n\n\n", "file_path": "arci-urdf-viz/src/client.rs", "rank": 97, "score": 118969.68827469765 }, { "content": "#[derive(Debug, Parser)]\n\nstruct Args {\n\n file_path: String,\n\n}\n\n\n\n#[tokio::main]\n\nasync fn main() -> Result<(), arci::Error> {\n\n tracing_subscriber::fmt::init();\n\n let args = Args::parse();\n\n let mut hash_map = HashMap::new();\n\n hash_map.insert(\"test\".to_string(), PathBuf::from(args.file_path));\n\n let speaker = AudioSpeaker::new(hash_map);\n\n let wait = speaker.speak(\"test\")?;\n\n wait.await\n\n}\n", "file_path": "arci-speak-audio/examples/audio_speak.rs", "rank": 98, "score": 118964.41976039889 } ]
Rust
src/build/windows.rs
kungfoo/boon
788d1265e9e6edd822cf651e5b3be8f2d12483c8
#![allow(clippy::too_many_lines)] use crate::build::{Iterator, collect_zip_directory, get_love_file_name, get_love_version_path, get_output_filename, get_zip_output_filename}; use crate::types::{Bitness, BuildSettings, BuildStatistics, LoveVersion, Platform, Project}; use glob::glob; use remove_dir_all::remove_dir_all; use anyhow::{anyhow, ensure, Context, Result}; use std::collections::HashSet; use std::fs::File; use std::io::{Read, Write}; use std::path::PathBuf; pub fn create_exe( project: &Project, build_settings: &BuildSettings, version: LoveVersion, bitness: Bitness, ) -> Result<BuildStatistics> { let start = std::time::Instant::now(); let app_dir_path = get_love_version_path(version, Platform::Windows, bitness)?; let mut app_dir_path_clone = PathBuf::new(); app_dir_path_clone.clone_from(&app_dir_path); let mut love_exe_path = app_dir_path; love_exe_path.push("love.exe"); ensure!(love_exe_path.exists(), format!("love.exe not found at '{}'\nhint: You may need to download LÖVE first: `boon love download {}`", love_exe_path.display(), version.to_string())); let exe_file_name = get_output_filename(project, Platform::Windows, bitness); let zip_output_file_name = &get_zip_output_filename(project, Platform::Windows, bitness); let mut output_path = project.get_release_path(build_settings); output_path.push(zip_output_file_name); if output_path.exists() { println!("Removing existing directory {}", output_path.display()); std::fs::remove_dir_all(&output_path).with_context(|| { format!( "Could not remove output directory '{}'", output_path.display() ) })?; } std::fs::create_dir(&output_path).with_context(|| { format!( "Could not create build directory '{}'", output_path.display() ) })?; output_path.push(exe_file_name); println!("Copying love from {}", love_exe_path.display()); println!("Outputting exe to {}", output_path.display()); let mut output_file = File::create(&output_path) .with_context(|| format!("Could not create output file '{}'", output_path.display()))?; let love_file_name = get_love_file_name(project); let mut local_love_file_path = project.get_release_path(build_settings); local_love_file_path.push(love_file_name); println!( "Copying project .love from {}", local_love_file_path.display() ); let mut copy_options = fs_extra::file::CopyOptions::new(); copy_options.overwrite = true; let dll_glob = glob( app_dir_path_clone .join("*.dll") .to_str() .context("Could not convert string")?, )?; let txt_glob = glob( app_dir_path_clone .join("*.txt") .to_str() .context("Could not convert string")?, )?; let ico_glob = glob( app_dir_path_clone .join("*.ico") .to_str() .context("Could not convert string")?, )?; for entry in dll_glob.chain(txt_glob).chain(ico_glob) { match entry { Ok(path) => { let local_file_name = path .file_name() .with_context(|| { format!("Could not get file name from path '{}'", path.display()) })? .to_str() .context("Could not do string conversion")?; fs_extra::file::copy( &path, &project .get_release_path(build_settings) .join(zip_output_file_name) .join(local_file_name), &copy_options, )?; } Err(e) => { return Err(anyhow!( "Path matched for '{}' but file was unreadable: {}", e.path().display(), e.error() )) } } } let paths = &[love_exe_path.as_path(), local_love_file_path.as_path()]; let mut buffer = Vec::new(); for path in paths { if path.is_file() { let mut file = File::open(path)?; file.read_to_end(&mut buffer)?; output_file.write_all(&buffer)?; buffer.clear(); } } let zip_output_file_name = get_zip_output_filename(project, Platform::Windows, bitness); let output_path = project .get_release_path(build_settings) .join(zip_output_file_name); let src_dir = output_path.clone(); let src_dir = src_dir.to_str().context("Could not do string conversion")?; let mut dst_file_path = output_path; dst_file_path.set_extension("zip"); let dst_file = dst_file_path .to_str() .context("Could not do string conversion")?; collect_zip_directory( src_dir, dst_file, zip::CompressionMethod::Deflated, &HashSet::new(), ) .with_context(|| { format!( "Error while zipping files from `{}` to `{}`", src_dir, dst_file ) })??; let path = PathBuf::new().join(src_dir); println!("Removing {}", path.display()); remove_dir_all(&path)?; let build_metadata = std::fs::metadata(dst_file) .with_context(|| format!("Failed to read file metadata for '{}'", dst_file))?; Ok(BuildStatistics { name: format!("Windows {}", bitness.to_string()), file_name: dst_file_path .file_name() .unwrap() .to_str() .unwrap() .to_string(), time: start.elapsed(), size: build_metadata.len(), }) }
#![allow(clippy::too_many_lines)] use crate::build::{Iterator, collect_zip_directory, get_love_file_name, get_love_version_path, get_output_filename, get_zip_output_filename}; use crate::types::{Bitness, BuildSettings, BuildStatistics, LoveVersion, Platform, Project}; use glob::glob; use remove_dir_all::remove_dir_all; use anyhow::{anyhow, ensure, Context, Result}; use std::collections::HashSet; use std::fs::File; use std::io::{Read, Write}; use std::path::PathBuf; pub fn create_exe( project: &Project, build_settings: &BuildSettings, version: LoveVersion, bitness: Bitness, ) -> Result<BuildStatistics> { let start = std::time::Instant::now(); let app_dir_path = get_love_version_path(version, Platform::Windows, bitness)?; let mut app_dir_path_clone = PathBuf::new(); app_dir_path_clone.clone_from(&app_dir_path); let mut love_exe_path = app_dir_path; love_exe_path.push("love.exe"); ensure!(love_exe_path.exists(), format!("love.exe not found at '{}'\nhint: You may need to download LÖVE first: `boon love download {}`", love_exe_path.display(), version.to_string())); let exe_file_name = get_output_filename(project, Platform::Windows, bitness); let zip_output_file_name = &get_zip_output_filename(project, Platform::Windows, bitness); let mut output_path = project.get_release_path(build_settings); output_path.push(zip_output_file_name); if output_path.exists() { println!("Removing existing directory {}", output_path.display()); std::fs::remove_dir_all(&output_path).with_context(|| { format!( "Could not remove output directory '{}'", output_path.display() ) })?; } std::fs::create_dir(&output_path).with_context(|| { format!( "Could not create build directory '{}'", output_path.display() ) })?; output_path.push(exe_file_name); println!("Copying love from {}", love_exe_path.display()); println!("Outputting exe to {}", output_path.display()); let mut output_file = File::create(&output_path) .with_context(|| format!("Could not create output file '{}'", output_path.display()))?; let love_file_name = get_love_file_name(project); let mut local_love_file_path = project.get_release_path(build_settings); local_love_file_path.push(love_file_name); println!( "Copying project .love from {}", local_love_file_path.display() ); let mut copy_options = fs_extra::file::CopyOptions::new(); copy_options.overwrite = true; let dll_glob = glob( app_dir_path_clone .join("*.dll") .to_str() .context("Could not convert string")?, )?; let txt_glob = glob( app_dir_path_clone .join("*.txt") .to_str() .context("Could not convert string")?, )?; let ico_glob = glob( app_dir_path_clone .join("*.ico") .to_str() .context("Could not convert string")?, )?; for entry in dll_glob.chain(txt_glob).chain(ico_glob) { match entry { Ok(path) => { let local_file_name = path .file_name() .with_context(|| { format!("Could not get file name from path '{}'", path.display()) })? .to_str() .context("Could not do string conversion")?; fs_extra::file::copy( &path, &project .get_release_path(build_settings) .join(zip_output_file_name) .join(local_file_name), &copy_options, )?; } Err(e) => { return Err(anyhow!( "Path matched for '{}' but file was unreadable: {}", e.path().display(), e.error() )) } } } let paths = &[love_exe_path.as_path(), local_love_file_path.as_path()]; let mut buffer = Vec::new(); for path in paths {
} let zip_output_file_name = get_zip_output_filename(project, Platform::Windows, bitness); let output_path = project .get_release_path(build_settings) .join(zip_output_file_name); let src_dir = output_path.clone(); let src_dir = src_dir.to_str().context("Could not do string conversion")?; let mut dst_file_path = output_path; dst_file_path.set_extension("zip"); let dst_file = dst_file_path .to_str() .context("Could not do string conversion")?; collect_zip_directory( src_dir, dst_file, zip::CompressionMethod::Deflated, &HashSet::new(), ) .with_context(|| { format!( "Error while zipping files from `{}` to `{}`", src_dir, dst_file ) })??; let path = PathBuf::new().join(src_dir); println!("Removing {}", path.display()); remove_dir_all(&path)?; let build_metadata = std::fs::metadata(dst_file) .with_context(|| format!("Failed to read file metadata for '{}'", dst_file))?; Ok(BuildStatistics { name: format!("Windows {}", bitness.to_string()), file_name: dst_file_path .file_name() .unwrap() .to_str() .unwrap() .to_string(), time: start.elapsed(), size: build_metadata.len(), }) }
if path.is_file() { let mut file = File::open(path)?; file.read_to_end(&mut buffer)?; output_file.write_all(&buffer)?; buffer.clear(); }
if_condition
[ { "content": "pub fn download_love(version: LoveVersion, platform: Platform, bitness: Bitness) -> Result<()> {\n\n let file_info = get_love_download_location(version, platform, bitness).with_context(|| {\n\n format!(\n\n \"Could not get download location for LÖVE {} on {} {}\",\n\n version, platform, bitness\n\n )\n\n })?;\n\n\n\n let mut output_file_path = app_dir(\n\n AppDataType::UserData,\n\n &APP_INFO,\n\n version.to_string().as_str(),\n\n )\n\n .with_context(|| {\n\n format!(\n\n \"Could not get app user data directory path for version `{}`\",\n\n version.to_string()\n\n )\n\n })?;\n\n output_file_path.push(&file_info.filename);\n", "file_path": "src/download.rs", "rank": 0, "score": 160032.76080685726 }, { "content": "/// `boon love remove` subcommand\n\nfn love_remove(version: LoveVersion) -> Result<()> {\n\n let version = version.to_string();\n\n let installed_versions =\n\n get_installed_love_versions().context(\"Could not get installed LÖVE versions\")?;\n\n\n\n if installed_versions.contains(&version) {\n\n let output_file_path = app_dir(AppDataType::UserData, &APP_INFO, \"/\")\n\n .context(\"Could not get app user data path\")?;\n\n let path = PathBuf::new().join(output_file_path).join(&version);\n\n remove_dir_all(&path).with_context(|| {\n\n format!(\n\n \"Could not remove installed version of LÖVE {} at path `{}`\",\n\n version,\n\n path.display()\n\n )\n\n })?;\n\n println!(\"Removed LÖVE version {}.\", version);\n\n } else {\n\n println!(\"LÖVE version '{}' is not installed.\", version);\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 1, "score": 110856.29443900209 }, { "content": "/// `boon love download` subcommand\n\nfn love_download(version: LoveVersion) -> Result<()> {\n\n download::download_love(version, Platform::Windows, Bitness::X86).context(format!(\n\n \"Could not download LÖVE {} for Windows (32-bit)\",\n\n version.to_string()\n\n ))?;\n\n download::download_love(version, Platform::Windows, Bitness::X64).context(format!(\n\n \"Could not download LÖVE {} for Windows (64-bit)\",\n\n version.to_string()\n\n ))?;\n\n download::download_love(version, Platform::MacOs, Bitness::X64).context(format!(\n\n \"Could not download LÖVE {} for macOS\",\n\n version.to_string()\n\n ))?;\n\n\n\n println!(\n\n \"\\nLÖVE {} is now available for building.\",\n\n version.to_string()\n\n );\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 2, "score": 110070.04824378631 }, { "content": "fn get_installed_love_versions() -> Result<Vec<String>> {\n\n let mut installed_versions: Vec<String> = Vec::new();\n\n let output_file_path =\n\n app_dir(AppDataType::UserData, &APP_INFO, \"/\").expect(\"Could not get app directory path\");\n\n let walker = WalkDir::new(output_file_path).max_depth(1).into_iter();\n\n for entry in walker {\n\n let entry = entry.expect(\"Could not get DirEntry\");\n\n if entry.depth() == 1 {\n\n let file_name = entry\n\n .file_name()\n\n .to_str()\n\n .with_context(|| format!(\"Could not parse file name `{:?}` to str\", entry))?;\n\n\n\n // Exclude directories that do not parse to a love\n\n // version, just in case some bogus directories\n\n // got in there somehow.\n\n if let Ok(version) = file_name.parse::<LoveVersion>() {\n\n installed_versions.push(version.to_string());\n\n }\n\n }\n\n }\n\n\n\n Ok(installed_versions)\n\n}\n", "file_path": "src/main.rs", "rank": 3, "score": 109357.04703137159 }, { "content": "fn get_love_download_location(\n\n version: LoveVersion,\n\n platform: Platform,\n\n bitness: Bitness,\n\n) -> Result<LoveDownloadLocation> {\n\n let release_location = \"https://github.com/love2d/love/releases/download\";\n\n let (version_string, release_file_name) = match (version, platform, bitness) {\n\n (LoveVersion::V11_3, Platform::Windows, Bitness::X64) => (\"11.3\", \"love-11.3-win64.zip\"),\n\n (LoveVersion::V11_3, Platform::Windows, Bitness::X86) => (\"11.3\", \"love-11.3-win32.zip\"),\n\n (LoveVersion::V11_3, Platform::MacOs, Bitness::X64) => (\"11.3\", \"love-11.3-macos.zip\"),\n\n\n\n (LoveVersion::V11_2, Platform::Windows, Bitness::X64) => (\"11.2\", \"love-11.2-win64.zip\"),\n\n (LoveVersion::V11_2, Platform::Windows, Bitness::X86) => (\"11.2\", \"love-11.2-win32.zip\"),\n\n (LoveVersion::V11_2, Platform::MacOs, Bitness::X64) => (\"11.2\", \"love-11.2-macos.zip\"),\n\n\n\n (LoveVersion::V11_1, Platform::Windows, Bitness::X64) => (\"11.1\", \"love-11.1-win64.zip\"),\n\n (LoveVersion::V11_1, Platform::Windows, Bitness::X86) => (\"11.1\", \"love-11.1-win32.zip\"),\n\n (LoveVersion::V11_1, Platform::MacOs, Bitness::X64) => (\"11.1\", \"love-11.1-macos.zip\"),\n\n\n\n (LoveVersion::V11_0, Platform::Windows, Bitness::X64) => (\"11.0\", \"love-11.0.0-win64.zip\"),\n", "file_path": "src/download.rs", "rank": 4, "score": 78500.8173672648 }, { "content": "/// Initializes the project settings and build settings.\n\n// @TODO: Get values from local project config\n\nfn get_settings() -> Result<(Config, BuildSettings)> {\n\n let mut settings = config::Config::new();\n\n let default_config = config::File::from_str(DEFAULT_CONFIG, config::FileFormat::Toml);\n\n settings.merge(default_config).context(format!(\n\n \"Could not set default configuration `{}`\",\n\n BOON_CONFIG_FILE_NAME\n\n ))?;\n\n\n\n let mut ignore_list: HashSet<String> = settings.get(\"build.ignore_list\").unwrap();\n\n if Path::new(BOON_CONFIG_FILE_NAME).exists() {\n\n // Add in `./Boon.toml`\n\n settings\n\n .merge(config::File::with_name(BOON_CONFIG_FILE_NAME))\n\n .context(format!(\n\n \"Error while reading config file `{}`.\",\n\n BOON_CONFIG_FILE_NAME\n\n ))?;\n\n\n\n let project_ignore_list: HashSet<String> = settings.get(\"build.ignore_list\").unwrap();\n\n\n", "file_path": "src/main.rs", "rank": 5, "score": 72503.24270067664 }, { "content": "/// `boon clean` command\n\nfn clean(build_settings: &BuildSettings) -> Result<()> {\n\n // @TODO: Get top-level directory from git?\n\n let directory = \".\";\n\n let mut release_dir_path = Path::new(directory)\n\n .canonicalize()\n\n .context(\"Could not get canonical directory path\")?;\n\n release_dir_path.push(build_settings.output_directory.as_str());\n\n\n\n if release_dir_path.exists() {\n\n println!(\"Cleaning {}\", release_dir_path.display());\n\n remove_dir_all(&release_dir_path).with_context(|| {\n\n format!(\n\n \"Could not clean release directory `{}`\",\n\n release_dir_path.display()\n\n )\n\n })?;\n\n println!(\n\n \"Release directory `{}` cleaned.\",\n\n release_dir_path.display()\n\n );\n\n } else {\n\n println!(\n\n \"Could not find expected release directory at `{}`\",\n\n release_dir_path.display()\n\n );\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 6, "score": 63913.16662580827 }, { "content": "/// `boon build` command\n\nfn build(\n\n settings: &Config,\n\n build_settings: &BuildSettings,\n\n target: Target,\n\n version: LoveVersion,\n\n directory: String,\n\n) -> Result<()> {\n\n let mut targets = &build_settings.targets;\n\n let cmd_target = vec![target];\n\n if target != Target::love {\n\n targets = &cmd_target;\n\n }\n\n\n\n if targets.contains(&Target::all) {\n\n println!(\"Building all targets from directory `{}`\", directory);\n\n } else {\n\n println!(\n\n \"Building targets `{:?}` from directory `{}`\",\n\n targets, directory\n\n );\n", "file_path": "src/main.rs", "rank": 7, "score": 62287.47388342868 }, { "content": "fn build_love(\n\n build_settings: &BuildSettings,\n\n project: &Project,\n\n stats_list: &mut Vec<BuildStatistics>,\n\n) -> Result<()> {\n\n stats_list\n\n .push(build::create_love(project, build_settings).context(\"Failed to build .love file\")?);\n\n Ok(())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 8, "score": 61444.40788645207 }, { "content": "/// `boon init` command\n\nfn init() -> Result<()> {\n\n if Path::new(BOON_CONFIG_FILE_NAME).exists() {\n\n println!(\"Project already initialized.\");\n\n } else {\n\n File::create(BOON_CONFIG_FILE_NAME).context(format!(\n\n \"Failed to create config file `{}`.\",\n\n BOON_CONFIG_FILE_NAME\n\n ))?;\n\n std::fs::write(BOON_CONFIG_FILE_NAME, DEFAULT_CONFIG).context(format!(\n\n \"Failed to write default configuration to `{}`.\",\n\n BOON_CONFIG_FILE_NAME\n\n ))?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 9, "score": 54908.17653259644 }, { "content": "fn main() -> Result<()> {\n\n // load in config from Settings file\n\n let (settings, build_settings) =\n\n get_settings().context(\"Could not load project settings or build settings\")?;\n\n\n\n match BoonOpt::from_args() {\n\n BoonOpt::Init => init().context(\"Failed to initialize boon configuration file\")?,\n\n\n\n BoonOpt::Build {\n\n target,\n\n version,\n\n directory,\n\n } => build(&settings, &build_settings, target, version, directory)\n\n .context(\"Failed to build project\")?,\n\n BoonOpt::Love(subcmd) => {\n\n match subcmd {\n\n LoveSubcommand::Download { version } => {\n\n love_download(version).context(\"Failed to download and install LÖVE\")?;\n\n }\n\n LoveSubcommand::Remove { version } => {\n", "file_path": "src/main.rs", "rank": 10, "score": 54905.8240744379 }, { "content": "fn build_macos(\n\n build_settings: &BuildSettings,\n\n version: LoveVersion,\n\n project: &Project,\n\n stats_list: &mut Vec<BuildStatistics>,\n\n) -> Result<()> {\n\n stats_list.push(\n\n build::macos::create_app(project, build_settings, version, Bitness::X64)\n\n .context(\"Failed to build for macOS\")?,\n\n );\n\n Ok(())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 11, "score": 43903.4579526537 }, { "content": "fn build_windows(\n\n build_settings: &BuildSettings,\n\n version: LoveVersion,\n\n project: &Project,\n\n stats_list: &mut Vec<BuildStatistics>,\n\n) -> Result<()> {\n\n stats_list.push(\n\n build::windows::create_exe(project, build_settings, version, Bitness::X86)\n\n .context(\"Failed to build for Windows 64-bit\")?,\n\n );\n\n stats_list.push(\n\n build::windows::create_exe(project, build_settings, version, Bitness::X64)\n\n .context(\"Failed to build for Windows 32-bit\")?,\n\n );\n\n Ok(())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 12, "score": 43903.4579526537 }, { "content": "fn display_build_report(build_stats: Vec<BuildStatistics>) {\n\n let mut build_report_table = Table::new();\n\n build_report_table.set_format(*prettytable::format::consts::FORMAT_NO_BORDER_LINE_SEPARATOR);\n\n build_report_table.set_titles(row![\"Build\", \"File\", \"Time\", \"Size\"]);\n\n\n\n for stats in build_stats {\n\n let time = if stats.time.as_millis() < 1000 {\n\n format!(\"{:6} ms\", stats.time.as_millis())\n\n } else {\n\n format!(\"{:6.2} s\", stats.time.as_secs_f64())\n\n };\n\n let size = stats\n\n .size\n\n .file_size(file_size_opts::CONVENTIONAL)\n\n .expect(\"Could not format build file size\");\n\n build_report_table.add_row(row![\n\n stats.name,\n\n stats.file_name,\n\n r->time, // Right aligned\n\n r->size // Right aligned\n\n ]);\n\n }\n\n\n\n println!();\n\n build_report_table.printstd();\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 13, "score": 41260.47443704162 }, { "content": "function love.conf(t)\n\n t.window = false\n\n\n\n t.modules.audio = false\n\n t.modules.data = false\n\n t.modules.event = true\n\n t.modules.font = false\n\n t.modules.graphics = false\n\n t.modules.image = false\n\n t.modules.joystick = false\n\n t.modules.keyboard = false\n\n t.modules.math = false\n\n t.modules.mouse = false\n\n t.modules.physics = false\n\n t.modules.sound = false\n\n t.modules.system = true\n\n t.modules.thread = false\n\n t.modules.timer = false\n\n t.modules.touch = false\n\n t.modules.video = false\n\n t.modules.window = false\n\nend\n", "file_path": "tests/game_build/conf.lua", "rank": 14, "score": 31000.77363705549 }, { "content": "function love.load()\n\n test()\n\n love.event.quit()\n\nend", "file_path": "tests/game_build/main.lua", "rank": 15, "score": 31000.77363705549 }, { "content": "# boon\n\n\n\nboon is a build tool for LÖVE. It makes it easy to package your game for multiple platforms, similar to [love-release](https://github.com/MisterDA/love-release). It is a single executable with no other dependecies. It can be used across multiple projects and makes automated building a breeze.\n\n\n\n![Rust](https://github.com/camchenry/boon/workflows/Rust/badge.svg)\n\n\n\nLicensed under the MIT License.\n\n\n\n# Features\n\n* Package your game for multiple platforms. Supported platforms:\n\n * Native (.love)\n\n * Windows (.exe)\n\n * macOS (.app)\n\n * Linux (coming soon)\n\n* Package your game for multiple versions of LÖVE. Supported versions:\n\n * 11.x\n\n * 0.10.2\n\n* No external dependencies\n\n\n\n# Getting started\n\n\n\n## Installation\n\n\n\n### Download prebuilt binaries (recommended)\n\n\n\nboon has prebuilt binaries on the GitHub Releases page. Download the zip file, then extract the executable onto your PATH.\n\n\n\nIf you're a **Windows** user, download the `boon-windows-amd64` file.\n\n\n\nIf you're a **macOS** user, download the `boon-macos-amd64` file.\n\n\n\nIf you're a **Linux** user, download the `boon-linux-amd64` file.\n\n\n\n## Usage\n\n\n\nIn general, if you need help figuring out how to use a command you can pass the `--help` option to see possible arguments, options, and subcommands. To get started and see the top-level commands and options, run `boon --help`.\n\n\n\n### Initialization\n\nTo start using boon with your project, it is recommended to first initialize it. This will create a `Boon.toml` file that will let you configure the settings for your project.\n\n\n\n```bash\n\n$ boon init\n\n```\n\n\n\nIf you don't initialize boon, you can still build your project normally, but the default configuration will be used to build it instead. You can initialize it later, or create a `Boon.toml` file yourself.\n\n\n\n### Downloading LÖVE\n\n\n\nIn order to build your project, you first need to download the versionof LÖVE that you are using for it.\n\n\n\n```bash\n\n# Will download LÖVE 11.3 for building\n\n$ boon love download 11.3\n\n```\n\n\n", "file_path": "README.md", "rank": 16, "score": 24449.848138513593 }, { "content": "# Changelog\n\n\n\nAll notable changes to this project will be documented in this file.\n\n\n\nThe format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),\n\nand this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).\n\n\n\n## [Unreleased]\n\n\n\n## [0.3.0] - 2021-08-13\n\n\n\n### Added\n\n\n\n- `boon love` learned how to explicitly list versions through the subcommand `list`: `boon love list`.\n\n\n\n### Fixed\n\n\n\n- `boon download` now uses GitHub for binary downloads instead of Bitbucket, fixing any dead links resulting from the transition away from Bitbucket.\n\n\n\n## [0.2.0] - 2020-03-15\n\n\n\n### Added\n\n\n\n- New build report to show output file name, elapsed time, and total output size.\n\n\n\n### Changed\n\n\n\n- Error handling to better highlight what caused an error to occur. An error will now display a list of causes, in order.\n\n- `boon build` learned how to build all supported targets at the same time using the `--target all` option.\n\n\n\n### Fixed\n\n\n\n- `boon love download` no longer makes an extra unnecessary HTTP request when downloading LÖVE which should improve performance.\n\n- `boon --version` now displays the correct release version.\n\n- Unnecessary references (pointers) to small integers values have been removed, slightly improving performance.\n\n- Library dependencies have been updated, improving performance and fixing many issues.\n\n- Duplicate entries in the ignore list when merging default and project configuration, removing unneeded work on build.\n\n- Copy semantics for platform/bitness enums, which may have resolved some issues with cross-platform compatibility.\n\n\n\n## [0.1.1] - 2020-02-11\n\n\n\n### Fixed\n\n\n\n- Incorrect macOS download locations for LÖVE 11.3.\n\n\n\n## [0.1.0] - 2019-04-17\n\n\n\n### Added\n\n\n\n- The initial release for boon.\n\n- Native LÖVE builds.\n\n- Windows (32/64-bit) builds.\n\n- macOS builds.\n\n- LÖVE version manager.\n", "file_path": "CHANGELOG.md", "rank": 17, "score": 24448.05705545712 }, { "content": "### Building your project\n\n\n\nFinally, to build your project just run `boon build` followed by where you want to run it. Usually, you just want to run it on the current directory, `.`.\n\n\n\n```bash\n\n$ boon build .\n\n```\n\n\n\nWithout a target specified, this will build a `.love` file and put it in the `release` directory. This is shorthand for `boon build <dir> --target love`\n\n\n\nIt is possible to build all targets simultaneously by passing `all` as the target, for example, `boon build . --target all`.\n\n\n\n#### Building for Windows\n\n\n\nTo build a Windows application:\n\n\n\n```bash\n\n$ boon build . --target windows\n\n```\n\n\n\n#### Building for macOS\n\n\n\nTo build a macOS application:\n\n\n\n```bash\n\n$ boon build . --target macos\n\n```\n\n\n\n### Building for a different version of LÖVE\n\n\n\nIf you would like to build for a LÖVE version other than the default, you can specify it using the `--version` flag.\n\n\n\n```bash\n\n$ boon build . --version 0.10.2\n\n```\n\n\n\n## Compiling from source\n\n\n\nboon is written in Rust, so you will need to install [Rust](https://www.rust-lang.org/) in order to compile it.\n\n\n\nTo build boon:\n\n```bash\n\ngit clone [email protected]:camchenry/boon.git\n\ncd boon\n\ncargo build --release\n\n./target/release/boon --version\n\nboon 0.2.0\n\n```\n", "file_path": "README.md", "rank": 18, "score": 24442.84675693705 }, { "content": "\n\n // @TODO: Add integrity checking with hash\n\n if output_file_path.exists() {\n\n println!(\"File already exists: {}\", output_file_path.display());\n\n } else {\n\n println!(\"Downloading '{}'\", file_info.url);\n\n\n\n let mut resp = reqwest::blocking::get(&file_info.url)\n\n .with_context(|| format!(\"Could not fetch URL `{}`\", &file_info.url))?;\n\n\n\n let file = File::create(&output_file_path)\n\n .with_context(|| format!(\"Could not create file `{}`\", output_file_path.display()))?;\n\n\n\n let mut writer = std::io::BufWriter::new(&file);\n\n resp.copy_to(&mut writer).with_context(|| {\n\n format!(\n\n \"Could not copy response from `{}` to file `{}`\",\n\n resp.url(),\n\n output_file_path.display()\n\n )\n", "file_path": "src/download.rs", "rank": 19, "score": 20017.99332902984 }, { "content": " .by_index(i)\n\n .unwrap_or_else(|_| panic!(\"Could not get archive file by index '{}'\", i));\n\n let mut outpath = output_file_path.clone();\n\n outpath.pop();\n\n outpath.push(file.enclosed_name().expect(\"Failed to get well-formed zip file entry path.\"));\n\n\n\n if file.name().ends_with('/') {\n\n std::fs::create_dir_all(&outpath).expect(\"Could not create output directory path\");\n\n } else {\n\n if let Some(p) = outpath.parent() {\n\n if !p.exists() {\n\n std::fs::create_dir_all(&p)\n\n .expect(\"Could not create output directory path\");\n\n }\n\n }\n\n let mut outfile =\n\n std::fs::File::create(&outpath).expect(\"Could not create output file\");\n\n std::io::copy(&mut file, &mut outfile).expect(\"Could not copy data to output file\");\n\n }\n\n\n", "file_path": "src/download.rs", "rank": 20, "score": 20017.28125137348 }, { "content": "use crate::types::{LoveDownloadLocation, LoveVersion};\n\n\n\nuse crate::APP_INFO;\n\nuse app_dirs::{AppDataType, app_dir};\n\n\n\nuse crate::{Bitness, Platform};\n\n\n\nuse anyhow::{bail, Context, Result};\n\nuse std::fs::File;\n\nuse std::io::Write;\n\n\n", "file_path": "src/download.rs", "rank": 21, "score": 20015.030256768838 }, { "content": " })?;\n\n writer\n\n .flush()\n\n .with_context(|| format!(\"Could not write file `{}`\", output_file_path.display()))?;\n\n }\n\n\n\n println!(\"Extracting '{}'\", output_file_path.display());\n\n {\n\n let file = File::open(&output_file_path)\n\n .with_context(|| format!(\"Could not open file `{}`\", output_file_path.display()))?;\n\n\n\n let mut archive = zip::ZipArchive::new(&file).with_context(|| {\n\n format!(\n\n \"Could not create zip archive `{}`\",\n\n output_file_path.display()\n\n )\n\n })?;\n\n\n\n for i in 0..archive.len() {\n\n let mut file = archive\n", "file_path": "src/download.rs", "rank": 22, "score": 20014.25298283888 }, { "content": " (LoveVersion::V11_0, Platform::Windows, Bitness::X86) => (\"11.0\", \"love-11.0.0-win32.zip\"),\n\n (LoveVersion::V11_0, Platform::MacOs, Bitness::X64) => (\"11.0\", \"love-11.0.0-macos.zip\"),\n\n\n\n (LoveVersion::V0_10_2, Platform::Windows, Bitness::X64) => (\"0.10.2\", \"love-0.10.2-win64.zip\"),\n\n (LoveVersion::V0_10_2, Platform::Windows, Bitness::X86) => (\"0.10.2\", \"love-0.10.2-win32.zip\"),\n\n (LoveVersion::V0_10_2, Platform::MacOs, Bitness::X64) => (\"0.10.2\", \"love-0.10.2-macosx-x64.zip\"),\n\n _ => {\n\n bail!(\n\n \"Unsupported platform {}-{} for version {}\",\n\n platform,\n\n bitness,\n\n version\n\n );\n\n }\n\n };\n\n\n\n let url = format!(\"{}/{}/{}\", release_location, version_string, release_file_name);\n\n Ok(LoveDownloadLocation {\n\n filename: release_file_name.to_string(),\n\n url,\n\n })\n\n}\n", "file_path": "src/download.rs", "rank": 23, "score": 20013.9998175288 }, { "content": " // Get and Set permissions\n\n #[cfg(unix)]\n\n {\n\n use std::os::unix::fs::PermissionsExt;\n\n\n\n if let Some(mode) = file.unix_mode() {\n\n std::fs::set_permissions(&outpath, std::fs::Permissions::from_mode(mode))\n\n .expect(\"Could not set permissions on file\");\n\n }\n\n }\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/download.rs", "rank": 24, "score": 20002.07514548598 }, { "content": "enum BoonOpt {\n\n #[structopt(about = \"Build game for a target platform\")]\n\n Build {\n\n #[structopt(\n\n long,\n\n short,\n\n help=\"Specify which target platform to build for\",\n\n possible_values=&Target::variants(),\n\n default_value=\"love\"\n\n )]\n\n target: Target,\n\n #[structopt(\n\n long,\n\n short,\n\n help = \"Specify which target version of LÖVE to build for\",\n\n possible_values=&LoveVersion::variants(),\n\n default_value=\"11.3\",\n\n )]\n\n version: LoveVersion,\n\n directory: String,\n\n },\n\n #[structopt(about = \"Remove built packages\")]\n\n Clean,\n\n #[structopt(about = \"Initialize configuration for project\")]\n\n Init,\n\n #[structopt(about = \"Manage multiple LÖVE versions\")]\n\n Love(LoveSubcommand),\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 25, "score": 18162.362987538487 }, { "content": "#[derive(StructOpt, Debug)]\n\nenum LoveSubcommand {\n\n #[structopt(about = \"Download a version of LÖVE\")]\n\n Download {\n\n #[structopt(possible_values=&LoveVersion::variants())]\n\n version: LoveVersion,\n\n },\n\n #[structopt(about = \"Remove a version of LÖVE\")]\n\n Remove {\n\n #[structopt(possible_values=&LoveVersion::variants())]\n\n version: LoveVersion,\n\n },\n\n #[structopt(about = \"List installed LÖVE versions\")]\n\n List,\n\n}\n\n\n\nconst APP_INFO: AppInfo = AppInfo {\n\n name: \"boon\",\n\n author: \"boon\",\n\n};\n\n\n\nconst BOON_CONFIG_FILE_NAME: &str = \"Boon.toml\";\n\nconst DEFAULT_CONFIG: &str = include_str!(concat!(\"../\", \"Boon.toml\"));\n\n\n", "file_path": "src/main.rs", "rank": 26, "score": 17540.94993379837 }, { "content": "-- This file ensures that required files will work\n\n\n\nlocal test = function()\n\n local os = love.system.getOS()\n\n local fh = assert(io.open('OK', 'wb'))\n\n fh:write(os)\n\n fh:flush()\n\n fh:close()\n\nend\n\n\n\nreturn test\n", "file_path": "tests/game_build/test.lua", "rank": 27, "score": 17276.32549852962 }, { "content": "local test = require 'test'\n\n\n", "file_path": "tests/game_build/main.lua", "rank": 28, "score": 17270.23745834284 }, { "content": "\n\n#[derive(Debug, Clone)]\n\n/// Stats about the build duration, size, etc.\n\npub struct BuildStatistics {\n\n /// Name of the build, e.g. Windows, macOS, etc.\n\n pub name: String,\n\n /// File name of the build output\n\n pub file_name: String,\n\n /// Time it took to build\n\n pub time: std::time::Duration,\n\n /// The size of the final build in bytes\n\n pub size: u64,\n\n}\n\n\n\nimpl FromStr for LoveVersion {\n\n type Err = String;\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n LOVE_VERSIONS\n\n .iter()\n\n .enumerate()\n", "file_path": "src/types.rs", "rank": 29, "score": 17.163497406105634 }, { "content": " love_remove(version).context(\"Failed to remove LÖVE\")?;\n\n }\n\n LoveSubcommand::List => {\n\n // List installed versions\n\n let installed_versions = get_installed_love_versions()\n\n .context(\"Could not get installed LÖVE versions\")?;\n\n\n\n println!(\"Installed versions:\");\n\n for version in installed_versions {\n\n println!(\"* {}\", version);\n\n }\n\n }\n\n }\n\n }\n\n BoonOpt::Clean => clean(&build_settings).context(\"Failed to clean release directory\")?,\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 30, "score": 16.09024893927902 }, { "content": " .get_str(\"project.email\")\n\n .context(\"Could not get project email\")?,\n\n url: settings\n\n .get_str(\"project.url\")\n\n .context(\"Could not get project URL\")?,\n\n version: settings\n\n .get_str(\"project.version\")\n\n .context(\"Could not get project version\")?,\n\n };\n\n\n\n build::init(&project, build_settings).with_context(|| {\n\n format!(\n\n \"Failed to initialize the build process using build settings: {}\",\n\n build_settings\n\n )\n\n })?;\n\n\n\n let mut stats_list = Vec::new();\n\n\n\n build_love(build_settings, &project, &mut stats_list)?;\n", "file_path": "src/main.rs", "rank": 31, "score": 15.430942836140984 }, { "content": " X86, // 32 bit\n\n X64, // 64 bit\n\n}\n\n\n\nconst LOVE_VERSIONS: [&str; 5] = [\"11.3\", \"11.2\", \"11.1\", \"11.0\", \"0.10.2\"];\n\n/// Represents a specific version of LÖVE2D\n\n#[derive(Copy, Clone, Debug, Primitive)]\n\npub enum LoveVersion {\n\n V11_3 = 0,\n\n V11_2 = 1,\n\n V11_1 = 2,\n\n V11_0 = 3,\n\n V0_10_2 = 4,\n\n}\n\n\n\n/// File info about remote download\n\npub struct LoveDownloadLocation {\n\n pub filename: String,\n\n pub url: String,\n\n}\n", "file_path": "src/types.rs", "rank": 32, "score": 14.733091870864827 }, { "content": " pub directory: String,\n\n pub uti: String, // Uniform Type Identifier, e.g. \"org.love2d.love\"\n\n\n\n pub authors: String,\n\n pub description: String,\n\n pub email: String,\n\n pub url: String,\n\n pub version: String,\n\n}\n\n\n\n/// Represents an operating system or other platform/environment.\n\n#[derive(Debug, Copy, Clone)]\n\npub enum Platform {\n\n Windows,\n\n MacOs,\n\n}\n\n\n\n/// Represents a CPU architecture\n\n#[derive(Debug, Copy, Clone)]\n\npub enum Bitness {\n", "file_path": "src/types.rs", "rank": 33, "score": 13.889519395084367 }, { "content": "use anyhow::{bail, Context, Result};\n\nuse app_dirs::{AppDataType, AppInfo, app_dir};\n\nuse config::Config;\n\nuse humansize::{file_size_opts, FileSize};\n\nuse prettytable::{cell, row, Table};\n\nuse remove_dir_all::remove_dir_all;\n\nuse std::collections::HashSet;\n\nuse std::fs::File;\n\nuse std::path::{Path, PathBuf};\n\nuse structopt::StructOpt;\n\nuse walkdir::WalkDir;\n\n\n\n#[derive(StructOpt, Debug)]\n\n#[structopt(\n\n name = \"boon\",\n\n author = \"Cameron McHenry\",\n\n about = \"boon: LÖVE2D build and deploy tool\"\n\n)]\n", "file_path": "src/main.rs", "rank": 34, "score": 12.848942863118179 }, { "content": " fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {\n\n use crate::types::Bitness::{X64, X86};\n\n\n\n let str = match self {\n\n X86 => \"x86\",\n\n X64 => \"x64\",\n\n };\n\n write!(f, \"{}\", str)\n\n }\n\n}\n\n\n\nimpl Display for Platform {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {\n\n use crate::types::Platform::{MacOs, Windows};\n\n\n\n let str = match self {\n\n Windows => \"Windows\",\n\n MacOs => \"macOS\",\n\n };\n\n write!(f, \"{}\", str)\n", "file_path": "src/types.rs", "rank": 35, "score": 12.677718468580663 }, { "content": "#![warn(\n\n clippy::all,\n\n clippy::pedantic,\n\n clippy::nursery,\n\n clippy::cargo\n\n)]\n\n#![allow(\n\n clippy::non_ascii_literal,\n\n clippy::missing_docs_in_private_items,\n\n clippy::implicit_return,\n\n clippy::print_stdout,\n\n clippy::module_name_repetitions,\n\n clippy::expect_used\n\n)]\n\nmod types;\n\nuse crate::types::{Bitness, BuildSettings, BuildStatistics, LoveVersion, Platform, Project, Target};\n\n\n\nmod build;\n\nmod download;\n\n\n", "file_path": "src/main.rs", "rank": 36, "score": 12.437695366262925 }, { "content": " .find(|(_, v)| s == **v)\n\n .map(|(i, _)| Self::from_usize(i))\n\n .flatten()\n\n .ok_or(format!(\"{} is not a valid love version.\", s))\n\n }\n\n}\n\n\n\nimpl LoveVersion {\n\n pub const fn variants() -> [&'static str; 5] {\n\n LOVE_VERSIONS\n\n }\n\n}\n\n\n\nimpl Display for LoveVersion {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"{}\", LOVE_VERSIONS[*self as usize])\n\n }\n\n}\n\n\n\nimpl Display for Bitness {\n", "file_path": "src/types.rs", "rank": 37, "score": 12.022889610316513 }, { "content": " }\n\n}\n\n\n\nimpl Display for BuildSettings {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {\n\n write!(\n\n f,\n\n \"{{\\n\\\n\n \\toutput_directory: {}\\n\\\n\n \\texclude_default_ignore_list: {}\\n\\\n\n \\tignore_list: {:?}\\n\\\n\n }}\",\n\n self.output_directory, self.exclude_default_ignore_list, self.ignore_list\n\n )\n\n }\n\n}\n\n\n\narg_enum! {\n\n #[derive(Debug, Copy, Clone, PartialEq)]\n\n #[allow(non_camel_case_types)]\n\n pub enum Target {\n\n love,\n\n windows,\n\n macos,\n\n all,\n\n }\n\n}\n", "file_path": "src/types.rs", "rank": 38, "score": 11.991694494249495 }, { "content": "#![allow(clippy::use_debug)]\n\nuse clap::arg_enum;\n\nuse enum_primitive_derive::Primitive;\n\nuse num_traits::FromPrimitive;\n\nuse std::collections::HashSet;\n\nuse std::fmt::{Display, Formatter};\n\nuse std::str::FromStr;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct BuildSettings {\n\n pub output_directory: String,\n\n pub ignore_list: HashSet<String>,\n\n pub exclude_default_ignore_list: bool,\n\n pub targets: Vec<Target>,\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Project {\n\n pub title: String, // Ex: \"My Super Awesome Game\"\n\n pub package_name: String, // Ex: \"super_game\"\n", "file_path": "src/types.rs", "rank": 39, "score": 11.935732193944677 }, { "content": " }\n\n\n\n let project = Project {\n\n title: settings\n\n .get_str(\"project.title\")\n\n .context(\"Could not get project title\")?,\n\n package_name: settings\n\n .get_str(\"project.package_name\")\n\n .context(\"Could not get project package name\")?,\n\n directory,\n\n uti: settings\n\n .get_str(\"project.uti\")\n\n .context(\"Could not get project UTI\")?,\n\n authors: settings\n\n .get_str(\"project.authors\")\n\n .context(\"Could not get project authors\")?,\n\n description: settings\n\n .get_str(\"project.description\")\n\n .context(\"Could not get project description\")?,\n\n email: settings\n", "file_path": "src/main.rs", "rank": 40, "score": 11.19835148344781 }, { "content": " if settings.get(\"build.exclude_default_ignore_list\").unwrap() {\n\n ignore_list = project_ignore_list;\n\n } else {\n\n ignore_list.extend(project_ignore_list);\n\n }\n\n }\n\n\n\n let hash_targets: HashSet<String> = settings.get(\"build.targets\").unwrap();\n\n let mut targets: Vec<Target> = Vec::new();\n\n for target in &hash_targets {\n\n targets.push(\n\n match target.as_str() {\n\n \"love\" => Target::love,\n\n \"windows\" => Target::windows,\n\n \"macos\" => Target::macos,\n\n \"all\" => Target::all,\n\n _ => bail!(\"{} is not a valid build target.\", target),\n\n }\n\n );\n\n }\n", "file_path": "src/main.rs", "rank": 41, "score": 10.066352142902351 }, { "content": "\n\n let build_settings = BuildSettings {\n\n ignore_list,\n\n exclude_default_ignore_list: settings.get(\"build.exclude_default_ignore_list\")?,\n\n output_directory: settings.get(\"build.output_directory\")?,\n\n targets,\n\n };\n\n\n\n Ok((settings, build_settings))\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 42, "score": 8.806092011323386 }, { "content": "\n\n if targets.contains(&Target::windows) || targets.contains(&Target::all) {\n\n build_windows(build_settings, version, &project, &mut stats_list)?;\n\n }\n\n\n\n if targets.contains(&Target::macos) || targets.contains(&Target::all) {\n\n build_macos(build_settings, version, &project, &mut stats_list)?;\n\n }\n\n\n\n // Display build report\n\n display_build_report(stats_list);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 43, "score": 8.03062503422209 } ]
Rust
compiler/rustc_middle/src/ty/consts/int.rs
cchiw/rust
469ee7cc68aa4d64d6c3bcff4e4108d0c8b97240
use rustc_apfloat::ieee::{Double, Single}; use rustc_apfloat::Float; use rustc_serialize::{Decodable, Decoder, Encodable, Encoder}; use rustc_target::abi::Size; use std::convert::{TryFrom, TryInto}; use std::fmt; use crate::ty::TyCtxt; #[derive(Copy, Clone)] pub struct ConstInt { int: ScalarInt, signed: bool, is_ptr_sized_integral: bool, } impl ConstInt { pub fn new(int: ScalarInt, signed: bool, is_ptr_sized_integral: bool) -> Self { Self { int, signed, is_ptr_sized_integral } } } impl std::fmt::Debug for ConstInt { fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let Self { int, signed, is_ptr_sized_integral } = *self; let size = int.size().bytes(); let raw = int.data; if signed { let bit_size = size * 8; let min = 1u128 << (bit_size - 1); let max = min - 1; if raw == min { match (size, is_ptr_sized_integral) { (_, true) => write!(fmt, "isize::MIN"), (1, _) => write!(fmt, "i8::MIN"), (2, _) => write!(fmt, "i16::MIN"), (4, _) => write!(fmt, "i32::MIN"), (8, _) => write!(fmt, "i64::MIN"), (16, _) => write!(fmt, "i128::MIN"), _ => bug!("ConstInt 0x{:x} with size = {} and signed = {}", raw, size, signed), } } else if raw == max { match (size, is_ptr_sized_integral) { (_, true) => write!(fmt, "isize::MAX"), (1, _) => write!(fmt, "i8::MAX"), (2, _) => write!(fmt, "i16::MAX"), (4, _) => write!(fmt, "i32::MAX"), (8, _) => write!(fmt, "i64::MAX"), (16, _) => write!(fmt, "i128::MAX"), _ => bug!("ConstInt 0x{:x} with size = {} and signed = {}", raw, size, signed), } } else { match size { 1 => write!(fmt, "{}", raw as i8)?, 2 => write!(fmt, "{}", raw as i16)?, 4 => write!(fmt, "{}", raw as i32)?, 8 => write!(fmt, "{}", raw as i64)?, 16 => write!(fmt, "{}", raw as i128)?, _ => bug!("ConstInt 0x{:x} with size = {} and signed = {}", raw, size, signed), } if fmt.alternate() { match (size, is_ptr_sized_integral) { (_, true) => write!(fmt, "_isize")?, (1, _) => write!(fmt, "_i8")?, (2, _) => write!(fmt, "_i16")?, (4, _) => write!(fmt, "_i32")?, (8, _) => write!(fmt, "_i64")?, (16, _) => write!(fmt, "_i128")?, _ => bug!(), } } Ok(()) } } else { let max = Size::from_bytes(size).truncate(u128::MAX); if raw == max { match (size, is_ptr_sized_integral) { (_, true) => write!(fmt, "usize::MAX"), (1, _) => write!(fmt, "u8::MAX"), (2, _) => write!(fmt, "u16::MAX"), (4, _) => write!(fmt, "u32::MAX"), (8, _) => write!(fmt, "u64::MAX"), (16, _) => write!(fmt, "u128::MAX"), _ => bug!("ConstInt 0x{:x} with size = {} and signed = {}", raw, size, signed), } } else { match size { 1 => write!(fmt, "{}", raw as u8)?, 2 => write!(fmt, "{}", raw as u16)?, 4 => write!(fmt, "{}", raw as u32)?, 8 => write!(fmt, "{}", raw as u64)?, 16 => write!(fmt, "{}", raw as u128)?, _ => bug!("ConstInt 0x{:x} with size = {} and signed = {}", raw, size, signed), } if fmt.alternate() { match (size, is_ptr_sized_integral) { (_, true) => write!(fmt, "_usize")?, (1, _) => write!(fmt, "_u8")?, (2, _) => write!(fmt, "_u16")?, (4, _) => write!(fmt, "_u32")?, (8, _) => write!(fmt, "_u64")?, (16, _) => write!(fmt, "_u128")?, _ => bug!(), } } Ok(()) } } } } #[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)] #[repr(packed)] pub struct ScalarInt { data: u128, size: u8, } impl<CTX> crate::ty::HashStable<CTX> for ScalarInt { fn hash_stable(&self, hcx: &mut CTX, hasher: &mut crate::ty::StableHasher) { { self.data }.hash_stable(hcx, hasher); self.size.hash_stable(hcx, hasher); } } impl<S: Encoder> Encodable<S> for ScalarInt { fn encode(&self, s: &mut S) -> Result<(), S::Error> { s.emit_u128(self.data)?; s.emit_u8(self.size) } } impl<D: Decoder> Decodable<D> for ScalarInt { fn decode(d: &mut D) -> ScalarInt { ScalarInt { data: d.read_u128(), size: d.read_u8() } } } impl ScalarInt { pub const TRUE: ScalarInt = ScalarInt { data: 1_u128, size: 1 }; pub const FALSE: ScalarInt = ScalarInt { data: 0_u128, size: 1 }; pub const ZST: ScalarInt = ScalarInt { data: 0_u128, size: 0 }; #[inline] pub fn size(self) -> Size { Size::from_bytes(self.size) } #[inline(always)] fn check_data(self) { debug_assert_eq!( self.size().truncate(self.data), { self.data }, "Scalar value {:#x} exceeds size of {} bytes", { self.data }, self.size ); } #[inline] pub fn null(size: Size) -> Self { Self { data: 0, size: size.bytes() as u8 } } #[inline] pub fn is_null(self) -> bool { self.data == 0 } #[inline] pub fn try_from_uint(i: impl Into<u128>, size: Size) -> Option<Self> { let data = i.into(); if size.truncate(data) == data { Some(Self { data, size: size.bytes() as u8 }) } else { None } } #[inline] pub fn try_from_int(i: impl Into<i128>, size: Size) -> Option<Self> { let i = i.into(); let truncated = size.truncate(i as u128); if size.sign_extend(truncated) as i128 == i { Some(Self { data: truncated, size: size.bytes() as u8 }) } else { None } } #[inline] pub fn assert_bits(self, target_size: Size) -> u128 { self.to_bits(target_size).unwrap_or_else(|size| { bug!("expected int of size {}, but got size {}", target_size.bytes(), size.bytes()) }) } #[inline] pub fn to_bits(self, target_size: Size) -> Result<u128, Size> { assert_ne!(target_size.bytes(), 0, "you should never look at the bits of a ZST"); if target_size.bytes() == u64::from(self.size) { self.check_data(); Ok(self.data) } else { Err(self.size()) } } #[inline] pub fn try_to_machine_usize<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Result<u64, Size> { Ok(self.to_bits(tcx.data_layout.pointer_size)? as u64) } } macro_rules! from { ($($ty:ty),*) => { $( impl From<$ty> for ScalarInt { #[inline] fn from(u: $ty) -> Self { Self { data: u128::from(u), size: std::mem::size_of::<$ty>() as u8, } } } )* } } macro_rules! try_from { ($($ty:ty),*) => { $( impl TryFrom<ScalarInt> for $ty { type Error = Size; #[inline] fn try_from(int: ScalarInt) -> Result<Self, Size> { int.to_bits(Size::from_bytes(std::mem::size_of::<$ty>())) .map(|u| u.try_into().unwrap()) } } )* } } from!(u8, u16, u32, u64, u128, bool); try_from!(u8, u16, u32, u64, u128); impl TryFrom<ScalarInt> for bool { type Error = Size; #[inline] fn try_from(int: ScalarInt) -> Result<Self, Size> { int.to_bits(Size::from_bytes(1)).and_then(|u| match u { 0 => Ok(false), 1 => Ok(true), _ => Err(Size::from_bytes(1)), }) } } impl From<char> for ScalarInt { #[inline] fn from(c: char) -> Self { Self { data: c as u128, size: std::mem::size_of::<char>() as u8 } } } impl TryFrom<ScalarInt> for char { type Error = Size; #[inline] fn try_from(int: ScalarInt) -> Result<Self, Size> { int.to_bits(Size::from_bytes(std::mem::size_of::<char>())) .map(|u| char::from_u32(u.try_into().unwrap()).unwrap()) } } impl From<Single> for ScalarInt { #[inline] fn from(f: Single) -> Self { Self { data: f.to_bits(), size: 4 } } } impl TryFrom<ScalarInt> for Single { type Error = Size; #[inline] fn try_from(int: ScalarInt) -> Result<Self, Size> { int.to_bits(Size::from_bytes(4)).map(Self::from_bits) } } impl From<Double> for ScalarInt { #[inline] fn from(f: Double) -> Self { Self { data: f.to_bits(), size: 8 } } } impl TryFrom<ScalarInt> for Double { type Error = Size; #[inline] fn try_from(int: ScalarInt) -> Result<Self, Size> { int.to_bits(Size::from_bytes(8)).map(Self::from_bits) } } impl fmt::Debug for ScalarInt { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { if self.size == 0 { self.check_data(); write!(f, "<ZST>") } else { write!(f, "0x{:x}", self) } } } impl fmt::LowerHex for ScalarInt { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.check_data(); write!(f, "{:01$x}", { self.data }, self.size as usize * 2) } } impl fmt::UpperHex for ScalarInt { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.check_data(); write!(f, "{:01$X}", { self.data }, self.size as usize * 2) } }
use rustc_apfloat::ieee::{Double, Single}; use rustc_apfloat::Float; use rustc_serialize::{Decodable, Decoder, Encodable, Encoder}; use rustc_target::abi::Size; use std::convert::{TryFrom, TryInto}; use std::fmt; use crate::ty::TyCtxt; #[derive(Copy, Clone)] pub struct ConstInt { int: ScalarInt, signed: bool, is_ptr_sized_integral: bool, } impl ConstInt { pub fn new(int: ScalarInt, signed: bool, is_ptr_sized_integral: bool) -> Self { Self { int, signed, is_ptr_sized_integral } } } impl std::fmt::Debug for ConstInt { fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let Self { int, signed, is_ptr_sized_integral } = *self; let size = int.size().bytes(); let raw = int.data; if signed { let bit_size = size * 8; let min = 1u128 << (bit_size - 1); let max = min - 1; if raw == min { match (size, is_ptr_sized_integral) { (_, true) => write!(fmt, "isize::MIN"), (1, _) => write!(fmt, "i8::MIN"), (2, _) => write!(fmt, "i16::MIN"), (4, _) => write!(fmt, "i32::MIN"), (8, _) => write!(fmt, "i64::MIN"), (16, _) => write!(fmt, "i128::MIN"), _ => bug!("ConstInt 0x{:x} with size = {} and signed = {}", raw, size, signed), } } else if raw == max { match (size, is_ptr_sized_integral) { (_, true) => write!(fmt, "isize::MAX"), (1, _) => write!(fmt, "i8::MAX"), (2, _) => write!(fmt, "i16::MAX"), (4, _) => write!(fmt, "i32::MAX"), (8, _) => write!(fmt, "i64::MAX"), (16, _) => write!(fmt, "i128::MAX"), _ => bug!("ConstInt 0x{:x} with size = {} and signed = {}", raw, size, signed), } } else { match size { 1 => write!(fmt, "{}", raw as i8)?, 2 => write!(fmt, "{}", raw as i16)?, 4 => write!(fmt, "{}", raw as i32)?, 8 => write!(fmt, "{}", raw as i64)?, 16 => write!(fmt, "{}", raw as i128)?, _ => bug!("ConstInt 0x{:x} with size = {} and signed = {}", raw, size, signed), } if fmt.alternate() { match (size, is_ptr_sized_integral) { (_, true) => write!(fmt, "_isize")?, (1, _) => write!(fmt, "_i8")?, (2, _) => write!(fmt, "_i16")?, (4, _) => write!(fmt, "_i32")?, (8, _) => write!(fmt, "_i64")?, (16, _) => write!(fmt, "_i128")?, _ => bug!(), } } Ok(()) } } else { let max = Size::from_bytes(size).truncate(u128::MAX); if raw == max { match (size, is_ptr_sized_integral) { (_, true) => write!(fmt, "usize::MAX"), (1, _) => write!(fmt, "u8::MAX"), (2, _) => write!(fmt, "u16::MAX"), (4, _) => write!(fmt, "u32::MAX"), (8, _) => write!(fmt, "u64::MAX"), (16, _) => write!(fmt, "u128::MAX"), _ => bug!("ConstInt 0x{:x} with size = {} and signed = {}", raw, size, signed), } } else { match size { 1 => write!(fmt, "{}", raw as u8)?, 2 => write!(fmt, "{}", raw as u16)?, 4 => write!(fmt, "{}", raw as u32)?, 8 => write!(fmt, "{}", raw as u64)?, 16 => write!(fmt, "{}", raw as u128)?, _ => bug!("ConstInt 0x{:x} with size = {} and signed = {}", raw, size, signed), } if fmt.alternate() { match (size, is_ptr_sized_integral) { (_, true) => write!(fmt, "_usize")?, (1, _) => write!(fmt, "_u8")?, (2, _) => write!(fmt, "_u16")?, (4, _) => write!(fmt, "_u32")?, (8, _) => write!(fmt, "_u64")?, (16, _) => write!(fmt, "_u128")?, _ => bug!(), } } Ok(()) } } } } #[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)] #[repr(packed)] pub struct ScalarInt { data: u128, size: u8, } impl<CTX> crate::ty::HashStable<CTX> for ScalarInt { fn hash_stable(&self, hcx: &mut CTX, hasher: &mut crate::ty::StableHasher) { { self.data }.hash_stable(hcx, hasher); self.size.hash_stable(hcx, hasher); } } impl<S: Encoder> Encodable<S> for ScalarInt { fn encode(&self, s: &mut S) -> Result<(), S::Error> { s.emit_u128(self.data)?; s.emit_u8(self.size) } } impl<D: Decoder> Decodable<D> for ScalarInt { fn decode(d: &mut D) -> ScalarInt { ScalarInt { data: d.read_u128(), size: d.read_u8() } } } impl ScalarInt { pub const TRUE: ScalarInt = ScalarInt { data: 1_u128, size: 1 }; pub const FALSE: ScalarInt = ScalarInt { data: 0_u128, size: 1 }; pub const ZST: ScalarInt = ScalarInt { data: 0_u128, size: 0 }; #[inline] pub fn size(self) -> Size { Size::from_bytes(self.size) } #[inline(always)] fn check_data(self) { debug_assert_eq!( self.size().truncate(self.data), { self.data }, "Scalar value {:#x} exceeds size of {} bytes", { self.data }, self.size ); } #[inline] pub fn null(size: Size) -> Self { Self { data: 0, size: size.bytes() as u8 } } #[inline] pub fn is_null(self) -> bool { self.data == 0 } #[inline]
#[inline] pub fn try_from_int(i: impl Into<i128>, size: Size) -> Option<Self> { let i = i.into(); let truncated = size.truncate(i as u128); if size.sign_extend(truncated) as i128 == i { Some(Self { data: truncated, size: size.bytes() as u8 }) } else { None } } #[inline] pub fn assert_bits(self, target_size: Size) -> u128 { self.to_bits(target_size).unwrap_or_else(|size| { bug!("expected int of size {}, but got size {}", target_size.bytes(), size.bytes()) }) } #[inline] pub fn to_bits(self, target_size: Size) -> Result<u128, Size> { assert_ne!(target_size.bytes(), 0, "you should never look at the bits of a ZST"); if target_size.bytes() == u64::from(self.size) { self.check_data(); Ok(self.data) } else { Err(self.size()) } } #[inline] pub fn try_to_machine_usize<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Result<u64, Size> { Ok(self.to_bits(tcx.data_layout.pointer_size)? as u64) } } macro_rules! from { ($($ty:ty),*) => { $( impl From<$ty> for ScalarInt { #[inline] fn from(u: $ty) -> Self { Self { data: u128::from(u), size: std::mem::size_of::<$ty>() as u8, } } } )* } } macro_rules! try_from { ($($ty:ty),*) => { $( impl TryFrom<ScalarInt> for $ty { type Error = Size; #[inline] fn try_from(int: ScalarInt) -> Result<Self, Size> { int.to_bits(Size::from_bytes(std::mem::size_of::<$ty>())) .map(|u| u.try_into().unwrap()) } } )* } } from!(u8, u16, u32, u64, u128, bool); try_from!(u8, u16, u32, u64, u128); impl TryFrom<ScalarInt> for bool { type Error = Size; #[inline] fn try_from(int: ScalarInt) -> Result<Self, Size> { int.to_bits(Size::from_bytes(1)).and_then(|u| match u { 0 => Ok(false), 1 => Ok(true), _ => Err(Size::from_bytes(1)), }) } } impl From<char> for ScalarInt { #[inline] fn from(c: char) -> Self { Self { data: c as u128, size: std::mem::size_of::<char>() as u8 } } } impl TryFrom<ScalarInt> for char { type Error = Size; #[inline] fn try_from(int: ScalarInt) -> Result<Self, Size> { int.to_bits(Size::from_bytes(std::mem::size_of::<char>())) .map(|u| char::from_u32(u.try_into().unwrap()).unwrap()) } } impl From<Single> for ScalarInt { #[inline] fn from(f: Single) -> Self { Self { data: f.to_bits(), size: 4 } } } impl TryFrom<ScalarInt> for Single { type Error = Size; #[inline] fn try_from(int: ScalarInt) -> Result<Self, Size> { int.to_bits(Size::from_bytes(4)).map(Self::from_bits) } } impl From<Double> for ScalarInt { #[inline] fn from(f: Double) -> Self { Self { data: f.to_bits(), size: 8 } } } impl TryFrom<ScalarInt> for Double { type Error = Size; #[inline] fn try_from(int: ScalarInt) -> Result<Self, Size> { int.to_bits(Size::from_bytes(8)).map(Self::from_bits) } } impl fmt::Debug for ScalarInt { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { if self.size == 0 { self.check_data(); write!(f, "<ZST>") } else { write!(f, "0x{:x}", self) } } } impl fmt::LowerHex for ScalarInt { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.check_data(); write!(f, "{:01$x}", { self.data }, self.size as usize * 2) } } impl fmt::UpperHex for ScalarInt { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.check_data(); write!(f, "{:01$X}", { self.data }, self.size as usize * 2) } }
pub fn try_from_uint(i: impl Into<u128>, size: Size) -> Option<Self> { let data = i.into(); if size.truncate(data) == data { Some(Self { data, size: size.bytes() as u8 }) } else { None } }
function_block-full_function
[]
Rust
libsplinter/src/collections/mod.rs
davececchi/splinter
92bc0fdec6e66aa53bc37db13b5521343235b016
use std::collections::hash_map::{Iter, Keys, Values}; use std::collections::HashMap; use std::hash::Hash; #[derive(Clone, Debug, PartialEq, Default)] pub struct BiHashMap<K: Hash + Eq, V: Hash + Eq> { kv_hash_map: HashMap<K, V>, vk_hash_map: HashMap<V, K>, } impl<K: Hash + Eq, V: Hash + Eq> BiHashMap<K, V> where K: std::clone::Clone, V: std::clone::Clone, { pub fn new() -> Self { BiHashMap { kv_hash_map: HashMap::new(), vk_hash_map: HashMap::new(), } } pub fn with_capacity(capacity: usize) -> Self { BiHashMap { kv_hash_map: HashMap::with_capacity(capacity), vk_hash_map: HashMap::with_capacity(capacity), } } pub fn capacity(&self) -> usize { self.kv_hash_map.capacity() } pub fn reserve(&mut self, additional: usize) { self.kv_hash_map.reserve(additional); self.vk_hash_map.reserve(additional); } pub fn shrink_to_fit(&mut self) { self.kv_hash_map.shrink_to_fit(); self.vk_hash_map.shrink_to_fit(); } pub fn keys(&self) -> Keys<K, V> { self.kv_hash_map.keys() } pub fn values(&self) -> Values<K, V> { self.kv_hash_map.values() } pub fn iter_by_keys(&self) -> Iter<K, V> { self.kv_hash_map.iter() } pub fn iter_by_values(&self) -> Iter<V, K> { self.vk_hash_map.iter() } pub fn len(&self) -> usize { self.kv_hash_map.len() } pub fn is_empty(&self) -> bool { self.kv_hash_map.is_empty() } pub fn clear(&mut self) { self.kv_hash_map.clear(); self.vk_hash_map.clear(); } pub fn get_by_key(&self, key: &K) -> Option<&V> { self.kv_hash_map.get(key) } pub fn get_by_value(&self, value: &V) -> Option<&K> { self.vk_hash_map.get(value) } pub fn contains_key(&self, key: &K) -> bool { self.kv_hash_map.contains_key(key) } pub fn contains_value(&self, value: &V) -> bool { self.vk_hash_map.contains_key(value) } pub fn insert(&mut self, key: K, value: V) -> (Option<K>, Option<V>) { let old_value = self.kv_hash_map.insert(key.clone(), value.clone()); let old_key = self.vk_hash_map.insert(value, key); (old_key, old_value) } pub fn remove_by_key(&mut self, key: &K) -> Option<(K, V)> { let value = self.kv_hash_map.remove(key); if let Some(value) = value { let key = self.vk_hash_map.remove(&value); if let Some(key) = key { return Some((key, value)); } } None } pub fn remove_by_value(&mut self, value: &V) -> Option<(K, V)> { let key = self.vk_hash_map.remove(value); if let Some(key) = key { let value = self.kv_hash_map.remove(&key); if let Some(value) = value { return Some((key, value)); } } None } } #[cfg(test)] pub mod tests { use super::*; #[test] fn test_capacity() { let map: BiHashMap<String, usize> = BiHashMap::new(); let capacity = map.capacity(); assert_eq!(capacity, 0); let map_with_capacity: BiHashMap<String, usize> = BiHashMap::with_capacity(5); let capacity = map_with_capacity.capacity(); assert!(capacity >= 5); } #[test] fn test_reserve() { let mut map: BiHashMap<String, usize> = BiHashMap::new(); let capacity = map.capacity(); assert_eq!(capacity, 0); map.reserve(5); let capacity = map.capacity(); assert!(capacity >= 5); } #[test] fn test_shrink_to_fit() { let mut map: BiHashMap<String, usize> = BiHashMap::with_capacity(100); let capacity = map.capacity(); assert!(capacity >= 100); map.shrink_to_fit(); let capacity = map.capacity(); assert_eq!(capacity, 0); } #[test] fn test_insert() { let mut map: BiHashMap<String, usize> = BiHashMap::new(); assert_eq!((None, None), map.insert("ONE".to_string(), 1)); assert_eq!( (Some("ONE".to_string()), Some(1)), map.insert("ONE".to_string(), 1) ); assert_eq!( (Some("ONE".to_string()), None), map.insert("TWO".to_string(), 1) ); assert_eq!((None, Some(1)), map.insert("ONE".to_string(), 3)); } #[test] fn test_keys_and_values() { let mut map: BiHashMap<String, usize> = BiHashMap::new(); map.insert("ONE".to_string(), 1); map.insert("TWO".to_string(), 2); map.insert("THREE".to_string(), 3); let mut keys: Vec<String> = map.keys().map(|key| key.to_string()).collect(); keys.sort(); assert_eq!( keys, ["ONE".to_string(), "THREE".to_string(), "TWO".to_string()] ); let mut values: Vec<usize> = map.values().map(|value| value.clone()).collect(); values.sort(); assert_eq!(values, [1, 2, 3]) } #[test] fn test_iter_keys_and_values() { let mut map: BiHashMap<String, usize> = BiHashMap::new(); map.insert("ONE".to_string(), 1); map.insert("TWO".to_string(), 2); map.insert("THREE".to_string(), 3); let keys = vec!["ONE".to_string(), "THREE".to_string(), "TWO".to_string()]; let values = vec![1, 2, 3]; for (key, value) in map.iter_by_keys() { assert!(keys.contains(key)); assert!(values.contains(value)); } for (value, key) in map.iter_by_values() { assert!(keys.contains(key)); assert!(values.contains(value)); } } #[test] fn test_clear_and_is_empty() { let mut map: BiHashMap<String, usize> = BiHashMap::new(); assert_eq!(map.len(), 0); assert!(map.is_empty()); map.insert("ONE".to_string(), 1); map.insert("TWO".to_string(), 2); map.insert("THREE".to_string(), 3); assert_eq!(map.len(), 3); assert!(!map.is_empty()); map.clear(); assert_eq!(map.len(), 0); assert!(map.is_empty()); } #[test] fn test_get() { let mut map: BiHashMap<String, usize> = BiHashMap::new(); map.insert("ONE".to_string(), 1); map.insert("TWO".to_string(), 2); map.insert("THREE".to_string(), 3); assert_eq!(map.get_by_key(&"ONE".to_string()), Some(&1)); assert_eq!(map.get_by_key(&"TWO".to_string()), Some(&2)); assert_eq!(map.get_by_key(&"THREE".to_string()), Some(&3)); assert_eq!(map.get_by_key(&"FOUR".to_string()), None); assert_eq!(map.get_by_value(&1), Some(&"ONE".to_string())); assert_eq!(map.get_by_value(&2), Some(&"TWO".to_string())); assert_eq!(map.get_by_value(&3), Some(&"THREE".to_string())); assert_eq!(map.get_by_value(&4), None); } #[test] fn test_contains_key_and_value() { let mut map: BiHashMap<String, usize> = BiHashMap::new(); map.insert("ONE".to_string(), 1); assert!(map.contains_key(&"ONE".to_string())); assert!(map.contains_value(&1)); assert!(!map.contains_key(&"TWO".to_string())); assert!(!map.contains_value(&2)); } #[test] fn test_removes() { let mut map: BiHashMap<String, usize> = BiHashMap::new(); map.insert("ONE".to_string(), 1); map.insert("TWO".to_string(), 2); map.insert("THREE".to_string(), 3); let removed = map.remove_by_key(&"ONE".to_string()); assert_eq!(removed, Some(("ONE".to_string(), 1))); let removed = map.remove_by_key(&"ONE".to_string()); assert_eq!(removed, None); let removed = map.remove_by_value(&2); assert_eq!(removed, Some(("TWO".to_string(), 2))); let removed = map.remove_by_value(&2); assert_eq!(removed, None); } }
use std::collections::hash_map::{Iter, Keys, Values}; use std::collections::HashMap; use std::hash::Hash; #[derive(Clone, Debug, PartialEq, Default)] pub struct BiHashMap<K: Hash + Eq, V: Hash + Eq> { kv_hash_map: HashMap<K, V>, vk_hash_map: HashMap<V, K>, } impl<K: Hash + Eq, V: Hash + Eq> BiHashMap<K, V> where K: std::clone::Clone, V: std::clone::Clone, { pub fn new() -> Self { BiHashMap { kv_hash_map: HashMap::new(), vk_hash_map: HashMap::new(), } } pub fn with_capacity(capacity: usize) -> Self { BiHashMap { kv_hash_map: HashMap::with_capacity(capacity), vk_hash_map: HashMap::with_capacity(capacity), } } pub fn capacity(&self) -> usize { self.kv_hash_map.capacity() } pub fn reserve(&mut self, additional: usize) { self.kv_hash_map.reserve(additional); self.vk_hash_map.reserve(additional); } pub fn shrink_to_fit(&mut self) { self.kv_hash_map.shrink_to_fit(); self.vk_hash_map.shrink_to_fit(); } pub fn keys(&self) -> Keys<K, V> { self.kv_hash_map.keys() } pub fn values(&self) -> Values<K, V> { self.kv_hash_map.values() } pub fn iter_by_keys(&self) -> Iter<K, V> { self.kv_hash_map.iter() } pub fn iter_by_values(&self) -> Iter<V, K> { self.vk_hash_map.iter() } pub fn len(&self) -> usize { self.kv_hash_map.len() } pub fn is_empty(&self) -> bool { self.kv_hash_map.is_empty() } pub fn clear(&mut self) { self.kv_hash_map.clear(); self.vk_hash_map.clear(); } pub fn get_by_key(&self, key: &K) -> Option<&V> { self.kv_hash_map.get(key) } pub fn get_by_value(&self, value: &V) -> Option<&K> { self.vk_hash_map.get(value) } pub fn contains_key(&self, key: &K) -> bool { self.kv_hash_map.contains_key(key) } pub fn contains_value(&self, value: &V) -> bool { self.vk_hash_map.contains_key(value) } pub fn insert(&mut self, key: K, value: V) -> (Option<K>, Option<V>) { let old_value = self.kv_hash_map.insert(key.clone(), value.clone()); let old_key = self.vk_hash_map.insert(value, key); (old_key, old_value) } pub fn remove_by_key(&mut self, key: &K) -> Option<(K, V)> { let value = self.kv_hash_map.remove(key); if let Some(value) = value { let key = self.vk_hash_map.remove(&value); if let Some(key) = key { return Some((key, value)); } } None } pub fn remove_by_value(&mut self, value: &V) -> Option<(K, V)> { let key = self.vk_hash_map.remove(value); if let Some(key) = key { let value = self.kv_hash_map.remove(&key); if let Some(value) = value { return Some((key, value)); } } None } } #[cfg(test)] pub mod tests { use super::*; #[test] fn test_capacity() { let map: BiHashMap<String, usize> = BiHashMap::new(); let capacity = map.capacity(); assert_eq!(capacity, 0); let map_with_capacity: BiHashMap<String, usize> = BiHashMap::with_capacity(5); let capacity = map_with_capacity.capacity(); assert!(capacity >= 5); } #[test] fn test_reserve() { let mut map: BiHashMap<String, usize> = BiHashMap::new(); let capacity = map.capacity(); assert_eq!(capacity, 0); map.reserve(5); let capacity = map.capacity(); assert!(capacity >= 5); } #[test] fn test_shrink_to_fit() { let mut map: BiHashMap<String, usize> = BiHashMap::with_capacity(100); let capacity = map.capacity(); assert!(capacity >= 100); map.shrink_to_fit(); let capacity = map.capacity(); assert_eq!(capacity, 0); } #[test] fn test_insert() { let mut map: BiHashMap<String, usize> = BiHashMap::new(); assert_eq!((None, None), map.insert("ONE".to_string(), 1)); assert_eq!( (Some("ONE".to_string()), Some(1)), map.insert("ONE".to_string(), 1) ); assert_eq!( (Some("ONE".to_string()), None), map.insert("TWO".to_string(), 1) ); assert_eq!((None, Some(1)), map.insert("ONE".to_string(), 3)); } #[test] fn test_keys_and_values() { let mut map: BiHashMap<String, usize> = BiHashMap::new(); map.insert("ONE".to_string(), 1); map.insert("TWO".to_string(), 2); map.insert("THREE".to_string(), 3); let mut keys: Vec<String> = map.keys().map(|key| key.to_string()).collect(); keys.sort(); assert_eq!( keys, ["ONE".to_string(), "THREE".to_string(), "TWO".to_string()] ); let mut values: Vec<usize> = map.values().map(|value| value.clone()).collect(); values.sort(); assert_eq!(values, [1, 2, 3]) } #[test] fn test_iter_keys_and_values() { let mut map: BiHashMap<String, usize> = BiHashMap::new(); map.insert("ONE".to_string(), 1); map.insert("TWO".to_string(), 2); map.insert("THREE".to_string(), 3); let keys = vec!["ONE".to_string(), "THREE".to_string(), "TWO".to_string()]; let values = vec![1, 2, 3]; for (key, value) in map.iter_by_keys() { assert!(keys.contains(key)); assert!(values.contains(value)); } for (value, key) in map.iter_by_values() { assert!(keys.contains(key)); assert!(values.contains(value)); } } #[test] fn test_clear_and_is_empty() { let mut map: BiHashMap<String, usize> = BiHashMap::new(); assert_eq!(map.len(), 0); assert!(map.is_empty()); map.insert("ONE".to_string(), 1); map.insert("TWO".to_string(), 2); map.insert("THREE".to_string(), 3); assert_eq!(map.len(), 3); assert!(!map.is_empty()); map.clear(); assert_eq!(map.len(), 0); assert!(map.is_empty()); } #[test] fn test_get() {
#[test] fn test_contains_key_and_value() { let mut map: BiHashMap<String, usize> = BiHashMap::new(); map.insert("ONE".to_string(), 1); assert!(map.contains_key(&"ONE".to_string())); assert!(map.contains_value(&1)); assert!(!map.contains_key(&"TWO".to_string())); assert!(!map.contains_value(&2)); } #[test] fn test_removes() { let mut map: BiHashMap<String, usize> = BiHashMap::new(); map.insert("ONE".to_string(), 1); map.insert("TWO".to_string(), 2); map.insert("THREE".to_string(), 3); let removed = map.remove_by_key(&"ONE".to_string()); assert_eq!(removed, Some(("ONE".to_string(), 1))); let removed = map.remove_by_key(&"ONE".to_string()); assert_eq!(removed, None); let removed = map.remove_by_value(&2); assert_eq!(removed, Some(("TWO".to_string(), 2))); let removed = map.remove_by_value(&2); assert_eq!(removed, None); } }
let mut map: BiHashMap<String, usize> = BiHashMap::new(); map.insert("ONE".to_string(), 1); map.insert("TWO".to_string(), 2); map.insert("THREE".to_string(), 3); assert_eq!(map.get_by_key(&"ONE".to_string()), Some(&1)); assert_eq!(map.get_by_key(&"TWO".to_string()), Some(&2)); assert_eq!(map.get_by_key(&"THREE".to_string()), Some(&3)); assert_eq!(map.get_by_key(&"FOUR".to_string()), None); assert_eq!(map.get_by_value(&1), Some(&"ONE".to_string())); assert_eq!(map.get_by_value(&2), Some(&"TWO".to_string())); assert_eq!(map.get_by_value(&3), Some(&"THREE".to_string())); assert_eq!(map.get_by_value(&4), None); }
function_block-function_prefix_line
[ { "content": "/// The HandlerWrapper provides a typeless wrapper for typed Handler instances.\n\nstruct HandlerWrapper<MT: Hash + Eq + Debug + Clone> {\n\n inner: InnerHandler<MT>,\n\n}\n\n\n\nimpl<MT: Hash + Eq + Debug + Clone> HandlerWrapper<MT> {\n\n fn handle(\n\n &self,\n\n message_bytes: &[u8],\n\n message_context: &MessageContext<MT>,\n\n network_sender: &dyn Sender<SendRequest>,\n\n ) -> Result<(), DispatchError> {\n\n (*self.inner)(message_bytes, message_context, network_sender)\n\n }\n\n}\n\n\n\n/// A message to be dispatched.\n\n///\n\n/// This struct contains information about a message that will be passed to a `Dispatcher` instance\n\n/// via a `Sender<DispatchMessage>`.\n\n#[derive(Clone)]\n", "file_path": "libsplinter/src/network/dispatch.rs", "rank": 0, "score": 259252.5596681324 }, { "content": "pub fn fetch_key_info(\n\n client: web::Data<Client>,\n\n splinterd_url: web::Data<String>,\n\n public_key: web::Path<String>,\n\n) -> impl Future<Item = HttpResponse, Error = Error> {\n\n let public_key = public_key.into_inner();\n\n client\n\n .get(format!(\"{}/keys/{}\", splinterd_url.get_ref(), public_key))\n\n .send()\n\n .map_err(Error::from)\n\n .and_then(move |mut resp| {\n\n let body = resp.body().wait()?;\n\n match resp.status() {\n\n StatusCode::OK => Ok(HttpResponse::Ok()\n\n .content_type(\"application/json\")\n\n .body(Body::Bytes(body))),\n\n StatusCode::NOT_FOUND => Ok(HttpResponse::NotFound().json(\n\n ErrorResponse::not_found(&format!(\n\n \"Could not find user information of key {}\",\n\n public_key\n", "file_path": "examples/gameroom/daemon/src/rest_api/routes/key.rs", "rank": 1, "score": 198014.29219671807 }, { "content": "pub fn run_incoming_loop(\n\n incoming_mesh: Mesh,\n\n incoming_running: Arc<AtomicBool>,\n\n mut inbound_router: InboundRouter<CircuitMessageType>,\n\n) -> Result<(), OrchestratorError> {\n\n while incoming_running.load(Ordering::SeqCst) {\n\n let timeout = Duration::from_secs(TIMEOUT_SEC);\n\n let message_bytes = match incoming_mesh.recv_timeout(timeout) {\n\n Ok(envelope) => envelope.take_payload(),\n\n Err(MeshRecvTimeoutError::Timeout) => continue,\n\n Err(MeshRecvTimeoutError::Disconnected) => {\n\n error!(\"Mesh Disconnected\");\n\n break;\n\n }\n\n };\n\n\n\n let msg: NetworkMessage = protobuf::parse_from_bytes(&message_bytes)\n\n .map_err(|err| OrchestratorError::Internal(Box::new(err)))?;\n\n\n\n // if a service is waiting on a reply the inbound router will\n", "file_path": "libsplinter/src/orchestrator/mod.rs", "rank": 2, "score": 196034.36608574988 }, { "content": "pub fn get_response_paging_info(\n\n limit: Option<usize>,\n\n offset: Option<usize>,\n\n link: &str,\n\n query_count: usize,\n\n) -> Paging {\n\n let limit = limit.unwrap_or(DEFAULT_LIMIT);\n\n let offset = offset.unwrap_or(DEFAULT_OFFSET);\n\n\n\n let base_link = format!(\"{}limit={}&\", link, limit);\n\n\n\n let current_link = format!(\"{}offset={}\", base_link, offset);\n\n\n\n let first_link = format!(\"{}offset=0\", base_link);\n\n\n\n let previous_offset = if offset > limit { offset - limit } else { 0 };\n\n\n\n let previous_link = format!(\"{}offset={}\", base_link, previous_offset);\n\n\n\n let last_offset = if query_count > 0 {\n", "file_path": "splinterd/src/routes/mod.rs", "rank": 3, "score": 192826.78531450068 }, { "content": "pub fn run(\n\n splinterd_url: String,\n\n node_id: String,\n\n db_conn: ConnectionPool,\n\n private_key: String,\n\n igniter: Igniter,\n\n) -> Result<(), AppAuthHandlerError> {\n\n let mut ws = WebSocketClient::new(\n\n &format!(\"{}/ws/admin/register/gameroom\", splinterd_url),\n\n move |ctx, event| {\n\n if let Err(err) = process_admin_event(\n\n event,\n\n &db_conn,\n\n &node_id,\n\n &private_key,\n\n &splinterd_url,\n\n ctx.igniter(),\n\n ) {\n\n error!(\"Failed to process admin event: {}\", err);\n\n }\n", "file_path": "examples/gameroom/daemon/src/authorization_handler/mod.rs", "rank": 4, "score": 192826.78531450068 }, { "content": "pub fn run(\n\n bind_url: &str,\n\n splinterd_url: &str,\n\n node: Node,\n\n database_connection: ConnectionPool,\n\n public_key: String,\n\n) -> Result<\n\n (\n\n RestApiShutdownHandle,\n\n thread::JoinHandle<Result<(), RestApiServerError>>,\n\n ),\n\n RestApiServerError,\n\n> {\n\n let bind_url = bind_url.to_owned();\n\n let splinterd_url = splinterd_url.to_owned();\n\n let gameroomd_data = GameroomdData { public_key };\n\n let (tx, rx) = mpsc::channel();\n\n let join_handle = thread::Builder::new()\n\n .name(\"GameroomdRestApi\".into())\n\n .spawn(move || {\n", "file_path": "examples/gameroom/daemon/src/rest_api/mod.rs", "rank": 5, "score": 192826.78531450068 }, { "content": "pub fn create_new_notification(\n\n notification_type: &str,\n\n requester: &str,\n\n requester_node_id: &str,\n\n target: &str,\n\n) -> NewGameroomNotification {\n\n NewGameroomNotification {\n\n notification_type: notification_type.to_string(),\n\n requester: requester.to_string(),\n\n requester_node_id: requester_node_id.to_string(),\n\n target: target.to_string(),\n\n created_time: SystemTime::now(),\n\n read: false,\n\n }\n\n}\n\n\n", "file_path": "examples/gameroom/database/src/helpers/notification.rs", "rank": 6, "score": 191246.86431702517 }, { "content": "pub fn start_service_loop(\n\n service_config: ServiceConfig,\n\n channel: (\n\n crossbeam_channel::Sender<SendRequest>,\n\n crossbeam_channel::Receiver<SendRequest>,\n\n ),\n\n consensus_msg_sender: Sender<ConsensusMessage>,\n\n proposal_update_sender: Sender<ProposalUpdate>,\n\n pending_proposal: Arc<Mutex<Option<(Proposal, Batch)>>>,\n\n network: Network,\n\n running: Arc<AtomicBool>,\n\n) -> Result<(), ServiceError> {\n\n info!(\"Starting Private Counter Service\");\n\n let sender_network = network.clone();\n\n let (send, recv) = channel;\n\n\n\n let network_sender_run_flag = running.clone();\n\n let _ = Builder::new()\n\n .name(\"NetworkMessageSender\".into())\n\n .spawn(move || {\n", "file_path": "examples/private_xo/src/service/mod.rs", "rank": 7, "score": 189771.39233383117 }, { "content": "pub fn get_response_paging_info(\n\n limit: usize,\n\n offset: usize,\n\n link: &str,\n\n query_count: usize,\n\n) -> Paging {\n\n let limit = limit as i64;\n\n let offset = offset as i64;\n\n let query_count = query_count as i64;\n\n\n\n let base_link = format!(\"{}limit={}&\", link, limit);\n\n\n\n let current_link = format!(\"{}offset={}\", base_link, offset);\n\n\n\n let first_link = format!(\"{}offset=0\", base_link);\n\n\n\n let previous_offset = if offset > limit { offset - limit } else { 0 };\n\n let previous_link = format!(\"{}offset={}\", base_link, previous_offset);\n\n\n\n let last_offset = if query_count > 0 {\n", "file_path": "examples/gameroom/daemon/src/rest_api/routes/mod.rs", "rank": 8, "score": 181417.22524541212 }, { "content": "pub fn into_protobuf<M: Message>(\n\n payload: web::Payload,\n\n) -> impl Future<Item = M, Error = ActixError> {\n\n payload\n\n .from_err::<ActixError>()\n\n .fold(web::BytesMut::new(), move |mut body, chunk| {\n\n body.extend_from_slice(&chunk);\n\n Ok::<_, ActixError>(body)\n\n })\n\n .and_then(|body| match protobuf::parse_from_bytes::<M>(&body) {\n\n Ok(proto) => Ok(proto),\n\n Err(err) => Err(ErrorBadRequest(json!({ \"message\": format!(\"{}\", err) }))),\n\n })\n\n .into_future()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use actix_http::Response;\n\n use futures::IntoFuture;\n\n\n\n #[test]\n\n fn test_create_handle() {\n\n let _handler = Resource::new(Method::Get, \"/test\", |_: HttpRequest, _: web::Payload| {\n\n Box::new(Response::Ok().finish().into_future())\n\n });\n\n }\n\n}\n", "file_path": "libsplinter/src/rest_api/mod.rs", "rank": 9, "score": 181114.60265333956 }, { "content": "/// The handler function for the `/batches` endpoint\n\npub fn batches(req: &mut Request) -> IronResult<Response> {\n\n let pending_batches = req\n\n .extensions\n\n .get::<State<Arc<Mutex<VecDeque<Batch>>>>>()\n\n .expect(\"Expected pending batches, but none was set on the request\");\n\n\n\n let batch_list: BatchList = parse_from_reader(&mut req.body).map_err(BatchSubmitError::from)?;\n\n\n\n log::debug!(\"Submitted {:?}\", &batch_list);\n\n\n\n let batch_ids = batch_list\n\n .get_batches()\n\n .iter()\n\n .map(|batch| batch.header_signature.clone())\n\n .collect::<Vec<_>>()\n\n .join(\",\");\n\n\n\n let batch =\n\n batch_list.batches.get(0).cloned().ok_or_else(|| {\n\n BatchSubmitError::InvalidBatchListFormat(\"No batches provided\".into())\n", "file_path": "examples/private_xo/src/routes/batches.rs", "rank": 10, "score": 178696.0782231058 }, { "content": "struct PeerMap {\n\n peers: BiHashMap<String, usize>,\n\n redirects: HashMap<String, String>,\n\n endpoints: BiHashMap<String, String>,\n\n}\n\n\n\n/// A map of Peer IDs to mesh IDs, which also maintains a redirect table for updated peer ids.\n\nimpl PeerMap {\n\n fn new() -> Self {\n\n PeerMap {\n\n peers: BiHashMap::new(),\n\n redirects: HashMap::new(),\n\n endpoints: BiHashMap::new(),\n\n }\n\n }\n\n\n\n /// Returns the current list of peer ids.\n\n ///\n\n /// This list does not include any of the redirected peer ids.\n\n fn peer_ids(&self) -> Vec<String> {\n", "file_path": "libsplinter/src/network/mod.rs", "rank": 11, "score": 177314.54008784489 }, { "content": "/// The handler function for the `/batch_statuses` endpoint\n\npub fn batch_statuses(req: &mut Request) -> IronResult<Response> {\n\n let id: String = query_param(req, \"id\")\n\n .unwrap()\n\n .ok_or_else(|| BatchStatusesError::MissingParameter(\"id\".into()))?;\n\n let wait: Option<u32> = query_param(req, \"wait\").map_err(|err| {\n\n BatchStatusesError::InvalidParameter(format!(\"wait must be an integer: {}\", err))\n\n })?;\n\n let ids = id.split(',').collect::<Vec<_>>();\n\n let wait_time = wait.unwrap_or(0);\n\n\n\n log::debug!(\"Checking status for batches {:?}\", &ids);\n\n let mut params = HashMap::new();\n\n params.insert(\"id\".into(), id.clone());\n\n params.insert(\"wait\".into(), wait_time.to_string());\n\n\n\n let link = url_for(&req, \"batch_statuses\", params).to_string();\n\n\n\n Ok(Response::with((\n\n status::Ok,\n\n Json(BatchStatusesResponse {\n", "file_path": "examples/private_xo/src/routes/batches.rs", "rank": 12, "score": 176232.32367399256 }, { "content": "/// The handler function for the `/state` endpoint.\n\npub fn list_state_with_params(req: &mut Request) -> IronResult<Response> {\n\n let request: ListStateRequest = get_list_params(req)?;\n\n let xo_state = req\n\n .extensions\n\n .get::<State<XoState>>()\n\n .expect(\"Expected xo state, but none was set on the request\");\n\n\n\n let state_root = request\n\n .head\n\n .as_ref()\n\n .cloned()\n\n .unwrap_or_else(|| xo_state.current_state_root());\n\n\n\n log::debug!(\n\n \"Listing state with prefix {:?} from head {}\",\n\n request.address.as_ref(),\n\n &state_root\n\n );\n\n\n\n let results: Result<Vec<StateEntry>, _> = xo_state\n", "file_path": "examples/private_xo/src/routes/state.rs", "rank": 13, "score": 173870.89953185245 }, { "content": "/// The handler function for the `/state/:address` endpoint.\n\npub fn get_state_by_address(req: &mut Request) -> IronResult<Response> {\n\n let xo_state = req\n\n .extensions\n\n .get::<State<XoState>>()\n\n .expect(\"Expected xo state, but none was set on the request\");\n\n\n\n let address = req\n\n .extensions\n\n .get::<router::Router>()\n\n .expect(\"Expected router but none was set on the request\")\n\n .find(\"address\")\n\n .ok_or_else(|| StateError::BadRequest(\"Missing state address\".into()))?;\n\n\n\n if address.len() != 70 {\n\n return Err(IronError::from(StateError::BadRequest(format!(\n\n \"\\\"{}\\\" is not a valid address\",\n\n address\n\n ))));\n\n }\n\n\n", "file_path": "examples/private_xo/src/routes/state.rs", "rank": 14, "score": 173870.89953185245 }, { "content": "/// Manages role-based permissions associated with public keys.\n\n///\n\n/// The KeyPermissionManager provides an interface for providing details on whether or not a public\n\n/// key has permissions to act in specific roles.\n\n///\n\n/// Note: the underlying implementation determines how those values are set and modified - these\n\n/// operations are not exposed via this interface.\n\npub trait KeyPermissionManager: Send {\n\n /// Checks to see if a public key is permitted for the given role.\n\n ///\n\n /// # Errors\n\n ///\n\n /// Returns a `KeyPermissionError` if the underling implementation encountered an error while\n\n /// checking the permissions.\n\n fn is_permitted(&self, public_key: &[u8], role: &str) -> KeyPermissionResult<bool>;\n\n}\n", "file_path": "libsplinter/src/keys/mod.rs", "rank": 15, "score": 170726.56468799873 }, { "content": "/// A registry of public key information.\n\n///\n\n/// The key registry provides an interface for storing and retrieving key information. Key\n\n/// information helps to tie a public key to a particular splinter node, as well as associating\n\n/// application metadata with the public key.\n\npub trait KeyRegistry: Send + Sync {\n\n /// Save a public key and its information.\n\n ///\n\n /// # Errors\n\n ///\n\n /// Returns a `KeyRegistryError` if the underling implementation could not save the key\n\n /// information.\n\n fn save_key(&mut self, key_info: KeyInfo) -> KeyRegistryResult<()>;\n\n\n\n /// Save a collection of public keys and their information.\n\n ///\n\n /// # Errors\n\n ///\n\n /// Returns a `KeyRegistryError` if the underling implementation could not save the key\n\n /// information.\n\n fn save_keys(&mut self, key_infos: Vec<KeyInfo>) -> KeyRegistryResult<()>;\n\n\n\n /// Delete a public key and its information.\n\n ///\n\n /// Returns the existing key information, if it exists.\n", "file_path": "libsplinter/src/keys/mod.rs", "rank": 16, "score": 168044.34564655847 }, { "content": "pub fn admin_service_id(node_id: &str) -> String {\n\n format!(\"admin::{}\", node_id)\n\n}\n\n\n", "file_path": "libsplinter/src/admin/mod.rs", "rank": 17, "score": 167979.60035186994 }, { "content": "pub fn to_hex(bytes: &[u8]) -> String {\n\n let mut buf = String::new();\n\n for b in bytes {\n\n write!(&mut buf, \"{:02x}\", b).expect(\"Unable to write to string\");\n\n }\n\n\n\n buf\n\n}\n\n\n\n#[cfg(all(feature = \"test-authorization-handler\", test))]\n\nmod test {\n\n use super::*;\n\n use splinter::events::Reactor;\n\n\n\n use diesel::{dsl::insert_into, prelude::*, RunQueryDsl};\n\n use gameroom_database::models::{\n\n GameroomMember, GameroomNotification, GameroomService, NewGameroomNotification,\n\n ProposalVoteRecord,\n\n };\n\n\n", "file_path": "examples/gameroom/daemon/src/authorization_handler/mod.rs", "rank": 18, "score": 167979.60035186994 }, { "content": "/// Given a location string, returns the appropriate storage\n\n///\n\n/// Accepts `\"memory\"` or `\"disk+/path/to/file\"` as location values\n\npub fn get_storage<'a, T: Sized + Serialize + DeserializeOwned + 'a, F: Fn() -> T>(\n\n location: &str,\n\n default: F,\n\n) -> Result<Box<dyn Storage<S = T> + 'a>, String> {\n\n if location.ends_with(\".yaml\") {\n\n Ok(Box::new(YamlStorage::new(location, default).unwrap()) as Box<dyn Storage<S = T>>)\n\n } else if location == \"memory\" {\n\n Ok(Box::new(MemStorage::new(default).unwrap()) as Box<dyn Storage<S = T>>)\n\n } else {\n\n Err(format!(\"Unknown state location type: {}\", location))\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::YamlStorage;\n\n use super::*;\n\n use tempdir::TempDir;\n\n\n\n #[test]\n", "file_path": "libsplinter/src/storage/mod.rs", "rank": 19, "score": 166453.32465146176 }, { "content": "pub fn create_circuit_direct_msg<M: protobuf::Message>(\n\n circuit: String,\n\n sender: String,\n\n recipient: String,\n\n payload: &M,\n\n correlation_id: String,\n\n) -> Result<Vec<u8>, ServiceError> {\n\n let mut direct_msg = CircuitDirectMessage::new();\n\n direct_msg.set_circuit(circuit);\n\n direct_msg.set_sender(sender);\n\n direct_msg.set_recipient(recipient);\n\n direct_msg.set_payload(payload.write_to_bytes()?);\n\n direct_msg.set_correlation_id(correlation_id);\n\n\n\n wrap_in_circuit_envelopes(CircuitMessageType::CIRCUIT_DIRECT_MESSAGE, direct_msg)\n\n}\n\n\n", "file_path": "examples/private_xo/src/service/mod.rs", "rank": 20, "score": 162777.64287517563 }, { "content": "pub fn query_param<T: std::str::FromStr>(\n\n req: &mut Request,\n\n key: &str,\n\n) -> Result<Option<T>, T::Err> {\n\n let mut params = query_params(req, key)?;\n\n\n\n if let Some(mut values) = params.take() {\n\n Ok(values.pop())\n\n } else {\n\n Ok(None)\n\n }\n\n}\n\n\n", "file_path": "examples/private_xo/src/routes/mod.rs", "rank": 21, "score": 161614.87767023483 }, { "content": "pub fn query_params<T: std::str::FromStr>(\n\n req: &mut Request,\n\n key: &str,\n\n) -> Result<Option<Vec<T>>, T::Err> {\n\n match req.get_ref::<urlencoded::UrlEncodedQuery>() {\n\n Ok(ref query) => match query.get(key) {\n\n Some(values) => Ok(Some(\n\n values\n\n .iter()\n\n .map(|s| s.parse())\n\n .collect::<Result<Vec<_>, _>>()?,\n\n )),\n\n None => Ok(None),\n\n },\n\n Err(_) => Ok(None),\n\n }\n\n}\n", "file_path": "examples/private_xo/src/routes/mod.rs", "rank": 22, "score": 161614.87767023483 }, { "content": "pub fn write<T: Write>(writer: &mut T, buffer: &[u8]) -> Result<(), SendError> {\n\n let mut packed = &pack(buffer)?[..];\n\n while !packed.is_empty() {\n\n match writer.write(packed) {\n\n Ok(0) => {\n\n return Err(SendError::IoError(std::io::Error::new(\n\n std::io::ErrorKind::WriteZero,\n\n \"failed to write whole buffer\",\n\n )))\n\n }\n\n Ok(n) => packed = &packed[n..],\n\n Err(ref e) if e.kind() == std::io::ErrorKind::Interrupted => {}\n\n Err(ref e) if e.kind() == std::io::ErrorKind::WouldBlock => {\n\n thread::sleep(Duration::from_millis(100));\n\n }\n\n Err(e) => return Err(SendError::IoError(e)),\n\n }\n\n }\n\n writer.flush()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "libsplinter/src/transport/rw.rs", "rank": 23, "score": 159926.13910533575 }, { "content": "pub fn read<T: Read>(reader: &mut T) -> Result<Vec<u8>, RecvError> {\n\n let len = loop {\n\n match reader.read_u32::<BigEndian>() {\n\n Err(ref e) if e.kind() == std::io::ErrorKind::WouldBlock => {\n\n thread::sleep(Duration::from_millis(100));\n\n continue;\n\n }\n\n Err(e) => return Err(RecvError::IoError(e)),\n\n Ok(n) => break n,\n\n };\n\n };\n\n\n\n let mut buffer = vec![0; len as usize];\n\n let mut remaining = &mut buffer[..];\n\n\n\n while !remaining.is_empty() {\n\n match reader.read(remaining) {\n\n Ok(0) => break,\n\n Ok(n) => {\n\n let tmp = remaining;\n", "file_path": "libsplinter/src/transport/rw.rs", "rank": 24, "score": 159926.13910533575 }, { "content": "// wrie the a file to a temp directory\n\npub fn write_file(\n\n mut temp_dir: PathBuf,\n\n file_name: &str,\n\n bytes: &[u8],\n\n) -> Result<String, CertError> {\n\n temp_dir.push(file_name);\n\n let path = {\n\n if let Some(path) = temp_dir.to_str() {\n\n path.to_string()\n\n } else {\n\n return Err(CertError::PathError(\n\n \"Path is not valid unicode\".to_string(),\n\n ));\n\n }\n\n };\n\n let mut file = File::create(path.to_string())?;\n\n file.write_all(bytes)?;\n\n\n\n Ok(path)\n\n}\n", "file_path": "splinterd/src/certs.rs", "rank": 25, "score": 157412.89282440313 }, { "content": "// Make a certificate and private key for the Certificate Authority\n\npub fn make_ca_cert() -> Result<(PKey<Private>, X509), CertError> {\n\n // generate private key\n\n let rsa = Rsa::generate(2048)?;\n\n let privkey = PKey::from_rsa(rsa)?;\n\n\n\n // build x509 name\n\n let mut x509_name = X509NameBuilder::new()?;\n\n x509_name.append_entry_by_text(\"CN\", \"generated_ca\")?;\n\n let x509_name = x509_name.build();\n\n\n\n // build x509 cert\n\n let mut cert_builder = X509::builder()?;\n\n cert_builder.set_version(2)?;\n\n cert_builder.set_subject_name(&x509_name)?;\n\n cert_builder.set_issuer_name(&x509_name)?;\n\n cert_builder.set_pubkey(&privkey)?;\n\n\n\n let not_before = Asn1Time::days_from_now(0)?;\n\n cert_builder.set_not_before(&not_before)?;\n\n let not_after = Asn1Time::days_from_now(365)?;\n", "file_path": "splinterd/src/certs.rs", "rank": 26, "score": 157137.31954413114 }, { "content": "pub fn sha256<T>(message: &T) -> Result<String, Sha256Error>\n\nwhere\n\n T: Message,\n\n{\n\n let bytes = message\n\n .write_to_bytes()\n\n .map_err(|err| Sha256Error(Box::new(err)))?;\n\n hash(MessageDigest::sha256(), &bytes)\n\n .map(|digest| to_hex(&*digest))\n\n .map_err(|err| Sha256Error(Box::new(err)))\n\n}\n\n\n\nimpl RestResourceProvider for AdminService {\n\n fn resources(&self) -> Vec<Resource> {\n\n vec![\n\n make_application_handler_registration_route(self.admin_service_shared.clone()),\n\n make_submit_route(self.admin_service_shared.clone()),\n\n ]\n\n }\n\n}\n\n\n", "file_path": "libsplinter/src/admin/mod.rs", "rank": 27, "score": 155691.56226216804 }, { "content": "pub fn list_nodes(\n\n req: HttpRequest,\n\n registry: web::Data<Box<dyn NodeRegistry>>,\n\n) -> Box<dyn Future<Item = HttpResponse, Error = Error>> {\n\n let query: web::Query<HashMap<String, String>> =\n\n if let Ok(q) = web::Query::from_query(req.query_string()) {\n\n q\n\n } else {\n\n return Box::new(\n\n HttpResponse::BadRequest()\n\n .json(json!({\n\n \"message\": \"Invalid query\"\n\n }))\n\n .into_future(),\n\n );\n\n };\n\n\n\n let offset = match query.get(\"offset\") {\n\n Some(value) => match value.parse::<usize>() {\n\n Ok(val) => val,\n", "file_path": "splinterd/src/routes/node.rs", "rank": 28, "score": 155027.8893243086 }, { "content": "pub fn fetch_node(\n\n request: HttpRequest,\n\n registry: web::Data<Box<dyn NodeRegistry>>,\n\n) -> Box<dyn Future<Item = HttpResponse, Error = Error>> {\n\n let identity = request\n\n .match_info()\n\n .get(\"identity\")\n\n .unwrap_or(\"\")\n\n .to_string();\n\n Box::new(\n\n web::block(move || registry.fetch_node(&identity)).then(|res| match res {\n\n Ok(node) => Ok(HttpResponse::Ok().json(node)),\n\n Err(err) => match err {\n\n BlockingError::Error(err) => match err {\n\n NodeRegistryError::NotFoundError(err) => Ok(HttpResponse::NotFound().json(err)),\n\n _ => Ok(HttpResponse::InternalServerError().json(format!(\"{}\", err))),\n\n },\n\n _ => Ok(HttpResponse::InternalServerError().json(format!(\"{}\", err))),\n\n },\n\n }),\n\n )\n\n}\n\n\n", "file_path": "splinterd/src/routes/node.rs", "rank": 29, "score": 155027.8893243086 }, { "content": "/// Helper function for creating a NetworkMessge with a Circuit message type\n\n///\n\n/// # Arguments\n\n///\n\n/// * `payload` - The payload in bytes that should be set in the Circuit message get_payload\n\n/// * `circuit_message_type` - The message type that should be set in teh Circuit message\n\npub fn create_message(\n\n payload: Vec<u8>,\n\n circuit_message_type: CircuitMessageType,\n\n) -> Result<Vec<u8>, protobuf::error::ProtobufError> {\n\n let mut circuit_msg = CircuitMessage::new();\n\n circuit_msg.set_message_type(circuit_message_type);\n\n circuit_msg.set_payload(payload);\n\n let circuit_bytes = circuit_msg.write_to_bytes()?;\n\n\n\n let mut network_msg = NetworkMessage::new();\n\n network_msg.set_message_type(NetworkMessageType::CIRCUIT);\n\n network_msg.set_payload(circuit_bytes);\n\n network_msg.write_to_bytes()\n\n}\n\n\n\n#[cfg(test)]\n\npub mod tests {\n\n use super::*;\n\n\n\n use std::thread;\n", "file_path": "libsplinter/src/service/sender.rs", "rank": 30, "score": 155027.8893243086 }, { "content": "pub fn get_status(\n\n node_id: String,\n\n endpoint: String,\n\n) -> Box<dyn Future<Item = HttpResponse, Error = Error>> {\n\n let status = Status {\n\n node_id,\n\n endpoint,\n\n version: get_version(),\n\n };\n\n\n\n Box::new(HttpResponse::Ok().json(status).into_future())\n\n}\n\n\n", "file_path": "splinterd/src/routes/status.rs", "rank": 31, "score": 155027.8893243086 }, { "content": "pub fn get_openapi(\n\n _: HttpRequest,\n\n _: web::Payload,\n\n) -> Box<dyn Future<Item = HttpResponse, Error = Error>> {\n\n Box::new(\n\n HttpResponse::Ok()\n\n .body(include_str!(\"../../api/static/openapi.yml\"))\n\n .into_future(),\n\n )\n\n}\n\n\n", "file_path": "splinterd/src/routes/status.rs", "rank": 32, "score": 155027.8893243086 }, { "content": "// Make a certificate and private key signed by the given CA cert and private key\n\n// Cert could act like both server or client\n\npub fn make_ca_signed_cert(\n\n ca_cert: &X509Ref,\n\n ca_privkey: &PKeyRef<Private>,\n\n common_name: &str,\n\n) -> Result<(PKey<Private>, X509), CertError> {\n\n // generate private key\n\n let rsa = Rsa::generate(2048)?;\n\n let privkey = PKey::from_rsa(rsa)?;\n\n\n\n // build x509_name\n\n let mut x509_name = X509NameBuilder::new()?;\n\n x509_name.append_entry_by_text(\"CN\", &common_name)?;\n\n let x509_name = x509_name.build();\n\n\n\n // build x509 cert\n\n let mut cert_builder = X509::builder()?;\n\n cert_builder.set_version(2)?;\n\n let serial_number = {\n\n let mut serial = BigNum::new()?;\n\n serial.rand(159, MsbOption::MAYBE_ZERO, false)?;\n", "file_path": "splinterd/src/certs.rs", "rank": 33, "score": 152767.7034637995 }, { "content": "pub fn do_add(url: &str, value: &str) -> Result<(), CliError> {\n\n let mut connection = TcpStream::connect(url)?;\n\n if value.parse::<u32>().is_err() {\n\n return Err(CliError::UserError(format!(\n\n \"Value {} cannot be parsed to u32\",\n\n value\n\n )));\n\n }\n\n let request = format!(\"GET /add/{} HTTP/1.1\", value);\n\n let _ = connection.write(request.as_bytes())?;\n\n connection.flush()?;\n\n\n\n let mut buffer = [0; 512];\n\n\n\n let _ = connection.read(&mut buffer)?;\n\n let response = String::from_utf8_lossy(&buffer[..]);\n\n\n\n if !response.starts_with(\"HTTP/1.1 204 NO CONTENT\") {\n\n println!(\"{}\", response);\n\n }\n\n Ok(())\n\n}\n", "file_path": "examples/private_counter/cli/src/actions.rs", "rank": 34, "score": 152036.08933095727 }, { "content": "pub fn fetch_notification(\n\n conn: &PgConnection,\n\n notification_id: i64,\n\n) -> QueryResult<Option<GameroomNotification>> {\n\n gameroom_notification::table\n\n .filter(gameroom_notification::id.eq(notification_id))\n\n .first::<GameroomNotification>(conn)\n\n .map(Some)\n\n .or_else(|err| if err == NotFound { Ok(None) } else { Err(err) })\n\n}\n\n\n", "file_path": "examples/gameroom/database/src/helpers/notification.rs", "rank": 35, "score": 150605.61018719056 }, { "content": "/// Create a Dispatcher for Authorization messages\n\n///\n\n/// Creates and configures a Dispatcher to handle messages from an AuthorizationMessage envelope.\n\n/// The dispatcher is provided the given network sender for response messages, and the network\n\n/// itself to handle updating identities (or removing connections with authorization failures).\n\n///\n\n/// The identity provided is sent to connections for Trust authorizations.\n\npub fn create_authorization_dispatcher(\n\n auth_manager: AuthorizationManager,\n\n network_sender: Box<dyn Sender<SendRequest>>,\n\n) -> Dispatcher<AuthorizationMessageType> {\n\n let mut auth_dispatcher = Dispatcher::new(network_sender);\n\n\n\n auth_dispatcher.set_handler(\n\n AuthorizationMessageType::CONNECT_REQUEST,\n\n Box::new(ConnectRequestHandler::new(auth_manager.clone())),\n\n );\n\n\n\n auth_dispatcher.set_handler(\n\n AuthorizationMessageType::CONNECT_RESPONSE,\n\n Box::new(ConnectResponseHandler::new(auth_manager.clone())),\n\n );\n\n\n\n auth_dispatcher.set_handler(\n\n AuthorizationMessageType::TRUST_REQUEST,\n\n Box::new(TrustRequestHandler::new(auth_manager.clone())),\n\n );\n", "file_path": "libsplinter/src/network/auth/handlers.rs", "rank": 36, "score": 150605.61018719056 }, { "content": "pub fn list_proposals_with_paging(\n\n conn: &PgConnection,\n\n limit: i64,\n\n offset: i64,\n\n) -> QueryResult<Vec<GameroomProposal>> {\n\n gameroom_proposal::table\n\n .select(gameroom_proposal::all_columns)\n\n .limit(limit)\n\n .offset(offset)\n\n .load::<GameroomProposal>(conn)\n\n}\n\n\n", "file_path": "examples/gameroom/database/src/helpers/gameroom.rs", "rank": 37, "score": 148551.83615496583 }, { "content": "pub fn register(\n\n new_user: web::Json<UserCreate>,\n\n pool: web::Data<ConnectionPool>,\n\n) -> Box<dyn Future<Item = HttpResponse, Error = Error>> {\n\n Box::new(\n\n web::block(move || create_user(pool, new_user.into_inner())).then(|res| match res {\n\n Ok(user) => Ok(HttpResponse::Ok().json(SuccessResponse::new(user))),\n\n Err(err) => match err {\n\n error::BlockingError::Error(err) => {\n\n Ok(HttpResponse::BadRequest()\n\n .json(ErrorResponse::bad_request(&err.to_string())))\n\n }\n\n error::BlockingError::Canceled => {\n\n debug!(\"Internal Server Error: {}\", err);\n\n Ok(HttpResponse::InternalServerError().json(ErrorResponse::internal_error()))\n\n }\n\n },\n\n }),\n\n )\n\n}\n\n\n", "file_path": "examples/gameroom/daemon/src/rest_api/routes/authenticate.rs", "rank": 38, "score": 148551.83615496583 }, { "content": "pub fn login(\n\n auth_data: web::Json<AuthData>,\n\n pool: web::Data<ConnectionPool>,\n\n) -> Box<dyn Future<Item = HttpResponse, Error = Error>> {\n\n Box::new(\n\n web::block(move || authenticate_user(pool, auth_data.into_inner())).then(|res| match res {\n\n Ok(user) => Ok(HttpResponse::Ok().json(SuccessResponse::new(user))),\n\n Err(err) => match err {\n\n error::BlockingError::Error(_) => Ok(HttpResponse::Unauthorized()\n\n .json(ErrorResponse::unauthorized(\"Invalid email or password\"))),\n\n error::BlockingError::Canceled => {\n\n debug!(\"Internal Server Error: {}\", err);\n\n Ok(HttpResponse::InternalServerError().json(ErrorResponse::internal_error()))\n\n }\n\n },\n\n }),\n\n )\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct UserCreate {\n\n pub email: String,\n\n pub hashed_password: String,\n\n pub encrypted_private_key: String,\n\n pub public_key: String,\n\n}\n\n\n", "file_path": "examples/gameroom/daemon/src/rest_api/routes/authenticate.rs", "rank": 39, "score": 148551.83615496583 }, { "content": "pub fn fetch_notifications_by_time(\n\n conn: &PgConnection,\n\n current_check_time: SystemTime,\n\n previous_check_time: SystemTime,\n\n) -> QueryResult<Vec<GameroomNotification>> {\n\n gameroom_notification::table\n\n .filter(\n\n gameroom_notification::created_time\n\n .ge(previous_check_time)\n\n .and(gameroom_notification::created_time.le(current_check_time)),\n\n )\n\n .load::<GameroomNotification>(conn)\n\n}\n", "file_path": "examples/gameroom/database/src/helpers/notification.rs", "rank": 40, "score": 148551.83615496583 }, { "content": "pub fn list_gamerooms_with_paging(\n\n conn: &PgConnection,\n\n limit: i64,\n\n offset: i64,\n\n) -> QueryResult<Vec<Gameroom>> {\n\n gameroom::table\n\n .select(gameroom::all_columns)\n\n .limit(limit)\n\n .offset(offset)\n\n .load::<Gameroom>(conn)\n\n}\n\n\n", "file_path": "examples/gameroom/database/src/helpers/gameroom.rs", "rank": 41, "score": 148551.83615496583 }, { "content": "pub fn update_gameroom_notification(\n\n conn: &PgConnection,\n\n notification_id: i64,\n\n) -> QueryResult<Option<GameroomNotification>> {\n\n diesel::update(gameroom_notification::table.find(notification_id))\n\n .set(gameroom_notification::read.eq(true))\n\n .get_result(conn)\n\n .map(Some)\n\n .or_else(|err| if err == NotFound { Ok(None) } else { Err(err) })\n\n}\n\n\n", "file_path": "examples/gameroom/database/src/helpers/notification.rs", "rank": 42, "score": 148551.83615496583 }, { "content": "pub fn insert_gameroom_proposal(\n\n conn: &PgConnection,\n\n proposal: NewGameroomProposal,\n\n) -> QueryResult<()> {\n\n insert_into(gameroom_proposal::table)\n\n .values(&vec![proposal])\n\n .execute(conn)\n\n .map(|_| ())\n\n}\n\n\n", "file_path": "examples/gameroom/database/src/helpers/gameroom.rs", "rank": 43, "score": 148551.83615496583 }, { "content": "pub fn insert_gameroom_services(\n\n conn: &PgConnection,\n\n gameroom_services: &[NewGameroomService],\n\n) -> QueryResult<()> {\n\n insert_into(gameroom_service::table)\n\n .values(gameroom_services)\n\n .execute(conn)\n\n .map(|_| ())\n\n}\n\n\n", "file_path": "examples/gameroom/database/src/helpers/gameroom.rs", "rank": 44, "score": 148551.83615496583 }, { "content": "/// Create and submit the Sabre transactions to setup the XO smart contract.\n\npub fn setup_xo(\n\n private_key: &str,\n\n scabbard_admin_keys: Vec<String>,\n\n splinterd_url: &str,\n\n circuit_id: &str,\n\n service_id: &str,\n\n) -> Result<Box<dyn Future<Item = (), Error = ()> + Send + 'static>, AppAuthHandlerError> {\n\n let context = create_context(\"secp256k1\")?;\n\n let factory = CryptoFactory::new(&*context);\n\n let private_key = Secp256k1PrivateKey::from_hex(private_key)?;\n\n let signer = factory.new_signer(&private_key);\n\n\n\n // The node with the first key in the list of scabbard admins is responsible for setting up xo\n\n let public_key = signer.get_public_key()?.as_hex();\n\n let is_submitter = match scabbard_admin_keys.get(0) {\n\n Some(submitting_key) => &public_key == submitting_key,\n\n None => false,\n\n };\n\n if !is_submitter {\n\n return Ok(Box::new(future::ok(())));\n", "file_path": "examples/gameroom/daemon/src/authorization_handler/sabre.rs", "rank": 45, "score": 148551.83615496583 }, { "content": "pub fn insert_gameroom_members(\n\n conn: &PgConnection,\n\n gameroom_members: &[NewGameroomMember],\n\n) -> QueryResult<()> {\n\n insert_into(gameroom_member::table)\n\n .values(gameroom_members)\n\n .execute(conn)\n\n .map(|_| ())\n\n}\n\n\n", "file_path": "examples/gameroom/database/src/helpers/gameroom.rs", "rank": 46, "score": 148551.83615496583 }, { "content": "pub fn insert_gameroom_notification(\n\n conn: &PgConnection,\n\n notifications: &[NewGameroomNotification],\n\n) -> QueryResult<()> {\n\n insert_into(gameroom_notification::table)\n\n .values(notifications)\n\n .execute(conn)\n\n .map(|_| ())\n\n}\n\n\n", "file_path": "examples/gameroom/database/src/helpers/notification.rs", "rank": 47, "score": 148551.83615496583 }, { "content": "pub fn update_gameroom_status(\n\n conn: &PgConnection,\n\n circuit_id: &str,\n\n updated_time: &SystemTime,\n\n status: &str,\n\n) -> QueryResult<()> {\n\n diesel::update(gameroom::table.find(circuit_id))\n\n .set((\n\n gameroom::updated_time.eq(updated_time),\n\n gameroom::status.eq(status),\n\n ))\n\n .execute(conn)\n\n .map(|_| ())\n\n}\n\n\n", "file_path": "examples/gameroom/database/src/helpers/gameroom.rs", "rank": 48, "score": 148551.83615496583 }, { "content": "fn connect<T: Transport>(transport: &mut T, mesh: &Mesh, peers: &[String], n: usize) -> Vec<usize> {\n\n if peers.len() == 0 {\n\n return Vec::with_capacity(0);\n\n }\n\n\n\n let mut ids = Vec::with_capacity(n);\n\n for i in 0..n {\n\n loop {\n\n let peer = &peers[i % peers.len()];\n\n println!(\"Connecting to {}\", peer);\n\n match transport.connect(peer).map(|conn| mesh.add(conn)) {\n\n Ok(Ok(id)) => {\n\n ids.push(id);\n\n break;\n\n }\n\n Ok(Err(err)) => {\n\n eprintln!(\"Error adding connection to mesh: {:?}\", err);\n\n break;\n\n }\n\n Err(_err) => {\n\n thread::sleep(Duration::from_millis(100));\n\n continue;\n\n }\n\n }\n\n }\n\n }\n\n ids\n\n}\n\n\n", "file_path": "libsplinter/examples/mesh-echo-peer.rs", "rank": 49, "score": 148064.09041470996 }, { "content": "pub fn update_gameroom_service_status(\n\n conn: &PgConnection,\n\n circuit_id: &str,\n\n updated_time: &SystemTime,\n\n old_status: &str,\n\n new_status: &str,\n\n) -> QueryResult<()> {\n\n diesel::update(\n\n gameroom_service::table.filter(\n\n gameroom_service::circuit_id\n\n .eq(circuit_id)\n\n .and(gameroom_service::status.eq(old_status)),\n\n ),\n\n )\n\n .set((\n\n gameroom_service::updated_time.eq(updated_time),\n\n gameroom_service::status.eq(new_status),\n\n ))\n\n .execute(conn)\n\n .map(|_| ())\n\n}\n\n\n", "file_path": "examples/gameroom/database/src/helpers/gameroom.rs", "rank": 50, "score": 146593.24796301304 }, { "content": "pub fn list_nodes(\n\n client: web::Data<Client>,\n\n splinterd_url: web::Data<String>,\n\n query: web::Query<HashMap<String, String>>,\n\n) -> impl Future<Item = HttpResponse, Error = Error> {\n\n let mut request_url = format!(\"{}/nodes\", splinterd_url.get_ref());\n\n\n\n let offset = query\n\n .get(\"offset\")\n\n .map(ToOwned::to_owned)\n\n .unwrap_or_else(|| DEFAULT_OFFSET.to_string());\n\n let limit = query\n\n .get(\"limit\")\n\n .map(ToOwned::to_owned)\n\n .unwrap_or_else(|| DEFAULT_LIMIT.to_string());\n\n\n\n request_url = format!(\"{}?offset={}&limit={}\", request_url, offset, limit);\n\n\n\n if let Some(filter) = query.get(\"filter\") {\n\n request_url = format!(\n", "file_path": "examples/gameroom/daemon/src/rest_api/routes/node.rs", "rank": 51, "score": 146593.24796301304 }, { "content": "pub fn fetch_proposal(\n\n pool: web::Data<ConnectionPool>,\n\n proposal_id: web::Path<i64>,\n\n) -> Box<dyn Future<Item = HttpResponse, Error = Error>> {\n\n Box::new(\n\n web::block(move || get_proposal_from_db(pool, *proposal_id)).then(|res| match res {\n\n Ok(proposal) => Ok(HttpResponse::Ok().json(SuccessResponse::new(proposal))),\n\n Err(err) => match err {\n\n error::BlockingError::Error(err) => {\n\n match err {\n\n RestApiResponseError::NotFound(err) => Ok(HttpResponse::NotFound()\n\n .json(ErrorResponse::not_found(&err.to_string()))),\n\n _ => Ok(HttpResponse::BadRequest()\n\n .json(ErrorResponse::bad_request(&err.to_string()))),\n\n }\n\n }\n\n error::BlockingError::Canceled => {\n\n debug!(\"Internal Server Error: {}\", err);\n\n Ok(HttpResponse::InternalServerError().json(ErrorResponse::internal_error()))\n\n }\n\n },\n\n }),\n\n )\n\n}\n\n\n", "file_path": "examples/gameroom/daemon/src/rest_api/routes/proposal.rs", "rank": 52, "score": 146593.24796301304 }, { "content": "pub fn update_gameroom_proposal_status(\n\n conn: &PgConnection,\n\n proposal_id: i64,\n\n updated_time: &SystemTime,\n\n status: &str,\n\n) -> QueryResult<()> {\n\n diesel::update(gameroom_proposal::table.find(proposal_id))\n\n .set((\n\n gameroom_proposal::updated_time.eq(updated_time),\n\n gameroom_proposal::status.eq(status),\n\n ))\n\n .execute(conn)\n\n .map(|_| ())\n\n}\n\n\n", "file_path": "examples/gameroom/database/src/helpers/gameroom.rs", "rank": 53, "score": 146593.24796301304 }, { "content": "pub fn list_proposals(\n\n pool: web::Data<ConnectionPool>,\n\n query: web::Query<HashMap<String, usize>>,\n\n) -> Box<dyn Future<Item = HttpResponse, Error = Error>> {\n\n let offset: usize = query\n\n .get(\"offset\")\n\n .map(ToOwned::to_owned)\n\n .unwrap_or_else(|| DEFAULT_OFFSET);\n\n\n\n let limit: usize = query\n\n .get(\"limit\")\n\n .map(ToOwned::to_owned)\n\n .unwrap_or_else(|| DEFAULT_LIMIT);\n\n\n\n Box::new(\n\n web::block(move || list_proposals_from_db(pool, limit, offset)).then(\n\n move |res| match res {\n\n Ok((proposals, query_count)) => {\n\n let paging_info = get_response_paging_info(\n\n limit,\n", "file_path": "examples/gameroom/daemon/src/rest_api/routes/proposal.rs", "rank": 54, "score": 146593.24796301304 }, { "content": "pub fn update_gameroom_member_status(\n\n conn: &PgConnection,\n\n circuit_id: &str,\n\n updated_time: &SystemTime,\n\n old_status: &str,\n\n new_status: &str,\n\n) -> QueryResult<()> {\n\n diesel::update(\n\n gameroom_member::table.filter(\n\n gameroom_member::circuit_id\n\n .eq(circuit_id)\n\n .and(gameroom_member::status.eq(old_status)),\n\n ),\n\n )\n\n .set((\n\n gameroom_member::updated_time.eq(updated_time),\n\n gameroom_member::status.eq(new_status),\n\n ))\n\n .execute(conn)\n\n .map(|_| ())\n\n}\n\n\n", "file_path": "examples/gameroom/database/src/helpers/gameroom.rs", "rank": 55, "score": 146593.24796301304 }, { "content": "pub fn fetch_node(\n\n identity: web::Path<String>,\n\n client: web::Data<Client>,\n\n splinterd_url: web::Data<String>,\n\n) -> impl Future<Item = HttpResponse, Error = Error> {\n\n client\n\n .get(&format!(\"{}/nodes/{}\", splinterd_url.get_ref(), identity))\n\n .send()\n\n .map_err(Error::from)\n\n .and_then(|mut resp| {\n\n let body = resp.body().wait()?;\n\n match resp.status() {\n\n StatusCode::OK => {\n\n let node: Node = serde_json::from_slice(&body)?;\n\n Ok(HttpResponse::Ok().json(SuccessResponse::new(node)))\n\n }\n\n StatusCode::NOT_FOUND => {\n\n let message: String = serde_json::from_slice(&body)?;\n\n Ok(HttpResponse::NotFound().json(ErrorResponse::not_found(&message)))\n\n }\n", "file_path": "examples/gameroom/daemon/src/rest_api/routes/node.rs", "rank": 56, "score": 146593.24796301304 }, { "content": "pub fn fetch_gameroom(\n\n pool: web::Data<ConnectionPool>,\n\n circuit_id: web::Path<String>,\n\n) -> Box<dyn Future<Item = HttpResponse, Error = Error>> {\n\n Box::new(\n\n web::block(move || fetch_gameroom_from_db(pool, &circuit_id)).then(|res| match res {\n\n Ok(gameroom) => Ok(HttpResponse::Ok().json(gameroom)),\n\n Err(err) => match err {\n\n error::BlockingError::Error(err) => {\n\n match err {\n\n RestApiResponseError::NotFound(err) => Ok(HttpResponse::NotFound()\n\n .json(ErrorResponse::not_found(&err.to_string()))),\n\n _ => Ok(HttpResponse::BadRequest()\n\n .json(ErrorResponse::bad_request(&err.to_string()))),\n\n }\n\n }\n\n error::BlockingError::Canceled => {\n\n debug!(\"Internal Server Error: {}\", err);\n\n Ok(HttpResponse::InternalServerError().json(ErrorResponse::internal_error()))\n\n }\n\n },\n\n }),\n\n )\n\n}\n\n\n", "file_path": "examples/gameroom/daemon/src/rest_api/routes/gameroom.rs", "rank": 57, "score": 146593.24796301304 }, { "content": "pub fn fetch_xo_game(\n\n conn: &PgConnection,\n\n circuit_id: &str,\n\n name: &str,\n\n) -> QueryResult<Option<XoGame>> {\n\n xo_games::table\n\n .filter(\n\n xo_games::game_name\n\n .eq(name)\n\n .and(xo_games::circuit_id.eq(circuit_id)),\n\n )\n\n .first::<XoGame>(conn)\n\n .map(Some)\n\n .or_else(|err| if err == NotFound { Ok(None) } else { Err(err) })\n\n}\n\n\n", "file_path": "examples/gameroom/database/src/helpers/xo_games.rs", "rank": 58, "score": 146593.24796301304 }, { "content": "pub fn list_gamerooms(\n\n pool: web::Data<ConnectionPool>,\n\n query: web::Query<HashMap<String, String>>,\n\n) -> Box<dyn Future<Item = HttpResponse, Error = Error>> {\n\n let mut base_link = \"api/gamerooms?\".to_string();\n\n let offset: usize = query\n\n .get(\"offset\")\n\n .map(ToOwned::to_owned)\n\n .unwrap_or_else(|| DEFAULT_OFFSET.to_string())\n\n .parse()\n\n .unwrap_or_else(|_| DEFAULT_OFFSET);\n\n\n\n let limit: usize = query\n\n .get(\"limit\")\n\n .map(ToOwned::to_owned)\n\n .unwrap_or_else(|| DEFAULT_LIMIT.to_string())\n\n .parse()\n\n .unwrap_or_else(|_| DEFAULT_LIMIT);\n\n\n\n let status_optional = query.get(\"status\").map(ToOwned::to_owned);\n", "file_path": "examples/gameroom/daemon/src/rest_api/routes/gameroom.rs", "rank": 59, "score": 146593.24796301304 }, { "content": "pub fn fetch_gameroom_proposal_with_status(\n\n conn: &PgConnection,\n\n circuit_id: &str,\n\n status: &str,\n\n) -> QueryResult<Option<GameroomProposal>> {\n\n gameroom_proposal::table\n\n .select(gameroom_proposal::all_columns)\n\n .filter(\n\n gameroom_proposal::circuit_id\n\n .eq(circuit_id)\n\n .and(gameroom_proposal::status.eq(status)),\n\n )\n\n .first(conn)\n\n .map(Some)\n\n .or_else(|err| if err == NotFound { Ok(None) } else { Err(err) })\n\n}\n\n\n", "file_path": "examples/gameroom/database/src/helpers/gameroom.rs", "rank": 60, "score": 146593.24796301304 }, { "content": "pub fn read_notification(\n\n pool: web::Data<ConnectionPool>,\n\n notification_id: web::Path<i64>,\n\n) -> Box<dyn Future<Item = HttpResponse, Error = Error>> {\n\n Box::new(\n\n web::block(move || update_gameroom_notification(pool, *notification_id)).then(|res| {\n\n match res {\n\n Ok(notification) => Ok(HttpResponse::Ok().json(SuccessResponse::new(notification))),\n\n Err(err) => match err {\n\n error::BlockingError::Error(err) => match err {\n\n RestApiResponseError::NotFound(err) => Ok(HttpResponse::NotFound()\n\n .json(ErrorResponse::not_found(&err.to_string()))),\n\n _ => Ok(HttpResponse::BadRequest()\n\n .json(ErrorResponse::bad_request(&err.to_string()))),\n\n },\n\n error::BlockingError::Canceled => {\n\n debug!(\"Internal Server Error: {}\", err);\n\n Ok(HttpResponse::InternalServerError()\n\n .json(ErrorResponse::internal_error()))\n\n }\n\n },\n\n }\n\n }),\n\n )\n\n}\n\n\n", "file_path": "examples/gameroom/daemon/src/rest_api/routes/notification.rs", "rank": 61, "score": 146593.24796301304 }, { "content": "pub fn proposal_vote(\n\n vote: web::Json<CircuitProposalVote>,\n\n proposal_id: web::Path<i64>,\n\n pool: web::Data<ConnectionPool>,\n\n node_info: web::Data<Node>,\n\n) -> Box<dyn Future<Item = HttpResponse, Error = Error>> {\n\n let node_identity = node_info.identity.to_string();\n\n Box::new(\n\n web::block(move || check_proposal_exists(*proposal_id, pool)).then(|res| match res {\n\n Ok(()) => match make_payload(vote.into_inner(), node_identity) {\n\n Ok(bytes) => Ok(HttpResponse::Ok()\n\n .json(SuccessResponse::new(json!({ \"payload_bytes\": bytes })))),\n\n Err(err) => {\n\n debug!(\"Failed to prepare circuit management payload {}\", err);\n\n Ok(HttpResponse::InternalServerError().json(ErrorResponse::internal_error()))\n\n }\n\n },\n\n Err(err) => match err {\n\n error::BlockingError::Error(err) => {\n\n match err {\n", "file_path": "examples/gameroom/daemon/src/rest_api/routes/proposal.rs", "rank": 62, "score": 146593.24796301304 }, { "content": "pub fn fetch_notificaiton(\n\n pool: web::Data<ConnectionPool>,\n\n notification_id: web::Path<i64>,\n\n) -> Box<dyn Future<Item = HttpResponse, Error = Error>> {\n\n Box::new(\n\n web::block(move || get_notification_from_db(pool, *notification_id)).then(\n\n |res| match res {\n\n Ok(notification) => Ok(HttpResponse::Ok().json(SuccessResponse::new(notification))),\n\n Err(err) => match err {\n\n error::BlockingError::Error(err) => match err {\n\n RestApiResponseError::NotFound(err) => Ok(HttpResponse::NotFound()\n\n .json(ErrorResponse::not_found(&err.to_string()))),\n\n _ => Ok(HttpResponse::BadRequest()\n\n .json(ErrorResponse::bad_request(&err.to_string()))),\n\n },\n\n error::BlockingError::Canceled => {\n\n debug!(\"Internal Server Error: {}\", err);\n\n Ok(HttpResponse::InternalServerError()\n\n .json(ErrorResponse::internal_error()))\n\n }\n\n },\n\n },\n\n ),\n\n )\n\n}\n\n\n", "file_path": "examples/gameroom/daemon/src/rest_api/routes/notification.rs", "rank": 63, "score": 146593.24796301304 }, { "content": "pub fn insert_proposal_vote_record(\n\n conn: &PgConnection,\n\n vote_records: &[NewProposalVoteRecord],\n\n) -> QueryResult<()> {\n\n insert_into(proposal_vote_record::table)\n\n .values(vote_records)\n\n .execute(conn)\n\n .map(|_| ())\n\n}\n\n\n", "file_path": "examples/gameroom/database/src/helpers/gameroom.rs", "rank": 64, "score": 146593.24796301304 }, { "content": "pub fn list_xo_games(\n\n conn: &PgConnection,\n\n circuit_id: &str,\n\n limit: i64,\n\n offset: i64,\n\n) -> QueryResult<Vec<XoGame>> {\n\n xo_games::table\n\n .filter(xo_games::circuit_id.eq(circuit_id))\n\n .limit(limit)\n\n .offset(offset)\n\n .load::<XoGame>(conn)\n\n}\n\n\n", "file_path": "examples/gameroom/database/src/helpers/xo_games.rs", "rank": 65, "score": 146593.24796301304 }, { "content": "pub fn list_gameroom_members_with_status(\n\n conn: &PgConnection,\n\n status: &str,\n\n) -> QueryResult<Vec<GameroomMember>> {\n\n gameroom_member::table\n\n .filter(gameroom_member::status.eq(status))\n\n .load::<GameroomMember>(conn)\n\n}\n\n\n", "file_path": "examples/gameroom/database/src/helpers/gameroom.rs", "rank": 66, "score": 146593.24796301304 }, { "content": "pub fn list_gamerooms_with_paging_and_status(\n\n conn: &PgConnection,\n\n status: &str,\n\n limit: i64,\n\n offset: i64,\n\n) -> QueryResult<Vec<Gameroom>> {\n\n gameroom::table\n\n .select(gameroom::all_columns)\n\n .filter(gameroom::status.eq(status))\n\n .limit(limit)\n\n .offset(offset)\n\n .load::<Gameroom>(conn)\n\n}\n\n\n", "file_path": "examples/gameroom/database/src/helpers/gameroom.rs", "rank": 67, "score": 146593.24796301304 }, { "content": "pub fn propose_gameroom(\n\n pool: web::Data<ConnectionPool>,\n\n create_gameroom: web::Json<CreateGameroomForm>,\n\n node_info: web::Data<Node>,\n\n gameroomd_data: web::Data<GameroomdData>,\n\n) -> impl Future<Item = HttpResponse, Error = Error> {\n\n let mut members = create_gameroom\n\n .member\n\n .iter()\n\n .map(|node| SplinterNode {\n\n node_id: node.identity.to_string(),\n\n endpoint: node.metadata.endpoint.to_string(),\n\n })\n\n .collect::<Vec<SplinterNode>>();\n\n\n\n members.push(SplinterNode {\n\n node_id: node_info.identity.to_string(),\n\n endpoint: node_info\n\n .metadata\n\n .get(\"endpoint\")\n", "file_path": "examples/gameroom/daemon/src/rest_api/routes/gameroom.rs", "rank": 68, "score": 146593.24796301304 }, { "content": "pub fn list_unread_notifications_with_paging(\n\n conn: &PgConnection,\n\n limit: i64,\n\n offset: i64,\n\n) -> QueryResult<Vec<GameroomNotification>> {\n\n gameroom_notification::table\n\n .select(gameroom_notification::all_columns)\n\n .filter(gameroom_notification::read.eq(false))\n\n .limit(limit)\n\n .offset(offset)\n\n .load::<GameroomNotification>(conn)\n\n}\n\n\n", "file_path": "examples/gameroom/database/src/helpers/notification.rs", "rank": 69, "score": 146593.24796301304 }, { "content": "fn into_map(list_params: ListStateRequest) -> HashMap<String, String> {\n\n let mut list_params = list_params;\n\n let mut params = HashMap::new();\n\n\n\n if let Some(address) = list_params.address.take() {\n\n params.insert(\"address\".into(), address);\n\n }\n\n if let Some(head) = list_params.head.take() {\n\n params.insert(\"head\".into(), head);\n\n }\n\n if let Some(start) = list_params.start.take() {\n\n params.insert(\"start\".into(), start.to_string());\n\n }\n\n if let Some(limit) = list_params.limit.take() {\n\n params.insert(\"limit\".into(), limit.to_string());\n\n }\n\n\n\n params\n\n}\n\n\n", "file_path": "examples/private_xo/src/routes/state.rs", "rank": 70, "score": 146417.4118556647 }, { "content": "/// A trait used for implementing different schemes for\n\n/// storing events.\n\npub trait EventHistory<T: Clone + Debug>: Clone + Debug {\n\n /// Add an event to the event history\n\n fn store(&mut self, event: T) -> Result<(), EventHistoryError>;\n\n\n\n /// Retrieves a list of events\n\n fn events(&self) -> Result<Vec<T>, EventHistoryError>;\n\n}\n\n\n\n/// An implementation of EventHistory for storing\n\n/// events in memory. Only the n most recent events\n\n/// are stored.\n\n#[derive(Clone, Debug)]\n\npub struct LocalEventHistory<T: Clone + Debug> {\n\n history: VecDeque<T>,\n\n limit: usize,\n\n}\n\n\n\nimpl<T: Clone + Debug> LocalEventHistory<T> {\n\n pub fn with_limit(limit: usize) -> Self {\n\n Self {\n", "file_path": "libsplinter/src/rest_api/events.rs", "rank": 71, "score": 145515.08681644793 }, { "content": "pub fn submit_signed_payload(\n\n client: web::Data<Client>,\n\n splinterd_url: web::Data<String>,\n\n signed_payload: web::Bytes,\n\n) -> Box<dyn Future<Item = HttpResponse, Error = Error>> {\n\n Box::new(\n\n client\n\n .post(format!(\"{}/admin/submit\", *splinterd_url))\n\n .send_body(Body::Bytes(signed_payload))\n\n .map_err(Error::from)\n\n .and_then(|mut resp| {\n\n let status = resp.status();\n\n let body = resp.body().wait()?;\n\n\n\n match status {\n\n StatusCode::ACCEPTED => Ok(HttpResponse::Accepted().json(\n\n SuccessResponse::new(\"The payload was submitted successfully\"),\n\n )),\n\n StatusCode::BAD_REQUEST => {\n\n let body_value: serde_json::Value = serde_json::from_slice(&body)?;\n", "file_path": "examples/gameroom/daemon/src/rest_api/routes/submit.rs", "rank": 72, "score": 144723.3781467767 }, { "content": "pub fn list_unread_notifications(\n\n pool: web::Data<ConnectionPool>,\n\n query: web::Query<HashMap<String, usize>>,\n\n) -> Box<dyn Future<Item = HttpResponse, Error = Error>> {\n\n let offset: usize = query\n\n .get(\"offset\")\n\n .map(ToOwned::to_owned)\n\n .unwrap_or_else(|| DEFAULT_OFFSET);\n\n\n\n let limit: usize = query\n\n .get(\"limit\")\n\n .map(ToOwned::to_owned)\n\n .unwrap_or_else(|| DEFAULT_LIMIT);\n\n\n\n Box::new(\n\n web::block(move || list_unread_notifications_from_db(pool, limit, offset)).then(\n\n move |res| match res {\n\n Ok((notifications, query_count)) => {\n\n let paging_info = get_response_paging_info(\n\n limit,\n", "file_path": "examples/gameroom/daemon/src/rest_api/routes/notification.rs", "rank": 73, "score": 144723.3781467767 }, { "content": "pub fn connect_socket(\n\n req: HttpRequest,\n\n pool: web::Data<ConnectionPool>,\n\n stream: web::Payload,\n\n) -> Box<dyn Future<Item = HttpResponse, Error = Error>> {\n\n Box::new(ws::start(GameroomWebSocket::new(pool), &req, stream).into_future())\n\n}\n\n\n", "file_path": "examples/gameroom/daemon/src/rest_api/routes/gameroom_websocket.rs", "rank": 74, "score": 144723.3781467767 }, { "content": "pub fn fetch_xo(\n\n pool: web::Data<ConnectionPool>,\n\n circuit_id: web::Path<String>,\n\n game_name: web::Path<String>,\n\n) -> Box<dyn Future<Item = HttpResponse, Error = Error>> {\n\n Box::new(\n\n web::block(move || fetch_xo_game_from_db(pool, &circuit_id, &game_name)).then(|res| {\n\n match res {\n\n Ok(xo_game) => Ok(HttpResponse::Ok().json(SuccessResponse::new(xo_game))),\n\n Err(err) => match err {\n\n error::BlockingError::Error(err) => match err {\n\n RestApiResponseError::NotFound(err) => Ok(HttpResponse::NotFound()\n\n .json(ErrorResponse::not_found(&err.to_string()))),\n\n _ => Ok(HttpResponse::BadRequest()\n\n .json(ErrorResponse::bad_request(&err.to_string()))),\n\n },\n\n error::BlockingError::Canceled => {\n\n debug!(\"Internal Server Error: {}\", err);\n\n Ok(HttpResponse::InternalServerError()\n\n .json(ErrorResponse::internal_error()))\n\n }\n\n },\n\n }\n\n }),\n\n )\n\n}\n\n\n", "file_path": "examples/gameroom/daemon/src/rest_api/routes/xo_games.rs", "rank": 75, "score": 144723.3781467767 }, { "content": "pub fn submit_scabbard_payload(\n\n client: web::Data<Client>,\n\n splinterd_url: web::Data<String>,\n\n circuit_id: web::Path<String>,\n\n node_info: web::Data<Node>,\n\n signed_payload: web::Bytes,\n\n) -> Box<dyn Future<Item = HttpResponse, Error = Error>> {\n\n let service_id = format!(\"gameroom_{}\", node_info.identity);\n\n Box::new(\n\n client\n\n .post(format!(\n\n \"{}/scabbard/{}/{}/batches\",\n\n *splinterd_url, &circuit_id, &service_id\n\n ))\n\n .send_body(Body::Bytes(signed_payload))\n\n .map_err(Error::from)\n\n .and_then(|mut resp| {\n\n let status = resp.status();\n\n let body = resp.body().wait()?;\n\n\n", "file_path": "examples/gameroom/daemon/src/rest_api/routes/submit.rs", "rank": 76, "score": 144723.3781467767 }, { "content": "pub fn list_xo(\n\n pool: web::Data<ConnectionPool>,\n\n circuit_id: web::Path<String>,\n\n query: web::Query<HashMap<String, usize>>,\n\n) -> Box<dyn Future<Item = HttpResponse, Error = Error>> {\n\n let offset: usize = query\n\n .get(\"offset\")\n\n .map(ToOwned::to_owned)\n\n .unwrap_or_else(|| DEFAULT_OFFSET);\n\n\n\n let limit: usize = query\n\n .get(\"limit\")\n\n .map(ToOwned::to_owned)\n\n .unwrap_or_else(|| DEFAULT_LIMIT);\n\n let base_link = format!(\"api/xo/{}/games?\", &circuit_id);\n\n\n\n Box::new(\n\n web::block(move || list_xo_games_from_db(pool, &circuit_id.clone(), limit, offset)).then(\n\n move |res| match res {\n\n Ok((games, query_count)) => {\n", "file_path": "examples/gameroom/daemon/src/rest_api/routes/xo_games.rs", "rank": 77, "score": 144723.3781467767 }, { "content": "pub fn insert_xo_game(conn: &PgConnection, game: NewXoGame) -> QueryResult<()> {\n\n insert_into(xo_games::table)\n\n .values(game)\n\n .execute(conn)\n\n .map(|_| ())\n\n}\n\n\n", "file_path": "examples/gameroom/database/src/helpers/xo_games.rs", "rank": 78, "score": 143877.85263328365 }, { "content": "pub fn fetch_gameroom_members_by_circuit_id_and_status(\n\n conn: &PgConnection,\n\n circuit_id: &str,\n\n status: &str,\n\n) -> QueryResult<Vec<GameroomMember>> {\n\n gameroom_member::table\n\n .filter(\n\n gameroom_member::circuit_id\n\n .eq(circuit_id)\n\n .and(gameroom_member::status.eq(status)),\n\n )\n\n .load::<GameroomMember>(conn)\n\n}\n\n\n", "file_path": "examples/gameroom/database/src/helpers/gameroom.rs", "rank": 79, "score": 142936.33218031787 }, { "content": "#[derive(Debug, Serialize, Deserialize, Default, Clone)]\n\nstruct PersistedKeyRegistry {\n\n #[serde(flatten)]\n\n keys: BTreeMap<String, PersistedKeyInfo>,\n\n}\n\n\n\nimpl PersistedKeyRegistry {\n\n pub fn add_key(&mut self, key_info: KeyInfo) -> Result<(), KeyRegistryError> {\n\n let hex_key = to_hex(key_info.public_key());\n\n\n\n let persisted_key_info = PersistedKeyInfo {\n\n public_key: hex_key.clone(),\n\n associated_node_id: key_info.associated_node_id().into(),\n\n metadata: key_info\n\n .metadata()\n\n .iter()\n\n .map(|(key, value)| (key.clone(), value.clone()))\n\n .collect::<BTreeMap<String, String>>(),\n\n };\n\n self.keys.insert(hex_key, persisted_key_info);\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "libsplinter/src/keys/storage.rs", "rank": 80, "score": 140974.96929681735 }, { "content": "#[derive(Debug, Serialize, Clone, PartialEq)]\n\nstruct KeyInfoResponse {\n\n #[serde(serialize_with = \"as_hex\")]\n\n public_key: Vec<u8>,\n\n node_id: String,\n\n\n\n metadata: BTreeMap<String, String>,\n\n}\n\n\n\nimpl KeyInfoResponse {\n\n fn new(key_info: &KeyInfo) -> Self {\n\n Self {\n\n public_key: key_info.public_key().to_vec(),\n\n node_id: key_info.associated_node_id().into(),\n\n metadata: key_info\n\n .metadata()\n\n .iter()\n\n .map(|(k, v)| (k.to_string(), v.to_string()))\n\n .collect(),\n\n }\n\n }\n", "file_path": "splinterd/src/routes/keys.rs", "rank": 81, "score": 140974.88825582352 }, { "content": "#[derive(Debug, Serialize, Deserialize, Clone)]\n\nstruct PersistedKeyInfo {\n\n public_key: String,\n\n associated_node_id: String,\n\n\n\n #[serde(default = \"BTreeMap::new\")]\n\n #[serde(skip_serializing_if = \"BTreeMap::is_empty\")]\n\n metadata: BTreeMap<String, String>,\n\n}\n\n\n\nimpl TryInto<KeyInfo> for PersistedKeyInfo {\n\n type Error = KeyRegistryError;\n\n\n\n fn try_into(self) -> Result<KeyInfo, Self::Error> {\n\n let mut builder = KeyInfo::builder(\n\n parse_hex(&self.public_key).map_err(|err| KeyRegistryError {\n\n context: format!(\"Unable to parse public key: {}\", self.public_key),\n\n source: Some(Box::new(err)),\n\n })?,\n\n self.associated_node_id,\n\n );\n", "file_path": "libsplinter/src/keys/storage.rs", "rank": 82, "score": 140969.48542486696 }, { "content": "struct KeyRegistryIter {\n\n persisted_key_registry: Arc<RwLock<PersistedKeyRegistry>>,\n\n current_key_info_index: usize,\n\n}\n\n\n\nimpl Iterator for KeyRegistryIter {\n\n type Item = KeyInfo;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n let key_registry = match self.persisted_key_registry.read() {\n\n Ok(readable) => readable,\n\n Err(_) => {\n\n error!(\"Lock was poisoned while iterating over keys; returning None\");\n\n return None;\n\n }\n\n };\n\n\n\n while self.current_key_info_index < key_registry.keys.len() {\n\n let res = key_registry\n\n .keys\n", "file_path": "libsplinter/src/keys/storage.rs", "rank": 83, "score": 140964.00947297332 }, { "content": "#[derive(Debug, Serialize, Clone, PartialEq)]\n\nstruct ListKeyInfoResponse {\n\n data: Vec<KeyInfoResponse>,\n\n paging: Paging,\n\n}\n\n\n", "file_path": "splinterd/src/routes/keys.rs", "rank": 84, "score": 138923.2360498653 }, { "content": "pub fn from_payload<T: DeserializeOwned>(\n\n payload: web::Payload,\n\n) -> impl Future<Item = T, Error = ActixError> {\n\n payload\n\n .from_err::<ActixError>()\n\n .fold(web::BytesMut::new(), move |mut body, chunk| {\n\n body.extend_from_slice(&chunk);\n\n Ok::<_, ActixError>(body)\n\n })\n\n .and_then(|body| Ok(serde_json::from_slice::<T>(&body)?))\n\n .or_else(|err| Err(ErrorBadRequest(json!({ \"message\": format!(\"{}\", err) }))))\n\n .into_future()\n\n}\n\n\n\nimpl CreateCircuit {\n\n pub fn from_proto(mut proto: admin::Circuit) -> Result<Self, MarshallingError> {\n\n let authorization_type = match proto.get_authorization_type() {\n\n admin::Circuit_AuthorizationType::TRUST_AUTHORIZATION => AuthorizationType::Trust,\n\n admin::Circuit_AuthorizationType::UNSET_AUTHORIZATION_TYPE => {\n\n return Err(MarshallingError::UnsetField(\n", "file_path": "libsplinter/src/admin/messages.rs", "rank": 85, "score": 138893.4275260294 }, { "content": "pub fn to_hex(bytes: &[u8]) -> String {\n\n let mut buf = String::new();\n\n for b in bytes {\n\n write!(&mut buf, \"{:02x}\", b).expect(\"Unable to write to string\");\n\n }\n\n\n\n buf\n\n}\n\n\n", "file_path": "libsplinter/src/hex.rs", "rank": 86, "score": 136665.4744133738 }, { "content": "pub fn get_xo_contract_address() -> String {\n\n compute_contract_address(XO_NAME, XO_VERSION)\n\n}\n\n\n", "file_path": "examples/gameroom/daemon/src/authorization_handler/sabre.rs", "rank": 87, "score": 136490.7664620873 }, { "content": "/// RAII structure used to allow write access to state object\n\n///\n\n/// This guard will ensure that any changes to an object are persisted to\n\n/// a backing store when this is Dropped.\n\npub trait StorageWriteGuard<'a, T: Sized>: DerefMut<Target = T> {}\n\n\n", "file_path": "libsplinter/src/storage/mod.rs", "rank": 88, "score": 134840.69807336546 }, { "content": "fn is_admin_service_id(service_id: &str) -> bool {\n\n service_id.starts_with(ADMIN_SERVICE_ID_PREFIX)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n use std::sync::{Arc, Mutex};\n\n\n\n use crate::channel::{SendError, Sender};\n\n use crate::circuit::directory::CircuitDirectory;\n\n use crate::circuit::{AuthorizationType, Circuit, DurabilityType, PersistenceType, RouteType};\n\n use crate::network::dispatch::Dispatcher;\n\n use crate::protos::circuit::CircuitMessage;\n\n use crate::protos::network::NetworkMessage;\n\n\n\n /// Send a message from a non-admin service. Expect that the message is ignored and an error\n\n /// is returned to sender.\n\n #[test]\n", "file_path": "libsplinter/src/circuit/handlers/admin_message.rs", "rank": 89, "score": 132295.63085397656 }, { "content": "#[derive(Deserialize, Serialize)]\n\nstruct KeySpec {\n\n node_id: String,\n\n #[serde(default = \"BTreeMap::new\")]\n\n #[serde(skip_serializing_if = \"BTreeMap::is_empty\")]\n\n metadata: BTreeMap<String, String>,\n\n}\n\n\n", "file_path": "cli/src/action/admin.rs", "rank": 90, "score": 131636.76663769764 }, { "content": "fn hash(bytes: &[u8]) -> Vec<u8> {\n\n Sha256::digest(bytes).as_slice().to_vec()\n\n}\n\n\n", "file_path": "examples/private_counter/service/src/main.rs", "rank": 91, "score": 131498.2195090023 }, { "content": "/// Stores a service and other structures that are used to manage it\n\nstruct ManagedService {\n\n pub service: Box<dyn Service>,\n\n pub registry: StandardServiceNetworkRegistry,\n\n}\n\n\n\n/// The `ServiceOrchestrator` manages initialization and shutdown of services.\n\npub struct ServiceOrchestrator {\n\n /// A (ServiceDefinition, ManagedService) map\n\n services: Arc<Mutex<HashMap<ServiceDefinition, ManagedService>>>,\n\n /// Factories used to create new services.\n\n service_factories: Vec<Box<dyn ServiceFactory>>,\n\n supported_service_types: Vec<String>,\n\n /// `network_sender` and `inbound_router` are used to create services' senders.\n\n network_sender: Sender<Vec<u8>>,\n\n inbound_router: InboundRouter<CircuitMessageType>,\n\n /// `running` and `join_handles` are used to shutdown the orchestrator's background threads\n\n running: Arc<AtomicBool>,\n\n join_handles: JoinHandles<Result<(), OrchestratorError>>,\n\n}\n\n\n", "file_path": "libsplinter/src/orchestrator/mod.rs", "rank": 92, "score": 130148.73919322535 }, { "content": "struct EventDealerWebSocket<T: Serialize + Debug + 'static> {\n\n recv: Option<UnboundedReceiver<MessageWrapper<T>>>,\n\n}\n\n\n\nimpl<T: Serialize + Debug + 'static> EventDealerWebSocket<T> {\n\n fn new(recv: UnboundedReceiver<MessageWrapper<T>>) -> Self {\n\n Self { recv: Some(recv) }\n\n }\n\n}\n\n\n\nimpl<T: Serialize + Debug + 'static> StreamHandler<MessageWrapper<T>, ()>\n\n for EventDealerWebSocket<T>\n\n{\n\n fn handle(&mut self, msg: MessageWrapper<T>, ctx: &mut Self::Context) {\n\n match msg {\n\n MessageWrapper::Message(msg) => {\n\n debug!(\"Received a message: {:?}\", msg);\n\n match serde_json::to_string(&msg) {\n\n Ok(text) => ctx.text(text),\n\n Err(err) => {\n", "file_path": "libsplinter/src/rest_api/events.rs", "rank": 93, "score": 129685.1320574473 }, { "content": "#[derive(Deserialize, Serialize)]\n\nstruct KeyRegistrySpec {\n\n #[serde(flatten)]\n\n keys: BTreeMap<String, KeySpec>,\n\n}\n\n\n", "file_path": "cli/src/action/admin.rs", "rank": 94, "score": 129232.60999409136 }, { "content": "#[derive(Default)]\n\nstruct ManagedAuthorizations {\n\n states: HashMap<String, AuthorizationState>,\n\n callbacks: Vec<Box<dyn AuthorizationCallback>>,\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub enum PeerAuthorizationState {\n\n Authorized,\n\n Unauthorized,\n\n}\n\n\n", "file_path": "libsplinter/src/network/auth/mod.rs", "rank": 95, "score": 127783.01378655966 }, { "content": "fn index(_: &mut Request) -> IronResult<Response> {\n\n Ok(Response::with((status::Ok, \"Private XO Server\")))\n\n}\n\n\n", "file_path": "examples/private_xo/src/main.rs", "rank": 96, "score": 127692.53711689867 }, { "content": "/// Creates a public/private key pair.\n\n///\n\n/// Returns the public key in hex, if successful.\n\nfn create_key_pair(\n\n key_dir: &Path,\n\n private_key_path: PathBuf,\n\n public_key_path: PathBuf,\n\n force_create: bool,\n\n quiet: bool,\n\n change_permissions: bool,\n\n) -> Result<Vec<u8>, CliError> {\n\n if !force_create {\n\n if private_key_path.exists() {\n\n return Err(CliError::EnvironmentError(format!(\n\n \"file exists: {:?}\",\n\n private_key_path\n\n )));\n\n }\n\n if public_key_path.exists() {\n\n return Err(CliError::EnvironmentError(format!(\n\n \"file exists: {:?}\",\n\n public_key_path\n\n )));\n", "file_path": "cli/src/action/admin.rs", "rank": 97, "score": 126622.9634208232 }, { "content": "struct IncomingIter<'a> {\n\n listener: &'a mut dyn Listener,\n\n}\n\n\n\nimpl<'a> IncomingIter<'a> {\n\n pub fn new(listener: &'a mut dyn Listener) -> Self {\n\n IncomingIter { listener }\n\n }\n\n}\n\n\n\nimpl<'a> Iterator for IncomingIter<'a> {\n\n type Item = Result<Box<dyn Connection>, AcceptError>;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n Some(self.listener.accept())\n\n }\n\n}\n\n\n\n// -- Errors --\n\n\n", "file_path": "libsplinter/src/transport/mod.rs", "rank": 98, "score": 125945.28458673347 }, { "content": "struct XoShared {\n\n current_state_root: Option<String>,\n\n executor: Executor,\n\n pending_changes: Option<Vec<StateChange>>,\n\n}\n\n\n\nimpl XoShared {\n\n fn new(context_manager: ContextManager) -> Result<Self, XoStateError> {\n\n let mut executor = Executor::new(vec![Box::new(\n\n StaticExecutionAdapter::new_adapter(\n\n vec![Box::new(SawtoothToTransactHandlerAdapter::new(\n\n XoTransactionHandler::new(),\n\n ))],\n\n context_manager,\n\n )\n\n .map_err(|err| {\n\n XoStateError(format!(\n\n \"Unable to create static execution adapter: {}\",\n\n err\n\n ))\n", "file_path": "examples/private_xo/src/transaction/mod.rs", "rank": 99, "score": 125529.13796105287 } ]
Rust
src/wasm/terrain_generator/src/rivers.rs
Havegum/Terrain-Generator
8e562f173f0474d1bf7d53ca04ede75768fa96cd
use super::erosion::get_flux; type River = Vec<(usize, f64)>; pub fn get_river( heights: &Vec<f64>, adjacent: &Vec<Vec<usize>>, flux: &Vec<f64>, sea_level: f64, voronoi_cells: &Vec<Vec<usize>>, cell_heights: &Vec<f64>, mut visited: &mut [bool], i: usize, mut river: Vec<(usize, f64)>, ) -> (River, Vec<River>) { visited[i] = true; let height = heights[i]; if height < sea_level { let cells = &voronoi_cells[i]; let num_adjacent = cells .iter() .filter(|cell| cell_heights[**cell] > sea_level) .count(); if num_adjacent < 2 { return (river, Vec::new()); } } river.push((i, flux[i])); let mut tributaries: Vec<River> = Vec::new(); let mut main_branch_found = false; let mut neighbors = adjacent[i].clone(); neighbors.sort_unstable_by(|&a, &b| flux[a].partial_cmp(&flux[b]).unwrap().reverse()); for neighbor in neighbors { if visited[neighbor] { continue; } if adjacent[neighbor].iter().any(|n| heights[*n] < height) { continue; } if !main_branch_found { main_branch_found = true; let (new_river, mut new_tributaries) = get_river( &heights, &adjacent, &flux, sea_level, &voronoi_cells, &cell_heights, &mut visited, neighbor, river, ); river = new_river; tributaries.append(&mut new_tributaries); } else { let (new_river, mut new_tributaries) = get_river( &heights, &adjacent, &flux, sea_level, &voronoi_cells, &cell_heights, &mut visited, neighbor, vec![(i, flux[i])], ); tributaries.push(new_river); tributaries.append(&mut new_tributaries); } } (river, tributaries) } pub fn get_rivers( heights: &Vec<f64>, adjacent: &Vec<Vec<usize>>, sea_level: f64, voronoi_cells: &Vec<Vec<usize>>, cell_heights: &Vec<f64>, ) -> Vec<River> { let flux = get_flux(heights, adjacent); let mut points_by_height = (0..heights.len()).collect::<Vec<usize>>(); points_by_height.sort_unstable_by(|a, b| heights[*a].partial_cmp(&heights[*b]).unwrap()); let mut visited = vec![false; heights.len()]; let mut rivers: Vec<River> = Vec::new(); for &i in points_by_height.iter() { if visited[i] { continue; } let (new_river, mut new_tributaries) = get_river( &heights, &adjacent, &flux, sea_level, &voronoi_cells, &cell_heights, &mut visited, i, Vec::new(), ); rivers.push(new_river); rivers.append(&mut new_tributaries); } rivers .into_iter() .filter(|r| r.len() > 1) .collect::<Vec<River>>() }
use super::erosion::get_flux; type River = Vec<(usize, f64)>; pub fn get_river( heights: &Vec<f64>, adjacent: &Vec<Vec<usize>>, flux: &Vec<f64>, sea_level: f64, voronoi_cells: &Vec<Vec<usize>>, cell_heights: &Vec<f64>, mut visited: &mut [bool], i: usize, mut river: Vec<(usize, f64)>, ) -> (River, Vec<River>) { visited[i] = true; let height = heights[i]; if height < sea_level { let cells = &voronoi_cells[i]; let num_adjacent = cells .iter() .filter(|cell| cell_heights[**cell] > sea_level) .count(); if num_adjacent < 2 { return (river, Vec::new()); } } river.push((i, flux[i])); let mut tributaries: Vec<River> = Vec::new(); let mut main_branch_found = false; let mut neighbors = adjacent[i].clone(); neighbors.sort_unstable_by(|&a, &b| flux[a].partial_cmp(&flux[b]).unwrap().reverse()); fo
pub fn get_rivers( heights: &Vec<f64>, adjacent: &Vec<Vec<usize>>, sea_level: f64, voronoi_cells: &Vec<Vec<usize>>, cell_heights: &Vec<f64>, ) -> Vec<River> { let flux = get_flux(heights, adjacent); let mut points_by_height = (0..heights.len()).collect::<Vec<usize>>(); points_by_height.sort_unstable_by(|a, b| heights[*a].partial_cmp(&heights[*b]).unwrap()); let mut visited = vec![false; heights.len()]; let mut rivers: Vec<River> = Vec::new(); for &i in points_by_height.iter() { if visited[i] { continue; } let (new_river, mut new_tributaries) = get_river( &heights, &adjacent, &flux, sea_level, &voronoi_cells, &cell_heights, &mut visited, i, Vec::new(), ); rivers.push(new_river); rivers.append(&mut new_tributaries); } rivers .into_iter() .filter(|r| r.len() > 1) .collect::<Vec<River>>() }
r neighbor in neighbors { if visited[neighbor] { continue; } if adjacent[neighbor].iter().any(|n| heights[*n] < height) { continue; } if !main_branch_found { main_branch_found = true; let (new_river, mut new_tributaries) = get_river( &heights, &adjacent, &flux, sea_level, &voronoi_cells, &cell_heights, &mut visited, neighbor, river, ); river = new_river; tributaries.append(&mut new_tributaries); } else { let (new_river, mut new_tributaries) = get_river( &heights, &adjacent, &flux, sea_level, &voronoi_cells, &cell_heights, &mut visited, neighbor, vec![(i, flux[i])], ); tributaries.push(new_river); tributaries.append(&mut new_tributaries); } } (river, tributaries) }
function_block-function_prefixed
[ { "content": "pub fn smooth(mut heights: Vec<f64>, adjacent: &Vec<Vec<usize>>) -> Vec<f64> {\n\n let alpha = 1.;\n\n let alpha = 0.66;\n\n\n\n for (i, height) in heights\n\n .clone()\n\n .into_iter()\n\n .enumerate()\n\n .collect::<Vec<(usize, f64)>>()\n\n {\n\n let sum = adjacent[i].iter().map(|n| heights[*n]).sum::<f64>() + height;\n\n\n\n let mean = sum / (adjacent[i].len() + 1) as f64;\n\n\n\n heights[i] = height * (1. - alpha) + mean * alpha;\n\n\n\n for n in adjacent[i].iter() {\n\n heights[*n] = heights[*n] * (1. - alpha) + mean * alpha;\n\n }\n\n }\n\n\n\n heights\n\n}\n", "file_path": "src/wasm/terrain_generator/src/erosion.rs", "rank": 0, "score": 185859.51942014755 }, { "content": "pub fn get_flux(heights: &Vec<f64>, adjacent: &Vec<Vec<usize>>) -> Vec<f64> {\n\n let mut flux = vec![0.0; heights.len()];\n\n\n\n let mut sorted = (0..heights.len()).collect::<Vec<usize>>();\n\n sorted.sort_unstable_by(|a, b| heights[*a].partial_cmp(&heights[*b]).unwrap().reverse());\n\n\n\n // find downhill for each point.\n\n for &point in sorted.iter() {\n\n let lowest_neighbour: usize = *adjacent[point]\n\n .iter()\n\n .min_by(|a, b| heights[**a].partial_cmp(&heights[**b]).unwrap())\n\n .unwrap();\n\n\n\n if adjacent[point].len() > 2 && heights[lowest_neighbour] < heights[point] {\n\n flux[lowest_neighbour] += flux[point] + 1.0;\n\n }\n\n }\n\n flux\n\n}\n\n\n", "file_path": "src/wasm/terrain_generator/src/erosion.rs", "rank": 1, "score": 179377.38355423007 }, { "content": "pub fn erode(heights: Vec<f64>, adjacent: &Vec<Vec<usize>>, sea_level: f64) -> Vec<f64> {\n\n // First, smooth out the landscape a bit, and fill sinks\n\n let heights = smooth(heights, adjacent);\n\n let heights = fill_sinks(heights, adjacent, sea_level);\n\n\n\n let flux = get_flux(&heights, adjacent);\n\n let adjacent = adjacent\n\n .iter()\n\n .map(|arr| arr.iter().map(|n| heights[*n]).collect::<Vec<f64>>())\n\n .collect::<Vec<Vec<f64>>>();\n\n\n\n let erosion_rate = 0.015;\n\n let erosion = |(i, height): (usize, f64)| {\n\n let point_flux = (flux[i] + 1.).ln();\n\n\n\n let erosion = point_flux * erosion_rate * height;\n\n\n\n if height >= sea_level {\n\n // Find lowest neighbor.\n\n let low = adjacent[i]\n", "file_path": "src/wasm/terrain_generator/src/erosion.rs", "rank": 2, "score": 166101.90623899826 }, { "content": "pub fn fill_sinks(heights: Vec<f64>, adjacent: &Vec<Vec<usize>>, sea_level: f64) -> Vec<f64> {\n\n // Mewo implementation details: https://mewo2.com/notes/terrain/\n\n // Original paper: https://horizon.documentation.ird.fr/exl-doc/pleins_textes/pleins_textes_7/sous_copyright/010031925.pdf\n\n let epsilon = 1e-5;\n\n\n\n let mut new_heights: Vec<f64> = heights\n\n .clone()\n\n .iter()\n\n .map(|&height| {\n\n if height > sea_level {\n\n f64::INFINITY\n\n } else {\n\n height\n\n }\n\n })\n\n .collect();\n\n\n\n let mut sorted: Vec<(usize, f64)> = heights.clone().into_iter().enumerate().collect();\n\n sorted.sort_unstable_by(|(_, a), (_, b)| a.partial_cmp(b).unwrap());\n\n\n", "file_path": "src/wasm/terrain_generator/src/erosion.rs", "rank": 3, "score": 163835.88466253222 }, { "content": "fn get_coast_cells(heights: &Vec<f64>, neighbors: &Vec<Vec<usize>>, sea_level: f64) -> Vec<usize> {\n\n let mut coasts = Vec::new();\n\n\n\n for i in 0..heights.len() {\n\n if heights[i] >= sea_level {\n\n let neighbors_sea = neighbors[i].iter().any(|&n| heights[n] < sea_level);\n\n if neighbors_sea {\n\n coasts.push(i);\n\n }\n\n }\n\n }\n\n\n\n coasts\n\n}\n\n\n", "file_path": "src/wasm/terrain_generator/src/coasts.rs", "rank": 4, "score": 159042.78747607558 }, { "content": "pub fn plateau(points: &Vec<f64>, mut heights: Vec<f64>) -> Vec<f64> {\n\n let plateau_start = 0.45; // Magic\n\n let plateau_cap = (1. - plateau_start) / 4.; // Magic\n\n\n\n let mut peak_index = 0;\n\n for (j, &height) in heights.iter().enumerate() {\n\n if height > heights[peak_index] {\n\n peak_index = j;\n\n }\n\n }\n\n let peak_x = points[peak_index * 2 + 0];\n\n let peak_y = points[peak_index * 2 + 1];\n\n\n\n let interpolate = |height: f64| {\n\n plateau_start\n\n + (1. - (1. - (height - plateau_start) / (1. - plateau_start)).powi(2)) * plateau_cap\n\n };\n\n\n\n for i in 0..heights.len() {\n\n let height = heights[i];\n", "file_path": "src/wasm/terrain_generator/src/erosion.rs", "rank": 5, "score": 156274.59488115498 }, { "content": "pub fn disc_sample(radius: f64, sea_level: f64, gen: &mut TerrainGenerator) -> Vec<f64> {\n\n let size = radius / (2.0_f64).sqrt();\n\n let cols = (1.0 / size) as usize;\n\n let rows = (1.0 / size) as usize;\n\n\n\n let grid: Vec<Vec<[f64; 2]>> = vec![vec![]; rows * cols];\n\n let active: Vec<[f64; 2]> = Vec::new();\n\n let points: Vec<f64> = Vec::new();\n\n\n\n let destruct = add_borders(grid, active, points, size, cols, rows);\n\n let mut grid = destruct.0;\n\n let mut active = destruct.1;\n\n let mut points = destruct.2;\n\n\n\n let x = gen.noise.rng();\n\n let y = gen.noise.rng();\n\n let sample = [x, y];\n\n let col = ((x / size) as usize).min(cols - 1);\n\n let row = ((y / size) as usize).min(rows - 1);\n\n grid[col + row * cols].push(sample);\n", "file_path": "src/wasm/terrain_generator/src/poisson.rs", "rank": 7, "score": 138868.28189596548 }, { "content": "#[wasm_bindgen]\n\npub fn greet() {\n\n alert(\"Hello, history-generator!\");\n\n}\n", "file_path": "src/wasm/history_generator/src/lib.rs", "rank": 10, "score": 82640.83922324568 }, { "content": "pub fn set_panic_hook() {\n\n // When the `console_error_panic_hook` feature is enabled, we can call the\n\n // `set_panic_hook` function at least once during initialization, and then\n\n // we will get better error messages if our code ever panics.\n\n //\n\n // For more details see\n\n // https://github.com/rustwasm/console_error_panic_hook#readme\n\n #[cfg(feature = \"console_error_panic_hook\")]\n\n console_error_panic_hook::set_once();\n\n}\n", "file_path": "src/wasm/history_generator/src/utils.rs", "rank": 11, "score": 80082.13967857482 }, { "content": "pub fn get_coast_lines(\n\n heights: &Vec<f64>,\n\n neighbors: &Vec<Vec<usize>>,\n\n voronoi_points: &Vec<Vec<usize>>,\n\n voronoi_cells: &Vec<Vec<usize>>,\n\n sea_level: f64,\n\n) -> Vec<(usize, usize)> {\n\n let coast_cells = get_coast_cells(heights, neighbors, sea_level);\n\n let mut coast_lines = Vec::new();\n\n\n\n for k in 0..coast_cells.len() {\n\n let points = &voronoi_points[coast_cells[k]];\n\n let mut prev = points[points.len() - 1];\n\n let mut prev_is_border = voronoi_cells[prev].iter().any(|&c| heights[c] < sea_level);\n\n\n\n for i in 0..points.len() {\n\n let point = points[i];\n\n let is_border = voronoi_cells[point].iter().any(|&c| heights[c] < sea_level);\n\n\n\n if is_border & prev_is_border {\n", "file_path": "src/wasm/terrain_generator/src/coasts.rs", "rank": 12, "score": 80082.13967857482 }, { "content": "#[allow(dead_code)]\n\npub fn set_panic_hook() {\n\n // When the `console_error_panic_hook` feature is enabled, we can call the\n\n // `set_panic_hook` function at least once during initialization, and then\n\n // we will get better error messages if our code ever panics.\n\n //\n\n // For more details see\n\n // https://github.com/rustwasm/console_error_panic_hook#readme\n\n #[cfg(feature = \"console_error_panic_hook\")]\n\n console_error_panic_hook::set_once();\n\n}\n", "file_path": "src/wasm/terrain_generator/src/utils.rs", "rank": 13, "score": 80082.13967857482 }, { "content": "#[wasm_bindgen_test]\n\nfn pass() {\n\n assert_eq!(1 + 1, 2);\n\n}\n", "file_path": "src/wasm/history_generator/tests/web.rs", "rank": 14, "score": 43377.394975034986 }, { "content": "#[wasm_bindgen_test]\n\nfn pass() {\n\n assert_eq!(1 + 1, 2);\n\n}\n", "file_path": "src/wasm/terrain_generator/tests/web.rs", "rank": 15, "score": 43377.394975034986 }, { "content": "fn add_borders(\n\n mut grid: Vec<Vec<[f64; 2]>>,\n\n mut active: Vec<[f64; 2]>,\n\n mut points: Vec<f64>,\n\n size: f64,\n\n cols: usize,\n\n rows: usize,\n\n) -> (Vec<Vec<[f64; 2]>>, Vec<[f64; 2]>, Vec<f64>) {\n\n let size = size / 2.0;\n\n let offset = 5e-2;\n\n let cx = 1.0 / 2.0;\n\n let cy = 1.0 / 2.0;\n\n\n\n // Top\n\n for _x in 0..=(1.0 / size) as usize {\n\n let x = _x as f64 * size;\n\n let y = offset * -(x - cx).abs().cos();\n\n let pos = [x, y];\n\n let i = (x / 2.0 / size) as usize;\n\n grid[i].push(pos);\n", "file_path": "src/wasm/terrain_generator/src/poisson.rs", "rank": 16, "score": 42690.69707259894 }, { "content": "fn check_sample(\n\n row: usize,\n\n col: usize,\n\n cols: usize,\n\n rows: usize,\n\n sample: &[f64; 2],\n\n grid: &Vec<Vec<[f64; 2]>>,\n\n min_offset: f64,\n\n) -> bool {\n\n let euclidean =\n\n |a: &[f64; 2], b: &[f64; 2]| ((a[0] - b[0]).powi(2) + (a[1] - b[1]).powi(2)).sqrt();\n\n\n\n 'i_loop: for i in ([-1, 0, 1] as [i8; 3]).iter() {\n\n 'j_loop: for j in ([-1, 0, 1] as [i8; 3]).iter() {\n\n let neighbor_col = match i {\n\n -1 => col.checked_sub(1),\n\n 1 => col.checked_add(1),\n\n _ => Some(col),\n\n };\n\n let neighbor_col = match neighbor_col {\n", "file_path": "src/wasm/terrain_generator/src/poisson.rs", "rank": 17, "score": 42690.69707259894 }, { "content": "fn sample_poisson_points(\n\n k: usize,\n\n size: f64,\n\n min_offset: f64,\n\n point: &[f64; 2],\n\n grid: &mut Vec<Vec<[f64; 2]>>,\n\n gen: &mut TerrainGenerator,\n\n) -> Vec<[f64; 2]> {\n\n let mut new_points: Vec<[f64; 2]> = vec![];\n\n\n\n let cols = (1.0 / size) as usize;\n\n let rows = (1.0 / size) as usize;\n\n\n\n for _ in 0..k {\n\n // Get a sample at some random angle and distance from `point`\n\n let theta = gen.noise.rng() * PI * 2.0;\n\n let offset = size + gen.noise.rng() * min_offset;\n\n let x = point[0] + theta.cos() * offset;\n\n let y = point[1] + theta.sin() * offset;\n\n\n", "file_path": "src/wasm/terrain_generator/src/poisson.rs", "rank": 18, "score": 42039.40357329601 }, { "content": "// From `../../terrain.js`\n\nstruct Adjacencies {\n\n adjacent: Vec<Vec<usize>>,\n\n voronoi_triangles: Vec<usize>,\n\n voronoi_points: Vec<Vec<usize>>,\n\n voronoi_cells: Vec<Vec<usize>>,\n\n}\n\n\n\nimpl Voronoi {\n\n // Adapted from:\n\n // https://github.com/d3/d3-delaunay/blob/master/src/voronoi.js\n\n // https://github.com/d3/d3-delaunay/blob/master/src/delaunay.js\n\n pub fn new(points: Vec<f64> /*, xmin: f64, ymin: f64, xmax: f64, ymax: f64*/) -> Voronoi {\n\n utils::set_panic_hook();\n\n let Triangulation {\n\n triangles,\n\n halfedges,\n\n hull,\n\n } = Voronoi::triangulate(&points);\n\n let inedges = Voronoi::get_inedges(&points, &halfedges, &triangles);\n\n let neighbors = Voronoi::get_neighbors(&points, &inedges, &hull, &halfedges, &triangles);\n", "file_path": "src/wasm/terrain_generator/src/voronoi.rs", "rank": 24, "score": 26764.131041358258 }, { "content": " let mut changed = true;\n\n while changed {\n\n changed = false;\n\n\n\n for &(i, height) in sorted.iter() {\n\n if new_heights[i] == height {\n\n continue;\n\n }\n\n\n\n let neighbors = &adjacent[i];\n\n for &neighbor in neighbors.iter() {\n\n let other = new_heights[neighbor] + epsilon;\n\n\n\n if height >= other {\n\n new_heights[i] = height;\n\n changed = true;\n\n break;\n\n }\n\n\n\n if new_heights[i] > other && other > height {\n", "file_path": "src/wasm/terrain_generator/src/erosion.rs", "rank": 25, "score": 14.42862940861764 }, { "content": " };\n\n\n\n let noise = |(i, height)| height + self.noise_single(points[i * 2], points[i * 2 + 1]);\n\n\n\n heights.iter().enumerate().map(noise).collect()\n\n }\n\n\n\n fn get_cell_heights(\n\n n: usize,\n\n heights: &Vec<f64>,\n\n voronoi_points: &Vec<Vec<usize>>,\n\n ) -> Vec<f64> {\n\n let mut cell_heights = vec![0.; n];\n\n for i in 0..n {\n\n let points = &voronoi_points[i];\n\n cell_heights[i] = points.iter().map(|&n| heights[n]).sum::<f64>() / points.len() as f64;\n\n }\n\n cell_heights\n\n }\n\n\n", "file_path": "src/wasm/terrain_generator/src/terrain_generator.rs", "rank": 26, "score": 13.905187116713993 }, { "content": " }\n\n }\n\n }\n\n }\n\n\n\n // pub fn adjacent(self, cell: usize) -> &Vec<BoardCell> {\n\n //\n\n // }\n\n}\n\n\n\npub enum ActionType {\n\n Occupy(usize),\n\n Grow,\n\n Defend,\n\n // Found (city)\n\n // Others?\n\n}\n\n\n\npub struct SimulatedAction<'a> {\n\n civ: &'a Civilization,\n\n action: ActionType,\n\n successful: bool,\n\n}\n", "file_path": "src/wasm/history_generator/src/board.rs", "rank": 27, "score": 13.7995822183841 }, { "content": "use super::civ::Civilization;\n\n\n\npub struct BoardCell<'a> {\n\n // adjacent: Vec<BoardCell> // Maybe this is a function?\n\n index: usize,\n\n owner: Option<&'a Civilization>,\n\n // resources: f64,\n\n}\n\n\n\npub struct Board<'a> {\n\n cells: Vec<BoardCell<'a>>,\n\n adjacencies: Vec<Vec<usize>>,\n\n history: Vec<BoardMutation<'a>>,\n\n}\n\n\n\npub enum BoardMutation<'a> {\n\n Ownership {\n\n cell: usize,\n\n prev: Option<&'a Civilization>,\n\n next: Option<&'a Civilization>,\n", "file_path": "src/wasm/history_generator/src/board.rs", "rank": 28, "score": 12.370865134166735 }, { "content": "\n\n#[wasm_bindgen(readonly)]\n\n#[derive(Serialize, Debug, PartialEq)]\n\npub struct World {\n\n voronoi: Voronoi,\n\n heights: Vec<f64>,\n\n\n\n #[serde(rename = \"cellHeights\")]\n\n cell_heights: Vec<f64>,\n\n rivers: Vec<Vec<(usize, f64)>>,\n\n\n\n #[serde(rename = \"coastLines\")]\n\n coast_lines: Vec<(usize, usize)>,\n\n}\n\n\n\n#[wasm_bindgen]\n\nimpl World {\n\n pub fn as_js_value(&self) -> JsValue {\n\n JsValue::from_serde(&self).unwrap()\n\n }\n", "file_path": "src/wasm/terrain_generator/src/terrain_generator.rs", "rank": 29, "score": 12.209791706632155 }, { "content": " },\n\n None,\n\n}\n\n\n\nimpl<'a> Board<'a> {\n\n pub fn new(cells: Vec<BoardCell>, adjacencies: Vec<Vec<usize>>) -> Board {\n\n Board {\n\n cells,\n\n adjacencies,\n\n history: Vec::new(),\n\n }\n\n }\n\n\n\n pub fn r#do(&mut self, action: SimulatedAction<'a>) {\n\n if !action.successful {\n\n return;\n\n }\n\n\n\n if let ActionType::Occupy(cell) = action.action {\n\n self.cells[cell].owner = Some(action.civ);\n", "file_path": "src/wasm/history_generator/src/board.rs", "rank": 30, "score": 11.939564987050105 }, { "content": " let point_neighbors: HashSet<usize> = HashSet::from_iter(\n\n voronoi_cells[point]\n\n .iter()\n\n .filter(|&x| heights[*x] < sea_level)\n\n .cloned(),\n\n );\n\n let prev_neighbors: HashSet<usize> = HashSet::from_iter(\n\n voronoi_cells[prev]\n\n .iter()\n\n .filter(|&x| heights[*x] < sea_level)\n\n .cloned(),\n\n );\n\n\n\n if !point_neighbors.is_disjoint(&prev_neighbors) {\n\n coast_lines.push((point, prev));\n\n }\n\n }\n\n\n\n prev = point;\n\n prev_is_border = is_border;\n\n }\n\n }\n\n\n\n coast_lines\n\n}\n", "file_path": "src/wasm/terrain_generator/src/coasts.rs", "rank": 31, "score": 11.085094786320058 }, { "content": " pub delaunay: Delaunay,\n\n pub adjacent: Vec<Vec<usize>>,\n\n pub voronoi_triangles: Vec<usize>,\n\n pub voronoi_points: Vec<Vec<usize>>,\n\n pub voronoi_cells: Vec<Vec<usize>>,\n\n}\n\n\n\n// #[wasm_bindgen]\n\n#[derive(Serialize, Debug, PartialEq)]\n\npub struct Delaunay {\n\n pub points: Vec<f64>,\n\n pub hull: Vec<usize>,\n\n pub inedges: Vec<usize>,\n\n pub halfedges: Vec<usize>,\n\n pub triangles: Vec<usize>,\n\n pub neighbors: Vec<Vec<usize>>,\n\n}\n\n\n\n// From `../../terrain.js`\n", "file_path": "src/wasm/terrain_generator/src/voronoi.rs", "rank": 32, "score": 10.981345652344203 }, { "content": " &voronoi.voronoi_points,\n\n );\n\n\n\n let rivers = get_rivers(\n\n &heights,\n\n &voronoi.adjacent,\n\n sea_level,\n\n &voronoi.voronoi_cells,\n\n &cell_heights,\n\n );\n\n log!(\" ✓ rivers flowed\");\n\n\n\n let coast_lines = get_coast_lines(\n\n &cell_heights,\n\n &voronoi.delaunay.neighbors,\n\n &voronoi.voronoi_points,\n\n &voronoi.voronoi_cells,\n\n sea_level,\n\n );\n\n log!(\" ✓ coasts lines carved\");\n", "file_path": "src/wasm/terrain_generator/src/terrain_generator.rs", "rank": 33, "score": 10.736819438224877 }, { "content": " pub fn world(&mut self, radius: f64, sea_level: f64) -> World {\n\n log!(\"`world` called\");\n\n let points = poisson::disc_sample(radius, sea_level, self);\n\n log!(\" ✓ points poissoned\");\n\n let voronoi = Voronoi::new(points);\n\n log!(\" ✓ voronoi triangulated\");\n\n\n\n let heights = self.noise_array(&voronoi.circumcenters, None);\n\n log!(\" ✓ heights noised\");\n\n let mut heights = plateau(&voronoi.circumcenters, heights);\n\n log!(\" · ✓ and plateaued\");\n\n\n\n for _ in 0..10 {\n\n heights = erode(heights, &voronoi.adjacent, sea_level);\n\n }\n\n\n\n log!(\" · ✓ and eroded ×10\");\n\n let cell_heights = TerrainGenerator::get_cell_heights(\n\n voronoi.delaunay.points.len() / 2,\n\n &heights,\n", "file_path": "src/wasm/terrain_generator/src/terrain_generator.rs", "rank": 34, "score": 10.191513984449502 }, { "content": " j += 2;\n\n }\n\n // TODO: exterior hull?\n\n circumcenters\n\n }\n\n\n\n fn get_adjacencies(\n\n points: &Vec<f64>,\n\n circumcenters: &Vec<f64>,\n\n inedges: &Vec<usize>,\n\n halfedges: &Vec<usize>,\n\n triangles: &Vec<usize>,\n\n ) -> Result<Adjacencies, String> {\n\n let mut adjacent = vec![Vec::new(); circumcenters.len() / 2];\n\n let mut voronoi_triangles = Vec::new();\n\n let mut voronoi_points = vec![Vec::new(); points.len() / 2];\n\n let mut voronoi_cells = vec![Vec::new(); circumcenters.len() / 2];\n\n\n\n for i in 0..inedges.len() {\n\n let e0 = inedges[i];\n", "file_path": "src/wasm/terrain_generator/src/voronoi.rs", "rank": 35, "score": 10.034772567064904 }, { "content": " uniform,\n\n }\n\n }\n\n\n\n pub fn height(&self, x: f64, y: f64) -> f64 {\n\n self.height.get_noise(x as f32, y as f32) as f64\n\n }\n\n\n\n pub fn theta(&self, x: f64, y: f64) -> f64 {\n\n self.theta.get_noise(x as f32, y as f32) as f64\n\n }\n\n\n\n pub fn offset(&self, x: f64, y: f64) -> f64 {\n\n self.offset.get_noise(x as f32, y as f32) as f64\n\n }\n\n\n\n pub fn rng(&mut self) -> f64 {\n\n self.uniform.rand::<f64>()\n\n }\n\n\n", "file_path": "src/wasm/terrain_generator/src/noise.rs", "rank": 36, "score": 9.741378914064233 }, { "content": "use bracket_noise::prelude::*;\n\nuse bracket_random::prelude::*;\n\n\n\npub struct Noise {\n\n height: FastNoise,\n\n theta: FastNoise,\n\n offset: FastNoise,\n\n // noise_resources: FastNoise,\n\n uniform: RandomNumberGenerator,\n\n}\n\n\n\nimpl Noise {\n\n pub fn new(seed: u64) -> Noise {\n\n let mut height = FastNoise::seeded(seed);\n\n height.set_noise_type(NoiseType::SimplexFractal);\n\n height.set_fractal_type(FractalType::FBM);\n\n height.set_fractal_octaves(5);\n\n height.set_fractal_gain(0.5);\n\n height.set_fractal_lacunarity(3.0);\n\n height.set_frequency(0.8);\n", "file_path": "src/wasm/terrain_generator/src/noise.rs", "rank": 37, "score": 9.64859609021499 }, { "content": " voronoi_triangles,\n\n voronoi_points,\n\n voronoi_cells,\n\n })\n\n }\n\n\n\n fn get_neighbors(\n\n points: &Vec<f64>,\n\n inedges: &Vec<usize>,\n\n hull: &Vec<usize>,\n\n halfedges: &Vec<usize>,\n\n triangles: &Vec<usize>,\n\n ) -> Vec<Vec<usize>> {\n\n let mut neighbors = vec![Vec::new(); points.len() / 2];\n\n let mut hull_index = vec![EMPTY; points.len() / 2];\n\n for i in 0..hull.len() {\n\n hull_index[hull[i]] = i;\n\n }\n\n\n\n for i in 0..points.len() / 2 {\n", "file_path": "src/wasm/terrain_generator/src/voronoi.rs", "rank": 38, "score": 9.519642665487076 }, { "content": " .iter()\n\n .cloned()\n\n .fold(0. / 0., f64::min)\n\n .min(height);\n\n\n\n let eroded = height - erosion;\n\n let alpha = 0.125;\n\n\n\n // If erosion is lower than the lowest neighbor, discount erosion by alpha\n\n low.max(eroded) * (1. - alpha) + eroded * alpha\n\n } else {\n\n height - erosion * 0.25\n\n }\n\n };\n\n\n\n let heights = heights\n\n .into_iter()\n\n .enumerate()\n\n .map(erosion)\n\n .collect::<Vec<f64>>();\n\n\n\n heights\n\n}\n\n\n", "file_path": "src/wasm/terrain_generator/src/erosion.rs", "rank": 39, "score": 9.167165202741131 }, { "content": "use std::collections::HashMap;\n\n\n\npub struct Tree {\n\n pub children: HashMap<SimulatedAction, Tree>,\n\n pub actions: Vec<ActionType>,\n\n pub visits: u64,\n\n pub score: f64,\n\n pub board: Option<Board>,\n\n // pub entry_action: SimulatedAction,\n\n}\n\n\n\n\n\nimpl Tree {\n\n pub fn new () -> Tree {\n\n let children = HashMap::new(); // Initialize empty\n\n let actions = vec![]; // Get actions\n\n let visits = 1;\n\n let score = 0.0;\n\n\n\n Tree { children, actions, visits, score }\n\n }\n\n}\n", "file_path": "src/wasm/history_generator/src/tree.rs", "rank": 40, "score": 9.033339597731125 }, { "content": "\n\n World {\n\n voronoi,\n\n heights,\n\n cell_heights,\n\n rivers,\n\n coast_lines,\n\n }\n\n }\n\n}\n", "file_path": "src/wasm/terrain_generator/src/terrain_generator.rs", "rank": 41, "score": 8.179389138744774 }, { "content": "\n\n TerrainGenerator {\n\n noise: Noise::new(seed),\n\n }\n\n }\n\n\n\n pub fn noise_single(&self, x: f64, y: f64) -> f64 {\n\n (self.noise.fractal_noise(x, y) + 1.) / 2.\n\n }\n\n\n\n #[wasm_bindgen(js_name = \"heightmap\")]\n\n pub fn heightmap_js(&self, points: Vec<f64>, heights: Option<Vec<f64>>) -> Vec<f64> {\n\n let heights = self.noise_array(&points, heights);\n\n plateau(&points, heights)\n\n }\n\n\n\n fn noise_array(&self, points: &Vec<f64>, heights: Option<Vec<f64>>) -> Vec<f64> {\n\n let heights = match heights {\n\n None => vec![0.; points.len() / 2],\n\n Some(heights) => heights,\n", "file_path": "src/wasm/terrain_generator/src/terrain_generator.rs", "rank": 42, "score": 7.992952610453376 }, { "content": " points: &Vec<f64>,\n\n halfedges: &Vec<usize>,\n\n triangles: &Vec<usize>,\n\n ) -> Vec<usize> {\n\n let mut inedges: Vec<usize> = vec![EMPTY; points.len() / 2];\n\n for e in 0..halfedges.len() {\n\n let p = triangles[if e % 3 == 2 { e - 2 } else { e + 1 }];\n\n if halfedges[e] == EMPTY || inedges[p] == EMPTY {\n\n inedges[p] = e;\n\n }\n\n }\n\n inedges\n\n }\n\n\n\n fn get_circumcenters(points: &Vec<f64>, triangles: &Vec<usize>) -> Vec<f64> {\n\n let n = triangles.len();\n\n let mut circumcenters = vec![0.0; n / 3 * 2];\n\n let mut i = 0;\n\n let mut j = 0;\n\n\n", "file_path": "src/wasm/terrain_generator/src/voronoi.rs", "rank": 43, "score": 7.975225831695944 }, { "content": "use super::civ::{Civilization, CivKnowledge, Priorities};\n\nuse super::board::ActionType;\n\nuse std::collections::HashMap;\n\n\n\n\n\npub struct Simulation {\n\n pub civilizations: Vec<Civilization>,\n\n}\n\n\n\nimpl Simulation {\n\n pub fn new() -> Self {\n\n Simulation {\n\n civilizations: Vec::new(),\n\n }\n\n }\n\n\n\n pub fn find(&self, id: u32, priorities: &Priorities, perceptions: &HashMap<u32, CivKnowledge>) -> ActionType {\n\n ActionType::Grow\n\n }\n\n}\n", "file_path": "src/wasm/history_generator/src/mcts.rs", "rank": 44, "score": 7.386019037498695 }, { "content": " circumcenters,\n\n delaunay,\n\n adjacent,\n\n voronoi_triangles,\n\n voronoi_points,\n\n voronoi_cells,\n\n }\n\n }\n\n\n\n fn triangulate(points: &Vec<f64>) -> Triangulation {\n\n let struct_points: Vec<&[f64]> = points.chunks_exact(2).collect();\n\n let struct_points = struct_points\n\n .iter()\n\n .map(|p| Point { x: p[0], y: p[1] })\n\n .collect::<Vec<_>>();\n\n\n\n triangulate(&struct_points).unwrap()\n\n }\n\n\n\n fn get_inedges(\n", "file_path": "src/wasm/terrain_generator/src/voronoi.rs", "rank": 45, "score": 7.09674452521995 }, { "content": " continue 'j_loop;\n\n }\n\n }\n\n None => continue 'j_loop,\n\n };\n\n\n\n let neighbor_i = neighbor_col.wrapping_add(cols * neighbor_row);\n\n\n\n for neighbor in grid[neighbor_i].iter() {\n\n let dist = euclidean(&sample, neighbor);\n\n if dist < min_offset {\n\n return false;\n\n }\n\n }\n\n }\n\n }\n\n true\n\n}\n\n\n", "file_path": "src/wasm/terrain_generator/src/poisson.rs", "rank": 46, "score": 7.071776220267772 }, { "content": " self.history.push(BoardMutation::Ownership {\n\n cell: cell,\n\n prev: self.cells[cell].owner,\n\n next: Some(action.civ),\n\n });\n\n }\n\n }\n\n\n\n pub fn undo(&mut self, action: &SimulatedAction) {\n\n if !action.successful {\n\n return;\n\n }\n\n\n\n if let ActionType::Occupy(occupied_cell) = action.action {\n\n let last = self.history.last().unwrap();\n\n\n\n if let BoardMutation::Ownership { cell, prev, next } = last {\n\n if next.unwrap() == action.civ && *cell == occupied_cell {\n\n self.cells[*cell].owner = *prev;\n\n self.history.pop();\n", "file_path": "src/wasm/history_generator/src/board.rs", "rank": 47, "score": 6.960345862973345 }, { "content": " if e0 == EMPTY {\n\n return Err(\"Coincident point\".to_string());\n\n } // coincident point\n\n let mut e = e0;\n\n let mut t;\n\n let mut previous_t = EMPTY;\n\n\n\n loop {\n\n t = e / 3;\n\n voronoi_cells[t].push(i);\n\n voronoi_points[i].push(t);\n\n\n\n // Index `t` is neighbour of the previous `t`\n\n if previous_t != EMPTY {\n\n if !adjacent[t].contains(&previous_t) {\n\n adjacent[t].push(previous_t);\n\n }\n\n if !adjacent[previous_t].contains(&t) {\n\n adjacent[previous_t].push(t);\n\n }\n", "file_path": "src/wasm/terrain_generator/src/voronoi.rs", "rank": 48, "score": 6.820314484877973 }, { "content": " active.push(pos);\n\n points.extend(pos.iter());\n\n }\n\n\n\n // Left\n\n for _y in 0..=(1.0 / size) as usize {\n\n let y = _y as f64 * size;\n\n let x = offset * -(y - cy).abs().cos();\n\n let pos = [x, y];\n\n let j = ((y / 2.0 / size) as usize).min(cols - 1);\n\n grid[j * cols].push(pos);\n\n active.push(pos);\n\n points.extend(pos.iter());\n\n }\n\n\n\n // Bottom\n\n for _x in 0..=(1.0 / size) as usize {\n\n let x = _x as f64 * size;\n\n let y = 1.0 + offset * (x - cx).abs().cos();\n\n let pos = [x, y];\n", "file_path": "src/wasm/terrain_generator/src/poisson.rs", "rank": 49, "score": 6.728143020858031 }, { "content": "use rand_core::{RngCore, SeedableRng};\n\nuse rand_pcg::Pcg32;\n\nuse std::collections::HashMap;\n\n\n\nuse super::board::ActionType;\n\nuse super::mcts::SimulatedWorld;\n\n\n\npub struct Color(u8, u8, u8);\n\n\n\npub struct Priorities {\n\n territory: f32,\n\n income: f32,\n\n wealth: f32,\n\n population: f32,\n\n}\n\n\n\npub struct CivKnowledge {\n\n priorities: Priorities,\n\n uncertainty: f32, // 0–1 .. probably\n\n}\n", "file_path": "src/wasm/history_generator/src/civ.rs", "rank": 50, "score": 6.48110594329882 }, { "content": " pub fn fractal_noise(&self, x: f64, y: f64) -> f64 {\n\n let force = 0.25; // magic\n\n let wavyness = 5e-1; // magic\n\n\n\n let theta = self.theta(x * force, y * force);\n\n let length = self.offset(x * force, y * force);\n\n\n\n let x = x + theta.cos() * length * wavyness;\n\n let y = y + theta.sin() * length * wavyness;\n\n\n\n self.height(x, y)\n\n }\n\n}\n", "file_path": "src/wasm/terrain_generator/src/noise.rs", "rank": 51, "score": 6.252245268312857 }, { "content": "use std::collections::HashSet;\n\nuse std::iter::FromIterator;\n\n\n", "file_path": "src/wasm/terrain_generator/src/coasts.rs", "rank": 52, "score": 6.1409186905198325 }, { "content": "\n\n let mut theta = FastNoise::seeded(seed + 1);\n\n theta.set_noise_type(NoiseType::Simplex);\n\n theta.set_frequency(2.0);\n\n\n\n let mut offset = FastNoise::seeded(seed + 2);\n\n offset.set_noise_type(NoiseType::Simplex);\n\n offset.set_frequency(2.0);\n\n\n\n // let mut noise_resources = FastNoise::seeded(seed + 3);\n\n // noise_resources.set_noise_type(NoiseType::Simplex);\n\n // noise_resources.set_frequency(2.0);\n\n\n\n let uniform = RandomNumberGenerator::seeded(seed);\n\n\n\n Noise {\n\n height,\n\n theta,\n\n offset,\n\n // noise_resources,\n", "file_path": "src/wasm/terrain_generator/src/noise.rs", "rank": 53, "score": 6.1029037379904745 }, { "content": " let circumcenters = Voronoi::get_circumcenters(&points, &triangles);\n\n\n\n let Adjacencies {\n\n adjacent,\n\n voronoi_triangles,\n\n voronoi_points,\n\n voronoi_cells,\n\n } = Voronoi::get_adjacencies(&points, &circumcenters, &inedges, &halfedges, &triangles)\n\n .unwrap();\n\n\n\n let delaunay = Delaunay {\n\n points,\n\n hull,\n\n inedges,\n\n halfedges,\n\n triangles,\n\n neighbors,\n\n };\n\n\n\n Voronoi {\n", "file_path": "src/wasm/terrain_generator/src/voronoi.rs", "rank": 54, "score": 5.971290968551781 }, { "content": " new_heights[i] = other;\n\n changed = true;\n\n }\n\n }\n\n }\n\n }\n\n\n\n new_heights\n\n}\n\n\n", "file_path": "src/wasm/terrain_generator/src/erosion.rs", "rank": 55, "score": 5.94845561769635 }, { "content": " let i = ((x / 2.0 / size) as usize).min(cols - 1);\n\n grid[i + (rows - 1) * cols].push(pos);\n\n active.push(pos);\n\n points.extend(pos.iter());\n\n }\n\n\n\n // Right\n\n for _y in 0..=(1.0 / size) as usize {\n\n let y = _y as f64 * size;\n\n let x = 1.0 + offset * (y - cy).abs().cos();\n\n let pos = [x, y];\n\n let j = ((y / 2.0 / size) as usize).min(cols - 1);\n\n grid[cols - 1 + j * cols].push(pos);\n\n active.push(pos);\n\n points.extend(pos.iter());\n\n }\n\n\n\n (grid, active, points)\n\n}\n", "file_path": "src/wasm/terrain_generator/src/poisson.rs", "rank": 56, "score": 5.890625706383785 }, { "content": " active.push(sample);\n\n points.extend(sample.iter());\n\n\n\n let offset_magnitude = |h| {\n\n let n = if h > sea_level { h } else { 1.0 - h };\n\n n\n\n };\n\n\n\n while active.len() > 0 {\n\n let rand_i = (gen.noise.rng() * active.len() as f64) as usize;\n\n let point = &active[rand_i];\n\n let min_offset = size * offset_magnitude(gen.noise_single(point[0], point[1]));\n\n let new_points = sample_poisson_points(30, size, min_offset, &point, &mut grid, gen);\n\n\n\n for sample in new_points.iter() {\n\n points.extend(sample.iter());\n\n }\n\n active.extend(new_points.iter());\n\n active.remove(rand_i);\n\n }\n\n\n\n points\n\n}\n\n\n", "file_path": "src/wasm/terrain_generator/src/poisson.rs", "rank": 57, "score": 5.799157411855701 }, { "content": " let e0 = inedges[i];\n\n let mut e = e0;\n\n let mut p0;\n\n\n\n loop {\n\n p0 = triangles[e];\n\n neighbors[i].push(p0);\n\n e = if e % 3 == 2 { e - 2 } else { e + 1 };\n\n\n\n e = halfedges[e];\n\n if e == EMPTY {\n\n let p = hull[(hull_index[i] + 1) % hull.len()];\n\n if p != p0 {\n\n neighbors[i].push(p);\n\n }\n\n break;\n\n }\n\n if e == e0 {\n\n break;\n\n }\n\n }\n\n }\n\n neighbors\n\n }\n\n}\n", "file_path": "src/wasm/terrain_generator/src/voronoi.rs", "rank": 58, "score": 5.2918744848884876 }, { "content": " voronoi_triangles.extend([i, t, previous_t].iter());\n\n }\n\n previous_t = t;\n\n\n\n e = if e % 3 == 2 { e - 2 } else { e + 1 };\n\n if triangles[e] != i {\n\n break;\n\n } // bad triangulation\n\n e = halfedges[e];\n\n\n\n if e == e0 || e == EMPTY {\n\n break;\n\n }\n\n }\n\n\n\n voronoi_triangles.extend([i, e / 3, previous_t].iter());\n\n }\n\n\n\n Ok(Adjacencies {\n\n adjacent,\n", "file_path": "src/wasm/terrain_generator/src/voronoi.rs", "rank": 59, "score": 5.227464003488202 }, { "content": " income: f32,\n\n wealth: f32,\n\n priorities: Priorities,\n\n ) -> Civilization {\n\n Civilization {\n\n id,\n\n name,\n\n color,\n\n income,\n\n wealth,\n\n priorities,\n\n perceptions: HashMap::new(),\n\n rng: Pcg32::seed_from_u64(id as u64),\n\n }\n\n }\n\n\n\n pub fn find_action(&mut self, simulation: &mut SimulatedWorld) -> ActionType {\n\n // Perceptions of others must be fresh here. Maybe just call it just before finding actions\n\n self.perceive_priorities(&mut simulation.civilizations);\n\n let suggested_action: ActionType = simulation.find(self.id, &self.priorities, &self.perceptions);\n", "file_path": "src/wasm/history_generator/src/civ.rs", "rank": 60, "score": 5.201398266971635 }, { "content": "use wasm_bindgen::prelude::*;\n\n\n\nuse super::coasts::*;\n\nuse super::erosion::*;\n\nuse super::noise::Noise;\n\nuse super::poisson;\n\nuse super::rivers::*;\n\nuse super::utils;\n\nuse super::voronoi::Voronoi;\n\n\n\nextern crate web_sys;\n\n\n\n#[allow(unused_macros)]\n\nmacro_rules! log {\n\n ( $( $t:tt )* ) => {\n\n if cfg![target = \"wasm32-unknown-unknown\"] {\n\n web_sys::console::log_1(&format!( $( $t )* ).into());\n\n }\n\n }\n\n}\n", "file_path": "src/wasm/terrain_generator/src/terrain_generator.rs", "rank": 61, "score": 4.536904624297344 }, { "content": "// When the `wee_alloc` feature is enabled, use `wee_alloc` as the global allocator.\n\n#[cfg(feature = \"wee_alloc\")]\n\n#[global_allocator]\n\nstatic ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT;\n\n\n\n// For serializing\n\n#[macro_use]\n\nextern crate serde_derive;\n\n\n\nmod coasts;\n\nmod erosion;\n\nmod noise;\n\nmod poisson;\n\nmod rivers;\n\npub mod terrain_generator;\n\nmod utils;\n\nmod voronoi;\n", "file_path": "src/wasm/terrain_generator/src/lib.rs", "rank": 62, "score": 4.479053443384881 }, { "content": "use delaunator::{triangulate, Point, Triangulation, EMPTY};\n\n\n\n#[path = \"utils.rs\"]\n\nmod utils;\n\n\n\nextern crate web_sys;\n\n\n\n#[allow(unused_macros)]\n\nmacro_rules! log {\n\n ( $( $t:tt )* ) => {\n\n web_sys::console::log_1(&format!( $( $t )* ).into());\n\n }\n\n}\n\n\n\n// Implement _init from here:\n\n// https://github.com/d3/d3-delaunay/blob/master/src/voronoi.js\n\n\n\n#[derive(Serialize, Debug, PartialEq)]\n\npub struct Voronoi {\n\n pub circumcenters: Vec<f64>,\n", "file_path": "src/wasm/terrain_generator/src/voronoi.rs", "rank": 63, "score": 4.414770492133988 }, { "content": "use super::terrain_generator::TerrainGenerator;\n\nuse std::f64::consts::PI;\n\n\n\nextern crate web_sys;\n\n\n\n#[allow(unused_macros)]\n\nmacro_rules! log {\n\n ( $( $t:tt )* ) => {\n\n web_sys::console::log_1(&format!( $( $t )* ).into());\n\n }\n\n}\n\n\n", "file_path": "src/wasm/terrain_generator/src/poisson.rs", "rank": 64, "score": 4.225827822539149 }, { "content": "\n\n pub fn glean_priorities(\n\n &mut self,\n\n uncertainty: f32,\n\n personal_priorities: &Priorities,\n\n ) -> Priorities {\n\n // We lean towards our own priorities. At least 25%, at most 75%\n\n let mut territory = personal_priorities.territory * (0.25 + uncertainty * 0.5);\n\n let mut income = personal_priorities.income * (0.25 + uncertainty * 0.5);\n\n let mut wealth = personal_priorities.wealth * (0.25 + uncertainty * 0.5);\n\n let mut population = personal_priorities.population * (0.25 + uncertainty * 0.5);\n\n\n\n territory += (self.rng(uncertainty) + self.priorities.territory) * (1.0 - uncertainty);\n\n income += (self.rng(uncertainty) + self.priorities.income) * (1.0 - uncertainty);\n\n wealth += (self.rng(uncertainty) + self.priorities.wealth) * (1.0 - uncertainty);\n\n population += (self.rng(uncertainty) + self.priorities.population) * (1.0 - uncertainty);\n\n\n\n let sum = territory + income + wealth + population;\n\n\n\n territory /= sum;\n", "file_path": "src/wasm/history_generator/src/civ.rs", "rank": 65, "score": 3.5363567752050864 }, { "content": " income /= sum;\n\n wealth /= sum;\n\n population /= sum;\n\n\n\n Priorities {\n\n territory,\n\n income,\n\n wealth,\n\n population,\n\n }\n\n }\n\n\n\n fn rng(&mut self, range: f32) -> f32 {\n\n range * self.rng.next_u32() as f32 / u32::MAX as f32\n\n }\n\n}\n\n\n\nimpl PartialEq for Civilization {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.id == other.id\n\n }\n\n}\n", "file_path": "src/wasm/history_generator/src/civ.rs", "rank": 66, "score": 3.264410862522446 }, { "content": "\n\n let x = points[i * 2 + 0];\n\n let y = points[i * 2 + 1];\n\n\n\n let distance_to_peak = ((x - peak_x).hypot(y - peak_y).min(0.5) / 0.5).powi(2);\n\n heights[i] = (1. - distance_to_peak) * height + distance_to_peak * interpolate(height);\n\n }\n\n\n\n heights\n\n}\n\n\n", "file_path": "src/wasm/terrain_generator/src/erosion.rs", "rank": 67, "score": 3.220565288945843 }, { "content": " suggested_action\n\n }\n\n\n\n pub fn perceive_priorities(&mut self, others: &mut Vec<Civilization>) {\n\n // Civ knowledge is persistent across turns. We don't reinitialize perceptions, but\n\n // rather update our beliefs (in the future, maybe in the bayesian sense).\n\n for foreign_civ in others {\n\n if foreign_civ == self {\n\n continue;\n\n }\n\n\n\n let knowledge = self.perceptions.get(&foreign_civ.id);\n\n match knowledge {\n\n None => {\n\n let uncertainty = 1.;\n\n let perceived_priorities =\n\n foreign_civ.glean_priorities(uncertainty, &self.priorities);\n\n\n\n let knowledge = CivKnowledge {\n\n uncertainty,\n", "file_path": "src/wasm/history_generator/src/civ.rs", "rank": 68, "score": 3.0983409061011873 }, { "content": " Some(col) => {\n\n if col < cols {\n\n col\n\n } else {\n\n continue 'i_loop;\n\n }\n\n }\n\n None => continue 'i_loop,\n\n };\n\n\n\n let neighbor_row = match j {\n\n -1 => row.checked_sub(1),\n\n 1 => row.checked_add(1),\n\n _ => Some(row),\n\n };\n\n let neighbor_row = match neighbor_row {\n\n Some(row) => {\n\n if row < rows {\n\n row\n\n } else {\n", "file_path": "src/wasm/terrain_generator/src/poisson.rs", "rank": 69, "score": 2.8327760001247153 }, { "content": "}\n\n\n\n#[wasm_bindgen]\n\npub struct TerrainGenerator {\n\n #[wasm_bindgen(skip)]\n\n pub noise: Noise,\n\n}\n\n\n\n#[wasm_bindgen]\n\nimpl TerrainGenerator {\n\n #[wasm_bindgen(constructor)]\n\n pub fn new(seed: Option<u32>) -> TerrainGenerator {\n\n if cfg![target = \"wasm32-unknown-unknown\"] {\n\n utils::set_panic_hook();\n\n }\n\n\n\n let seed = match seed {\n\n None => 123456 as u64,\n\n Some(seed) => seed as u64,\n\n };\n", "file_path": "src/wasm/terrain_generator/src/terrain_generator.rs", "rank": 70, "score": 2.424964480473634 }, { "content": "mod board;\n\nmod civ;\n\nmod utils;\n\nmod mcts;\n\n\n\nuse wasm_bindgen::prelude::*;\n\n\n\n// When the `wee_alloc` feature is enabled, use `wee_alloc` as the global\n\n// allocator.\n\n#[cfg(feature = \"wee_alloc\")]\n\n#[global_allocator]\n\nstatic ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT;\n\n\n\n#[wasm_bindgen]\n\nextern \"C\" {\n\n fn alert(s: &str);\n\n}\n\n\n\n#[wasm_bindgen]\n", "file_path": "src/wasm/history_generator/src/lib.rs", "rank": 71, "score": 2.0078342967468528 }, { "content": "\n\npub struct Civilization {\n\n id: u32,\n\n name: String,\n\n color: Color,\n\n income: f32,\n\n wealth: f32,\n\n priorities: Priorities,\n\n // secrecy: f32,\n\n // feature: add this as a constant uncertainty to others about their priorities\n\n // Secrecy will cool over time, and can be reheated with an action.\n\n perceptions: HashMap<u32, CivKnowledge>, // indexed by id ... probably\n\n rng: Pcg32,\n\n}\n\n\n\nimpl Civilization {\n\n pub fn new(\n\n id: u32,\n\n name: String,\n\n color: Color,\n", "file_path": "src/wasm/history_generator/src/civ.rs", "rank": 72, "score": 1.909117298832888 }, { "content": " // If out of lower bounds, keep looking.\n\n if x < -min_offset || y < -min_offset\n\n || x > 1. + min_offset || y > 1. + min_offset {\n\n continue;\n\n }\n\n\n\n let sample = [x, y];\n\n let col = ((x / size) as usize).min(cols - 1);\n\n let row = ((y / size) as usize).min(rows - 1);\n\n\n\n if check_sample(row, col, cols, rows, &sample, &grid, min_offset) == false {\n\n continue; // Check if too close to existing samples. If point is not valid, keep looking.\n\n }\n\n // push sample in\n\n grid[col + row * cols].push(sample);\n\n new_points.push(sample);\n\n }\n\n\n\n new_points\n\n}\n\n\n", "file_path": "src/wasm/terrain_generator/src/poisson.rs", "rank": 73, "score": 1.8414929920058896 }, { "content": "//! Test suite for the Web and headless browsers.\n\n\n\n#![cfg(target_arch = \"wasm32\")]\n\n\n\nextern crate wasm_bindgen_test;\n\nuse wasm_bindgen_test::*;\n\n\n\nwasm_bindgen_test_configure!(run_in_browser);\n\n\n\n#[wasm_bindgen_test]\n", "file_path": "src/wasm/history_generator/tests/web.rs", "rank": 74, "score": 1.7478442765348783 }, { "content": "//! Test suite for the Web and headless browsers.\n\n\n\n#![cfg(target_arch = \"wasm32\")]\n\n\n\nextern crate wasm_bindgen_test;\n\nuse wasm_bindgen_test::*;\n\n\n\nwasm_bindgen_test_configure!(run_in_browser);\n\n\n\n#[wasm_bindgen_test]\n", "file_path": "src/wasm/terrain_generator/tests/web.rs", "rank": 75, "score": 1.7478442765348783 }, { "content": "Copyright (c) 2018 Halvard Vegum <[email protected]>\n\n\n\nPermission is hereby granted, free of charge, to any\n\nperson obtaining a copy of this software and associated\n\ndocumentation files (the \"Software\"), to deal in the\n\nSoftware without restriction, including without\n\nlimitation the rights to use, copy, modify, merge,\n\npublish, distribute, sublicense, and/or sell copies of\n\nthe Software, and to permit persons to whom the Software\n\nis furnished to do so, subject to the following\n\nconditions:\n\n\n\nThe above copyright notice and this permission notice\n\nshall be included in all copies or substantial portions\n\nof the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF\n\nANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED\n\nTO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A\n\nPARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT\n\nSHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\n\nCLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION\n\nOF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR\n\nIN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\n\nDEALINGS IN THE SOFTWARE.\n", "file_path": "LICENSE.md", "rank": 76, "score": 1.4219094644203585 }, { "content": "<div align=\"center\">\n\n\n\n <h1><code>wasm-pack-template</code></h1>\n\n\n\n <strong>A template for kick starting a Rust and WebAssembly project using <a href=\"https://github.com/rustwasm/wasm-pack\">wasm-pack</a>.</strong>\n\n\n\n <p>\n\n <a href=\"https://travis-ci.org/rustwasm/wasm-pack-template\"><img src=\"https://img.shields.io/travis/rustwasm/wasm-pack-template.svg?style=flat-square\" alt=\"Build Status\" /></a>\n\n </p>\n\n\n\n <h3>\n\n <a href=\"https://rustwasm.github.io/docs/wasm-pack/tutorials/npm-browser-packages/index.html\">Tutorial</a>\n\n <span> | </span>\n\n <a href=\"https://discordapp.com/channels/442252698964721669/443151097398296587\">Chat</a>\n\n </h3>\n\n\n\n <sub>Built with 🦀🕸 by <a href=\"https://rustwasm.github.io/\">The Rust and WebAssembly Working Group</a></sub>\n\n</div>\n\n\n\n## About\n\n\n\n[**📚 Read this template tutorial! 📚**][template-docs]\n\n\n\nThis template is designed for compiling Rust libraries into WebAssembly and\n\npublishing the resulting package to NPM.\n\n\n\nBe sure to check out [other `wasm-pack` tutorials online][tutorials] for other\n\ntemplates and usages of `wasm-pack`.\n\n\n\n[tutorials]: https://rustwasm.github.io/docs/wasm-pack/tutorials/index.html\n\n[template-docs]: https://rustwasm.github.io/docs/wasm-pack/tutorials/npm-browser-packages/index.html\n\n\n\n## 🚴 Usage\n\n\n\n### 🐑 Use `cargo generate` to Clone this Template\n\n\n\n[Learn more about `cargo generate` here.](https://github.com/ashleygwilliams/cargo-generate)\n\n\n\n```\n\ncargo generate --git https://github.com/rustwasm/wasm-pack-template.git --name my-project\n\ncd my-project\n\n```\n\n\n\n### 🛠️ Build with `wasm-pack build`\n\n\n\n```\n\nwasm-pack build\n\n```\n\n\n\n### 🔬 Test in Headless Browsers with `wasm-pack test`\n\n\n\n```\n\nwasm-pack test --headless --firefox\n\n```\n\n\n\n### 🎁 Publish to NPM with `wasm-pack publish`\n\n\n\n```\n\nwasm-pack publish\n\n```\n\n\n\n## 🔋 Batteries Included\n\n\n\n* [`wasm-bindgen`](https://github.com/rustwasm/wasm-bindgen) for communicating\n\n between WebAssembly and JavaScript.\n\n* [`console_error_panic_hook`](https://github.com/rustwasm/console_error_panic_hook)\n\n for logging panic messages to the developer console.\n\n* [`wee_alloc`](https://github.com/rustwasm/wee_alloc), an allocator optimized\n\n for small code size.\n", "file_path": "src/wasm/history_generator/README.md", "rank": 77, "score": 0.7023127579365758 }, { "content": "<div align=\"center\">\n\n\n\n <h1><code>wasm-pack-template</code></h1>\n\n\n\n <strong>A template for kick starting a Rust and WebAssembly project using <a href=\"https://github.com/rustwasm/wasm-pack\">wasm-pack</a>.</strong>\n\n\n\n <p>\n\n <a href=\"https://travis-ci.org/rustwasm/wasm-pack-template\"><img src=\"https://img.shields.io/travis/rustwasm/wasm-pack-template.svg?style=flat-square\" alt=\"Build Status\" /></a>\n\n </p>\n\n\n\n <h3>\n\n <a href=\"https://rustwasm.github.io/docs/wasm-pack/tutorials/npm-browser-packages/index.html\">Tutorial</a>\n\n <span> | </span>\n\n <a href=\"https://discordapp.com/channels/442252698964721669/443151097398296587\">Chat</a>\n\n </h3>\n\n\n\n <sub>Built with 🦀🕸 by <a href=\"https://rustwasm.github.io/\">The Rust and WebAssembly Working Group</a></sub>\n\n</div>\n\n\n\n## About\n\n\n\n[**📚 Read this template tutorial! 📚**][template-docs]\n\n\n\nThis template is designed for compiling Rust libraries into WebAssembly and\n\npublishing the resulting package to NPM.\n\n\n\nBe sure to check out [other `wasm-pack` tutorials online][tutorials] for other\n\ntemplates and usages of `wasm-pack`.\n\n\n\n[tutorials]: https://rustwasm.github.io/docs/wasm-pack/tutorials/index.html\n\n[template-docs]: https://rustwasm.github.io/docs/wasm-pack/tutorials/npm-browser-packages/index.html\n\n\n\n## 🚴 Usage\n\n\n\n### 🐑 Use `cargo generate` to Clone this Template\n\n\n\n[Learn more about `cargo generate` here.](https://github.com/ashleygwilliams/cargo-generate)\n\n\n\n```\n\ncargo generate --git https://github.com/rustwasm/wasm-pack-template.git --name my-project\n\ncd my-project\n\n```\n\n\n\n### 🛠️ Build with `wasm-pack build`\n\n\n\n```\n\nwasm-pack build\n\n```\n\n\n\n### 🔬 Test in Headless Browsers with `wasm-pack test`\n\n\n\n```\n\nwasm-pack test --headless --firefox\n\n```\n\n\n\n### 🎁 Publish to NPM with `wasm-pack publish`\n\n\n\n```\n\nwasm-pack publish\n\n```\n\n\n\n## 🔋 Batteries Included\n\n\n\n* [`wasm-bindgen`](https://github.com/rustwasm/wasm-bindgen) for communicating\n\n between WebAssembly and JavaScript.\n\n* [`console_error_panic_hook`](https://github.com/rustwasm/console_error_panic_hook)\n\n for logging panic messages to the developer console.\n\n* [`wee_alloc`](https://github.com/rustwasm/wee_alloc), an allocator optimized\n\n for small code size.\n", "file_path": "src/wasm/terrain_generator/README.md", "rank": 78, "score": 0.7023127579365758 }, { "content": "\n\n![](https://raw.githubusercontent.com/Havegum/Terrain-Generator/master/public/favicon.png)\n\n\n\n# Agent-based border simulation\n\nI originally started this because I wanted to try generating semi-realistic borders by simulating agents.\n\n\n\nPossibly with genetic algorithms? Maybe with reinforcement learning? We'll see ... for now I'm just porting the thing over to Rust, and learning the language on the way.\n\n\n\n\n\n## References and inspiration\n\n### [Uncharted Atlas](https://github.com/mewo2/terrain)\n\nThe starting point for this project. Voronoi based map generation with hydraulic erosion.\n\n\n\n### [Here be dragons](https://heredragonsabound.blogspot.com/2016/10/welcome.html)\n\nUses the same exact starting point as this project. Lots of well explained development, going through the pitfalls and successes!\n\n\n\n### [ThingOnItsOwn](http://thingonitsown.blogspot.com/)\n\nPosts about maps, evolving borders, simulating agents. Very cool!\n\n\n\n### [Undiscovered Worlds](https://undiscoveredworlds.blogspot.com/)\n\nMassive world generation project. Goes through lots of tricks to make the maps more believable.\n\n\n\n### [Geologically reasonable maps](https://www.reddit.com/r/proceduralgeneration/comments/gi4hq4/geologically_reasonable_maps_seed_2/) by u/troyunrau.\n\nMore world maps. I'm not drawing world maps, but there's probably some helpful tips here.\n\n\n\n### [Amit Patel's posts are a treasure trove](http://www-cs-students.stanford.edu/~amitp/game-programming/polygon-map-generation/)\n\n[Lots of good stuff here ...](https://simblob.blogspot.com/2018/08/mapgen4-goals.html). Remember to check the appendices as well.\n\n\n\n\n\n## How to try this out yourself\n\nYou will need to have [Node.js](https://nodejs.org) installed.\n\n\n\nAdditionally you'll need a bunch of [Rust stuff](https://www.rust-lang.org/tools/install).\n\n\n\nWhen all is set up, you can navigate to this projects folder and run:\n\n```bash\n\nyarn install\n\nyarn dev\n\n```\n\nIf you don't have yarn, `npm install` and `npm run dev` should do the trick.\n\n\n\nIt should now be running and be available at [localhost:5000](http://localhost:5000).\n", "file_path": "README.md", "rank": 79, "score": 0.45167565939481014 }, { "content": "class TerrainGenerator {\n\n constructor (seed=123456) {\n\n this.wasm = new Promise((resolve, reject) => wasm\n\n .then(result => {\n\n this.terrainGen = new result.TerrainGenerator(seed);\n\n resolve(true);\n\n }).catch(reject)\n\n );\n\n }\n\n\n\n async generate ({ points = 2**10, seaLevel = 0.39 }={}) {\n\n await this.wasm;\n\n\n\n let radius = Math.pow(500 / points, 0.5) / 10;\n\n const world = this.terrainGen.world(radius, seaLevel).as_js_value();\n\n world.seaLevel = seaLevel;\n\n world.points = world.voronoi.delaunay.points;\n\n world.circumcenters = world.voronoi.circumcenters;\n\n world.voronoiAdjacency = world.voronoi.adjacent;\n\n world.voronoiTriangles = world.voronoi.voronoi_triangles;\n\n world.voronoiPoints = world.voronoi.voronoi_points;\n\n\n\n delete world.voronoi\n\n return world;\n\n }\n", "file_path": "src/utils/terrain-worker.js", "rank": 80, "score": 0.3651678587654983 } ]
Rust
src/conferencing.rs
ktaekwon000/fluminurs
edcbbba13f8f5bf23d713333518b6ee25be5294a
use std::collections::HashMap; use std::path::{Path, PathBuf}; use std::time::SystemTime; use async_trait::async_trait; use futures_util::future; use reqwest::header::REFERER; use reqwest::{Method, Url}; use scraper::{Html, Selector}; use serde::Deserialize; use crate::resource; use crate::resource::{OverwriteMode, OverwriteResult, Resource}; use crate::util::{parse_time, sanitise_filename}; use crate::{Api, ApiData, Result}; const ZOOM_VALIDATE_MEETING_PASSWORD_URL: &str = "https://nus-sg.zoom.us/rec/validate_meet_passwd"; const ZOOM_PASSWORD_URL_PREFIX: &str = "/rec/share"; const ZOOM_DOWNLOAD_REFERER_URL: &str = "https://nus-sg.zoom.us/"; #[derive(Debug, Deserialize)] #[serde(rename_all = "camelCase")] struct Conference { id: String, name: String, start_date: String, #[serde(rename = "isPublishRecordURL")] is_publish_record_url: bool, } #[derive(Debug, Deserialize)] #[serde(rename_all = "camelCase")] struct CloudRecord { code: Option<u32>, record_instances: Option<Vec<CloudRecordInstance>>, } #[derive(Debug, Deserialize)] #[serde(rename_all = "camelCase")] struct CloudRecordInstance { #[serde(rename = "shareURL")] share_url: String, password: String, } #[derive(Debug, Deserialize)] #[serde(rename_all = "camelCase")] struct ZoomValidationResponse { status: bool, } pub struct ConferencingHandle { id: String, path: PathBuf, } #[derive(Debug)] pub struct ZoomRecording { id: String, path: PathBuf, share_url: String, password: String, start_date: SystemTime, } impl ConferencingHandle { pub fn new(id: String, path: PathBuf) -> ConferencingHandle { ConferencingHandle { id, path } } pub async fn load(self, api: &Api) -> Result<Vec<ZoomRecording>> { let conferencing_resp = api .api_as_json::<ApiData<Vec<Conference>>>( &format!( "zoom/Meeting/{}/Meetings?offset=0&sortby=startDate%20asc&populate=null", self.id ), Method::GET, None, ) .await?; match conferencing_resp.data { Some(conferences) => future::join_all( conferences .into_iter() .filter(|c| c.is_publish_record_url) .map(|c| load_cloud_record(api, c, &self.path)), ) .await .into_iter() .collect::<Result<Vec<_>>>() .map(|v| v.into_iter().flatten().collect::<Vec<_>>()), None => Err("Invalid API response from server: type mismatch"), } } } async fn load_cloud_record( api: &Api, conference: Conference, path: &Path, ) -> Result<Vec<ZoomRecording>> { let request_path = format!("zoom/Meeting/{}/cloudrecord", conference.id); let mut num_404_tries = 0; let cloud_record = loop { let cloud_record = api .api_as_json::<CloudRecord>(&request_path, Method::GET, None) .await?; if cloud_record.code != Some(400) && (cloud_record.code != Some(404) || num_404_tries >= 5) { break cloud_record; } if cloud_record.code == Some(404) { num_404_tries += 1; } }; let start_date = parse_time(&conference.start_date); let mut conference_id = conference.id; let conference_name: &str = &conference.name; match cloud_record.record_instances { Some(record_instances) => Ok(match record_instances.len() { 0 => vec![], 1 => record_instances .into_iter() .map(|cri| ZoomRecording { id: std::mem::take(&mut conference_id), path: path.join(make_mp4_extension(Path::new(&sanitise_filename( conference_name, )))), share_url: cri.share_url, password: cri.password, start_date, }) .collect::<Vec<_>>(), _ => record_instances .into_iter() .enumerate() .map(|(i, cri)| ZoomRecording { id: conference_id.clone(), path: path.join(make_mp4_extension(Path::new(&append_number( &sanitise_filename(conference_name), i + 1, )))), share_url: cri.share_url, password: cri.password, start_date, }) .collect::<Vec<_>>(), }), None => Ok(vec![]), } } fn make_mp4_extension(path: &Path) -> PathBuf { path.with_extension("mp4") } fn append_number(text: &str, number: usize) -> String { format!("{} ({})", text, number) } #[async_trait(?Send)] impl Resource for ZoomRecording { fn id(&self) -> &str { &self.id } fn path(&self) -> &Path { &self.path } fn path_mut(&mut self) -> &mut PathBuf { &mut self.path } fn last_updated(&self) -> SystemTime { self.start_date } async fn download( &self, api: &Api, destination: &Path, temp_destination: &Path, overwrite: OverwriteMode, ) -> Result<OverwriteResult> { resource::do_retryable_download( api, destination, temp_destination, overwrite, self.last_updated(), move |api| self.get_download_url(api), move |api, url, temp_destination| { resource::download_chunks(api, url, temp_destination, |req| { Api::add_desktop_user_agent(req) .header(reqwest::header::RANGE, "bytes=0-") .header(reqwest::header::REFERER, ZOOM_DOWNLOAD_REFERER_URL) }) }, ) .await } } impl ZoomRecording { async fn get_download_url(&self, api: &Api) -> Result<Url> { let share_url = Url::parse(&self.share_url).map_err(|_| "Unable to parse share URL")?; let share_resp = api .custom_request( share_url.clone(), Method::GET, None, Api::add_desktop_user_agent, ) .await?; let video_resp = if share_resp .url() .path() .starts_with(ZOOM_PASSWORD_URL_PREFIX) { let cloned_share_resp_url = share_resp.url().to_string(); let html = share_resp .text() .await .map_err(|_| "Unable to get HTML response")?; let document = Html::parse_document(&html); let id_selector = Selector::parse("#meetId").unwrap(); let mut form: HashMap<&str, &str> = HashMap::new(); form.insert( "id", document .select(&id_selector) .next() .and_then(|el| el.value().attr("value")) .ok_or("Unable to find conference id")?, ); form.insert("passwd", &self.password); form.insert("action", "viewdetailpage"); form.insert("recaptcha", ""); let validate_resp = api .custom_request( Url::parse(ZOOM_VALIDATE_MEETING_PASSWORD_URL) .expect("Unable to parse Zoom validation URL"), Method::POST, Some(&form), move |req| { Api::add_desktop_user_agent(req) .header(REFERER, cloned_share_resp_url.as_str()) }, ) .await?; let validate_resp_data = validate_resp .json::<ZoomValidationResponse>() .await .map_err(|_| "Unable to parse response JSON from Zoom validation")?; if !validate_resp_data.status { return Err("Recording password was rejected by Zoom"); } let resp = api .custom_request(share_url, Method::GET, None, Api::add_desktop_user_agent) .await?; if resp.url().path().starts_with(ZOOM_PASSWORD_URL_PREFIX) { return Err("Zoom still wants a password even though we already supplied it"); } resp } else { share_resp }; let resp_html = video_resp .text() .await .map_err(|_| "Unable to get response text")?; let video_url_regex = regex::Regex::new("viewMp4Url:[\\s]*\'([^\']*)\'").expect("Unable to parse regex"); let url = Url::parse( video_url_regex .captures(&resp_html) .ok_or("Parse error")? .get(1) .ok_or("Parse error")? .as_str(), ) .map_err(|_| "Unable to parse conference download URL")?; Ok(url) } }
use std::collections::HashMap; use std::path::{Path, PathBuf}; use std::time::SystemTime; use async_trait::async_trait; use futures_util::future; use reqwest::header::REFERER; use reqwest::{Method, Url}; use scraper::{Html, Selector}; use serde::Deserialize; use crate::resource; use crate::resource::{OverwriteMode, OverwriteResult, Resource}; use crate::util::{parse_time, sanitise_filename}; use crate::{Api, ApiData, Result}; const ZOOM_VALIDATE_MEETING_PASSWORD_URL: &str = "https://nus-sg.zoom.us/rec/validate_meet_passwd"; const ZOOM_PASSWORD_URL_PREFIX: &str = "/rec/share"; const ZOOM_DOWNLOAD_REFERER_URL: &str = "https://nus-sg.zoom.us/"; #[derive(Debug, Deserialize)] #[serde(rename_all = "camelCase")] struct Conference { id: String, name: String, start_date: String, #[serde(rename = "isPublishRecordURL")] is_publish_record_url: bool, } #[derive(Debug, Deserialize)] #[serde(rename_all = "camelCase")] struct CloudRecord { code: Option<u32>, record_instances: Option<Vec<CloudRecordInstance>>, } #[derive(Debug, Deserialize)] #[serde(rename_all = "camelCase")] struct CloudRecordInstance { #[serde(rename = "shareURL")] share_url: String, password: String, } #[derive(Debug, Deserialize)] #[serde(rename_all = "camelCase")] struct ZoomValidationResponse { status: bool, } pub struct ConferencingHandle { id: String, path: PathBuf, } #[derive(Debug)] pub struct ZoomRecording { id: String, path: PathBuf, share_url: String, password: String, start_date: SystemTime, } impl ConferencingHandle { pub fn new(id: String, path: PathBuf) -> ConferencingHandle { ConferencingHandle { id, path } } pub async fn load(self, api: &Api) -> Result<Vec<ZoomRecording>> { let conferencing_resp = api .api_as_json::<ApiData<Vec<Conference>>>( &format!( "zoom/Meeting/{}/Meetings?offset=0&sortby=startDate%20asc&populate=null", self.id ), Method::GET, None, ) .await?; match conferencing_resp.data { Some(conferences) => future::join_all( conferences .into_iter() .filter(|c| c.is_publish_record_url) .map(|c| load_cloud_record(api, c, &self.path)), ) .awai
} async fn load_cloud_record( api: &Api, conference: Conference, path: &Path, ) -> Result<Vec<ZoomRecording>> { let request_path = format!("zoom/Meeting/{}/cloudrecord", conference.id); let mut num_404_tries = 0; let cloud_record = loop { let cloud_record = api .api_as_json::<CloudRecord>(&request_path, Method::GET, None) .await?; if cloud_record.code != Some(400) && (cloud_record.code != Some(404) || num_404_tries >= 5) { break cloud_record; } if cloud_record.code == Some(404) { num_404_tries += 1; } }; let start_date = parse_time(&conference.start_date); let mut conference_id = conference.id; let conference_name: &str = &conference.name; match cloud_record.record_instances { Some(record_instances) => Ok(match record_instances.len() { 0 => vec![], 1 => record_instances .into_iter() .map(|cri| ZoomRecording { id: std::mem::take(&mut conference_id), path: path.join(make_mp4_extension(Path::new(&sanitise_filename( conference_name, )))), share_url: cri.share_url, password: cri.password, start_date, }) .collect::<Vec<_>>(), _ => record_instances .into_iter() .enumerate() .map(|(i, cri)| ZoomRecording { id: conference_id.clone(), path: path.join(make_mp4_extension(Path::new(&append_number( &sanitise_filename(conference_name), i + 1, )))), share_url: cri.share_url, password: cri.password, start_date, }) .collect::<Vec<_>>(), }), None => Ok(vec![]), } } fn make_mp4_extension(path: &Path) -> PathBuf { path.with_extension("mp4") } fn append_number(text: &str, number: usize) -> String { format!("{} ({})", text, number) } #[async_trait(?Send)] impl Resource for ZoomRecording { fn id(&self) -> &str { &self.id } fn path(&self) -> &Path { &self.path } fn path_mut(&mut self) -> &mut PathBuf { &mut self.path } fn last_updated(&self) -> SystemTime { self.start_date } async fn download( &self, api: &Api, destination: &Path, temp_destination: &Path, overwrite: OverwriteMode, ) -> Result<OverwriteResult> { resource::do_retryable_download( api, destination, temp_destination, overwrite, self.last_updated(), move |api| self.get_download_url(api), move |api, url, temp_destination| { resource::download_chunks(api, url, temp_destination, |req| { Api::add_desktop_user_agent(req) .header(reqwest::header::RANGE, "bytes=0-") .header(reqwest::header::REFERER, ZOOM_DOWNLOAD_REFERER_URL) }) }, ) .await } } impl ZoomRecording { async fn get_download_url(&self, api: &Api) -> Result<Url> { let share_url = Url::parse(&self.share_url).map_err(|_| "Unable to parse share URL")?; let share_resp = api .custom_request( share_url.clone(), Method::GET, None, Api::add_desktop_user_agent, ) .await?; let video_resp = if share_resp .url() .path() .starts_with(ZOOM_PASSWORD_URL_PREFIX) { let cloned_share_resp_url = share_resp.url().to_string(); let html = share_resp .text() .await .map_err(|_| "Unable to get HTML response")?; let document = Html::parse_document(&html); let id_selector = Selector::parse("#meetId").unwrap(); let mut form: HashMap<&str, &str> = HashMap::new(); form.insert( "id", document .select(&id_selector) .next() .and_then(|el| el.value().attr("value")) .ok_or("Unable to find conference id")?, ); form.insert("passwd", &self.password); form.insert("action", "viewdetailpage"); form.insert("recaptcha", ""); let validate_resp = api .custom_request( Url::parse(ZOOM_VALIDATE_MEETING_PASSWORD_URL) .expect("Unable to parse Zoom validation URL"), Method::POST, Some(&form), move |req| { Api::add_desktop_user_agent(req) .header(REFERER, cloned_share_resp_url.as_str()) }, ) .await?; let validate_resp_data = validate_resp .json::<ZoomValidationResponse>() .await .map_err(|_| "Unable to parse response JSON from Zoom validation")?; if !validate_resp_data.status { return Err("Recording password was rejected by Zoom"); } let resp = api .custom_request(share_url, Method::GET, None, Api::add_desktop_user_agent) .await?; if resp.url().path().starts_with(ZOOM_PASSWORD_URL_PREFIX) { return Err("Zoom still wants a password even though we already supplied it"); } resp } else { share_resp }; let resp_html = video_resp .text() .await .map_err(|_| "Unable to get response text")?; let video_url_regex = regex::Regex::new("viewMp4Url:[\\s]*\'([^\']*)\'").expect("Unable to parse regex"); let url = Url::parse( video_url_regex .captures(&resp_html) .ok_or("Parse error")? .get(1) .ok_or("Parse error")? .as_str(), ) .map_err(|_| "Unable to parse conference download URL")?; Ok(url) } }
t .into_iter() .collect::<Result<Vec<_>>>() .map(|v| v.into_iter().flatten().collect::<Vec<_>>()), None => Err("Invalid API response from server: type mismatch"), } }
function_block-function_prefixed
[ { "content": "pub fn sanitise_filename(name: &str) -> String {\n\n if cfg!(windows) {\n\n sanitize_filename::sanitize_with_options(\n\n name.trim(),\n\n sanitize_filename::Options {\n\n windows: true,\n\n truncate: true,\n\n replacement: \"-\",\n\n },\n\n )\n\n } else {\n\n name.trim().replace(\"\\0\", \"-\").replace(\"/\", \"-\")\n\n }\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 0, "score": 165328.26247155428 }, { "content": "fn full_api_url(path: &str) -> Url {\n\n Url::parse(API_BASE_URL)\n\n .and_then(|u| u.join(path))\n\n .expect(\"Unable to join URL's\")\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 1, "score": 164503.45551952985 }, { "content": "pub fn extract_video_url_from_document(html: &str) -> Result<Url> {\n\n let document = Html::parse_document(html);\n\n let selector = Selector::parse(r#\"meta[property=\"og:video\"]\"#).unwrap();\n\n\n\n let url_str = document\n\n .select(&selector)\n\n .next()\n\n .and_then(|element| element.value().attr(\"content\"))\n\n .ok_or(\"Unable to find video URL\")?;\n\n\n\n Url::parse(url_str).map_err(|_| \"Unable to parse video URL\")\n\n}\n", "file_path": "src/panopto.rs", "rank": 2, "score": 156020.46795506292 }, { "content": "// Makes the paths of all the given files unique, based on the last updated time and the id.\n\n// This function will also sort the files.\n\npub fn sort_and_make_all_paths_unique<T: Resource>(resources: &mut [T]) {\n\n resources.sort_unstable_by(|r1, r2| {\n\n r1.path()\n\n .cmp(&r2.path())\n\n .then_with(|| r1.last_updated().cmp(&r2.last_updated()).reverse())\n\n });\n\n // todo: This is not very right... conferences will append \"(1)\" or \"(2)\" etc if there are multiple links.\n\n resources.iter_mut().fold(Path::new(\"\"), |path, r| {\n\n if path == r.path() {\n\n let mut new_name = r.path().file_stem().map_or_else(OsString::new, |n| {\n\n let mut new_name = n.to_owned();\n\n new_name.push(\"_\");\n\n new_name\n\n });\n\n new_name.push(r.id());\n\n r.path().extension().map(|e| {\n\n new_name.push(\".\");\n\n new_name.push(e);\n\n });\n\n r.path_mut().set_file_name(new_name);\n", "file_path": "src/resource.rs", "rank": 4, "score": 99961.25817855909 }, { "content": "fn build_token_form<'a>(code: &'a str) -> HashMap<&'static str, &'a str> {\n\n let mut map = HashMap::new();\n\n map.insert(\"grant_type\", \"authorization_code\");\n\n map.insert(\"client_id\", ADFS_CLIENT_ID);\n\n map.insert(\"resource\", ADFS_RESOURCE_TYPE);\n\n map.insert(\"code\", code);\n\n map.insert(\"redirect_uri\", ADFS_REDIRECT_URI);\n\n map\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 5, "score": 99325.3459018751 }, { "content": "// TODO: check file extension?\n\nfn make_mp4_extension(path: &Path) -> PathBuf {\n\n path.with_extension(\"mp4\")\n\n}\n\n\n\n#[async_trait(?Send)]\n\nimpl Resource for InternalVideo {\n\n fn id(&self) -> &str {\n\n &self.id\n\n }\n\n\n\n fn path(&self) -> &Path {\n\n &self.path\n\n }\n\n fn path_mut(&mut self) -> &mut PathBuf {\n\n &mut self.path\n\n }\n\n\n\n fn last_updated(&self) -> SystemTime {\n\n self.last_updated\n\n }\n", "file_path": "src/multimedia/mod.rs", "rank": 6, "score": 98836.73352867452 }, { "content": "fn build_auth_form<'a>(username: &'a str, password: &'a str) -> HashMap<&'static str, &'a str> {\n\n let mut map = HashMap::new();\n\n map.insert(\"UserName\", username);\n\n map.insert(\"Password\", password);\n\n map.insert(\"AuthMethod\", \"FormsAuthentication\");\n\n map\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 7, "score": 96629.53100756266 }, { "content": "pub fn parse_time(time: &str) -> SystemTime {\n\n SystemTime::from(\n\n chrono::DateTime::<chrono::FixedOffset>::parse_from_rfc3339(time)\n\n .expect(\"Failed to parse last updated time\"),\n\n )\n\n}\n", "file_path": "src/util.rs", "rank": 8, "score": 95356.40964612771 }, { "content": "#[derive(Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct Name {\n\n user_name_original: String,\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 10, "score": 88565.17526750662 }, { "content": "fn build_auth_url() -> Url {\n\n let nonce = generate_random_bytes(16);\n\n let mut url = Url::parse(ADFS_OAUTH2_URL).expect(\"Unable to parse ADFS URL\");\n\n url.query_pairs_mut()\n\n .append_pair(\"response_type\", \"code\")\n\n .append_pair(\"client_id\", ADFS_CLIENT_ID)\n\n .append_pair(\"state\", &nonce)\n\n .append_pair(\"redirect_uri\", ADFS_REDIRECT_URI)\n\n .append_pair(\"scope\", \"\")\n\n .append_pair(\"resource\", ADFS_RESOURCE_TYPE)\n\n .append_pair(\"nonce\", &nonce);\n\n url\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 11, "score": 86166.12436743942 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct ApiData<T> {\n\n data: Option<T>,\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 12, "score": 79031.69653747407 }, { "content": "fn build_client() -> Result<Client> {\n\n Client::builder()\n\n .http1_title_case_headers()\n\n .cookie_store(true)\n\n .add_root_certificate(hack_get_intermediate_cert()?)\n\n .redirect(Policy::custom(|attempt| {\n\n if attempt.previous().len() > 5 {\n\n attempt.error(\"too many redirects\")\n\n } else {\n\n attempt.follow()\n\n }\n\n }))\n\n .build()\n\n .map_err(|_| \"Unable to create HTTP client\")\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 13, "score": 74398.15282153529 }, { "content": "fn main() -> std::io::Result<()> {\n\n if cfg!(target_os = \"windows\") {\n\n // We need to set the 'longPathAware' manifest key, so that file paths with length >260 chars will work.\n\n // This happens sometimes since we encode IDs for duplicate files.\n\n let mut res = winres::WindowsResource::new();\n\n res.set_manifest(\n\n r#\"<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\"?>\n\n<assembly xmlns=\"urn:schemas-microsoft-com:asm.v1\" manifestVersion=\"1.0\" xmlns:asmv3=\"urn:schemas-microsoft-com:asm.v3\">\n\n<application xmlns=\"urn:schemas-microsoft-com:asm.v3\">\n\n <windowsSettings xmlns:ws2=\"http://schemas.microsoft.com/SMI/2016/WindowsSettings\">\n\n <ws2:longPathAware>true</ws2:longPathAware>\n\n </windowsSettings>\n\n</application>\n\n</assembly>\"#);\n\n res.compile()?;\n\n }\n\n Ok(())\n\n}\n", "file_path": "build.rs", "rank": 14, "score": 73814.48426027983 }, { "content": "#[async_trait(?Send)]\n\npub trait Resource {\n\n fn id(&self) -> &str;\n\n fn path(&self) -> &Path;\n\n fn path_mut(&mut self) -> &mut PathBuf;\n\n fn last_updated(&self) -> SystemTime;\n\n async fn download(\n\n &self,\n\n api: &Api,\n\n destination: &Path,\n\n temp_destination: &Path,\n\n overwrite: OverwriteMode,\n\n ) -> Result<OverwriteResult>;\n\n}\n\n\n", "file_path": "src/resource.rs", "rank": 15, "score": 73244.13027565206 }, { "content": "fn hack_get_intermediate_cert() -> Result<Certificate> {\n\n Certificate::from_pem(include_bytes!(\"DigiCert_TLS_RSA_SHA256_2020_CA1.pem\"))\n\n .map_err(|_| \"Unable to load TLS intermediate certificate\")\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 16, "score": 70451.698127291 }, { "content": "#[async_trait(?Send)]\n\npub trait SimpleDownloadableResource {\n\n fn id(&self) -> &str;\n\n fn path(&self) -> &Path;\n\n fn path_mut(&mut self) -> &mut PathBuf;\n\n fn last_updated(&self) -> SystemTime;\n\n async fn get_download_url(&self, api: &Api) -> Result<Url>;\n\n}\n\n\n\n#[async_trait(?Send)]\n\nimpl<T: SimpleDownloadableResource> Resource for T {\n\n fn id(&self) -> &str {\n\n self.id()\n\n }\n\n\n\n fn path(&self) -> &Path {\n\n self.path()\n\n }\n\n fn path_mut(&mut self) -> &mut PathBuf {\n\n self.path_mut()\n\n }\n", "file_path": "src/resource.rs", "rank": 17, "score": 68613.23793757783 }, { "content": "fn generate_random_bytes(size: usize) -> String {\n\n (0..size)\n\n .map(|_| format!(\"{:02x}\", rand::random::<u8>()))\n\n .collect()\n\n}\n\n\n\nasync fn infinite_retry_http<F>(\n\n client: &Client,\n\n url: Url,\n\n method: Method,\n\n form: Option<&HashMap<&str, &str>>,\n\n edit_request: F,\n\n) -> Result<Response>\n\nwhere\n\n F: (Fn(RequestBuilder) -> RequestBuilder),\n\n{\n\n let form = if let Some(form) = form {\n\n Some(serde_urlencoded::to_string(form).map_err(|_| \"Failed to serialise HTTP form\")?)\n\n } else {\n\n None\n", "file_path": "src/lib.rs", "rank": 18, "score": 67728.46695709306 }, { "content": "#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct ApiFileDirectory {\n\n id: String,\n\n name: String,\n\n file_name: Option<String>,\n\n allow_upload: Option<bool>,\n\n creator_name: Option<String>,\n\n last_updated_date: String,\n\n}\n\n\n\npub struct DirectoryHandle {\n\n id: String,\n\n path: PathBuf,\n\n allow_upload: bool,\n\n /* last_updated: SystemTime, */\n\n}\n\n\n\npub struct File {\n\n id: String,\n\n path: PathBuf,\n\n last_updated: SystemTime,\n", "file_path": "src/file.rs", "rank": 21, "score": 61871.995786055 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct Access {\n\n #[serde(rename = \"access_Full\")]\n\n full: bool,\n\n #[serde(rename = \"access_Read\")]\n\n read: bool,\n\n #[serde(rename = \"access_Create\")]\n\n create: bool,\n\n #[serde(rename = \"access_Update\")]\n\n update: bool,\n\n #[serde(rename = \"access_Delete\")]\n\n delete: bool,\n\n #[serde(rename = \"access_Settings_Read\")]\n\n settings_read: bool,\n\n #[serde(rename = \"access_Settings_Update\")]\n\n settings_update: bool,\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct Announcement {\n\n pub title: String,\n", "file_path": "src/module.rs", "rank": 22, "score": 41849.860888022624 }, { "content": "#[derive(Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct Term {\n\n term_detail: TermDetail,\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 23, "score": 41849.70665155016 }, { "content": "#[derive(Deserialize)]\n\nstruct TermDetail {\n\n term: String,\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 24, "score": 40452.29023026225 }, { "content": "#[derive(Deserialize)]\n\nstruct TokenResponse {\n\n access_token: String,\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 25, "score": 40452.29023026225 }, { "content": "#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct WebLectureResponse {\n\n id: String,\n\n name: String,\n\n}\n\n\n", "file_path": "src/weblecture.rs", "rank": 27, "score": 39205.3533251972 }, { "content": "#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct WebLectureMedia {\n\n id: String,\n\n name: String,\n\n last_updated_date: String,\n\n}\n\n\n\npub struct WebLectureHandle {\n\n id: String,\n\n path: PathBuf,\n\n}\n\n\n\npub struct WebLectureVideo {\n\n module_id: String,\n\n id: String,\n\n path: PathBuf,\n\n last_updated: SystemTime,\n\n}\n\n\n\nimpl WebLectureHandle {\n\n pub fn new(id: String, path: PathBuf) -> WebLectureHandle {\n", "file_path": "src/weblecture.rs", "rank": 29, "score": 39205.3533251972 }, { "content": "#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct InternalMedia {\n\n id: String,\n\n name: String,\n\n last_updated_date: String,\n\n // used to download the stream\n\n stream_url_path: Option<String>, // Not all multimedia items are videos\n\n}\n\n\n\npub struct MultimediaHandle {\n\n id: String,\n\n path: PathBuf,\n\n}\n\n\n\npub struct InternalVideo {\n\n id: String,\n\n stream_url_path: String,\n\n path: PathBuf,\n\n last_updated: SystemTime,\n\n}\n\n\n", "file_path": "src/multimedia/mod.rs", "rank": 30, "score": 39205.3533251972 }, { "content": "#[derive(Clone, Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct PanoptoQueryParameter {\n\n key: String,\n\n value: String,\n\n}\n\n\n\npub async fn launch(api: &Api, api_path: &str) -> Result<Response> {\n\n let query_params = api\n\n .api_as_json::<Option<PanoptoRequestConstructionDetails>>(api_path, Method::GET, None)\n\n .await?\n\n .ok_or(\"Invalid API response from server: type mismatch\")?;\n\n\n\n let url =\n\n Url::parse(&query_params.launch_url).map_err(|_| \"Unable to parse Panopto launch URL\")?;\n\n\n\n let form: HashMap<&str, &str> = query_params\n\n .data_items\n\n .iter()\n\n .map(|item| (item.key.as_str(), item.value.as_str()))\n\n .collect();\n\n\n\n api.custom_request(url, Method::POST, Some(&form), Api::add_desktop_user_agent)\n\n .await\n\n}\n\n\n", "file_path": "src/panopto.rs", "rank": 31, "score": 39205.30681546772 }, { "content": "#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct PanoptoRequestConstructionDetails {\n\n #[serde(rename = \"launchURL\")]\n\n launch_url: String,\n\n data_items: Vec<PanoptoQueryParameter>,\n\n}\n\n\n", "file_path": "src/panopto.rs", "rank": 32, "score": 38086.41912431564 }, { "content": "#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct ExternalMultimediaResponse {\n\n d: ExternalMultimediaResponseResponse,\n\n}\n\n\n", "file_path": "src/multimedia/external_multimedia.rs", "rank": 33, "score": 37076.55240914626 }, { "content": "#[derive(Debug, Serialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct ExternalMultimediaRequest {\n\n pub query_parameters: ExternalMultimediaRequestQueryParameters,\n\n}\n\n\n", "file_path": "src/multimedia/external_multimedia.rs", "rank": 34, "score": 37073.65776112414 }, { "content": "#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"PascalCase\")]\n\nstruct ExternalMultimediaIndividualResponse {\n\n #[serde(rename = \"DeliveryID\")]\n\n delivery_id: String,\n\n viewer_url: String,\n\n session_name: String,\n\n}\n\n\n", "file_path": "src/multimedia/external_multimedia.rs", "rank": 35, "score": 36160.54733213729 }, { "content": "#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"PascalCase\")]\n\nstruct ExternalMultimediaResponseResponse {\n\n results: Vec<ExternalMultimediaIndividualResponse>,\n\n}\n\n\n", "file_path": "src/multimedia/external_multimedia.rs", "rank": 36, "score": 36160.54733213729 }, { "content": "#[derive(Debug, Serialize, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct ExternalMultimediaRequestQueryParameters {\n\n #[serde(rename = \"folderID\")]\n\n pub folder_id: String,\n\n}\n\n\n\npub struct ExternalVideo {\n\n id: String,\n\n html_url: String,\n\n path: PathBuf,\n\n}\n\n\n\npub(super) async fn load_external_channel(\n\n api: &Api,\n\n channel: Channel,\n\n path: &Path,\n\n) -> Result<Vec<ExternalVideo>> {\n\n let channel_path = path.join(Path::new(&sanitise_filename(&channel.name)));\n\n\n\n let response = panopto::launch(\n\n api,\n", "file_path": "src/multimedia/external_multimedia.rs", "rank": 37, "score": 35325.852613015275 }, { "content": "}\n\n\n\npub enum RetryableError {\n\n Retry(Error),\n\n Fail(Error),\n\n}\n\n\n\npub type RetryableResult<T> = std::result::Result<T, RetryableError>;\n\n\n\npub async fn do_retryable_download<\n\n 'a,\n\n F1: FnOnce(&'a Api) -> Fut1 + 'a,\n\n Fut1: Future<Output = Result<C>>,\n\n F2: Fn(&'a Api, C, &'a Path) -> Fut2 + 'a,\n\n Fut2: Future<Output = RetryableResult<()>>,\n\n C: Clone,\n\n>(\n\n api: &'a Api,\n\n destination: &Path,\n\n temp_destination: &'a Path,\n", "file_path": "src/resource.rs", "rank": 38, "score": 26427.208026150576 }, { "content": "use std::ffi::OsString;\n\nuse std::path::{Path, PathBuf};\n\nuse std::time::SystemTime;\n\n\n\nuse async_trait::async_trait;\n\nuse futures_util::future::Future;\n\nuse reqwest::{RequestBuilder, Url};\n\nuse tokio::io::AsyncWriteExt;\n\n\n\nuse crate::{Api, Error, Result};\n\n\n\n#[async_trait(?Send)]\n", "file_path": "src/resource.rs", "rank": 39, "score": 26424.835789346613 }, { "content": " .map_err(|_| \"Failed renaming existing file\")?;\n\n Ok((true, OverwriteResult::Renamed { renamed_path })) // do download, because we renamed the old file\n\n }\n\n }\n\n }\n\n}\n\n\n\nasync fn infinite_retry_download<\n\n 'a,\n\n F: Fn(&'a Api, C, &'a Path) -> Fut + 'a,\n\n Fut: Future<Output = RetryableResult<()>>,\n\n C: Clone,\n\n>(\n\n api: &'a Api,\n\n before_download_data: C,\n\n destination: &Path,\n\n temp_destination: &'a Path,\n\n download_file: F,\n\n) -> Result<()> {\n\n loop {\n", "file_path": "src/resource.rs", "rank": 40, "score": 26423.16229325387 }, { "content": "\n\n fn last_updated(&self) -> SystemTime {\n\n self.last_updated()\n\n }\n\n\n\n async fn download(\n\n &self,\n\n api: &Api,\n\n destination: &Path,\n\n temp_destination: &Path,\n\n overwrite: OverwriteMode,\n\n ) -> Result<OverwriteResult> {\n\n do_retryable_download(\n\n api,\n\n destination,\n\n temp_destination,\n\n overwrite,\n\n self.last_updated(),\n\n move |api| self.get_download_url(api),\n\n move |api, url, temp_destination| {\n\n download_chunks(api, url, temp_destination, move |req| req)\n\n },\n\n )\n\n .await\n\n }\n\n}\n\n\n", "file_path": "src/resource.rs", "rank": 41, "score": 26422.05982844604 }, { "content": " .await?;\n\n\n\n // set the last modified time manually to the time we got from the server,\n\n // so that in case our local machine has unsynced time, or the file got updated while we are downloading it,\n\n // we will be able to update the file the next time we attempt to download it\n\n filetime::set_file_mtime(\n\n destination,\n\n filetime::FileTime::from_system_time(last_updated),\n\n )\n\n .map_err(|_| \"Unable to set last modified time\")?;\n\n }\n\n Ok(result)\n\n}\n\n\n\npub async fn download_chunks<F>(\n\n api: &Api,\n\n download_url: reqwest::Url,\n\n temp_destination: &Path,\n\n edit_request: F,\n\n) -> RetryableResult<()>\n", "file_path": "src/resource.rs", "rank": 42, "score": 26420.24031015285 }, { "content": " Ok(())\n\n}\n\n\n\nasync fn prepare_path(\n\n path: &Path,\n\n overwrite: OverwriteMode,\n\n last_updated: SystemTime,\n\n) -> Result<(bool, OverwriteResult)> {\n\n let metadata = tokio::fs::metadata(path).await;\n\n if let Err(e) = metadata {\n\n return match e.kind() {\n\n std::io::ErrorKind::NotFound => Ok((true, OverwriteResult::NewFile)), // do download, because file does not already exist\n\n std::io::ErrorKind::PermissionDenied => {\n\n Err(\"Permission denied when retrieving file metadata\")\n\n }\n\n _ => Err(\"Unable to retrieve file metadata\"),\n\n };\n\n }\n\n let old_time = metadata\n\n .unwrap()\n", "file_path": "src/resource.rs", "rank": 43, "score": 26419.559068653958 }, { "content": " overwrite: OverwriteMode,\n\n last_updated: SystemTime,\n\n before_download_file: F1,\n\n download_file: F2,\n\n) -> Result<OverwriteResult> {\n\n let (should_download, result) = prepare_path(destination, overwrite, last_updated).await?;\n\n if should_download {\n\n let before_download_data = before_download_file(api).await?;\n\n if let Some(parent) = destination.parent() {\n\n tokio::fs::create_dir_all(parent)\n\n .await\n\n .map_err(|_| \"Unable to create directory\")?;\n\n };\n\n infinite_retry_download(\n\n api,\n\n before_download_data,\n\n destination,\n\n temp_destination,\n\n download_file,\n\n )\n", "file_path": "src/resource.rs", "rank": 44, "score": 26417.100558535873 }, { "content": " .modified()\n\n .map_err(|_| \"File system does not support last modified time\")?;\n\n if last_updated <= old_time {\n\n Ok((false, OverwriteResult::AlreadyHave)) // don't download, because we already have updated file\n\n } else {\n\n match overwrite {\n\n OverwriteMode::Skip => Ok((false, OverwriteResult::Skipped)), // don't download, because user wants to skip updated files\n\n OverwriteMode::Overwrite => Ok((true, OverwriteResult::Overwritten)), // do download, because user wants to overwrite updated files\n\n OverwriteMode::Rename => {\n\n let mut new_stem = path\n\n .file_stem()\n\n .expect(\"File does not have name\")\n\n .to_os_string();\n\n let date = chrono::DateTime::<chrono::Local>::from(old_time).date();\n\n use chrono::Datelike;\n\n new_stem.push(format!(\n\n \"_autorename_{:04}-{:02}-{:02}\",\n\n date.year(),\n\n date.month(),\n\n date.day()\n", "file_path": "src/resource.rs", "rank": 45, "score": 26416.7248041528 }, { "content": " path\n\n } else {\n\n r.path().as_ref()\n\n }\n\n });\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub enum OverwriteMode {\n\n Skip,\n\n Overwrite,\n\n Rename,\n\n}\n\n\n\npub enum OverwriteResult {\n\n NewFile,\n\n AlreadyHave,\n\n Skipped,\n\n Overwritten,\n\n Renamed { renamed_path: PathBuf },\n", "file_path": "src/resource.rs", "rank": 46, "score": 26416.67845991183 }, { "content": "where\n\n F: (Fn(RequestBuilder) -> RequestBuilder),\n\n{\n\n let mut file = tokio::fs::File::create(temp_destination)\n\n .await\n\n .map_err(|_| RetryableError::Fail(\"Unable to open temporary file\"))?;\n\n let mut res = edit_request(api.get_client().get(download_url))\n\n .send()\n\n .await\n\n .map_err(|_| RetryableError::Retry(\"Failed during download\"))?;\n\n while let Some(chunk) = res\n\n .chunk()\n\n .await\n\n .map_err(|_| RetryableError::Retry(\"Failed during streaming\"))?\n\n .as_deref()\n\n {\n\n file.write_all(chunk)\n\n .await\n\n .map_err(|_| RetryableError::Fail(\"Failed writing to disk\"))?;\n\n }\n", "file_path": "src/resource.rs", "rank": 47, "score": 26415.30507807449 }, { "content": " ));\n\n let path_extension = path.extension();\n\n let mut i = 0;\n\n let mut suffixed_stem = new_stem.clone();\n\n let renamed_path = loop {\n\n let renamed_path_without_ext = path.with_file_name(suffixed_stem);\n\n let renamed_path = if let Some(ext) = path_extension {\n\n renamed_path_without_ext.with_extension(ext)\n\n } else {\n\n renamed_path_without_ext\n\n };\n\n if !renamed_path.exists() {\n\n break renamed_path;\n\n }\n\n i += 1;\n\n suffixed_stem = new_stem.clone();\n\n suffixed_stem.push(format!(\"_{}\", i));\n\n };\n\n tokio::fs::rename(path, renamed_path.clone())\n\n .await\n", "file_path": "src/resource.rs", "rank": 48, "score": 26414.834164610245 }, { "content": " match download_file(api, before_download_data.clone(), temp_destination).await {\n\n Ok(_) => {\n\n tokio::fs::rename(temp_destination, destination)\n\n .await\n\n .map_err(|_| \"Unable to move temporary file\")?;\n\n break;\n\n }\n\n Err(err) => {\n\n let success = tokio::fs::remove_file(temp_destination).await.is_ok();\n\n match err {\n\n RetryableError::Retry(_) => {\n\n if !success {\n\n return Err(\"Unable to delete temporary file\");\n\n }\n\n /* retry */\n\n }\n\n RetryableError::Fail(err) => {\n\n // return the underlying error (perhaps explaining why the file can't be created)\n\n return Err(err);\n\n }\n\n }\n\n }\n\n };\n\n }\n\n Ok(())\n\n}\n", "file_path": "src/resource.rs", "rank": 49, "score": 26414.538109829162 }, { "content": "use std::path::{Path, PathBuf};\n\nuse std::time::SystemTime;\n\n\n\nuse async_trait::async_trait;\n\nuse futures_util::future;\n\nuse reqwest::Method;\n\nuse serde::Deserialize;\n\nuse tokio::process::Command;\n\n\n\nuse crate::resource;\n\nuse crate::resource::{OverwriteMode, OverwriteResult, Resource, RetryableError, RetryableResult};\n\nuse crate::util::{parse_time, sanitise_filename};\n\nuse crate::{Api, ApiData, Result};\n\n\n\nmod external_multimedia;\n\npub use external_multimedia::ExternalVideo;\n\n\n\n#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct Channel {\n\n pub id: String,\n\n pub name: String,\n\n pub is_external_tool: bool,\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n", "file_path": "src/multimedia/mod.rs", "rank": 50, "score": 25.09512392919783 }, { "content": " selected_modules.dedup_by(|other, latest| if other.code == latest.code {\n\n println!(\"Warning: module {} appeared in more than one semester, only latest semester will be retrieved\", other.code);\n\n true\n\n } else {\n\n false\n\n });\n\n Ok(selected_modules)\n\n } else {\n\n Err(\"Invalid API response from server: type mismatch\")\n\n }\n\n }\n\n\n\n pub async fn name(&self) -> Result<String> {\n\n Ok(self\n\n .api_as_json::<Name>(\"user/Profile\", Method::GET, None)\n\n .await?\n\n .user_name_original)\n\n }\n\n\n\n pub async fn with_login<'a>(username: &str, password: &str) -> Result<Api> {\n", "file_path": "src/lib.rs", "rank": 51, "score": 24.473485355881174 }, { "content": "\n\n#[derive(Debug)]\n\npub struct Api {\n\n jwt: String,\n\n client: Client,\n\n ffmpeg_path: String,\n\n}\n\n\n\nimpl Api {\n\n pub fn get_client(&self) -> &Client {\n\n &self.client\n\n }\n\n\n\n async fn api_as_json<T: DeserializeOwned + 'static>(\n\n &self,\n\n path: &str,\n\n method: Method,\n\n form: Option<&HashMap<&str, &str>>,\n\n ) -> Result<T> {\n\n let res = self.api(path, method, form).await?;\n", "file_path": "src/lib.rs", "rank": 52, "score": 23.51260651679687 }, { "content": " if archived { \"Archived\" } else { \"NonArchived\" },\n\n self.id\n\n );\n\n let api_data = api\n\n .api_as_json::<ApiData<Vec<Announcement>>>(&path, Method::GET, None)\n\n .await?;\n\n if let Some(announcements) = api_data.data {\n\n Ok(announcements)\n\n } else {\n\n Err(\"Invalid API response from server: type mismatch\")\n\n }\n\n }\n\n\n\n pub fn workbin_root<F: FnOnce(&str) -> PathBuf>(&self, make_path: F) -> DirectoryHandle {\n\n DirectoryHandle::new(self.id.clone(), make_path(&sanitise_filename(&self.code)))\n\n }\n\n\n\n pub fn multimedia_root<F: FnOnce(&str) -> PathBuf>(&self, make_path: F) -> MultimediaHandle {\n\n MultimediaHandle::new(self.id.clone(), make_path(&sanitise_filename(&self.code)))\n\n }\n", "file_path": "src/module.rs", "rank": 55, "score": 20.272572880062434 }, { "content": " pub description: String,\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct Module {\n\n pub id: String,\n\n #[serde(rename = \"name\")]\n\n pub code: String,\n\n #[serde(rename = \"courseName\")]\n\n pub name: String,\n\n access: Option<Access>,\n\n pub term: String,\n\n}\n\n\n\nimpl Module {\n\n pub fn is_teaching(&self) -> bool {\n\n self.access\n\n .as_ref()\n\n .map(|access| {\n\n access.full\n", "file_path": "src/module.rs", "rank": 56, "score": 20.145719509610682 }, { "content": "}\n\n\n\nimpl DirectoryHandle {\n\n pub fn new(id: String, path: PathBuf) -> DirectoryHandle {\n\n DirectoryHandle {\n\n id,\n\n path,\n\n allow_upload: false,\n\n }\n\n }\n\n\n\n // loads all files recursively and returns a flattened list\n\n pub fn load<'a>(\n\n self,\n\n api: &'a Api,\n\n include_uploadable: bool,\n\n ) -> BoxFuture<'a, Result<Vec<File>>> {\n\n debug_assert!(include_uploadable || !self.allow_upload);\n\n\n\n async move {\n", "file_path": "src/file.rs", "rank": 57, "score": 19.60959018269192 }, { "content": "use std::path::{Path, PathBuf};\n\nuse std::time::SystemTime;\n\n\n\nuse async_trait::async_trait;\n\nuse reqwest::{Method, Url};\n\nuse serde::Deserialize;\n\n\n\nuse crate::panopto;\n\nuse crate::resource::SimpleDownloadableResource;\n\nuse crate::util::{parse_time, sanitise_filename};\n\nuse crate::{Api, ApiData, Result};\n\n\n\n#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n", "file_path": "src/weblecture.rs", "rank": 58, "score": 19.204442807021348 }, { "content": " }\n\n\n\n pub fn with_ffmpeg<S: Into<String>>(self: Api, ffmpeg_path: S) -> Api {\n\n Api {\n\n jwt: self.jwt,\n\n client: self.client,\n\n ffmpeg_path: ffmpeg_path.into(),\n\n }\n\n }\n\n}\n\n\n\nasync fn zoom_signin_get_saml_request(client: &Client) -> Result<(String, String)> {\n\n let resp = infinite_retry_http(\n\n client,\n\n Url::parse(ZOOM_SIGNIN_URL).expect(\"Unable to parse Zoom URL\"),\n\n Method::GET,\n\n None,\n\n move |req| req.header(REFERER, ZOOM_REFERER_URL),\n\n )\n\n .await?;\n", "file_path": "src/lib.rs", "rank": 59, "score": 18.9674848781974 }, { "content": " res.json::<T>()\n\n .await\n\n .map_err(|_| \"Unable to deserialize JSON\")\n\n /*let res = self.api(path, method, form).await?;\n\n let text = res.text().await.map_err(|_| \"Unable to get text\")?;\n\n println!(\"{}\", text.as_str());\n\n serde_json::from_str(&text).map_err(|_| \"Unable to deserialize JSON\")*/\n\n }\n\n\n\n pub async fn api(\n\n &self,\n\n path: &str,\n\n method: Method,\n\n form: Option<&HashMap<&str, &str>>,\n\n ) -> Result<Response> {\n\n let url = full_api_url(path);\n\n\n\n infinite_retry_http(&self.client, url, method, form, move |req| {\n\n req.header(OCP_APIM_SUBSCRIPTION_KEY_HEADER, OCP_APIM_SUBSCRIPTION_KEY)\n\n .bearer_auth(self.jwt.as_str())\n", "file_path": "src/lib.rs", "rank": 60, "score": 18.70908143820955 }, { "content": "use std::path::{Path, PathBuf};\n\nuse std::time::SystemTime;\n\n\n\nuse async_trait::async_trait;\n\nuse reqwest::{Method, Url};\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse crate::multimedia::Channel;\n\nuse crate::panopto;\n\nuse crate::resource::SimpleDownloadableResource;\n\nuse crate::util::sanitise_filename;\n\nuse crate::{Api, Result};\n\n\n\n#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n", "file_path": "src/multimedia/external_multimedia.rs", "rank": 61, "score": 18.459557001360594 }, { "content": "use std::path::{Path, PathBuf};\n\nuse std::time::SystemTime;\n\n\n\nuse async_trait::async_trait;\n\nuse futures_util::future;\n\nuse futures_util::future::{BoxFuture, FutureExt};\n\nuse reqwest::{Method, Url};\n\nuse serde::Deserialize;\n\n\n\nuse crate::resource::SimpleDownloadableResource;\n\nuse crate::util::{parse_time, sanitise_filename};\n\nuse crate::{Api, ApiData, Result};\n\n\n\n#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n", "file_path": "src/file.rs", "rank": 62, "score": 18.2728218736796 }, { "content": " WebLectureHandle { id, path }\n\n }\n\n\n\n pub async fn load(self, api: &Api) -> Result<Vec<WebLectureVideo>> {\n\n let weblecture_resp = api\n\n .api_as_json::<WebLectureResponse>(\n\n &format!(\"weblecture/?ParentID={}\", self.id),\n\n Method::GET,\n\n None,\n\n )\n\n .await;\n\n\n\n match weblecture_resp {\n\n Ok(weblecture) => {\n\n let weblectures_resp = api\n\n .api_as_json::<ApiData<Vec<WebLectureMedia>>>(\n\n &format!(\"weblecture/{}/sessions\", weblecture.id),\n\n Method::GET,\n\n None,\n\n )\n", "file_path": "src/weblecture.rs", "rank": 63, "score": 18.231473929718916 }, { "content": "\n\n async fn download(\n\n &self,\n\n api: &Api,\n\n destination: &Path,\n\n temp_destination: &Path,\n\n overwrite: OverwriteMode,\n\n ) -> Result<OverwriteResult> {\n\n resource::do_retryable_download(\n\n api,\n\n destination,\n\n temp_destination,\n\n overwrite,\n\n self.last_updated(),\n\n move |_| future::ready(Ok(self.stream_url_path.as_str())),\n\n move |api, stream_url_path, temp_destination| {\n\n Self::stream_video(api, stream_url_path, temp_destination)\n\n },\n\n )\n\n .await\n", "file_path": "src/multimedia/mod.rs", "rank": 64, "score": 18.07412933941567 }, { "content": " }\n\n }\n\n Ok((internal_videos, external_videos))\n\n }\n\n None => Err(\"Invalid API response from server: type mismatch\"),\n\n }\n\n }\n\n\n\n async fn load_channel(api: &Api, channel: Channel, path: &Path) -> Result<Vec<InternalVideo>> {\n\n let channel_resp = api\n\n .api_as_json::<ApiData<Vec<InternalMedia>>>(\n\n &format!(\"multimedia/{}/medias\", channel.id),\n\n Method::GET,\n\n None,\n\n )\n\n .await?;\n\n\n\n let channel_path = path.join(Path::new(&sanitise_filename(&channel.name)));\n\n\n\n match channel_resp.data {\n", "file_path": "src/multimedia/mod.rs", "rank": 66, "score": 17.996211942391795 }, { "content": " async fn get_download_url(&self, api: &Api) -> Result<Url> {\n\n let data = api\n\n .api_as_json::<ApiData<String>>(\n\n &format!(\"files/file/{}/downloadurl\", self.id),\n\n Method::GET,\n\n None,\n\n )\n\n .await?;\n\n if let Some(url) = data.data {\n\n Ok(Url::parse(&url).map_err(|_| \"Unable to parse URL\")?)\n\n } else {\n\n Err(\"Invalid API response from server: type mismatch\")\n\n }\n\n }\n\n}\n", "file_path": "src/file.rs", "rank": 69, "score": 17.687082281573367 }, { "content": " }\n\n}\n\n\n\nimpl InternalVideo {\n\n async fn stream_video(\n\n api: &Api,\n\n stream_url_path: &str,\n\n temp_destination: &Path,\n\n ) -> RetryableResult<()> {\n\n let success = Command::new(&api.ffmpeg_path)\n\n .arg(\"-y\") // flag to overwrite output file without prompting\n\n .arg(\"-i\")\n\n .arg(stream_url_path)\n\n .arg(\"-c\")\n\n .arg(\"copy\")\n\n .arg(temp_destination.as_os_str())\n\n .output()\n\n .await\n\n .map_err(|_| RetryableError::Fail(\"Failed to start ffmpeg\"))?\n\n .status\n\n .success();\n\n if success {\n\n Ok(())\n\n } else {\n\n Err(RetryableError::Retry(\"ffmpeg returned nonzero exit code\"))\n\n }\n\n }\n\n}\n", "file_path": "src/multimedia/mod.rs", "rank": 71, "score": 16.971593063393065 }, { "content": " .custom_request(panopto_url, Method::POST, None, |req| req.json(&json))\n\n .await?;\n\n\n\n let output = response\n\n .json::<ExternalMultimediaResponse>()\n\n .await\n\n .map_err(|_| \"Unable to deserialize JSON\")?;\n\n\n\n Ok(output\n\n .d\n\n .results\n\n .into_iter()\n\n .map(|m| ExternalVideo {\n\n id: m.delivery_id,\n\n html_url: m.viewer_url,\n\n path: channel_path.join(super::make_mp4_extension(Path::new(&sanitise_filename(\n\n &m.session_name,\n\n )))),\n\n })\n\n .collect::<Vec<_>>())\n", "file_path": "src/multimedia/external_multimedia.rs", "rank": 72, "score": 16.727545412220255 }, { "content": " Some(medias) => Ok(medias\n\n .into_iter()\n\n .filter_map(|m| match m.stream_url_path {\n\n Some(stream_url_path) => Some(InternalVideo {\n\n id: m.id,\n\n stream_url_path,\n\n path: channel_path\n\n .join(make_mp4_extension(Path::new(&sanitise_filename(&m.name)))),\n\n last_updated: parse_time(&m.last_updated_date),\n\n }),\n\n None => None,\n\n })\n\n .collect::<Vec<_>>()),\n\n None => Err(\"Invalid API response from server: type mismatch\"),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/multimedia/mod.rs", "rank": 73, "score": 16.698718232139033 }, { "content": "\n\n pub fn weblecture_root<F: FnOnce(&str) -> PathBuf>(&self, make_path: F) -> WebLectureHandle {\n\n WebLectureHandle::new(self.id.clone(), make_path(&sanitise_filename(&self.code)))\n\n }\n\n\n\n pub fn conferencing_root<F: FnOnce(&str) -> PathBuf>(\n\n &self,\n\n make_path: F,\n\n ) -> ConferencingHandle {\n\n ConferencingHandle::new(self.id.clone(), make_path(&sanitise_filename(&self.code)))\n\n }\n\n}\n", "file_path": "src/module.rs", "rank": 75, "score": 16.534624632430713 }, { "content": "\n\n fn last_updated(&self) -> SystemTime {\n\n self.last_updated\n\n }\n\n\n\n async fn get_download_url(&self, api: &Api) -> Result<Url> {\n\n let response = panopto::launch(\n\n api,\n\n &format!(\n\n \"lti/Launch/panopto?context_id={}&resource_link_id={}\",\n\n self.module_id, self.id\n\n ),\n\n )\n\n .await?;\n\n\n\n let html = response\n\n .text()\n\n .await\n\n .map_err(|_| \"Unable to get HTML response\")?;\n\n\n\n panopto::extract_video_url_from_document(&html)\n\n }\n\n}\n", "file_path": "src/weblecture.rs", "rank": 76, "score": 16.384613412117965 }, { "content": " ),\n\n Method::GET,\n\n None,\n\n )\n\n .await?;\n\n match files_resp.data {\n\n Some(files) => Ok(files\n\n .into_iter()\n\n .map(|s| File {\n\n id: s.id,\n\n path: self.path.join({\n\n let name_for_download =\n\n s.file_name.as_deref().unwrap_or(s.name.as_str());\n\n if self.allow_upload {\n\n sanitise_filename(\n\n format!(\n\n \"{} - {}\",\n\n s.creator_name.as_deref().unwrap_or_else(|| \"Unknown\"),\n\n name_for_download\n\n )\n", "file_path": "src/file.rs", "rank": 77, "score": 16.348537575731378 }, { "content": " F: (Fn(RequestBuilder) -> RequestBuilder),\n\n {\n\n infinite_retry_http(&self.client, url, method, form, edit_request).await\n\n }\n\n\n\n pub async fn get_text<F>(\n\n &self,\n\n url: Url,\n\n method: Method,\n\n form: Option<&HashMap<&str, &str>>,\n\n edit_request: F,\n\n ) -> Result<String>\n\n where\n\n F: (Fn(RequestBuilder) -> RequestBuilder),\n\n {\n\n // Panapto displays a 500 internal server error page without a desktop user-agent\n\n let res = infinite_retry_http(&self.client, url, method, form, edit_request).await?;\n\n\n\n res.text().await.map_err(|_| \"Unable to get text\")\n\n }\n", "file_path": "src/lib.rs", "rank": 78, "score": 15.602514791844289 }, { "content": " }\n\n}\n\n\n\n#[async_trait(?Send)]\n\nimpl SimpleDownloadableResource for File {\n\n fn id(&self) -> &str {\n\n &self.id\n\n }\n\n\n\n fn path(&self) -> &Path {\n\n &self.path\n\n }\n\n fn path_mut(&mut self) -> &mut PathBuf {\n\n &mut self.path\n\n }\n\n\n\n fn last_updated(&self) -> SystemTime {\n\n self.last_updated\n\n }\n\n\n", "file_path": "src/file.rs", "rank": 79, "score": 15.262391027739865 }, { "content": " let get_subdirs = || async {\n\n let subdirs_resp = api\n\n .api_as_json::<ApiData<Vec<ApiFileDirectory>>>(\n\n &format!(\"files/?ParentID={}\", self.id),\n\n Method::GET,\n\n None,\n\n )\n\n .await?;\n\n match subdirs_resp.data {\n\n Some(subdirs) => future::join_all(\n\n subdirs\n\n .into_iter()\n\n .filter(|s| include_uploadable || !s.allow_upload.unwrap_or(false))\n\n .map(|s| DirectoryHandle {\n\n id: s.id,\n\n path: self.path.join(Path::new(&sanitise_filename(&s.name))),\n\n allow_upload: s.allow_upload.unwrap_or(false),\n\n /* last_updated: parse_time(&s.last_updated_date), */\n\n })\n\n .map(|dh| dh.load(api, include_uploadable)),\n", "file_path": "src/file.rs", "rank": 80, "score": 15.231228407449121 }, { "content": "\n\n async fn current_term(&self) -> Result<String> {\n\n Ok(self\n\n .api_as_json::<Term>(\n\n \"setting/AcademicWeek/current?populate=termDetail\",\n\n Method::GET,\n\n None,\n\n )\n\n .await?\n\n .term_detail\n\n .term)\n\n }\n\n\n\n pub async fn modules(&self, term: Option<String>) -> Result<Vec<Module>> {\n\n enum FilterMode {\n\n GreaterThan(String),\n\n Equal(String),\n\n }\n\n let filter = if let Some(specified_term) = term {\n\n FilterMode::Equal(specified_term)\n", "file_path": "src/lib.rs", "rank": 81, "score": 15.107345541993173 }, { "content": "pub mod weblecture;\n\n\n\npub type Error = &'static str;\n\npub type Result<T> = std::result::Result<T, Error>;\n\n\n\nconst ADFS_OAUTH2_URL: &str = \"https://vafs.nus.edu.sg/adfs/oauth2/authorize\";\n\nconst ADFS_CLIENT_ID: &str = \"E10493A3B1024F14BDC7D0D8B9F649E9-234390\";\n\nconst ADFS_RESOURCE_TYPE: &str = \"sg_edu_nus_oauth\";\n\nconst ADFS_REDIRECT_URI: &str = \"https://luminus.nus.edu.sg/auth/callback\";\n\nconst API_BASE_URL: &str = \"https://luminus.nus.edu.sg/v2/api/\";\n\nconst OCP_APIM_SUBSCRIPTION_KEY: &str = \"6963c200ca9440de8fa1eede730d8f7e\";\n\nconst OCP_APIM_SUBSCRIPTION_KEY_HEADER: &str = \"Ocp-Apim-Subscription-Key\";\n\nconst ADFS_REFERER_URL: &str = \"https://vafs.nus.edu.sg/\";\n\nconst ZOOM_REFERER_URL: &str = \"https://nus-sg.zoom.us/\";\n\nconst ZOOM_SIGNIN_URL: &str = \"https://nus-sg.zoom.us/signin\";\n\nconst ZOOM_REDIRECT_URL: &str = \"https://nus-sg.zoom.us/profile\";\n\n\n\n#[derive(Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n", "file_path": "src/lib.rs", "rank": 82, "score": 14.630753340975982 }, { "content": " }\n\n\n\n // TODO: check file extension?\n\n fn make_mp4_extension(path: &Path) -> PathBuf {\n\n path.with_extension(\"mp4\")\n\n }\n\n}\n\n\n\n#[async_trait(?Send)]\n\nimpl SimpleDownloadableResource for WebLectureVideo {\n\n fn id(&self) -> &str {\n\n &self.id\n\n }\n\n\n\n fn path(&self) -> &Path {\n\n &self.path\n\n }\n\n fn path_mut(&mut self) -> &mut PathBuf {\n\n &mut self.path\n\n }\n", "file_path": "src/weblecture.rs", "rank": 83, "score": 14.324388419254664 }, { "content": "// Utilities for Panopto (web lectures and external multimedia)\n\n\n\nuse std::collections::HashMap;\n\n\n\nuse reqwest::{Method, Response, Url};\n\nuse scraper::{Html, Selector};\n\nuse serde::Deserialize;\n\n\n\nuse crate::{Api, Result};\n\n\n\n#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n", "file_path": "src/panopto.rs", "rank": 84, "score": 14.317239523758142 }, { "content": " || access.create\n\n || access.update\n\n || access.delete\n\n || access.settings_read\n\n || access.settings_update\n\n })\n\n .unwrap_or(false)\n\n }\n\n\n\n pub fn is_taking(&self) -> bool {\n\n !self.is_teaching()\n\n }\n\n\n\n pub fn has_access(&self) -> bool {\n\n self.access.is_some()\n\n }\n\n\n\n pub async fn get_announcements(&self, api: &Api, archived: bool) -> Result<Vec<Announcement>> {\n\n let path = format!(\n\n \"announcement/{}/{}?sortby=displayFrom%20ASC\",\n", "file_path": "src/module.rs", "rank": 85, "score": 14.299598609804523 }, { "content": " .await?;\n\n\n\n match weblectures_resp.data {\n\n Some(weblectures) => Ok(weblectures\n\n .into_iter()\n\n .map(|w| WebLectureVideo {\n\n module_id: self.id.clone(),\n\n id: w.id,\n\n path: self.path.join(Self::make_mp4_extension(Path::new(\n\n &sanitise_filename(&w.name),\n\n ))),\n\n last_updated: parse_time(&w.last_updated_date),\n\n })\n\n .collect::<Vec<_>>()),\n\n None => Err(\"Invalid API response from server: type mismatch\"),\n\n }\n\n }\n\n // If an error occurred, there are no weblectures for that module\n\n Err(_) => Ok(vec![]),\n\n }\n", "file_path": "src/weblecture.rs", "rank": 86, "score": 14.291265391747363 }, { "content": "}\n\n\n\n#[async_trait(?Send)]\n\nimpl SimpleDownloadableResource for ExternalVideo {\n\n fn id(&self) -> &str {\n\n &self.id\n\n }\n\n\n\n fn path(&self) -> &Path {\n\n &self.path\n\n }\n\n fn path_mut(&mut self) -> &mut PathBuf {\n\n &mut self.path\n\n }\n\n\n\n fn last_updated(&self) -> SystemTime {\n\n // External multimedia do not have last updated dates\n\n SystemTime::UNIX_EPOCH\n\n }\n\n\n", "file_path": "src/multimedia/external_multimedia.rs", "rank": 87, "score": 14.206131432112587 }, { "content": "use std::collections::HashMap;\n\n\n\nuse reqwest::header::{CONTENT_TYPE, REFERER, USER_AGENT};\n\nuse reqwest::redirect::Policy;\n\nuse reqwest::Certificate;\n\nuse reqwest::Method;\n\nuse reqwest::{Client, RequestBuilder, Response, Url};\n\nuse scraper::{Html, Selector};\n\nuse serde::de::DeserializeOwned;\n\nuse serde::Deserialize;\n\n\n\nuse self::module::Module;\n\n\n\npub mod conferencing;\n\npub mod file;\n\npub mod module;\n\npub mod multimedia;\n\npub mod panopto;\n\npub mod resource;\n\npub mod util;\n", "file_path": "src/lib.rs", "rank": 89, "score": 14.146965641672196 }, { "content": " async fn get_download_url(&self, api: &Api) -> Result<Url> {\n\n let url =\n\n Url::parse(&self.html_url).map_err(|_| \"Unable to parse external multimedia URL\")?;\n\n\n\n let html = api\n\n .get_text(url, Method::GET, None, Api::add_desktop_user_agent)\n\n .await?;\n\n\n\n panopto::extract_video_url_from_document(&html)\n\n }\n\n}\n", "file_path": "src/multimedia/external_multimedia.rs", "rank": 91, "score": 13.966637557649165 }, { "content": " if !token_resp.status().is_success() {\n\n return Err(\"Unknown authentication failure (no token returned)\");\n\n }\n\n let token = token_resp\n\n .json::<TokenResponse>()\n\n .await\n\n .map_err(|_| \"Failed to deserialise token exchange response\")?;\n\n Ok(Api {\n\n jwt: token.access_token,\n\n client,\n\n ffmpeg_path: String::new(),\n\n })\n\n }\n\n\n\n // Assumes ADFS is already logged in\n\n pub async fn login_zoom(&mut self) -> Result<()> {\n\n let (idp_url, saml_request) = zoom_signin_get_saml_request(&self.client).await?;\n\n let (sso_url, saml_response) =\n\n idp_signon_post_fetch_saml_response(&self.client, &idp_url, &saml_request).await?;\n\n sso_post_saml_response(&self.client, &sso_url, &saml_response).await\n", "file_path": "src/lib.rs", "rank": 94, "score": 12.895409169526573 }, { "content": " }\n\n };\n\n Ok(res)\n\n}\n\n\n\nasync fn auth_http_post(\n\n client: &Client,\n\n url: Url,\n\n form: Option<&HashMap<&str, &str>>,\n\n with_apim: bool,\n\n) -> Result<Response> {\n\n infinite_retry_http(client, url, Method::POST, form, move |req| {\n\n if with_apim {\n\n req.header(OCP_APIM_SUBSCRIPTION_KEY_HEADER, OCP_APIM_SUBSCRIPTION_KEY)\n\n } else {\n\n req\n\n }\n\n })\n\n .await\n\n}\n", "file_path": "src/lib.rs", "rank": 95, "score": 12.785652587749503 }, { "content": "use std::path::PathBuf;\n\n\n\nuse reqwest::Method;\n\nuse serde::Deserialize;\n\n\n\nuse crate::conferencing::ConferencingHandle;\n\nuse crate::file::DirectoryHandle;\n\nuse crate::multimedia::MultimediaHandle;\n\nuse crate::util::sanitise_filename;\n\nuse crate::weblecture::WebLectureHandle;\n\nuse crate::{Api, ApiData, Result};\n\n\n\n#[derive(Debug, Deserialize)]\n", "file_path": "src/module.rs", "rank": 96, "score": 12.783748103854986 }, { "content": " )\n\n .await\n\n .into_iter()\n\n .collect::<Result<Vec<_>>>()\n\n .map(|v| v.into_iter().flatten().collect::<Vec<_>>()),\n\n None => Err(\"Invalid API response from server: type mismatch\"),\n\n }\n\n };\n\n\n\n let get_files = || async {\n\n let files_resp = api\n\n .api_as_json::<ApiData<Vec<ApiFileDirectory>>>(\n\n &format!(\n\n \"files/{}/file{}\",\n\n self.id,\n\n if self.allow_upload {\n\n \"?populate=Creator\"\n\n } else {\n\n \"\"\n\n }\n", "file_path": "src/file.rs", "rank": 97, "score": 12.562939381569462 }, { "content": " let params = build_auth_form(username, password);\n\n let client = build_client()?;\n\n\n\n let auth_resp = auth_http_post(&client, build_auth_url(), Some(&params), false).await?;\n\n if !auth_resp.url().as_str().starts_with(ADFS_REDIRECT_URI) {\n\n return Err(\"Invalid credentials\");\n\n }\n\n let code = auth_resp\n\n .url()\n\n .query_pairs()\n\n .find(|(key, _)| key == \"code\")\n\n .map(|(_key, code)| code.into_owned())\n\n .ok_or(\"Unknown authentication failure (no code returned)\")?;\n\n let token_resp = auth_http_post(\n\n &client,\n\n full_api_url(\"login/adfstoken\"),\n\n Some(&build_token_form(&code)),\n\n true,\n\n )\n\n .await?;\n", "file_path": "src/lib.rs", "rank": 98, "score": 12.555808013127077 }, { "content": " })\n\n .await\n\n }\n\n\n\n // Add a desktop user agent to the request (for those endpoints that are picky about it)\n\n pub fn add_desktop_user_agent(req: RequestBuilder) -> RequestBuilder {\n\n req.header(\n\n USER_AGENT,\n\n \"Mozilla/5.0 (X11; Linux x86_64; rv:88.0) Gecko/20100101 Firefox/88.0\",\n\n )\n\n }\n\n\n\n pub async fn custom_request<F>(\n\n &self,\n\n url: Url,\n\n method: Method,\n\n form: Option<&HashMap<&str, &str>>,\n\n edit_request: F,\n\n ) -> Result<Response>\n\n where\n", "file_path": "src/lib.rs", "rank": 99, "score": 12.452124464181392 } ]
Rust
src/io_source.rs
YtFlow/mio-noafd
27bbb8dcb72a253ad86031ddc5f4ab2a1f2cda27
use std::ops::{Deref, DerefMut}; #[cfg(unix)] use std::os::unix::io::AsRawFd; #[cfg(windows)] use std::os::windows::io::AsRawSocket; #[cfg(debug_assertions)] use std::sync::atomic::{AtomicUsize, Ordering}; use std::{fmt, io}; #[cfg(any(unix, debug_assertions))] use crate::poll; use crate::sys::IoSourceState; use crate::{event, Interest, Registry, Token}; /* /// /// # Examples /// /// Basic usage. /// /// ``` /// # use std::error::Error; /// # fn main() -> Result<(), Box<dyn Error>> { /// use mio::{Interest, Poll, Token}; /// use mio::IoSource; /// /// use std::net; /// /// let poll = Poll::new()?; /// /// // Bind a std TCP listener. /// let listener = net::TcpListener::bind("127.0.0.1:0")?; /// // Wrap it in the `IoSource` type. /// let mut listener = IoSource::new(listener); /// /// // Register the listener. /// poll.registry().register(&mut listener, Token(0), Interest::READABLE)?; /// # Ok(()) /// # } /// ``` */ pub struct IoSource<T> { state: IoSourceState, inner: T, #[cfg(debug_assertions)] selector_id: SelectorId, } #[allow(unused)] impl<T> IoSource<T> { pub fn new(io: T) -> IoSource<T> { IoSource { state: IoSourceState::new(), inner: io, #[cfg(debug_assertions)] selector_id: SelectorId::new(), } } pub fn do_io<F, R>(&self, f: F) -> io::Result<R> where F: FnOnce(&T) -> io::Result<R>, { self.state.do_io(f, &self.inner) } pub fn into_inner(self) -> T { self.inner } } impl<T> Deref for IoSource<T> { type Target = T; fn deref(&self) -> &Self::Target { &self.inner } } impl<T> DerefMut for IoSource<T> { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.inner } } #[cfg(unix)] impl<T> event::Source for IoSource<T> where T: AsRawFd, { fn register( &mut self, registry: &Registry, token: Token, interests: Interest, ) -> io::Result<()> { #[cfg(debug_assertions)] self.selector_id.associate(registry)?; poll::selector(registry).register(self.inner.as_raw_fd(), token, interests) } fn reregister( &mut self, registry: &Registry, token: Token, interests: Interest, ) -> io::Result<()> { #[cfg(debug_assertions)] self.selector_id.check_association(registry)?; poll::selector(registry).reregister(self.inner.as_raw_fd(), token, interests) } fn deregister(&mut self, registry: &Registry) -> io::Result<()> { #[cfg(debug_assertions)] self.selector_id.remove_association(registry)?; poll::selector(registry).deregister(self.inner.as_raw_fd()) } } #[cfg(windows)] impl<T> event::Source for IoSource<T> where T: AsRawSocket, { fn register( &mut self, registry: &Registry, token: Token, interests: Interest, ) -> io::Result<()> { #[cfg(debug_assertions)] self.selector_id.associate(registry)?; self.state .register(registry, token, interests, self.inner.as_raw_socket()) } fn reregister( &mut self, registry: &Registry, token: Token, interests: Interest, ) -> io::Result<()> { #[cfg(debug_assertions)] self.selector_id.check_association(registry)?; self.state.reregister(registry, token, interests) } fn deregister(&mut self, _registry: &Registry) -> io::Result<()> { #[cfg(debug_assertions)] self.selector_id.remove_association(_registry)?; self.state.deregister() } } impl<T> fmt::Debug for IoSource<T> where T: fmt::Debug, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.inner.fmt(f) } } #[cfg(debug_assertions)] #[derive(Debug)] struct SelectorId { id: AtomicUsize, } #[cfg(debug_assertions)] impl SelectorId { const UNASSOCIATED: usize = 0; const fn new() -> SelectorId { SelectorId { id: AtomicUsize::new(Self::UNASSOCIATED), } } fn associate(&self, registry: &Registry) -> io::Result<()> { let registry_id = poll::selector(&registry).id(); let previous_id = self.id.swap(registry_id, Ordering::AcqRel); if previous_id == Self::UNASSOCIATED { Ok(()) } else { Err(io::Error::new( io::ErrorKind::AlreadyExists, "I/O source already registered with a `Registry`", )) } } fn check_association(&self, registry: &Registry) -> io::Result<()> { let registry_id = poll::selector(&registry).id(); let id = self.id.load(Ordering::Acquire); if id == registry_id { Ok(()) } else if id == Self::UNASSOCIATED { Err(io::Error::new( io::ErrorKind::NotFound, "I/O source not registered with `Registry`", )) } else { Err(io::Error::new( io::ErrorKind::AlreadyExists, "I/O source already registered with a different `Registry`", )) } } fn remove_association(&self, registry: &Registry) -> io::Result<()> { let registry_id = poll::selector(&registry).id(); let previous_id = self.id.swap(Self::UNASSOCIATED, Ordering::AcqRel); if previous_id == registry_id { Ok(()) } else { Err(io::Error::new( io::ErrorKind::NotFound, "I/O source not registered with `Registry`", )) } } } #[cfg(debug_assertions)] impl Clone for SelectorId { fn clone(&self) -> SelectorId { SelectorId { id: AtomicUsize::new(self.id.load(Ordering::Acquire)), } } }
use std::ops::{Deref, DerefMut}; #[cfg(unix)] use std::os::unix::io::AsRawFd; #[cfg(windows)] use std::os::windows::io::AsRawSocket; #[cfg(debug_assertions)] use std::sync::atomic::{AtomicUsize, Ordering}; use std::{fmt, io}; #[cfg(any(unix, debug_assertions))] use crate::poll; use crate::sys::IoSourceState; use crate::{event, Interest, Registry, Token}; /* /// /// # Examples /// /// Basic usage. /// /// ``` /// # use std::error::Error; /// # fn main() -> Result<(), Box<dyn Error>> { /// use mio::{Interest, Poll, Token}; /// use mio::IoSource; /// /// use std::net; /// /// let poll = Poll::new()?; /// /// // Bind a std TCP listener. /// let listener = net::TcpListener::bind("127.0.0.1:0")?; /// // Wrap it in the `IoSource` type. /// let mut listener = IoSource::new(listener); /// /// // Register the listener. /// poll.registry().register(&mut listener, Token(0), Interest::READABLE)?; /// # Ok(()) /// # } /// ``` */ pub struct IoSource<T> { state: IoSourceState, inner: T, #[cfg(debug_assertions)] selector_id: SelectorId, } #[allow(unused)] impl<T> IoSource<T> { pub fn new(io: T) -> IoSource<T> { IoSource { state: IoSourceState::new(), inner: io, #[cfg(debug_assertions)] selector_id: SelectorId::new(), } } pub fn do_io<F, R>(&self, f: F) -> io::Result<R> where F: FnOnce(&T) -> io::Result<R>, { self.state.do_io(f, &self.inner) } pub fn into_inner(self) -> T { self.inner } } impl<T> Deref for IoSource<T> { type Target = T; fn deref(&self) -> &Self::Target { &self.inner } } impl<T> DerefMut for IoSource<T> { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.inner } } #[cfg(unix)] impl<T> event::Source for IoSource<T> where T: AsRawFd, { fn register( &mut self, registry: &Registry, token: Token, interests: Interest, ) -> io::Result<()> { #[cfg(debug_assertions)] self.selector_id.associate(registry)?; poll::selector(registry).register(self.inner.as_raw_fd(), token, interests) } fn reregister( &mut self, registry: &Registry, token: Token, interests: Interest, ) -> io::Result<()> { #[cfg(debug_assertions)] self.selector_id.check_association(registry)?; poll::selector(registry).reregister(self.inner.as_raw_fd(), token, interests) } fn deregister(&mut self, registry: &Registry) -> io::Result<()> { #[cfg(debug_assertions)] self.selector_id.remove_association(registry)?; poll::selector(registry).deregister(self.inner.as_raw_fd()) } } #[cfg(windows)] impl<T> event::Source for IoSource<T> where T: AsRawSocket, { fn register( &mut self, registry: &Registry, token: Token, interests: Interest, ) -> io::Result<()> { #[cfg(debug_assertions)] self.selector_id.associate(registry)?; self.state .register(registry, token, interests, self.inner.as_raw_socket()) }
fn deregister(&mut self, _registry: &Registry) -> io::Result<()> { #[cfg(debug_assertions)] self.selector_id.remove_association(_registry)?; self.state.deregister() } } impl<T> fmt::Debug for IoSource<T> where T: fmt::Debug, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.inner.fmt(f) } } #[cfg(debug_assertions)] #[derive(Debug)] struct SelectorId { id: AtomicUsize, } #[cfg(debug_assertions)] impl SelectorId { const UNASSOCIATED: usize = 0; const fn new() -> SelectorId { SelectorId { id: AtomicUsize::new(Self::UNASSOCIATED), } } fn associate(&self, registry: &Registry) -> io::Result<()> { let registry_id = poll::selector(&registry).id(); let previous_id = self.id.swap(registry_id, Ordering::AcqRel); if previous_id == Self::UNASSOCIATED { Ok(()) } else { Err(io::Error::new( io::ErrorKind::AlreadyExists, "I/O source already registered with a `Registry`", )) } } fn check_association(&self, registry: &Registry) -> io::Result<()> { let registry_id = poll::selector(&registry).id(); let id = self.id.load(Ordering::Acquire); if id == registry_id { Ok(()) } else if id == Self::UNASSOCIATED { Err(io::Error::new( io::ErrorKind::NotFound, "I/O source not registered with `Registry`", )) } else { Err(io::Error::new( io::ErrorKind::AlreadyExists, "I/O source already registered with a different `Registry`", )) } } fn remove_association(&self, registry: &Registry) -> io::Result<()> { let registry_id = poll::selector(&registry).id(); let previous_id = self.id.swap(Self::UNASSOCIATED, Ordering::AcqRel); if previous_id == registry_id { Ok(()) } else { Err(io::Error::new( io::ErrorKind::NotFound, "I/O source not registered with `Registry`", )) } } } #[cfg(debug_assertions)] impl Clone for SelectorId { fn clone(&self) -> SelectorId { SelectorId { id: AtomicUsize::new(self.id.load(Ordering::Acquire)), } } }
fn reregister( &mut self, registry: &Registry, token: Token, interests: Interest, ) -> io::Result<()> { #[cfg(debug_assertions)] self.selector_id.check_association(registry)?; self.state.reregister(registry, token, interests) }
function_block-full_function
[ { "content": "fn main() -> io::Result<()> {\n\n env_logger::init();\n\n\n\n // Create a poll instance.\n\n let mut poll = Poll::new()?;\n\n // Create storage for events.\n\n let mut events = Events::with_capacity(128);\n\n\n\n // Setup the TCP server socket.\n\n let addr = \"127.0.0.1:9000\".parse().unwrap();\n\n let mut server = TcpListener::bind(addr)?;\n\n\n\n // Register the server with poll we can receive events for it.\n\n poll.registry()\n\n .register(&mut server, SERVER, Interest::READABLE)?;\n\n\n\n // Map of `Token` -> `TcpStream`.\n\n let mut connections = HashMap::new();\n\n // Unique token for each incoming connection.\n\n let mut unique_token = Token(SERVER.0 + 1);\n", "file_path": "examples/tcp_server.rs", "rank": 0, "score": 335519.8503959938 }, { "content": "fn main() -> io::Result<()> {\n\n env_logger::init();\n\n\n\n // Create a poll instance.\n\n let mut poll = Poll::new()?;\n\n // Create storage for events. Since we will only register a single socket, a\n\n // capacity of 1 will do.\n\n let mut events = Events::with_capacity(1);\n\n\n\n // Setup the UDP socket.\n\n let addr = \"127.0.0.1:9000\".parse().unwrap();\n\n let mut socket = UdpSocket::bind(addr)?;\n\n\n\n // Register our socket with the token defined above and an interest in being\n\n // `READABLE`.\n\n poll.registry()\n\n .register(&mut socket, UDP_SOCKET, Interest::READABLE)?;\n\n\n\n println!(\"You can connect to the server using `nc`:\");\n\n println!(\" $ nc -u 127.0.0.1 9000\");\n", "file_path": "examples/udp_server.rs", "rank": 1, "score": 296924.49470930255 }, { "content": "#[test]\n\npub fn reregister_different_interest_without_poll() {\n\n init();\n\n\n\n let mut events = Events::with_capacity(1024);\n\n let mut poll = Poll::new().unwrap();\n\n\n\n // Create the listener\n\n let mut l = TcpListener::bind(\"127.0.0.1:0\".parse().unwrap()).unwrap();\n\n\n\n // Register the listener with `Poll`\n\n poll.registry()\n\n .register(&mut l, Token(0), Interest::READABLE)\n\n .unwrap();\n\n\n\n let mut s1 = TcpStream::connect(l.local_addr().unwrap()).unwrap();\n\n poll.registry()\n\n .register(&mut s1, Token(2), Interest::READABLE)\n\n .unwrap();\n\n\n\n const TIMEOUT: Duration = Duration::from_millis(200);\n\n sleep(TIMEOUT);\n\n\n\n poll.registry()\n\n .reregister(&mut l, Token(0), Interest::WRITABLE)\n\n .unwrap();\n\n\n\n poll.poll(&mut events, Some(TIMEOUT)).unwrap();\n\n assert!(events.iter().next().is_none());\n\n}\n\n\n", "file_path": "tests/registering.rs", "rank": 2, "score": 275810.16058498534 }, { "content": "/// Expected a closed event. If `read` is true is checks for `is_read_closed`,\n\n/// otherwise for `is_write_closed`.\n\npub fn expect_one_closed_event(poll: &mut Poll, events: &mut Events, token: Token, read: bool) {\n\n poll.poll(events, Some(Duration::from_secs(1))).unwrap();\n\n let mut iter = events.iter();\n\n let event = iter.next().unwrap();\n\n assert_eq!(event.token(), token, \"invalid token, event: {:#?}\", event);\n\n if read {\n\n assert!(\n\n event.is_read_closed(),\n\n \"expected closed or error, event: {:#?}\",\n\n event\n\n );\n\n } else {\n\n assert!(\n\n event.is_write_closed(),\n\n \"expected closed or error, event: {:#?}\",\n\n event\n\n );\n\n }\n\n assert!(iter.next().is_none());\n\n}\n", "file_path": "tests/unix_pipe.rs", "rank": 3, "score": 268295.45861990785 }, { "content": "fn next(current: &mut Token) -> Token {\n\n let next = current.0;\n\n current.0 += 1;\n\n Token(next)\n\n}\n\n\n", "file_path": "examples/tcp_server.rs", "rank": 4, "score": 266633.4659671026 }, { "content": "#[test]\n\nfn reregister_interest_token_usage() {\n\n let (mut poll, mut events) = init_with_poll();\n\n\n\n let mut udp_socket = UdpSocket::bind(any_local_address()).unwrap();\n\n\n\n poll.registry()\n\n .register(&mut udp_socket, ID1, Interest::READABLE)\n\n .expect(\"unable to register listener\");\n\n\n\n poll.registry()\n\n .reregister(&mut udp_socket, ID1, Interest::READABLE)\n\n .expect(\"unable to register listener\");\n\n\n\n poll.registry()\n\n .reregister(&mut udp_socket, ID2, Interest::WRITABLE)\n\n .expect(\"unable to register listener\");\n\n\n\n expect_events(\n\n &mut poll,\n\n &mut events,\n", "file_path": "tests/poll.rs", "rank": 5, "score": 264947.94503652945 }, { "content": "pub fn accept(listener: &net::TcpListener) -> io::Result<(net::TcpStream, SocketAddr)> {\n\n let mut addr: MaybeUninit<libc::sockaddr_storage> = MaybeUninit::uninit();\n\n let mut length = size_of::<libc::sockaddr_storage>() as libc::socklen_t;\n\n\n\n // On platforms that support it we can use `accept4(2)` to set `NONBLOCK`\n\n // and `CLOEXEC` in the call to accept the connection.\n\n #[cfg(any(\n\n // Android x86's seccomp profile forbids calls to `accept4(2)`\n\n // See https://github.com/tokio-rs/mio/issues/1445 for details\n\n all(\n\n not(target_arch=\"x86\"),\n\n target_os = \"android\"\n\n ),\n\n target_os = \"dragonfly\",\n\n target_os = \"freebsd\",\n\n target_os = \"illumos\",\n\n target_os = \"linux\",\n\n target_os = \"netbsd\",\n\n target_os = \"openbsd\"\n\n ))]\n", "file_path": "src/sys/unix/tcp.rs", "rank": 6, "score": 261518.16493864765 }, { "content": "pub fn accept(_: &net::TcpListener) -> io::Result<(net::TcpStream, SocketAddr)> {\n\n os_required!();\n\n}\n\n\n\npub(crate) fn get_localaddr(_: TcpSocket) -> io::Result<SocketAddr> {\n\n os_required!();\n\n}\n", "file_path": "src/sys/shell/tcp.rs", "rank": 7, "score": 253716.33113200514 }, { "content": "fn expect_waker_event(poll: &mut Poll, events: &mut Events, token: Token) {\n\n poll.poll(events, Some(Duration::from_millis(100))).unwrap();\n\n assert!(!events.is_empty());\n\n for event in events.iter() {\n\n assert_eq!(event.token(), token);\n\n assert!(event.is_readable());\n\n }\n\n}\n", "file_path": "tests/waker.rs", "rank": 8, "score": 252651.78859326136 }, { "content": "fn would_block(err: &io::Error) -> bool {\n\n err.kind() == io::ErrorKind::WouldBlock\n\n}\n\n\n", "file_path": "examples/tcp_server.rs", "rank": 9, "score": 245148.40253158144 }, { "content": "fn interrupted(err: &io::Error) -> bool {\n\n err.kind() == io::ErrorKind::Interrupted\n\n}\n", "file_path": "examples/tcp_server.rs", "rank": 10, "score": 245148.40253158144 }, { "content": "#[test]\n\n#[cfg(debug_assertions)] // Check is only present when debug assertions are enabled.\n\npub fn double_register_different_token() {\n\n init();\n\n let poll = Poll::new().unwrap();\n\n\n\n let mut listener = TcpListener::bind(\"127.0.0.1:0\".parse().unwrap()).unwrap();\n\n\n\n poll.registry()\n\n .register(&mut listener, Token(0), Interest::READABLE)\n\n .unwrap();\n\n\n\n assert_error(\n\n poll.registry()\n\n .register(&mut listener, Token(1), Interest::READABLE),\n\n \"already registered\",\n\n );\n\n}\n\n\n", "file_path": "tests/poll.rs", "rank": 11, "score": 243717.8136960738 }, { "content": "pub fn bind(_: SocketAddr) -> io::Result<net::UdpSocket> {\n\n os_required!()\n\n}\n\n\n\npub(crate) fn only_v6(_: &net::UdpSocket) -> io::Result<bool> {\n\n os_required!()\n\n}\n", "file_path": "src/sys/shell/udp.rs", "rank": 12, "score": 243426.04601456784 }, { "content": "pub fn bind(addr: SocketAddr) -> io::Result<net::UdpSocket> {\n\n // Gives a warning for non Apple platforms.\n\n #[allow(clippy::let_and_return)]\n\n let socket = new_ip_socket(addr, libc::SOCK_DGRAM);\n\n\n\n socket.and_then(|socket| {\n\n let (raw_addr, raw_addr_length) = socket_addr(&addr);\n\n syscall!(bind(socket, raw_addr.as_ptr(), raw_addr_length))\n\n .map_err(|err| {\n\n // Close the socket if we hit an error, ignoring the error\n\n // from closing since we can't pass back two errors.\n\n let _ = unsafe { libc::close(socket) };\n\n err\n\n })\n\n .map(|_| unsafe { net::UdpSocket::from_raw_fd(socket) })\n\n })\n\n}\n\n\n\npub(crate) fn only_v6(socket: &net::UdpSocket) -> io::Result<bool> {\n\n let mut optval: libc::c_int = 0;\n", "file_path": "src/sys/unix/udp.rs", "rank": 13, "score": 239289.28261133897 }, { "content": "pub fn bind(addr: SocketAddr) -> io::Result<net::UdpSocket> {\n\n init();\n\n new_ip_socket(addr, SOCK_DGRAM).and_then(|socket| {\n\n let (raw_addr, raw_addr_length) = socket_addr(&addr);\n\n syscall!(\n\n win_bind(socket, raw_addr.as_ptr(), raw_addr_length,),\n\n PartialEq::eq,\n\n SOCKET_ERROR\n\n )\n\n .map_err(|err| {\n\n // Close the socket if we hit an error, ignoring the error\n\n // from closing since we can't pass back two errors.\n\n let _ = unsafe { closesocket(socket) };\n\n err\n\n })\n\n .map(|_| unsafe { net::UdpSocket::from_raw_socket(socket as StdSocket) })\n\n })\n\n}\n", "file_path": "src/sys/windows/udp.rs", "rank": 14, "score": 239289.28261133897 }, { "content": "pub fn expect_no_events(poll: &mut Poll, events: &mut Events) {\n\n poll.poll(events, Some(Duration::from_millis(50)))\n\n .expect(\"unable to poll\");\n\n if !events.is_empty() {\n\n for event in events.iter() {\n\n error!(\"unexpected event: {:?}\", event);\n\n }\n\n panic!(\"received events, but didn't expect any, see above\");\n\n }\n\n}\n\n\n", "file_path": "tests/util/mod.rs", "rank": 15, "score": 239137.75643461625 }, { "content": "/// Assert that the provided result is an `io::Error` with kind `WouldBlock`.\n\npub fn assert_would_block<T>(result: io::Result<T>) {\n\n match result {\n\n Ok(_) => panic!(\"unexpected OK result, expected a `WouldBlock` error\"),\n\n Err(ref err) if err.kind() == io::ErrorKind::WouldBlock => {}\n\n Err(err) => panic!(\"unexpected error result: {}\", err),\n\n }\n\n}\n\n\n\n/// Assert that `NONBLOCK` is set on `socket`.\n", "file_path": "tests/util/mod.rs", "rank": 16, "score": 226562.91611137503 }, { "content": "pub fn expect_events(poll: &mut Poll, events: &mut Events, mut expected: Vec<ExpectEvent>) {\n\n // In a lot of calls we expect more then one event, but it could be that\n\n // poll returns the first event only in a single call. To be a bit more\n\n // lenient we'll poll a couple of times.\n\n for _ in 0..3 {\n\n poll.poll(events, Some(Duration::from_millis(500)))\n\n .expect(\"unable to poll\");\n\n\n\n for event in events.iter() {\n\n let index = expected.iter().position(|expected| expected.matches(event));\n\n\n\n if let Some(index) = index {\n\n expected.swap_remove(index);\n\n } else {\n\n // Must accept sporadic events.\n\n warn!(\"got unexpected event: {:?}\", event);\n\n }\n\n }\n\n\n\n if expected.is_empty() {\n", "file_path": "tests/util/mod.rs", "rank": 17, "score": 220387.7162057769 }, { "content": "/// Create a new non-blocking Unix pipe.\n\n///\n\n/// This is a wrapper around Unix's [`pipe(2)`] system call and can be used as\n\n/// inter-process or thread communication channel.\n\n///\n\n/// This channel may be created before forking the process and then one end used\n\n/// in each process, e.g. the parent process has the sending end to send command\n\n/// to the child process.\n\n///\n\n/// [`pipe(2)`]: https://pubs.opengroup.org/onlinepubs/9699919799/functions/pipe.html\n\n///\n\n/// # Events\n\n///\n\n/// The [`Sender`] can be registered with [`WRITABLE`] interest to receive\n\n/// [writable events], the [`Receiver`] with [`READABLE`] interest. Once data is\n\n/// written to the `Sender` the `Receiver` will receive an [readable event].\n\n///\n\n/// In addition to those events, events will also be generated if the other side\n\n/// is dropped. To check if the `Sender` is dropped you'll need to check\n\n/// [`is_read_closed`] on events for the `Receiver`, if it returns true the\n\n/// `Sender` is dropped. On the `Sender` end check [`is_write_closed`], if it\n\n/// returns true the `Receiver` was dropped. Also see the second example below.\n\n///\n\n/// [`WRITABLE`]: Interest::WRITABLE\n\n/// [writable events]: event::Event::is_writable\n\n/// [`READABLE`]: Interest::READABLE\n\n/// [readable event]: event::Event::is_readable\n\n/// [`is_read_closed`]: event::Event::is_read_closed\n\n/// [`is_write_closed`]: event::Event::is_write_closed\n\n///\n\n/// # Deregistering\n\n///\n\n/// Both `Sender` and `Receiver` will deregister themselves when dropped,\n\n/// **iff** the file descriptors are not duplicated (via [`dup(2)`]).\n\n///\n\n/// [`dup(2)`]: https://pubs.opengroup.org/onlinepubs/9699919799/functions/dup.html\n\n///\n\n/// # Examples\n\n///\n\n/// Simple example that writes data into the sending end and read it from the\n\n/// receiving end.\n\n///\n\n/// ```\n\n/// use std::io::{self, Read, Write};\n\n///\n\n/// use mio::{Poll, Events, Interest, Token};\n\n/// use mio::unix::pipe;\n\n///\n\n/// // Unique tokens for the two ends of the channel.\n\n/// const PIPE_RECV: Token = Token(0);\n\n/// const PIPE_SEND: Token = Token(1);\n\n///\n\n/// # fn main() -> io::Result<()> {\n\n/// // Create our `Poll` instance and the `Events` container.\n\n/// let mut poll = Poll::new()?;\n\n/// let mut events = Events::with_capacity(8);\n\n///\n\n/// // Create a new pipe.\n\n/// let (mut sender, mut receiver) = pipe::new()?;\n\n///\n\n/// // Register both ends of the channel.\n\n/// poll.registry().register(&mut receiver, PIPE_RECV, Interest::READABLE)?;\n\n/// poll.registry().register(&mut sender, PIPE_SEND, Interest::WRITABLE)?;\n\n///\n\n/// const MSG: &[u8; 11] = b\"Hello world\";\n\n///\n\n/// loop {\n\n/// poll.poll(&mut events, None)?;\n\n///\n\n/// for event in events.iter() {\n\n/// match event.token() {\n\n/// PIPE_SEND => sender.write(MSG)\n\n/// .and_then(|n| if n != MSG.len() {\n\n/// // We'll consider a short write an error in this\n\n/// // example. NOTE: we can't use `write_all` with\n\n/// // non-blocking I/O.\n\n/// Err(io::ErrorKind::WriteZero.into())\n\n/// } else {\n\n/// Ok(())\n\n/// })?,\n\n/// PIPE_RECV => {\n\n/// let mut buf = [0; 11];\n\n/// let n = receiver.read(&mut buf)?;\n\n/// println!(\"received: {:?}\", &buf[0..n]);\n\n/// assert_eq!(n, MSG.len());\n\n/// assert_eq!(&buf, &*MSG);\n\n/// return Ok(());\n\n/// },\n\n/// _ => unreachable!(),\n\n/// }\n\n/// }\n\n/// }\n\n/// # }\n\n/// ```\n\n///\n\n/// Example that receives an event once the `Sender` is dropped.\n\n///\n\n/// ```\n\n/// # use std::io;\n\n/// #\n\n/// # use mio::{Poll, Events, Interest, Token};\n\n/// # use mio::unix::pipe;\n\n/// #\n\n/// # const PIPE_RECV: Token = Token(0);\n\n/// # const PIPE_SEND: Token = Token(1);\n\n/// #\n\n/// # fn main() -> io::Result<()> {\n\n/// // Same setup as in the example above.\n\n/// let mut poll = Poll::new()?;\n\n/// let mut events = Events::with_capacity(8);\n\n///\n\n/// let (mut sender, mut receiver) = pipe::new()?;\n\n///\n\n/// poll.registry().register(&mut receiver, PIPE_RECV, Interest::READABLE)?;\n\n/// poll.registry().register(&mut sender, PIPE_SEND, Interest::WRITABLE)?;\n\n///\n\n/// // Drop the sender.\n\n/// drop(sender);\n\n///\n\n/// poll.poll(&mut events, None)?;\n\n///\n\n/// for event in events.iter() {\n\n/// match event.token() {\n\n/// PIPE_RECV if event.is_read_closed() => {\n\n/// // Detected that the sender was dropped.\n\n/// println!(\"Sender dropped!\");\n\n/// return Ok(());\n\n/// },\n\n/// _ => unreachable!(),\n\n/// }\n\n/// }\n\n/// # unreachable!();\n\n/// # }\n\n/// ```\n\npub fn new() -> io::Result<(Sender, Receiver)> {\n\n let mut fds: [RawFd; 2] = [-1, -1];\n\n\n\n #[cfg(any(\n\n target_os = \"android\",\n\n target_os = \"dragonfly\",\n\n target_os = \"freebsd\",\n\n target_os = \"linux\",\n\n target_os = \"netbsd\",\n\n target_os = \"openbsd\",\n\n target_os = \"illumos\",\n\n ))]\n\n unsafe {\n\n if libc::pipe2(fds.as_mut_ptr(), libc::O_CLOEXEC | libc::O_NONBLOCK) != 0 {\n\n return Err(io::Error::last_os_error());\n\n }\n\n }\n\n\n\n #[cfg(any(target_os = \"ios\", target_os = \"macos\", target_os = \"solaris\"))]\n\n unsafe {\n", "file_path": "src/sys/unix/pipe.rs", "rank": 18, "score": 214705.55303620768 }, { "content": "fn smoke_test_tcp_listener<F>(addr: SocketAddr, make_listener: F)\n\nwhere\n\n F: FnOnce(SocketAddr) -> io::Result<TcpListener>,\n\n{\n\n let (mut poll, mut events) = init_with_poll();\n\n\n\n let mut listener = make_listener(addr).unwrap();\n\n let address = listener.local_addr().unwrap();\n\n\n\n assert_socket_non_blocking(&listener);\n\n assert_socket_close_on_exec(&listener);\n\n\n\n poll.registry()\n\n .register(&mut listener, ID1, Interest::READABLE)\n\n .expect(\"unable to register TCP listener\");\n\n\n\n let barrier = Arc::new(Barrier::new(2));\n\n let thread_handle = start_connections(address, 1, barrier.clone());\n\n\n\n expect_events(\n", "file_path": "tests/tcp_listener.rs", "rank": 19, "score": 211714.93293249642 }, { "content": "/// Call all registration operations, ending with `source` being registered with `token` and `final_interests`.\n\npub fn registry_ops_flow(\n\n registry: &Registry,\n\n source: &mut dyn Source,\n\n token: Token,\n\n init_interests: Interest,\n\n final_interests: Interest,\n\n) -> io::Result<()> {\n\n registry.register(source, token, init_interests).unwrap();\n\n registry.deregister(source).unwrap();\n\n\n\n registry.register(source, token, init_interests).unwrap();\n\n registry.reregister(source, token, final_interests)\n\n}\n\n\n", "file_path": "tests/poll.rs", "rank": 20, "score": 209699.42905946798 }, { "content": "/// Assert that `result` is an error and the formatted error (via\n\n/// `fmt::Display`) equals `expected_msg`.\n\npub fn assert_error<T, E: fmt::Display>(result: Result<T, E>, expected_msg: &str) {\n\n match result {\n\n Ok(_) => panic!(\"unexpected OK result\"),\n\n Err(err) => assert!(\n\n err.to_string().contains(expected_msg),\n\n \"wanted: {}, got: {}\",\n\n err,\n\n expected_msg\n\n ),\n\n }\n\n}\n", "file_path": "tests/poll.rs", "rank": 21, "score": 207951.3386206454 }, { "content": "#[test]\n\nfn tcp_listener_std() {\n\n smoke_test_tcp_listener(any_local_address(), |addr| {\n\n let listener = net::TcpListener::bind(addr).unwrap();\n\n // `std::net::TcpListener`s are blocking by default, so make sure it is in\n\n // non-blocking mode before wrapping in a Mio equivalent.\n\n listener.set_nonblocking(true).unwrap();\n\n Ok(TcpListener::from_std(listener))\n\n });\n\n}\n\n\n", "file_path": "tests/tcp_listener.rs", "rank": 22, "score": 204078.32906221534 }, { "content": "fn enqueue_with_wakeup(queue: *mut (), node: &ReadinessNode) -> io::Result<()> {\n\n debug_assert!(!queue.is_null());\n\n // This is ugly... but we don't want to bump the ref count.\n\n let queue: &Arc<ReadinessQueueInner> =\n\n unsafe { &*(&queue as *const *mut () as *const Arc<ReadinessQueueInner>) };\n\n queue.enqueue_node_with_wakeup(node)\n\n}\n\n\n\nunsafe fn token(node: &ReadinessNode, pos: usize) -> Token {\n\n match pos {\n\n 0 => *node.token_0.get(),\n\n 1 => *node.token_1.get(),\n\n 2 => *node.token_2.get(),\n\n _ => unreachable!(),\n\n }\n\n}\n\n\n", "file_path": "src/sys/windows/queue.rs", "rank": 23, "score": 198581.41323365807 }, { "content": "#[test]\n\nfn reregister() {\n\n let (mut poll, mut events) = init_with_poll();\n\n\n\n let mut listener = TcpListener::bind(any_local_address()).unwrap();\n\n let address = listener.local_addr().unwrap();\n\n\n\n poll.registry()\n\n .register(&mut listener, ID1, Interest::READABLE)\n\n .unwrap();\n\n poll.registry()\n\n .reregister(&mut listener, ID2, Interest::READABLE)\n\n .unwrap();\n\n\n\n let barrier = Arc::new(Barrier::new(2));\n\n let thread_handle = start_connections(address, 1, barrier.clone());\n\n\n\n expect_events(\n\n &mut poll,\n\n &mut events,\n\n vec![ExpectEvent::new(ID2, Interest::READABLE)],\n", "file_path": "tests/tcp_listener.rs", "rank": 24, "score": 193470.58981894955 }, { "content": "#[test]\n\nfn registering() {\n\n let (mut poll, mut events) = init_with_poll();\n\n\n\n let mut stream = TcpListener::bind(any_local_address()).unwrap();\n\n\n\n poll.registry()\n\n .register(&mut stream, ID1, Interest::READABLE)\n\n .expect(\"unable to register TCP listener\");\n\n\n\n expect_no_events(&mut poll, &mut events);\n\n\n\n // NOTE: more tests are done in the smoke tests above.\n\n}\n\n\n", "file_path": "tests/tcp_listener.rs", "rank": 25, "score": 193393.4572474038 }, { "content": "struct ListenerInner {\n\n iocp: ReadyBinding,\n\n accept: State<net::TcpStream, (net::TcpStream, SocketAddr)>,\n\n accept_buf: AcceptAddrsBuf,\n\n // While compiling with target x86_64-uwp-windows-msvc,\n\n // the field is mistakenly reported as unused.\n\n #[allow(dead_code)]\n\n instant_notify: bool,\n\n}\n\n\n", "file_path": "src/sys/windows/tcp.rs", "rank": 26, "score": 191616.84149609593 }, { "content": "struct ListenerIo {\n\n inner: Mutex<ListenerInner>,\n\n accept: Overlapped,\n\n family: Family,\n\n socket: net::TcpListener,\n\n}\n\n\n", "file_path": "src/sys/windows/tcp.rs", "rank": 27, "score": 191445.61669645013 }, { "content": "#[test]\n\n#[cfg(debug_assertions)] // Check is only present when debug assertions are enabled.\n\nfn reregister_without_register() {\n\n let poll = Poll::new().expect(\"unable to create Poll instance\");\n\n\n\n let mut listener = TcpListener::bind(any_local_address()).unwrap();\n\n\n\n assert_error(\n\n poll.registry()\n\n .reregister(&mut listener, ID1, Interest::READABLE),\n\n \"not registered\",\n\n );\n\n}\n\n\n\n// This test checks the following register/deregister constraint:\n\n// The event source must have previously been registered with this instance\n\n// of `Poll`, otherwise the behavior is undefined.\n\n//\n\n// This test is done on Windows and epoll platforms where deregistering a\n\n// source without a previous register is defined behavior that fail with an\n\n// error code.\n\n//\n\n// On kqueue platforms deregistering w/o registering works but that's not a\n\n// test goal, so it is not tested.\n", "file_path": "tests/poll.rs", "rank": 28, "score": 189507.22915963674 }, { "content": "pub fn token(event: &Event) -> Token {\n\n event.token\n\n}\n\n\n", "file_path": "src/sys/windows/event.rs", "rank": 29, "score": 188922.34810092277 }, { "content": "fn set_keepalive_retries(socket: TcpSocket, retries: u32) -> io::Result<()> {\n\n let retries = retries.try_into().ok().unwrap_or_else(i32::max_value);\n\n syscall!(setsockopt(\n\n socket,\n\n libc::IPPROTO_TCP,\n\n libc::TCP_KEEPCNT,\n\n &(retries as libc::c_int) as *const _ as *const libc::c_void,\n\n size_of::<libc::c_int>() as libc::socklen_t\n\n ))\n\n .map(|_| ())\n\n}\n\n\n\n#[cfg(any(\n\n target_os = \"linux\",\n\n target_os = \"macos\",\n\n target_os = \"ios\",\n\n target_os = \"freebsd\",\n\n target_os = \"netbsd\",\n\n))]\n\npub(crate) fn get_keepalive_retries(socket: TcpSocket) -> io::Result<Option<u32>> {\n", "file_path": "src/sys/unix/tcp.rs", "rank": 30, "score": 187264.32406559534 }, { "content": "fn set_keepalive_interval(socket: TcpSocket, interval: Duration) -> io::Result<()> {\n\n let interval_secs = interval\n\n .as_secs()\n\n .try_into()\n\n .ok()\n\n .unwrap_or_else(i32::max_value);\n\n syscall!(setsockopt(\n\n socket,\n\n libc::IPPROTO_TCP,\n\n libc::TCP_KEEPINTVL,\n\n &(interval_secs as libc::c_int) as *const _ as *const libc::c_void,\n\n size_of::<libc::c_int>() as libc::socklen_t\n\n ))\n\n .map(|_| ())\n\n}\n\n\n\n#[cfg(any(\n\n target_os = \"linux\",\n\n target_os = \"macos\",\n\n target_os = \"ios\",\n", "file_path": "src/sys/unix/tcp.rs", "rank": 31, "score": 187264.32406559534 }, { "content": "fn set_keepalive_time(socket: TcpSocket, time: Duration) -> io::Result<()> {\n\n let time_secs = time\n\n .as_secs()\n\n .try_into()\n\n .ok()\n\n .unwrap_or_else(i32::max_value);\n\n syscall!(setsockopt(\n\n socket,\n\n libc::IPPROTO_TCP,\n\n KEEPALIVE_TIME,\n\n &(time_secs as libc::c_int) as *const _ as *const libc::c_void,\n\n size_of::<libc::c_int>() as libc::socklen_t\n\n ))\n\n .map(|_| ())\n\n}\n\n\n\npub(crate) fn get_keepalive_time(socket: TcpSocket) -> io::Result<Option<Duration>> {\n\n if !get_keepalive(socket)? {\n\n return Ok(None);\n\n }\n", "file_path": "src/sys/unix/tcp.rs", "rank": 32, "score": 187264.32406559534 }, { "content": "pub fn init_with_poll() -> (Poll, Events) {\n\n init();\n\n\n\n let poll = Poll::new().expect(\"unable to create Poll instance\");\n\n let events = Events::with_capacity(16);\n\n (poll, events)\n\n}\n\n\n", "file_path": "tests/util/mod.rs", "rank": 33, "score": 187223.2969102926 }, { "content": "#[test]\n\npub fn register_deregister() {\n\n init();\n\n\n\n debug!(\"Starting TEST_REGISTER_DEREGISTER\");\n\n let mut poll = Poll::new().unwrap();\n\n let mut events = Events::with_capacity(1024);\n\n\n\n let mut server = TcpListener::bind(any_local_address()).unwrap();\n\n let addr = server.local_addr().unwrap();\n\n\n\n info!(\"register server socket\");\n\n poll.registry()\n\n .register(&mut server, SERVER, Interest::READABLE)\n\n .unwrap();\n\n\n\n let mut client = TcpStream::connect(addr).unwrap();\n\n\n\n // Register client socket only as writable\n\n poll.registry()\n\n .register(&mut client, CLIENT, Interest::READABLE)\n", "file_path": "tests/registering.rs", "rank": 34, "score": 184199.68970933312 }, { "content": "fn would_block() -> io::Error {\n\n io::ErrorKind::WouldBlock.into()\n\n}\n\n\n\nimpl NamedPipe {\n\n /// Creates a new named pipe at the specified `addr` given a \"reasonable\n\n /// set\" of initial configuration options.\n\n pub fn new<A: AsRef<OsStr>>(addr: A) -> io::Result<NamedPipe> {\n\n let pipe = pipe::NamedPipe::new(addr)?;\n\n // Safety: nothing actually unsafe about this. The trait fn includes\n\n // `unsafe`.\n\n Ok(unsafe { NamedPipe::from_raw_handle(pipe.into_raw_handle()) })\n\n }\n\n\n\n /// Attempts to call `ConnectNamedPipe`, if possible.\n\n ///\n\n /// This function will attempt to connect this pipe to a client in an\n\n /// asynchronous fashion. If the function immediately establishes a\n\n /// connection to a client then `Ok(())` is returned. Otherwise if a\n\n /// connection attempt was issued and is now in progress then a \"would\n", "file_path": "src/sys/windows/named_pipe.rs", "rank": 35, "score": 183473.69532984163 }, { "content": "/// Check all events for possible errors, it returns the first error found.\n\nfn check_errors(events: &[libc::kevent], ignored_errors: &[Data]) -> io::Result<()> {\n\n for event in events {\n\n // We can't use references to packed structures (in checking the ignored\n\n // errors), so we need copy the data out before use.\n\n let data = event.data;\n\n // Check for the error flag, the actual error will be in the `data`\n\n // field.\n\n if (event.flags & libc::EV_ERROR != 0) && data != 0 && !ignored_errors.contains(&data) {\n\n return Err(io::Error::from_raw_os_error(data as i32));\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n\ncfg_io_source! {\n\n #[cfg(debug_assertions)]\n\n impl Selector {\n\n pub fn id(&self) -> usize {\n\n self.id\n\n }\n", "file_path": "src/sys/unix/selector/kqueue.rs", "rank": 36, "score": 182569.8573984823 }, { "content": "/// Assert that `result` is an error and the formatted error (via\n\n/// `fmt::Display`) equals `expected_msg`.\n\npub fn assert_error<T, E: fmt::Display>(result: Result<T, E>, expected_msg: &str) {\n\n match result {\n\n Ok(_) => panic!(\"unexpected OK result\"),\n\n Err(err) => assert!(\n\n err.to_string().contains(expected_msg),\n\n \"wanted: {}, got: {}\",\n\n expected_msg,\n\n err,\n\n ),\n\n }\n\n}\n\n\n", "file_path": "tests/util/mod.rs", "rank": 37, "score": 174830.7524236797 }, { "content": "fn other(s: &str) -> io::Error {\n\n io::Error::new(io::ErrorKind::Other, s)\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Events {\n\n /// Raw I/O event completions are filled in here by the call to `get_many`\n\n /// on the completion port above. These are then processed to run callbacks\n\n /// which figure out what to do after the event is done.\n\n statuses: Box<[CompletionStatus]>,\n\n\n\n /// Literal events returned by `get` to the upwards `EventLoop`. This file\n\n /// doesn't really modify this (except for the waker), instead almost all\n\n /// events are filled in by the `ReadinessQueue` from the `poll` module.\n\n events: Vec<Event>,\n\n}\n\n\n\nimpl Events {\n\n pub fn with_capacity(cap: usize) -> Events {\n\n // Note that it's possible for the output `events` to grow beyond the\n", "file_path": "src/sys/windows/selector.rs", "rank": 38, "score": 174447.2559048801 }, { "content": "#[test]\n\nfn tcp_listener() {\n\n smoke_test_tcp_listener(any_local_address(), TcpListener::bind);\n\n}\n\n\n", "file_path": "tests/tcp_listener.rs", "rank": 39, "score": 167100.98401198385 }, { "content": "#[test]\n\nfn tcp_listener_ipv6() {\n\n smoke_test_tcp_listener(any_local_ipv6_address(), TcpListener::bind);\n\n}\n\n\n", "file_path": "tests/tcp_listener.rs", "rank": 40, "score": 163770.36611616964 }, { "content": "fn smoke_test<F>(new_listener: F, test_name: &'static str)\n\nwhere\n\n F: FnOnce(&Path) -> io::Result<UnixListener>,\n\n{\n\n let (mut poll, mut events) = init_with_poll();\n\n let barrier = Arc::new(Barrier::new(2));\n\n let path = temp_file(test_name);\n\n\n\n let mut listener = new_listener(&path).unwrap();\n\n\n\n assert_socket_non_blocking(&listener);\n\n assert_socket_close_on_exec(&listener);\n\n\n\n poll.registry()\n\n .register(\n\n &mut listener,\n\n TOKEN_1,\n\n Interest::WRITABLE.add(Interest::READABLE),\n\n )\n\n .unwrap();\n", "file_path": "tests/unix_listener.rs", "rank": 41, "score": 162856.29072443445 }, { "content": "#[test]\n\nfn register_during_poll() {\n\n let (mut poll, mut events) = init_with_poll();\n\n let registry = poll.registry().try_clone().unwrap();\n\n\n\n let barrier = Arc::new(Barrier::new(2));\n\n let barrier1 = Arc::clone(&barrier);\n\n\n\n let handle1 = thread::spawn(move || {\n\n let mut stream = UdpSocket::bind(any_local_address()).unwrap();\n\n\n\n barrier1.wait();\n\n // Get closer to \"trying\" to register during a poll by doing a short\n\n // sleep before register to give main thread enough time to start\n\n // waiting the 5 sec long poll.\n\n sleep(Duration::from_millis(200));\n\n registry\n\n .register(&mut stream, ID1, Interest::WRITABLE)\n\n .unwrap();\n\n\n\n barrier1.wait();\n", "file_path": "tests/poll.rs", "rank": 42, "score": 162291.9410942474 }, { "content": "fn smoke_test_tcp_stream<F>(addr: SocketAddr, make_stream: F)\n\nwhere\n\n F: FnOnce(SocketAddr) -> io::Result<TcpStream>,\n\n{\n\n let (mut poll, mut events) = init_with_poll();\n\n\n\n let (handle, addr) = echo_listener(addr, 1);\n\n let mut stream = make_stream(addr).unwrap();\n\n\n\n assert_socket_non_blocking(&stream);\n\n assert_socket_close_on_exec(&stream);\n\n\n\n poll.registry()\n\n .register(&mut stream, ID1, Interest::WRITABLE.add(Interest::READABLE))\n\n .expect(\"unable to register TCP stream\");\n\n\n\n expect_events(\n\n &mut poll,\n\n &mut events,\n\n vec![ExpectEvent::new(ID1, Interest::WRITABLE)],\n", "file_path": "tests/tcp_stream.rs", "rank": 43, "score": 161724.7725262429 }, { "content": "#[test]\n\nfn tcp_listener_two_streams() {\n\n let (mut poll1, mut events) = init_with_poll();\n\n\n\n let mut listener = TcpListener::bind(any_local_address()).unwrap();\n\n let address = listener.local_addr().unwrap();\n\n\n\n let barrier = Arc::new(Barrier::new(3));\n\n let thread_handle1 = start_connections(address, 1, barrier.clone());\n\n\n\n poll1\n\n .registry()\n\n .register(&mut listener, ID1, Interest::READABLE)\n\n .unwrap();\n\n\n\n expect_events(\n\n &mut poll1,\n\n &mut events,\n\n vec![ExpectEvent::new(ID1, Interest::READABLE)],\n\n );\n\n\n", "file_path": "tests/tcp_listener.rs", "rank": 44, "score": 160599.51350170586 }, { "content": "#[test]\n\nfn unix_listener_from_std() {\n\n smoke_test(\n\n |path| {\n\n let listener = net::UnixListener::bind(path).unwrap();\n\n // `std::os::unix::net::UnixStream`s are blocking by default, so make sure\n\n // it is in non-blocking mode before wrapping in a Mio equivalent.\n\n listener.set_nonblocking(true).unwrap();\n\n Ok(UnixListener::from_std(listener))\n\n },\n\n \"unix_listener_from_std\",\n\n )\n\n}\n\n\n", "file_path": "tests/unix_listener.rs", "rank": 45, "score": 155031.0846780815 }, { "content": "#[test]\n\nfn unix_listener_reregister() {\n\n let (mut poll, mut events) = init_with_poll();\n\n let barrier = Arc::new(Barrier::new(2));\n\n\n\n let path = temp_file(\"unix_listener_reregister\");\n\n let mut listener = UnixListener::bind(&path).unwrap();\n\n poll.registry()\n\n .register(&mut listener, TOKEN_1, Interest::WRITABLE)\n\n .unwrap();\n\n\n\n let handle = open_connections(path, 1, barrier.clone());\n\n expect_no_events(&mut poll, &mut events);\n\n\n\n poll.registry()\n\n .reregister(&mut listener, TOKEN_1, Interest::READABLE)\n\n .unwrap();\n\n expect_events(\n\n &mut poll,\n\n &mut events,\n\n vec![ExpectEvent::new(TOKEN_1, Interest::READABLE)],\n\n );\n\n\n\n barrier.wait();\n\n handle.join().unwrap();\n\n}\n\n\n", "file_path": "tests/unix_listener.rs", "rank": 46, "score": 155015.6714305196 }, { "content": "#[test]\n\nfn unix_listener_register() {\n\n let (mut poll, mut events) = init_with_poll();\n\n\n\n let path = temp_file(\"unix_listener_register\");\n\n let mut listener = UnixListener::bind(path).unwrap();\n\n poll.registry()\n\n .register(&mut listener, TOKEN_1, Interest::READABLE)\n\n .unwrap();\n\n expect_no_events(&mut poll, &mut events)\n\n}\n\n\n", "file_path": "tests/unix_listener.rs", "rank": 47, "score": 154943.43785341905 }, { "content": "#[test]\n\nfn tcp_stream_std() {\n\n smoke_test_tcp_stream(any_local_address(), |addr| {\n\n let stream = net::TcpStream::connect(addr).unwrap();\n\n // `std::net::TcpStream`s are blocking by default, so make sure it is\n\n // in non-blocking mode before wrapping in a Mio equivalent.\n\n stream.set_nonblocking(true).unwrap();\n\n Ok(TcpStream::from_std(stream))\n\n });\n\n}\n\n\n", "file_path": "tests/tcp_stream.rs", "rank": 48, "score": 153220.85649717532 }, { "content": "struct StreamInner {\n\n iocp: ReadyBinding,\n\n deferred_connect: Option<SocketAddr>,\n\n read: State<(), ()>,\n\n write: State<(Vec<u8>, usize), (Vec<u8>, usize)>,\n\n /// whether we are instantly notified of success\n\n /// (FILE_SKIP_COMPLETION_PORT_ON_SUCCESS,\n\n /// without a roundtrip through the event loop)\n\n instant_notify: bool,\n\n}\n\n\n", "file_path": "src/sys/windows/tcp.rs", "rank": 49, "score": 151597.01433514693 }, { "content": "#[test]\n\nfn poll_ok_after_cancelling_pending_ops() {\n\n let (mut poll, mut events) = init_with_poll();\n\n\n\n let mut listener = TcpListener::bind(any_local_address()).unwrap();\n\n let address = listener.local_addr().unwrap();\n\n\n\n let registry = Arc::new(poll.registry().try_clone().unwrap());\n\n let registry1 = Arc::clone(&registry);\n\n\n\n let barrier = Arc::new(Barrier::new(2));\n\n let barrier1 = Arc::clone(&barrier);\n\n\n\n registry\n\n .register(&mut listener, ID1, Interest::READABLE)\n\n .unwrap();\n\n\n\n // Call a dummy poll just to submit an afd poll request\n\n poll.poll(&mut events, Some(Duration::from_millis(0)))\n\n .unwrap();\n\n\n", "file_path": "tests/poll.rs", "rank": 50, "score": 151483.7489953344 }, { "content": "struct StreamIo {\n\n inner: Mutex<StreamInner>,\n\n read: Overlapped, // also used for connect\n\n write: Overlapped,\n\n socket: net::TcpStream,\n\n}\n\n\n", "file_path": "src/sys/windows/tcp.rs", "rank": 51, "score": 151425.78953550116 }, { "content": "#[derive(Clone)]\n\nstruct ListenerImp {\n\n inner: FromRawArc<ListenerIo>,\n\n}\n\n\n", "file_path": "src/sys/windows/tcp.rs", "rank": 52, "score": 151308.87855005023 }, { "content": "#[test]\n\nfn connect_error() {\n\n init();\n\n\n\n let mut poll = Poll::new().unwrap();\n\n let mut events = Events::with_capacity(16);\n\n\n\n // Pick a \"random\" port that shouldn't be in use.\n\n let mut stream = match TcpStream::connect(\"127.0.0.1:38381\".parse().unwrap()) {\n\n Ok(l) => l,\n\n Err(ref e) if e.kind() == io::ErrorKind::ConnectionRefused => {\n\n // Connection failed synchronously. This is not a bug, but it\n\n // unfortunately doesn't get us the code coverage we want.\n\n return;\n\n }\n\n Err(e) => panic!(\"TcpStream::connect unexpected error {:?}\", e),\n\n };\n\n\n\n poll.registry()\n\n .register(&mut stream, Token(0), Interest::WRITABLE)\n\n .unwrap();\n", "file_path": "tests/tcp.rs", "rank": 53, "score": 150742.04103045087 }, { "content": "#[test]\n\nfn write_error() {\n\n init();\n\n\n\n let mut poll = Poll::new().unwrap();\n\n let mut events = Events::with_capacity(16);\n\n let (tx, rx) = channel();\n\n\n\n let listener = net::TcpListener::bind(\"127.0.0.1:0\").unwrap();\n\n let addr = listener.local_addr().unwrap();\n\n let handle = thread::spawn(move || {\n\n let (conn, _addr) = listener.accept().unwrap();\n\n rx.recv().unwrap();\n\n drop(conn);\n\n });\n\n\n\n let mut s = TcpStream::connect(addr).unwrap();\n\n poll.registry()\n\n .register(&mut s, Token(0), Interest::READABLE | Interest::WRITABLE)\n\n .unwrap();\n\n\n", "file_path": "tests/tcp.rs", "rank": 54, "score": 150742.04103045087 }, { "content": "#[test]\n\nfn reregistering() {\n\n let (mut poll, mut events) = init_with_poll();\n\n\n\n let (thread_handle, address) = echo_listener(any_local_address(), 1);\n\n\n\n let mut stream = TcpStream::connect(address).unwrap();\n\n\n\n poll.registry()\n\n .register(&mut stream, ID1, Interest::READABLE)\n\n .expect(\"unable to register TCP stream\");\n\n\n\n poll.registry()\n\n .reregister(&mut stream, ID2, Interest::WRITABLE)\n\n .expect(\"unable to reregister TCP stream\");\n\n\n\n expect_events(\n\n &mut poll,\n\n &mut events,\n\n vec![ExpectEvent::new(ID2, Interest::WRITABLE)],\n\n );\n\n\n\n assert_eq!(stream.peer_addr().unwrap(), address);\n\n\n\n drop(stream);\n\n thread_handle.join().expect(\"unable to join thread\");\n\n}\n\n\n", "file_path": "tests/tcp_stream.rs", "rank": 55, "score": 150736.55523773003 }, { "content": "#[test]\n\nfn registering() {\n\n let (mut poll, mut events) = init_with_poll();\n\n\n\n let (thread_handle, address) = echo_listener(any_local_address(), 1);\n\n\n\n let mut stream = TcpStream::connect(address).unwrap();\n\n\n\n poll.registry()\n\n .register(&mut stream, ID1, Interest::READABLE)\n\n .expect(\"unable to register TCP stream\");\n\n\n\n expect_no_events(&mut poll, &mut events);\n\n\n\n // NOTE: more tests are done in the smoke tests above.\n\n\n\n drop(stream);\n\n thread_handle.join().expect(\"unable to join thread\");\n\n}\n\n\n", "file_path": "tests/tcp_stream.rs", "rank": 56, "score": 150659.42266618428 }, { "content": "#[test]\n\nfn listen_then_close() {\n\n init();\n\n\n\n let mut poll = Poll::new().unwrap();\n\n let mut listener = TcpListener::bind(\"127.0.0.1:0\".parse().unwrap()).unwrap();\n\n\n\n poll.registry()\n\n .register(&mut listener, Token(1), Interest::READABLE)\n\n .unwrap();\n\n drop(listener);\n\n\n\n let mut events = Events::with_capacity(128);\n\n\n\n poll.poll(&mut events, Some(Duration::from_millis(100)))\n\n .unwrap();\n\n\n\n for event in &events {\n\n if event.token() == Token(1) {\n\n panic!(\"recieved ready() on a closed TcpListener\")\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/tcp.rs", "rank": 57, "score": 150445.33623013116 }, { "content": "#[test]\n\n#[cfg(debug_assertions)] // Check is only present when debug assertions are enabled.\n\nfn tcp_register_multiple_event_loops() {\n\n init();\n\n\n\n let mut listener = TcpListener::bind(any_local_address()).unwrap();\n\n let addr = listener.local_addr().unwrap();\n\n\n\n let poll1 = Poll::new().unwrap();\n\n poll1\n\n .registry()\n\n .register(\n\n &mut listener,\n\n Token(0),\n\n Interest::READABLE | Interest::WRITABLE,\n\n )\n\n .unwrap();\n\n\n\n let poll2 = Poll::new().unwrap();\n\n\n\n // Try registering the same socket with the initial one\n\n let res = poll2.registry().register(\n", "file_path": "tests/registering.rs", "rank": 58, "score": 150179.545710266 }, { "content": "struct Inner {\n\n iocp: ReadyBinding,\n\n read: State<Vec<u8>, Vec<u8>>,\n\n write: State<Vec<u8>, (Vec<u8>, usize)>,\n\n read_buf: SocketAddrBuf,\n\n}\n\n\n", "file_path": "src/sys/windows/udp.rs", "rank": 59, "score": 148552.53029781737 }, { "content": "struct Io {\n\n read: Overlapped,\n\n write: Overlapped,\n\n socket: net::UdpSocket,\n\n inner: Mutex<Inner>,\n\n}\n\n\n", "file_path": "src/sys/windows/udp.rs", "rank": 61, "score": 148058.05076812766 }, { "content": "#[test]\n\nfn registry_behind_arc() {\n\n // `Registry` should work behind an `Arc`, being `Sync` and `Send`.\n\n init();\n\n\n\n let mut poll = Poll::new().unwrap();\n\n let registry = Arc::new(poll.registry().try_clone().unwrap());\n\n let mut events = Events::with_capacity(128);\n\n\n\n let mut listener = TcpListener::bind(any_local_address()).unwrap();\n\n let addr = listener.local_addr().unwrap();\n\n let barrier = Arc::new(Barrier::new(3));\n\n\n\n let registry2 = Arc::clone(&registry);\n\n let registry3 = Arc::clone(&registry);\n\n let barrier2 = Arc::clone(&barrier);\n\n let barrier3 = Arc::clone(&barrier);\n\n\n\n let handle1 = thread::spawn(move || {\n\n registry2\n\n .register(&mut listener, Token(0), Interest::READABLE)\n", "file_path": "tests/poll.rs", "rank": 62, "score": 148010.40687086686 }, { "content": "#[test]\n\n#[cfg(debug_assertions)] // Check is only present when debug assertions are enabled.\n\nfn deregister_without_register() {\n\n let poll = Poll::new().expect(\"unable to create Poll instance\");\n\n\n\n let mut listener = TcpListener::bind(any_local_address()).unwrap();\n\n\n\n assert_error(poll.registry().deregister(&mut listener), \"not registered\");\n\n}\n\n\n", "file_path": "tests/poll.rs", "rank": 63, "score": 147893.1418909308 }, { "content": "#[cfg(target_os = \"illumos\")]\n\nfn set_nonblocking(fd: RawFd, nonblocking: bool) -> io::Result<()> {\n\n let flags = unsafe { libc::fcntl(fd, libc::F_GETFL) };\n\n if flags < 0 {\n\n return Err(io::Error::last_os_error());\n\n }\n\n\n\n let nflags = if nonblocking {\n\n flags | libc::O_NONBLOCK\n\n } else {\n\n flags & !libc::O_NONBLOCK\n\n };\n\n\n\n if flags != nflags {\n\n if unsafe { libc::fcntl(fd, libc::F_SETFL, nflags) } < 0 {\n\n return Err(io::Error::last_os_error());\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "src/sys/unix/pipe.rs", "rank": 64, "score": 147509.05709934834 }, { "content": "#[test]\n\nfn bind_twice_bad() {\n\n init();\n\n\n\n let l1 = TcpListener::bind(\"127.0.0.1:0\".parse().unwrap()).unwrap();\n\n let addr = l1.local_addr().unwrap();\n\n assert!(TcpListener::bind(addr).is_err());\n\n}\n\n\n", "file_path": "tests/tcp.rs", "rank": 65, "score": 146651.38415474136 }, { "content": "/// Start a listener that accepts `n_connections` connections on the returned\n\n/// address. If a barrier is provided it will wait on it before closing the\n\n/// connection.\n\nfn start_listener(\n\n n_connections: usize,\n\n barrier: Option<Arc<Barrier>>,\n\n shutdown_write: bool,\n\n) -> (thread::JoinHandle<()>, SocketAddr) {\n\n let (sender, receiver) = channel();\n\n let thread_handle = thread::spawn(move || {\n\n let listener = net::TcpListener::bind(any_local_address()).unwrap();\n\n let local_address = listener.local_addr().unwrap();\n\n sender.send(local_address).unwrap();\n\n\n\n for _ in 0..n_connections {\n\n let (stream, _) = listener.accept().unwrap();\n\n if let Some(ref barrier) = barrier {\n\n barrier.wait();\n\n\n\n if shutdown_write {\n\n stream.shutdown(Shutdown::Write).unwrap();\n\n barrier.wait();\n\n }\n\n }\n\n drop(stream);\n\n }\n\n });\n\n (thread_handle, receiver.recv().unwrap())\n\n}\n\n\n", "file_path": "tests/tcp_stream.rs", "rank": 66, "score": 146337.12474474442 }, { "content": "#[cfg(unix)]\n\n#[test]\n\nfn raw_fd() {\n\n init();\n\n\n\n let listener = TcpListener::bind(any_local_address()).unwrap();\n\n let address = listener.local_addr().unwrap();\n\n\n\n let raw_fd1 = listener.as_raw_fd();\n\n let raw_fd2 = listener.into_raw_fd();\n\n assert_eq!(raw_fd1, raw_fd2);\n\n\n\n let listener = unsafe { TcpListener::from_raw_fd(raw_fd2) };\n\n assert_eq!(listener.as_raw_fd(), raw_fd1);\n\n assert_eq!(listener.local_addr().unwrap(), address);\n\n}\n\n\n", "file_path": "tests/tcp_listener.rs", "rank": 67, "score": 146332.4719915091 }, { "content": "#[test]\n\nfn is_send_and_sync() {\n\n assert_send::<TcpListener>();\n\n assert_sync::<TcpListener>();\n\n}\n\n\n", "file_path": "tests/tcp_listener.rs", "rank": 68, "score": 146332.4719915091 }, { "content": "/// Start `n_connections` connections to `address`. If a `barrier` is provided\n\n/// it will wait on it after each connection is made before it is dropped.\n\nfn start_connections(\n\n address: SocketAddr,\n\n n_connections: usize,\n\n barrier: Arc<Barrier>,\n\n) -> thread::JoinHandle<()> {\n\n thread::spawn(move || {\n\n for _ in 0..n_connections {\n\n let conn = net::TcpStream::connect(address).unwrap();\n\n barrier.wait();\n\n drop(conn);\n\n }\n\n })\n\n}\n", "file_path": "tests/tcp_listener.rs", "rank": 69, "score": 146332.4719915091 }, { "content": "#[test]\n\nfn no_events_after_deregister() {\n\n let (mut poll, mut events) = init_with_poll();\n\n\n\n let mut listener = TcpListener::bind(any_local_address()).unwrap();\n\n let address = listener.local_addr().unwrap();\n\n\n\n poll.registry()\n\n .register(&mut listener, ID1, Interest::READABLE)\n\n .unwrap();\n\n\n\n let barrier = Arc::new(Barrier::new(2));\n\n let thread_handle = start_connections(address, 1, barrier.clone());\n\n\n\n poll.registry().deregister(&mut listener).unwrap();\n\n\n\n expect_no_events(&mut poll, &mut events);\n\n\n\n // Should still be able to accept the connection.\n\n let (stream, peer_address) = listener.accept().expect(\"unable to accept connection\");\n\n assert!(peer_address.ip().is_loopback());\n", "file_path": "tests/tcp_listener.rs", "rank": 70, "score": 146332.4719915091 }, { "content": "pub fn is_error(event: &Event) -> bool {\n\n event.readiness.is_error()\n\n}\n\n\n", "file_path": "src/sys/windows/event.rs", "rank": 71, "score": 146123.05368078506 }, { "content": "#[repr(C)]\n\nstruct Inner {\n\n // NOTE: careful modifying the order of these three fields, the `ptr_from_*`\n\n // methods depend on the layout!\n\n connect: Overlapped,\n\n read: Overlapped,\n\n write: Overlapped,\n\n // END NOTE.\n\n handle: pipe::NamedPipe,\n\n connecting: AtomicBool,\n\n io: Mutex<Io>,\n\n pool: Mutex<BufferPool>,\n\n}\n\n\n", "file_path": "src/sys/windows/named_pipe.rs", "rank": 72, "score": 145911.8298423353 }, { "content": "struct Io {\n\n // Token used to identify events\n\n token: Option<Token>,\n\n read: State,\n\n write: State,\n\n connect_error: Option<io::Error>,\n\n}\n\n\n", "file_path": "src/sys/windows/named_pipe.rs", "rank": 73, "score": 145422.96623934613 }, { "content": "#[test]\n\nfn registry_operations_are_thread_safe() {\n\n let (mut poll, mut events) = init_with_poll();\n\n\n\n let registry = Arc::new(poll.registry().try_clone().unwrap());\n\n let registry1 = Arc::clone(&registry);\n\n let registry2 = Arc::clone(&registry);\n\n let registry3 = Arc::clone(&registry);\n\n\n\n let barrier = Arc::new(Barrier::new(4));\n\n let barrier1 = Arc::clone(&barrier);\n\n let barrier2 = Arc::clone(&barrier);\n\n let barrier3 = Arc::clone(&barrier);\n\n\n\n let mut listener = TcpListener::bind(any_local_address()).unwrap();\n\n let addr = listener.local_addr().unwrap();\n\n\n\n // Expect that multiple register/deregister/reregister work fine on multiple\n\n // threads. Main thread will wait before the expect_events for all other 3\n\n // threads to do their work. Otherwise expect_events timeout might be too short\n\n // for all threads to complete, and call might fail.\n", "file_path": "tests/poll.rs", "rank": 74, "score": 144105.48075631785 }, { "content": "#[test]\n\nfn drop_cancels_interest_and_shuts_down() {\n\n init();\n\n\n\n use mio::net::TcpStream;\n\n use std::io;\n\n use std::io::Read;\n\n use std::net::TcpListener;\n\n use std::thread;\n\n\n\n let listener = TcpListener::bind(\"127.0.0.1:0\").unwrap();\n\n let addr = listener.local_addr().unwrap();\n\n\n\n let handle = thread::spawn(move || {\n\n let mut stream = listener.incoming().next().unwrap().unwrap();\n\n stream\n\n .set_read_timeout(Some(Duration::from_secs(5)))\n\n .expect(\"set_read_timeout\");\n\n match stream.read(&mut [0; 16]) {\n\n Ok(_) => (),\n\n Err(err) => {\n", "file_path": "tests/poll.rs", "rank": 75, "score": 144028.1469492492 }, { "content": "/// Returns `true` if the connection is done.\n\nfn handle_connection_event(\n\n registry: &Registry,\n\n connection: &mut TcpStream,\n\n event: &Event,\n\n) -> io::Result<bool> {\n\n if event.is_writable() {\n\n // We can (maybe) write to the connection.\n\n match connection.write(DATA) {\n\n // We want to write the entire `DATA` buffer in a single go. If we\n\n // write less we'll return a short write error (same as\n\n // `io::Write::write_all` does).\n\n Ok(n) if n < DATA.len() => return Err(io::ErrorKind::WriteZero.into()),\n\n Ok(_) => {\n\n // After we've written something we'll reregister the connection\n\n // to only respond to readable events.\n\n registry.reregister(connection, event.token(), Interest::READABLE)?\n\n }\n\n // Would block \"errors\" are the OS's way of saying that the\n\n // connection is not actually ready to perform this I/O operation.\n\n Err(ref err) if would_block(err) => {}\n", "file_path": "examples/tcp_server.rs", "rank": 76, "score": 142727.83719807625 }, { "content": "#[test]\n\nfn set_get_ttl() {\n\n init();\n\n\n\n let listener = TcpListener::bind(any_local_address()).unwrap();\n\n\n\n // set TTL, get TTL, make sure it has the expected value\n\n const TTL: u32 = 10;\n\n listener.set_ttl(TTL).unwrap();\n\n assert_eq!(listener.ttl().unwrap(), TTL);\n\n assert!(listener.take_error().unwrap().is_none());\n\n}\n\n\n", "file_path": "tests/tcp_listener.rs", "rank": 77, "score": 142480.83201463602 }, { "content": "#[test]\n\nfn reregister_deregister_before_register() {\n\n let (mut pipe, _) = server();\n\n let poll = t!(Poll::new());\n\n\n\n assert_eq!(\n\n poll.registry()\n\n .reregister(&mut pipe, Token(0), Interest::READABLE,)\n\n .unwrap_err()\n\n .kind(),\n\n io::ErrorKind::NotFound,\n\n );\n\n\n\n assert_eq!(\n\n poll.registry().deregister(&mut pipe).unwrap_err().kind(),\n\n io::ErrorKind::NotFound,\n\n );\n\n}\n\n\n", "file_path": "tests/win_named_pipe.rs", "rank": 78, "score": 140705.5684506332 }, { "content": "#[test]\n\nfn reregister_deregister_different_poll() {\n\n let (mut pipe, _) = server();\n\n let poll1 = t!(Poll::new());\n\n let poll2 = t!(Poll::new());\n\n\n\n // Register with 1\n\n t!(poll1\n\n .registry()\n\n .register(&mut pipe, Token(0), Interest::READABLE));\n\n\n\n assert_eq!(\n\n poll2\n\n .registry()\n\n .reregister(&mut pipe, Token(0), Interest::READABLE,)\n\n .unwrap_err()\n\n .kind(),\n\n io::ErrorKind::AlreadyExists,\n\n );\n\n\n\n assert_eq!(\n\n poll2.registry().deregister(&mut pipe).unwrap_err().kind(),\n\n io::ErrorKind::AlreadyExists,\n\n );\n\n}\n", "file_path": "tests/win_named_pipe.rs", "rank": 79, "score": 136956.40448241512 }, { "content": "#[repr(C)]\n\nstruct Inner<T> {\n\n data: T,\n\n cnt: AtomicUsize,\n\n}\n\n\n\nimpl<T> FromRawArc<T> {\n\n pub fn new(data: T) -> FromRawArc<T> {\n\n let x = Box::new(Inner {\n\n data: data,\n\n cnt: AtomicUsize::new(1),\n\n });\n\n FromRawArc {\n\n _inner: unsafe { mem::transmute(x) },\n\n }\n\n }\n\n\n\n pub unsafe fn from_raw(ptr: *mut T) -> FromRawArc<T> {\n\n // Note that if we could use `mem::transmute` here to get a libstd Arc\n\n // (guaranteed) then we could just use std::sync::Arc, but this is the\n\n // crucial reason this currently exists.\n", "file_path": "src/sys/windows/from_raw_arc.rs", "rank": 80, "score": 135680.60159632 }, { "content": "#[test]\n\nfn get_ttl_without_previous_set() {\n\n init();\n\n\n\n let listener = TcpListener::bind(any_local_address()).unwrap();\n\n\n\n // expect a get TTL to work w/o any previous set_ttl\n\n listener.ttl().expect(\"unable to get TTL for TCP listener\");\n\n assert!(listener.take_error().unwrap().is_none());\n\n}\n\n\n", "file_path": "tests/tcp_listener.rs", "rank": 81, "score": 135467.62164411577 }, { "content": "#[test]\n\npub fn multicast() {\n\n init();\n\n\n\n debug!(\"Starting TEST_UDP_CONNECTIONLESS\");\n\n let mut poll = Poll::new().unwrap();\n\n\n\n let mut tx = UdpSocket::bind(any_local_address()).unwrap();\n\n let mut rx = UdpSocket::bind(any_local_address()).unwrap();\n\n\n\n info!(\"Joining group 227.1.1.100\");\n\n let any = &\"0.0.0.0\".parse().unwrap();\n\n rx.join_multicast_v4(&\"227.1.1.100\".parse().unwrap(), any)\n\n .unwrap();\n\n\n\n info!(\"Joining group 227.1.1.101\");\n\n rx.join_multicast_v4(&\"227.1.1.101\".parse().unwrap(), any)\n\n .unwrap();\n\n\n\n info!(\"Registering SENDER\");\n\n poll.registry()\n", "file_path": "tests/udp_socket.rs", "rank": 82, "score": 132034.82609961968 }, { "content": "pub fn init() {\n\n static INIT: Once = Once::new();\n\n\n\n INIT.call_once(|| {\n\n env_logger::try_init().expect(\"unable to initialise logger\");\n\n\n\n // Remove all temporary files from previous test runs.\n\n let dir = temp_dir();\n\n let _ = fs::remove_dir_all(&dir);\n\n fs::create_dir_all(&dir).expect(\"unable to create temporary directory\");\n\n })\n\n}\n\n\n", "file_path": "tests/util/mod.rs", "rank": 83, "score": 132034.82609961968 }, { "content": "#[test]\n\npub fn close_on_drop() {\n\n init();\n\n debug!(\"Starting TEST_CLOSE_ON_DROP\");\n\n let mut poll = Poll::new().unwrap();\n\n\n\n // == Create & setup server socket\n\n let mut srv = TcpListener::bind(any_local_address()).unwrap();\n\n let addr = srv.local_addr().unwrap();\n\n\n\n poll.registry()\n\n .register(&mut srv, SERVER, Interest::READABLE)\n\n .unwrap();\n\n\n\n // == Create & setup client socket\n\n let mut sock = TcpStream::connect(addr).unwrap();\n\n\n\n poll.registry()\n\n .register(&mut sock, CLIENT, Interest::WRITABLE)\n\n .unwrap();\n\n\n", "file_path": "tests/close_on_drop.rs", "rank": 84, "score": 129445.39629080202 }, { "content": "#[test]\n\npub fn udp_socket() {\n\n init();\n\n\n\n let tx = UdpSocket::bind(any_local_address()).unwrap();\n\n let rx = UdpSocket::bind(any_local_address()).unwrap();\n\n\n\n send_recv_udp(tx, rx, false);\n\n}\n\n\n", "file_path": "tests/udp_socket.rs", "rank": 85, "score": 129445.39629080202 }, { "content": "fn interests_to_epoll(interests: Interest) -> u32 {\n\n let mut kind = EPOLLET;\n\n\n\n if interests.is_readable() {\n\n kind = kind | EPOLLIN | EPOLLRDHUP;\n\n }\n\n\n\n if interests.is_writable() {\n\n kind |= EPOLLOUT;\n\n }\n\n\n\n kind as u32\n\n}\n\n\n\npub type Event = libc::epoll_event;\n\npub type Events = Vec<Event>;\n\n\n\npub mod event {\n\n use std::fmt;\n\n\n", "file_path": "src/sys/unix/selector/epoll.rs", "rank": 86, "score": 128857.6701539404 }, { "content": "#[test]\n\npub fn udp_socket_discard() {\n\n init();\n\n\n\n let mut tx = UdpSocket::bind(any_local_address()).unwrap();\n\n let mut rx = UdpSocket::bind(any_local_address()).unwrap();\n\n let udp_outside = UdpSocket::bind(any_local_address()).unwrap();\n\n\n\n let tx_addr = tx.local_addr().unwrap();\n\n let rx_addr = rx.local_addr().unwrap();\n\n\n\n assert!(tx.connect(rx_addr).is_ok());\n\n assert!(udp_outside.connect(rx_addr).is_ok());\n\n assert!(rx.connect(tx_addr).is_ok());\n\n\n\n let mut poll = Poll::new().unwrap();\n\n\n\n checked_write!(udp_outside.send(b\"hello world\"));\n\n\n\n poll.registry()\n\n .register(&mut rx, LISTENER, Interest::READABLE)\n", "file_path": "tests/udp_socket.rs", "rank": 87, "score": 127015.36925255743 }, { "content": "#[test]\n\npub fn udp_socket_send_recv() {\n\n init();\n\n\n\n let (tx, rx) = connected_sockets();\n\n\n\n send_recv_udp(tx, rx, true);\n\n}\n\n\n", "file_path": "tests/udp_socket.rs", "rank": 88, "score": 124730.465482633 }, { "content": "#[test]\n\nfn registering_after_deregistering() {\n\n init();\n\n\n\n let mut poll = Poll::new().unwrap();\n\n let mut events = Events::with_capacity(8);\n\n\n\n let mut server = TcpListener::bind(any_local_address()).unwrap();\n\n\n\n poll.registry()\n\n .register(&mut server, SERVER, Interest::READABLE)\n\n .unwrap();\n\n\n\n poll.registry().deregister(&mut server).unwrap();\n\n\n\n poll.registry()\n\n .register(&mut server, SERVER, Interest::READABLE)\n\n .unwrap();\n\n\n\n poll.poll(&mut events, Some(Duration::from_millis(100)))\n\n .unwrap();\n\n assert!(events.is_empty());\n\n}\n", "file_path": "tests/registering.rs", "rank": 89, "score": 119760.08689143235 }, { "content": "struct MyHandler {\n\n listener: TcpListener,\n\n connected: TcpStream,\n\n accepted: Option<TcpStream>,\n\n shutdown: bool,\n\n}\n\n\n", "file_path": "tests/tcp.rs", "rank": 90, "score": 119586.95838999542 }, { "content": "/// Bind to any port on localhost.\n\npub fn any_local_address() -> SocketAddr {\n\n \"127.0.0.1:0\".parse().unwrap()\n\n}\n\n\n", "file_path": "tests/util/mod.rs", "rank": 91, "score": 119383.77605743063 }, { "content": "#[test]\n\nfn poll_registration() {\n\n init();\n\n let poll = Poll::new().unwrap();\n\n let registry = poll.registry();\n\n\n\n let mut source = TestEventSource::new();\n\n let token = Token(0);\n\n let interests = Interest::READABLE;\n\n registry.register(&mut source, token, interests).unwrap();\n\n assert_eq!(source.registrations.len(), 1);\n\n assert_eq!(source.registrations.get(0), Some(&(token, interests)));\n\n assert!(source.reregistrations.is_empty());\n\n assert_eq!(source.deregister_count, 0);\n\n\n\n let re_token = Token(0);\n\n let re_interests = Interest::READABLE;\n\n registry\n\n .reregister(&mut source, re_token, re_interests)\n\n .unwrap();\n\n assert_eq!(source.registrations.len(), 1);\n", "file_path": "tests/poll.rs", "rank": 92, "score": 119343.82007697475 }, { "content": "struct TestHandler {\n\n server: TcpListener,\n\n client: TcpStream,\n\n state: usize,\n\n}\n\n\n\nimpl TestHandler {\n\n fn new(srv: TcpListener, cli: TcpStream) -> TestHandler {\n\n TestHandler {\n\n server: srv,\n\n client: cli,\n\n state: 0,\n\n }\n\n }\n\n\n\n fn handle_read(&mut self, registry: &Registry, token: Token) {\n\n match token {\n\n SERVER => {\n\n trace!(\"handle_read; token=SERVER\");\n\n let mut sock = self.server.accept().unwrap().0;\n", "file_path": "tests/registering.rs", "rank": 93, "score": 118368.5648346215 }, { "content": "fn smoke_test<F>(connect_stream: F, test_name: &'static str)\n\nwhere\n\n F: FnOnce(&Path) -> io::Result<UnixStream>,\n\n{\n\n let (mut poll, mut events) = init_with_poll();\n\n let (handle, remote_addr) = new_echo_listener(1, test_name);\n\n let path = remote_addr.as_pathname().expect(\"failed to get pathname\");\n\n\n\n let mut stream = connect_stream(path).unwrap();\n\n\n\n assert_socket_non_blocking(&stream);\n\n assert_socket_close_on_exec(&stream);\n\n\n\n poll.registry()\n\n .register(\n\n &mut stream,\n\n TOKEN_1,\n\n Interest::WRITABLE.add(Interest::READABLE),\n\n )\n\n .unwrap();\n", "file_path": "tests/unix_stream.rs", "rank": 94, "score": 118293.31390054614 }, { "content": "#[cfg(not(target_os = \"netbsd\"))]\n\ntype UData = *mut libc::c_void;\n", "file_path": "src/sys/unix/selector/kqueue.rs", "rank": 95, "score": 118167.32721351925 }, { "content": "/// Bind to any port on localhost, using a IPv6 address.\n\npub fn any_local_ipv6_address() -> SocketAddr {\n\n \"[::1]:0\".parse().unwrap()\n\n}\n\n\n", "file_path": "tests/util/mod.rs", "rank": 96, "score": 117236.06681236933 }, { "content": "#[test]\n\nfn poll_erroneous_registration() {\n\n init();\n\n let poll = Poll::new().unwrap();\n\n let registry = poll.registry();\n\n\n\n let mut source = ErroneousTestEventSource;\n\n let token = Token(0);\n\n let interests = Interest::READABLE;\n\n assert_error(registry.register(&mut source, token, interests), \"register\");\n\n assert_error(\n\n registry.reregister(&mut source, token, interests),\n\n \"reregister\",\n\n );\n\n assert_error(registry.deregister(&mut source), \"deregister\");\n\n}\n\n\n", "file_path": "tests/poll.rs", "rank": 97, "score": 116895.99384210023 }, { "content": "#[test]\n\nfn poll_closes_fd() {\n\n init();\n\n\n\n for _ in 0..2000 {\n\n let mut poll = Poll::new().unwrap();\n\n let mut events = Events::with_capacity(4);\n\n\n\n poll.poll(&mut events, Some(Duration::from_millis(0)))\n\n .unwrap();\n\n\n\n drop(poll);\n\n }\n\n}\n\n\n", "file_path": "tests/poll.rs", "rank": 98, "score": 116895.99384210023 } ]
Rust
src/gui/element/my_weakauras.rs
kawarimidoll/ajour
753e0ed24c83d183476a14ca918646a38dff23a3
use { super::{DEFAULT_FONT_SIZE, DEFAULT_PADDING}, crate::gui::{ style, AuraColumnKey, AuraColumnState, Interaction, Message, Mode, SortDirection, State, }, ajour_core::config::Flavor, ajour_core::theme::ColorPalette, ajour_weak_auras::Aura, ajour_widgets::TableRow, ajour_widgets::{header, Header}, iced::{ button, pick_list, Align, Button, Column, Container, Element, Length, PickList, Row, Space, Text, }, std::collections::HashMap, }; #[allow(clippy::too_many_arguments)] pub fn menu_container<'a>( color_palette: ColorPalette, flavor: Flavor, update_all_button_state: &'a mut button::State, refresh_button_state: &'a mut button::State, state: &HashMap<Mode, State>, num_auras: usize, updates_available: bool, is_updating: bool, updates_queued: bool, accounts_picklist_state: &'a mut pick_list::State<String>, accounts: &'a [String], chosen_account: Option<String>, ) -> Container<'a, Message> { let state = state .get(&Mode::MyWeakAuras(flavor)) .cloned() .unwrap_or_default(); let mut row = Row::new().height(Length::Units(35)); let mut update_all_button = Button::new( update_all_button_state, Text::new("Update All").size(DEFAULT_FONT_SIZE), ) .style(style::DefaultButton(color_palette)); let mut refresh_button = Button::new( refresh_button_state, Text::new("Refresh").size(DEFAULT_FONT_SIZE), ) .style(style::DefaultButton(color_palette)); let pick_list = PickList::new( accounts_picklist_state, accounts, chosen_account.clone(), Message::WeakAurasAccountSelected, ) .text_size(14) .width(Length::Units(120)) .style(style::PickList(color_palette)); if updates_available && !is_updating && !updates_queued { update_all_button = update_all_button.on_press(Interaction::UpdateAll(Mode::MyWeakAuras(flavor))); } if state == State::Ready { refresh_button = refresh_button.on_press(Interaction::Refresh(Mode::MyWeakAuras(flavor))); } let update_all_button: Element<Interaction> = update_all_button.into(); let refresh_button: Element<Interaction> = refresh_button.into(); let status_text = match state { State::Ready => { if updates_queued { Text::new("Updates queued. Finish them in-game.").size(DEFAULT_FONT_SIZE) } else { Text::new(format!("{} WeakAuras loaded", num_auras,)).size(DEFAULT_FONT_SIZE) } } _ => Text::new(""), }; let status_container = Container::new(status_text) .center_y() .padding(5) .style(style::NormalBackgroundContainer(color_palette)); let account_info_container = Container::new( Text::new(if chosen_account.is_some() { "" } else { "Select an Account" }) .size(DEFAULT_FONT_SIZE), ) .center_y() .padding(5) .style(style::NormalBackgroundContainer(color_palette)); row = row .push(Space::new(Length::Units(DEFAULT_PADDING), Length::Units(0))) .push(refresh_button.map(Message::Interaction)) .push(Space::new(Length::Units(7), Length::Units(0))) .push(update_all_button.map(Message::Interaction)) .push(Space::new(Length::Units(7), Length::Units(0))) .push(status_container) .push(Space::new(Length::Fill, Length::Units(0))) .push(account_info_container) .push(Space::new(Length::Units(7), Length::Units(0))) .push(pick_list) .push(Space::new(Length::Units(DEFAULT_PADDING), Length::Units(0))); let settings_column = Column::new() .push(Space::new(Length::Units(0), Length::Units(5))) .push(row); Container::new(settings_column) } pub fn data_row_container<'a, 'b>( color_palette: ColorPalette, aura: &'a Aura, column_config: &'b [(AuraColumnKey, Length, bool)], is_odd: Option<bool>, ) -> TableRow<'a, Message> { let default_height = Length::Units(26); let default_row_height = 26; let mut row_containers = vec![]; if let Some((idx, width)) = column_config .iter() .enumerate() .filter_map(|(idx, (key, width, hidden))| { if *key == AuraColumnKey::Title && !hidden { Some((idx, width)) } else { None } }) .next() { let title = Text::new(aura.name()).size(DEFAULT_FONT_SIZE); let title_row = Row::new().push(title).spacing(3).align_items(Align::Center); let title_container = Container::new(title_row) .padding(5) .height(default_height) .width(*width) .center_y() .style(style::HoverableBrightForegroundContainer(color_palette)); row_containers.push((idx, title_container)); } if let Some((idx, width)) = column_config .iter() .enumerate() .filter_map(|(idx, (key, width, hidden))| { if *key == AuraColumnKey::LocalVersion && !hidden { Some((idx, width)) } else { None } }) .next() { let local_version = Text::new(aura.installed_symver().unwrap_or("-")).size(DEFAULT_FONT_SIZE); let local_version_row = Row::new() .push(local_version) .spacing(3) .align_items(Align::Center); let local_version_container = Container::new(local_version_row) .height(default_height) .width(*width) .center_y() .style(style::HoverableForegroundContainer(color_palette)); row_containers.push((idx, local_version_container)); } if let Some((idx, width)) = column_config .iter() .enumerate() .filter_map(|(idx, (key, width, hidden))| { if *key == AuraColumnKey::RemoteVersion && !hidden { Some((idx, width)) } else { None } }) .next() { let remote_version = Text::new(aura.remote_symver()).size(DEFAULT_FONT_SIZE); let remote_version_row = Row::new() .push(remote_version) .spacing(3) .align_items(Align::Center); let remote_version_container = Container::new(remote_version_row) .height(default_height) .width(*width) .center_y() .style(style::HoverableForegroundContainer(color_palette)); row_containers.push((idx, remote_version_container)); } if let Some((idx, width)) = column_config .iter() .enumerate() .filter_map(|(idx, (key, width, hidden))| { if *key == AuraColumnKey::Author && !hidden { Some((idx, width)) } else { None } }) .next() { let author = Text::new(aura.author()).size(DEFAULT_FONT_SIZE); let author_row = Row::new() .push(author) .spacing(3) .align_items(Align::Center); let author_container = Container::new(author_row) .height(default_height) .width(*width) .center_y() .style(style::HoverableForegroundContainer(color_palette)); row_containers.push((idx, author_container)); } if let Some((idx, width)) = column_config .iter() .enumerate() .filter_map(|(idx, (key, width, hidden))| { if *key == AuraColumnKey::Status && !hidden { Some((idx, width)) } else { None } }) .next() { let status = Text::new(aura.status().to_string()).size(DEFAULT_FONT_SIZE); let status_row = Row::new() .push(status) .spacing(3) .align_items(Align::Center); let status_container = Container::new(status_row) .height(default_height) .width(*width) .center_y() .style(style::HoverableForegroundContainer(color_palette)); row_containers.push((idx, status_container)); } let left_spacer = Space::new(Length::Units(DEFAULT_PADDING), Length::Units(0)); let right_spacer = Space::new(Length::Units(DEFAULT_PADDING + 5), Length::Units(0)); let mut row = Row::new().push(left_spacer).spacing(1); row_containers.sort_by(|a, b| a.0.cmp(&b.0)); for (_, elem) in row_containers.into_iter() { row = row.push(elem); } row = row.push(right_spacer); let mut table_row = TableRow::new(row) .width(Length::Fill) .inner_row_height(default_row_height); if let Some(url) = aura.url() { table_row = table_row .on_press(move |_| Message::Interaction(Interaction::OpenLink(url.to_string()))); } if is_odd == Some(true) { table_row = table_row.style(style::TableRowAlternate(color_palette)) } else { table_row = table_row.style(style::TableRow(color_palette)) } table_row } fn row_title<T: PartialEq>( column_key: T, previous_column_key: Option<T>, previous_sort_direction: Option<SortDirection>, title: &str, ) -> String { if Some(column_key) == previous_column_key { match previous_sort_direction { Some(SortDirection::Asc) => format!("{} ▲", title), Some(SortDirection::Desc) => format!("{} ▼", title), _ => title.to_string(), } } else { title.to_string() } } pub fn titles_row_header<'a>( color_palette: ColorPalette, auras: &[Aura], header_state: &'a mut header::State, column_state: &'a mut [AuraColumnState], previous_column_key: Option<AuraColumnKey>, previous_sort_direction: Option<SortDirection>, ) -> Header<'a, Message> { let mut row_titles = vec![]; for column in column_state.iter_mut().filter(|c| !c.hidden) { let column_key = column.key; let row_title = row_title( column_key, previous_column_key, previous_sort_direction, &column.key.title(), ); let mut row_header = Button::new( &mut column.btn_state, Text::new(row_title) .size(DEFAULT_FONT_SIZE) .width(Length::Fill), ) .width(Length::Fill) .on_press(Interaction::SortAuraColumn(column_key)); if previous_column_key == Some(column_key) { row_header = row_header.style(style::SelectedColumnHeaderButton(color_palette)); } else { row_header = row_header.style(style::ColumnHeaderButton(color_palette)); } let row_header: Element<Interaction> = row_header.into(); let row_container = Container::new(row_header.map(Message::Interaction)) .width(column.width) .style(style::NormalBackgroundContainer(color_palette)); if !auras.is_empty() { row_titles.push((column.key.as_string(), row_container)); } } Header::new( header_state, row_titles, Some(Length::Units(DEFAULT_PADDING)), Some(Length::Units(DEFAULT_PADDING + 5)), ) .spacing(1) .height(Length::Units(25)) .on_resize(3, |event| { Message::Interaction(Interaction::ResizeColumn( Mode::MyWeakAuras(Flavor::default()), event, )) }) }
use { super::{DEFAULT_FONT_SIZE, DEFAULT_PADDING}, crate::gui::{ style, AuraColumnKey, AuraColumnState, Interaction, Message, Mode, SortDirection, State, }, ajour_core::config::Flavor, ajour_core::theme::ColorPalette, ajour_weak_auras::Aura, ajour_widgets::TableRow, ajour_widgets::{header, Header}, iced::{ button, pick_list, Align, Button, Column, Container, Element, Length, PickList, Row, Space, Text, }, std::collections::HashMap, }; #[allow(clippy::too_many_arguments)] pub fn menu_container<'a>( color_palette: ColorPalette, flavor: Flavor, update_all_button_state: &'a mut button::State, refresh_button_state: &'a mut button::State, state: &HashMap<Mode, State>, num_auras: usize, updates_available: bool, is_updating: bool, updates_queued: bool, accounts_picklist_state: &'a mut pick_list::State<String>, accounts: &'a [String], chosen_account: Option<String>, ) -> Container<'a, Message> { let state = state .get(&Mode::MyWeakAuras(flavor)) .cloned() .unwrap_or_default(); let mut row = Row::new().height(Length::Units(35)); let mut update_all_button = Button::new( update_all_button_state, Text::new("Update All").size(DEFAULT_FONT_SIZE), ) .style(style::DefaultButton(color_palette)); let mut refresh_button = Button::new( refresh_button_state, Text::new("Refresh").size(DEFAULT_FONT_SIZE), ) .style(style::DefaultButton(color_palette)); let pick_list = PickList::new( accounts_picklist_state, accounts, chosen_account.clone(), Message::WeakAurasAccountSelected, ) .text_size(14) .width(Length::Units(120)) .style(style::PickList(color_palette)); if updates_available && !is_updating && !updates_queued { update_all_button = update_all_button.on_press(Interaction::UpdateAll(Mode::MyWeakAuras(flavor))); } if state == State::Ready { refresh_button = refresh_button.on_press(Interaction::Refresh(Mode::MyWeakAuras(flavor))); } let update_all_button: Element<Interaction> = update_all_button.into(); let refresh_button: Element<Interaction> = refresh_button.into(); let status_text = match state { State::Ready => { if updates_queued { Text::new("Updates queued. Finish them in-game.").size(DEFAULT_FONT_SIZE) } else { Text::new(format!("{} WeakAuras loaded", num_auras,)).size(DEFAULT_FONT_SIZE) } } _ => Text::new(""), }; let status_container = Container::new(status_text) .center_y() .padding(5) .style(style::NormalBackgroundContainer(color_palette)); let account_info_container = Container::new( Text::new(if chosen_account.is_some() { "" } else { "Select an Account" }) .size(DEFAULT_FONT_SIZE), ) .center_y() .padding(5) .style(style::NormalBackgroundContainer(color_palette)); row = row .push(Space::new(Length::Units(DEFAULT_PADDING), Length::Units(0))) .push(refresh_button.map(Message::Interaction)) .push(Space::new(Length::Units(7), Length::Units(0))) .push(update_all_button.map(Message::Interaction)) .push(Space::new(Length::Units(7), Length::Units(0))) .push(status_container) .push(Space::new(Length::Fill, Length::Units(0))) .push(account_info_container) .push(Space::new(Length::Units(7), Length::Units(0))) .push(pick_list) .push(Space::new(Length::Units(DEFAULT_PADDING), Length::Units(0))); let settings_column = Column::new() .push(Space::new(Length::Units(0), Length::Units(5))) .push(row); Container::new(settings_column) } pub fn data_row_container<'a, 'b>( color_palette: ColorPalette, aura: &'a Aura, column_config: &'b [(AuraColumnKey, Length, bool)], is_odd: Option<bool>, ) -> TableRow<'a, Message> { let default_height = Length::Units(26); let default_row_height = 26; let mut row_containers = vec![]; if let Some((idx, width)) = column_config .iter() .enumerate() .filter_map(|(idx, (key, width, hidden))| { if *key == AuraColumnKey::Title && !hidden { Some((idx, width)) } else { None } }) .next() { let title = Text::new(aura.name()).size(DEFAULT_FONT_SIZE); let title_row = Row::new().push(title).spacing(3).align_items(Align::Center); let title_container = Container::new(title_row) .padding(5) .height(default_height) .width(*width) .center_y() .style(style::HoverableBrightForegroundContainer(color_palette)); row_containers.push((idx, title_container)); } if let Some((idx, width)) = column_config .iter() .enumerate() .filter_map(|(idx, (key, width, hidden))| { if *key == AuraColumnKey::LocalVersion && !hidden { Some((idx, width)) } else { None } }) .next() { let local_version = Text::new(aura.installed_symver().unwrap_or("-")).size(DEFAULT_FONT_SIZE); let local_version_row = Row::new() .push(local_version) .spacing(3) .align_items(Align::Center); let local_version_container = Container::new(local_version_row) .height(default_height) .width(*width) .center_y() .style(style::HoverableForegroundContainer(color_palette)); row_containers.push((idx, local_version_container)); } if let Some((idx, width)) = column_config .iter() .enumerate() .filter_map(|(idx, (key, width, hidden))| { if *key == AuraColumnKey::RemoteVersion && !hidden { Some((idx, width)) } else { None } }) .next() { let remote_version = Text::new(aura.remote_symver()).size(DEFAULT_FONT_SIZE); let remote_version_row = Row::new() .push(remote_version) .spacing(3) .align_items(Align::Center); let remote_version_container = Container::new(remote_version_row) .height(default_height) .width(*width) .center_y() .style(style::HoverableForegroundContainer(color_palette)); row_containers.push((idx, remote_version_container)); } if let Some((idx, width)) = column_config .iter() .enumerate() .filter_map(|(idx, (key, width, hidden))| { if *key == AuraColumnKey::Author && !hidden { Some((idx, width)) } else { None } }) .next() { let author = Text::new(aura.author()).size(DEFAULT_FONT_SIZE); let author_row = Row::new() .push(author) .spacing(3) .align_items(Align::Center); let author_container = Container::new(author_row) .height(default_height) .width(*width) .center_y() .style(style::HoverableForegroundContainer(color_palette)); row_containers.push((idx, author_container)); } if let Some((idx, width)) = column_config .iter() .enumerate() .filter_map(|(idx, (key, width, hidden))| {
}) .next() { let status = Text::new(aura.status().to_string()).size(DEFAULT_FONT_SIZE); let status_row = Row::new() .push(status) .spacing(3) .align_items(Align::Center); let status_container = Container::new(status_row) .height(default_height) .width(*width) .center_y() .style(style::HoverableForegroundContainer(color_palette)); row_containers.push((idx, status_container)); } let left_spacer = Space::new(Length::Units(DEFAULT_PADDING), Length::Units(0)); let right_spacer = Space::new(Length::Units(DEFAULT_PADDING + 5), Length::Units(0)); let mut row = Row::new().push(left_spacer).spacing(1); row_containers.sort_by(|a, b| a.0.cmp(&b.0)); for (_, elem) in row_containers.into_iter() { row = row.push(elem); } row = row.push(right_spacer); let mut table_row = TableRow::new(row) .width(Length::Fill) .inner_row_height(default_row_height); if let Some(url) = aura.url() { table_row = table_row .on_press(move |_| Message::Interaction(Interaction::OpenLink(url.to_string()))); } if is_odd == Some(true) { table_row = table_row.style(style::TableRowAlternate(color_palette)) } else { table_row = table_row.style(style::TableRow(color_palette)) } table_row } fn row_title<T: PartialEq>( column_key: T, previous_column_key: Option<T>, previous_sort_direction: Option<SortDirection>, title: &str, ) -> String { if Some(column_key) == previous_column_key { match previous_sort_direction { Some(SortDirection::Asc) => format!("{} ▲", title), Some(SortDirection::Desc) => format!("{} ▼", title), _ => title.to_string(), } } else { title.to_string() } } pub fn titles_row_header<'a>( color_palette: ColorPalette, auras: &[Aura], header_state: &'a mut header::State, column_state: &'a mut [AuraColumnState], previous_column_key: Option<AuraColumnKey>, previous_sort_direction: Option<SortDirection>, ) -> Header<'a, Message> { let mut row_titles = vec![]; for column in column_state.iter_mut().filter(|c| !c.hidden) { let column_key = column.key; let row_title = row_title( column_key, previous_column_key, previous_sort_direction, &column.key.title(), ); let mut row_header = Button::new( &mut column.btn_state, Text::new(row_title) .size(DEFAULT_FONT_SIZE) .width(Length::Fill), ) .width(Length::Fill) .on_press(Interaction::SortAuraColumn(column_key)); if previous_column_key == Some(column_key) { row_header = row_header.style(style::SelectedColumnHeaderButton(color_palette)); } else { row_header = row_header.style(style::ColumnHeaderButton(color_palette)); } let row_header: Element<Interaction> = row_header.into(); let row_container = Container::new(row_header.map(Message::Interaction)) .width(column.width) .style(style::NormalBackgroundContainer(color_palette)); if !auras.is_empty() { row_titles.push((column.key.as_string(), row_container)); } } Header::new( header_state, row_titles, Some(Length::Units(DEFAULT_PADDING)), Some(Length::Units(DEFAULT_PADDING + 5)), ) .spacing(1) .height(Length::Units(25)) .on_resize(3, |event| { Message::Interaction(Interaction::ResizeColumn( Mode::MyWeakAuras(Flavor::default()), event, )) }) }
if *key == AuraColumnKey::Status && !hidden { Some((idx, width)) } else { None }
if_condition
[ { "content": "fn sort_auras(auras: &mut [Aura], sort_direction: SortDirection, column_key: AuraColumnKey) {\n\n match (column_key, sort_direction) {\n\n (AuraColumnKey::Title, SortDirection::Asc) => {\n\n auras.sort_by(|a, b| a.name().to_lowercase().cmp(&b.name().to_lowercase()));\n\n }\n\n (AuraColumnKey::Title, SortDirection::Desc) => {\n\n auras.sort_by(|a, b| {\n\n a.name()\n\n .to_lowercase()\n\n .cmp(&b.name().to_lowercase())\n\n .reverse()\n\n });\n\n }\n\n (AuraColumnKey::LocalVersion, SortDirection::Asc) => {\n\n auras.sort_by(|a, b| {\n\n a.installed_symver()\n\n .cmp(&b.installed_symver())\n\n .then_with(|| a.name().cmp(&b.name()))\n\n });\n\n }\n", "file_path": "src/gui/update.rs", "rank": 2, "score": 233221.69386611413 }, { "content": "pub fn titles_row_header<'a>(\n\n color_palette: ColorPalette,\n\n catalog: &Catalog,\n\n header_state: &'a mut header::State,\n\n column_state: &'a mut [CatalogColumnState],\n\n previous_column_key: Option<CatalogColumnKey>,\n\n previous_sort_direction: Option<SortDirection>,\n\n) -> Header<'a, Message> {\n\n // A row containing titles above the addon rows.\n\n let mut row_titles = vec![];\n\n\n\n for column in column_state.iter_mut().filter(|c| !c.hidden) {\n\n let column_key = column.key;\n\n\n\n let row_title = row_title(\n\n column_key,\n\n previous_column_key,\n\n previous_sort_direction,\n\n &column.key.title(),\n\n );\n", "file_path": "src/gui/element/catalog.rs", "rank": 3, "score": 232166.98335969314 }, { "content": "pub fn titles_row_header<'a>(\n\n color_palette: ColorPalette,\n\n addons: &[Addon],\n\n header_state: &'a mut header::State,\n\n column_state: &'a mut [ColumnState],\n\n previous_column_key: Option<ColumnKey>,\n\n previous_sort_direction: Option<SortDirection>,\n\n) -> Header<'a, Message> {\n\n // A row containing titles above the addon rows.\n\n let mut row_titles = vec![];\n\n\n\n for column in column_state.iter_mut().filter(|c| !c.hidden) {\n\n let column_key = column.key;\n\n\n\n let row_title = row_title(\n\n column_key,\n\n previous_column_key,\n\n previous_sort_direction,\n\n &column.key.title(),\n\n );\n", "file_path": "src/gui/element/my_addons.rs", "rank": 4, "score": 232166.98335969314 }, { "content": "pub fn data_row_container<'a, 'b>(\n\n color_palette: ColorPalette,\n\n addon: &'a mut Addon,\n\n is_addon_expanded: bool,\n\n expand_type: &'a ExpandType,\n\n config: &Config,\n\n column_config: &'b [(ColumnKey, Length, bool)],\n\n is_odd: Option<bool>,\n\n) -> TableRow<'a, Message> {\n\n let default_height = Length::Units(26);\n\n let default_row_height = 26;\n\n\n\n let mut row_containers = vec![];\n\n\n\n let author = addon.author().map(str::to_string);\n\n let game_version = addon.game_version().map(str::to_string);\n\n let notes = addon.notes().map(str::to_string);\n\n let website_url = addon.website_url().map(str::to_string);\n\n let changelog_url = addon.changelog_url(config.addons.global_release_channel);\n\n let repository_kind = addon.repository_kind();\n", "file_path": "src/gui/element/my_addons.rs", "rank": 5, "score": 218717.90770445848 }, { "content": "pub fn data_row_container<'a, 'b>(\n\n color_palette: ColorPalette,\n\n config: &Config,\n\n addon: &'a mut CatalogRow,\n\n column_config: &'b [(CatalogColumnKey, Length, bool)],\n\n installed_for_flavor: bool,\n\n install_addon: Option<&InstallAddon>,\n\n is_odd: Option<bool>,\n\n) -> TableRow<'a, Message> {\n\n let default_height = Length::Units(26);\n\n let default_row_height = 26;\n\n\n\n let mut row_containers = vec![];\n\n\n\n let addon_data = &addon.addon;\n\n let install_button_state = &mut addon.install_button_state;\n\n\n\n let flavor_exists_for_addon = addon_data\n\n .game_versions\n\n .iter()\n", "file_path": "src/gui/element/catalog.rs", "rank": 6, "score": 218717.90770445848 }, { "content": "pub fn data_container<'a, 'b>(\n\n color_palette: ColorPalette,\n\n scrollable_state: &'a mut scrollable::State,\n\n directory_button_state: &'a mut button::State,\n\n config: &Config,\n\n theme_state: &'a mut ThemeState,\n\n scale_state: &'a mut ScaleState,\n\n backup_state: &'a mut BackupState,\n\n column_settings: &'a mut ColumnSettings,\n\n column_config: &'b [(ColumnKey, Length, bool)],\n\n catalog_column_settings: &'a mut CatalogColumnSettings,\n\n catalog_column_config: &'b [(CatalogColumnKey, Length, bool)],\n\n open_config_dir_button_state: &'a mut button::State,\n\n self_update_channel_state: &'a mut SelfUpdateChannelState,\n\n default_addon_release_channel_picklist_state: &'a mut pick_list::State<GlobalReleaseChannel>,\n\n reset_columns_button_state: &'a mut button::State,\n\n) -> Container<'a, Message> {\n\n let mut scrollable = Scrollable::new(scrollable_state)\n\n .spacing(1)\n\n .height(Length::FillPortion(1))\n", "file_path": "src/gui/element/settings.rs", "rank": 8, "score": 190689.16199588243 }, { "content": "pub fn data_container<'a>(\n\n color_palette: ColorPalette,\n\n release: &Option<Release>,\n\n scrollable_state: &'a mut scrollable::State,\n\n website_button_state: &'a mut button::State,\n\n patreon_button_state: &'a mut button::State,\n\n) -> Container<'a, Message> {\n\n let ajour_title = Text::new(\"Ajour\").size(50);\n\n let ajour_title_container =\n\n Container::new(ajour_title).style(style::BrightBackgroundContainer(color_palette));\n\n\n\n let changelog_title_text = Text::new(if let Some(release) = release {\n\n format!(\"Changelog for {}\", release.tag_name)\n\n } else {\n\n \"Changelog\".to_owned()\n\n })\n\n .size(DEFAULT_FONT_SIZE);\n\n\n\n let changelog_text = Text::new(if let Some(release) = release {\n\n release.body.clone()\n", "file_path": "src/gui/element/about.rs", "rank": 9, "score": 176433.33373338086 }, { "content": "/// Returns flavor `String` in Tukui format\n\nfn format_flavor(flavor: &Flavor) -> String {\n\n let base_flavor = flavor.base_flavor();\n\n match base_flavor {\n\n Flavor::Retail => \"retail\".to_owned(),\n\n Flavor::Classic => \"classic\".to_owned(),\n\n _ => panic!(format!(\"Unknown base flavor {}\", base_flavor)),\n\n }\n\n}\n\n\n", "file_path": "crates/core/src/repository/backend/tukui.rs", "rank": 11, "score": 171787.0370980535 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn menu_container<'a>(\n\n color_palette: ColorPalette,\n\n flavor: Flavor,\n\n update_all_button_state: &'a mut button::State,\n\n refresh_button_state: &'a mut button::State,\n\n state: &HashMap<Mode, State>,\n\n addons: &[Addon],\n\n config: &Config,\n\n) -> Container<'a, Message> {\n\n // MyAddons state.\n\n let state = state\n\n .get(&Mode::MyAddons(flavor))\n\n .cloned()\n\n .unwrap_or_default();\n\n\n\n // A row contain general settings.\n\n let mut settings_row = Row::new().height(Length::Units(35));\n\n\n\n let mut update_all_button = Button::new(\n\n update_all_button_state,\n", "file_path": "src/gui/element/my_addons.rs", "rank": 12, "score": 171136.426850722 }, { "content": "pub fn data_container<'a>(\n\n color_palette: ColorPalette,\n\n title: &str,\n\n description: &str,\n\n onboarding_directory_btn_state: Option<&'a mut button::State>,\n\n) -> Container<'a, Message> {\n\n let title = Text::new(title)\n\n .size(DEFAULT_FONT_SIZE)\n\n .width(Length::Fill)\n\n .horizontal_alignment(HorizontalAlignment::Center);\n\n let title_container = Container::new(title)\n\n .width(Length::Fill)\n\n .style(style::BrightBackgroundContainer(color_palette));\n\n\n\n let description = Text::new(description)\n\n .size(DEFAULT_FONT_SIZE)\n\n .width(Length::Fill)\n\n .horizontal_alignment(HorizontalAlignment::Center);\n\n let description_container = Container::new(description)\n\n .width(Length::Fill)\n", "file_path": "src/gui/element/status.rs", "rank": 13, "score": 171136.426850722 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn data_container<'a>(\n\n color_palette: ColorPalette,\n\n mode: &Mode,\n\n state: &HashMap<Mode, State>,\n\n error: &Option<anyhow::Error>,\n\n config: &Config,\n\n valid_flavors: &[Flavor],\n\n settings_button_state: &'a mut button::State,\n\n about_button_state: &'a mut button::State,\n\n addon_mode_button_state: &'a mut button::State,\n\n weakauras_mode_button_state: &'a mut button::State,\n\n catalog_mode_btn_state: &'a mut button::State,\n\n install_mode_btn_state: &'a mut button::State,\n\n retail_btn_state: &'a mut button::State,\n\n retail_ptr_btn_state: &'a mut button::State,\n\n retail_beta_btn_state: &'a mut button::State,\n\n classic_btn_state: &'a mut button::State,\n\n classic_ptr_btn_state: &'a mut button::State,\n\n self_update_state: &'a mut SelfUpdateState,\n\n weak_auras_is_installed: bool,\n", "file_path": "src/gui/element/menu.rs", "rank": 14, "score": 171136.426850722 }, { "content": "pub fn handle_message(ajour: &mut Ajour, message: Message) -> Result<Command<Message>> {\n\n match message {\n\n Message::CachesLoaded(result) => {\n\n log::debug!(\"Message::CachesLoaded(error: {})\", result.is_err());\n\n\n\n if let Ok((fingerprint_cache, addon_cache)) = result {\n\n ajour.fingerprint_cache = Some(Arc::new(Mutex::new(fingerprint_cache)));\n\n ajour.addon_cache = Some(Arc::new(Mutex::new(addon_cache)));\n\n }\n\n\n\n return Ok(Command::perform(async {}, Message::Parse));\n\n }\n\n Message::Parse(_) => {\n\n log::debug!(\"Message::Parse\");\n\n\n\n // Begin to parse addon folder(s).\n\n let mut commands = vec![];\n\n\n\n // If a backup directory is selected, find the latest backup\n\n if let Some(dir) = &ajour.config.backup_directory {\n", "file_path": "src/gui/update.rs", "rank": 15, "score": 169651.90933014217 }, { "content": "pub trait Renderer: iced_native::Renderer + container::Renderer + space::Renderer + Sized {\n\n fn draw<Message>(\n\n &mut self,\n\n defaults: &Self::Defaults,\n\n children: &[Element<'_, Message, Self>],\n\n layout: Layout<'_>,\n\n cursor_position: Point,\n\n resize_hovering: bool,\n\n viewport: &Rectangle,\n\n ) -> Self::Output;\n\n}\n\n\n\nimpl<'a, Message, Renderer> From<Header<'a, Message, Renderer>> for Element<'a, Message, Renderer>\n\nwhere\n\n Renderer: 'a + self::Renderer,\n\n Message: 'a,\n\n{\n\n fn from(header: Header<'a, Message, Renderer>) -> Element<'a, Message, Renderer> {\n\n Element::new(header)\n\n }\n", "file_path": "crates/widgets/src/widget/header.rs", "rank": 16, "score": 162443.56015685538 }, { "content": "/// Helper function to split a comma separated string into `Vec<String>`.\n\nfn split_dependencies_into_vec(value: &str) -> Vec<String> {\n\n if value.is_empty() {\n\n return vec![];\n\n }\n\n\n\n value\n\n .split([','].as_ref())\n\n .map(|s| s.trim().to_string())\n\n .collect()\n\n}\n\n\n", "file_path": "crates/core/src/parse.rs", "rank": 17, "score": 155372.72201637118 }, { "content": "/// Return the tukui API endpoint.\n\nfn api_endpoint(id: &str, flavor: &Flavor) -> String {\n\n format!(\n\n \"https://hub.wowup.io/tukui/{}/{}\",\n\n format_flavor(flavor),\n\n id\n\n )\n\n}\n\n\n\n/// Function to fetch a remote addon package which contains\n\n/// information about the addon on the repository.\n\npub(crate) async fn fetch_remote_package(\n\n id: &str,\n\n flavor: &Flavor,\n\n) -> Result<(String, TukuiPackage), DownloadError> {\n\n let url = api_endpoint(id, flavor);\n\n\n\n let timeout = Some(30);\n\n let mut resp = request_async(&url, vec![], timeout).await?;\n\n\n\n if resp.status().is_success() {\n", "file_path": "crates/core/src/repository/backend/tukui.rs", "rank": 18, "score": 153747.81797021226 }, { "content": "pub fn install_from_source(url: Uri, flavor: Flavor) -> Result<()> {\n\n task::block_on(async {\n\n log::debug!(\"Fetching remote info for {:?}\", &url);\n\n\n\n // Will use hash of url as temp name to download zip as\n\n let mut hasher = DefaultHasher::new();\n\n hasher.write(url.to_string().as_bytes());\n\n let url_hash = hasher.finish();\n\n\n\n let config = load_config().await?;\n\n let global_release_channel = config.addons.global_release_channel;\n\n\n\n let addon_cache = Arc::new(Mutex::new(load_addon_cache().await?));\n\n let fingerprint_cache = Arc::new(Mutex::new(load_fingerprint_cache().await?));\n\n\n\n // Fetch the remote repo metadata\n\n let mut repo_package = RepositoryPackage::from_source_url(Flavor::Retail, url)?;\n\n repo_package.resolve_metadata().await?;\n\n\n\n // Build an addon using this repo package\n", "file_path": "src/command/install.rs", "rank": 19, "score": 153684.77592094266 }, { "content": "pub fn update_all_weakauras() -> Result<()> {\n\n log::info!(\"Checking for WeakAura updates...\");\n\n\n\n task::block_on(async {\n\n let config = load_config().await?;\n\n\n\n let mut flavors_setup = 0usize;\n\n\n\n for flavor in &Flavor::ALL[..] {\n\n if let Some(account) = config.weak_auras_account.get(flavor).cloned() {\n\n flavors_setup += 1;\n\n\n\n log::info!(\"{} - Parsing WeakAuras for account {}\", flavor, &account);\n\n\n\n let wtf_path = config.get_wtf_directory_for_flavor(flavor).ok_or_else(|| format_err!(\"No WoW directory set. Launch Ajour and make sure a WoW directory is set before using the command line.\"))?;\n\n let addon_dir = config.get_addon_directory_for_flavor(flavor).ok_or_else(|| format_err!(\"No WoW directory set. Launch Ajour and make sure a WoW directory is set before using the command line.\"))?;\n\n\n\n let auras = parse_auras(wtf_path, account.clone())\n\n .await\n\n .context(format!(\n", "file_path": "src/command/update_weakauras.rs", "rank": 20, "score": 145188.79205624413 }, { "content": "fn row_title<T: PartialEq>(\n\n column_key: T,\n\n previous_column_key: Option<T>,\n\n previous_sort_direction: Option<SortDirection>,\n\n title: &str,\n\n) -> String {\n\n if Some(column_key) == previous_column_key {\n\n match previous_sort_direction {\n\n Some(SortDirection::Asc) => format!(\"{} ▲\", title),\n\n Some(SortDirection::Desc) => format!(\"{} ▼\", title),\n\n _ => title.to_string(),\n\n }\n\n } else {\n\n title.to_string()\n\n }\n\n}\n\n\n", "file_path": "src/gui/element/catalog.rs", "rank": 21, "score": 141755.0653981077 }, { "content": "fn row_title<T: PartialEq>(\n\n column_key: T,\n\n previous_column_key: Option<T>,\n\n previous_sort_direction: Option<SortDirection>,\n\n title: &str,\n\n) -> String {\n\n if Some(column_key) == previous_column_key {\n\n match previous_sort_direction {\n\n Some(SortDirection::Asc) => format!(\"{} ▲\", title),\n\n Some(SortDirection::Desc) => format!(\"{} ▼\", title),\n\n _ => title.to_string(),\n\n }\n\n } else {\n\n title.to_string()\n\n }\n\n}\n\n\n", "file_path": "src/gui/element/my_addons.rs", "rank": 22, "score": 141755.06539810772 }, { "content": "fn save_column_configs(ajour: &mut Ajour) {\n\n let my_addons_columns: Vec<_> = ajour\n\n .header_state\n\n .columns\n\n .iter()\n\n .map(ColumnConfigV2::from)\n\n .collect();\n\n\n\n let catalog_columns: Vec<_> = ajour\n\n .catalog_header_state\n\n .columns\n\n .iter()\n\n .map(ColumnConfigV2::from)\n\n .collect();\n\n\n\n let aura_columns: Vec<_> = ajour\n\n .aura_header_state\n\n .columns\n\n .iter()\n\n .map(ColumnConfigV2::from)\n", "file_path": "src/gui/update.rs", "rank": 23, "score": 131620.2980471981 }, { "content": "/// A set of rules that dictate the style of a table row.\n\npub trait StyleSheet {\n\n fn style(&self) -> Style;\n\n\n\n /// Produces the style of a hovered table row.\n\n fn hovered(&self) -> Style;\n\n}\n\n\n", "file_path": "crates/widgets/src/style/table_row.rs", "rank": 24, "score": 128055.89564363219 }, { "content": "pub fn main() {\n\n let opts_result = cli::get_opts();\n\n\n\n #[cfg(debug_assertions)]\n\n let is_debug = true;\n\n #[cfg(not(debug_assertions))]\n\n let is_debug = false;\n\n\n\n // If this is a clap error, we map to None since we are going to exit and display\n\n // an error message anyway and this value won't matter. If it's not an error,\n\n // the underlying `command` will drive this variable. If a `command` is passed\n\n // on the command line, Ajour functions as a CLI instead of launching the GUI.\n\n let is_cli = opts_result\n\n .as_ref()\n\n .map(|o| &o.command)\n\n .unwrap_or(&None)\n\n .is_some();\n\n\n\n // This function validates whether or not we need to exit and print any message\n\n // due to arguments passed on the command line. If not, it will return a\n", "file_path": "src/main.rs", "rank": 25, "score": 114740.06054644356 }, { "content": "#[allow(clippy::unnecessary_operation)]\n\nfn setup_logger(is_cli: bool, is_debug: bool) -> Result<()> {\n\n let mut logger = fern::Dispatch::new()\n\n .format(|out, message, record| {\n\n out.finish(format_args!(\n\n \"{} [{}][{}] {}\",\n\n chrono::Local::now().format(\"%H:%M:%S%.3f\"),\n\n record.target(),\n\n record.level(),\n\n message\n\n ))\n\n })\n\n .level(log::LevelFilter::Off)\n\n .level_for(\"panic\", log::LevelFilter::Error)\n\n .level_for(\"ajour\", log::LevelFilter::Trace);\n\n\n\n if !is_cli {\n\n logger = logger.level_for(\"ajour_core\", log::LevelFilter::Trace);\n\n }\n\n\n\n if is_cli || is_debug {\n", "file_path": "src/main.rs", "rank": 26, "score": 114476.26227179595 }, { "content": "pub fn backup(\n\n backup_folder: BackupFolder,\n\n destination: PathBuf,\n\n flavors: Vec<Flavor>,\n\n) -> Result<()> {\n\n task::block_on(async {\n\n let config = load_config().await?;\n\n\n\n let flavors = if flavors.is_empty() {\n\n Flavor::ALL.to_vec()\n\n } else {\n\n flavors\n\n };\n\n\n\n if !destination.is_dir() {\n\n return Err(format_err!(\"destination must be a folder, not a file\"));\n\n }\n\n\n\n let wow_dir = config.wow.directory.as_ref().ok_or_else(|| format_err!(\"No WoW directory set. Launch Ajour and make sure a WoW directory is set before using the command line.\"))?;\n\n\n", "file_path": "src/command/backup.rs", "rank": 27, "score": 111624.79948393707 }, { "content": "/// Ajour user-agent.\n\nfn user_agent() -> String {\n\n format!(\"ajour/{}\", env!(\"CARGO_PKG_VERSION\"))\n\n}\n\n\n\n/// Generic request function.\n\npub async fn request_async<T: ToString>(\n\n url: T,\n\n headers: Vec<(&str, &str)>,\n\n timeout: Option<u64>,\n\n) -> Result<Response<isahc::Body>, DownloadError> {\n\n // Sometimes a download url has a space.\n\n let url = url.to_string().replace(\" \", \"%20\");\n\n\n\n let mut request = Request::builder().uri(url);\n\n\n\n for (name, value) in headers {\n\n request = request.header(name, value);\n\n }\n\n\n\n request = request.header(\"user-agent\", &user_agent());\n", "file_path": "crates/core/src/network.rs", "rank": 28, "score": 109815.65586877239 }, { "content": "#[allow(unused_variables)]\n\npub fn validate_opts_or_exit(\n\n opts_result: Result<Opts, clap::Error>,\n\n is_cli: bool,\n\n is_debug: bool,\n\n) -> Opts {\n\n // If an error, we need to setup the AttachConsole fix for Windows release\n\n // so we can exit and display the error message to the user.\n\n let is_opts_error = opts_result.is_err();\n\n\n\n // Workaround to output to console even though we compile with windows_subsystem = \"windows\"\n\n // in release mode\n\n #[cfg(target_os = \"windows\")]\n\n {\n\n if (is_cli || is_opts_error) && !is_debug {\n\n use winapi::um::wincon::{AttachConsole, ATTACH_PARENT_PROCESS};\n\n\n\n unsafe {\n\n AttachConsole(ATTACH_PARENT_PROCESS);\n\n }\n\n }\n", "file_path": "src/cli.rs", "rank": 29, "score": 108752.47454385112 }, { "content": "pub trait Renderer: iced_native::Renderer {\n\n type Style: Default;\n\n #[allow(clippy::too_many_arguments)]\n\n fn draw<Message>(\n\n &mut self,\n\n defaults: &Self::Defaults,\n\n layout: Layout<'_>,\n\n cursor_position: Point,\n\n style: &Self::Style,\n\n content: &Element<'_, Message, Self>,\n\n viewport: &Rectangle,\n\n custom_bounds: &Rectangle,\n\n ) -> Self::Output;\n\n}\n\n\n\nimpl<'a, Message, Renderer> From<TableRow<'a, Message, Renderer>> for Element<'a, Message, Renderer>\n\nwhere\n\n Renderer: 'a + self::Renderer,\n\n Message: 'a,\n\n{\n\n fn from(table_row: TableRow<'a, Message, Renderer>) -> Element<'a, Message, Renderer> {\n\n Element::new(table_row)\n\n }\n\n}\n", "file_path": "crates/widgets/src/widget/table_row.rs", "rank": 30, "score": 107896.86991366112 }, { "content": "pub fn update_both() -> Result<()> {\n\n update_all_addons()?;\n\n\n\n update_all_weakauras()?;\n\n\n\n Ok(())\n\n}\n", "file_path": "src/command/mod.rs", "rank": 31, "score": 106081.4537042104 }, { "content": "fn str_to_flavor(s: &str) -> Result<Flavor, &'static str> {\n\n match s {\n\n \"retail\" => Ok(Flavor::Retail),\n\n \"beta\" => Ok(Flavor::RetailBeta),\n\n \"ptr\" => Ok(Flavor::RetailPTR),\n\n \"classic\" => Ok(Flavor::Classic),\n\n \"classic_ptr\" => Ok(Flavor::ClassicPTR),\n\n _ => Err(\"valid values are ['retail','ptr','beta','classic','classic_ptr']\"),\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy)]\n\npub enum BackupFolder {\n\n Both,\n\n AddOns,\n\n WTF,\n\n}\n\n\n", "file_path": "src/cli.rs", "rank": 32, "score": 105194.14245655511 }, { "content": "/// Starts the GUI.\n\n/// This function does not return.\n\npub fn run(opts: Opts) {\n\n let config: Config = Config::load_or_default().expect(\"loading config on application startup\");\n\n\n\n log::debug!(\"config loaded:\\n{:#?}\", &config);\n\n\n\n let mut settings = Settings::default();\n\n settings.window.size = config.window_size.unwrap_or((900, 620));\n\n\n\n #[cfg(not(target_os = \"linux\"))]\n\n // TODO (casperstorm): Due to an upstream bug, min_size causes the window to become unresizable\n\n // on Linux.\n\n // @see: https://github.com/casperstorm/ajour/issues/427\n\n {\n\n settings.window.min_size = Some((600, 300));\n\n }\n\n\n\n #[cfg(feature = \"wgpu\")]\n\n {\n\n let antialiasing = opts.antialiasing.unwrap_or(true);\n\n log::debug!(\"antialiasing: {}\", antialiasing);\n", "file_path": "src/gui/mod.rs", "rank": 33, "score": 101104.8795704832 }, { "content": "pub fn update_all_addons() -> Result<()> {\n\n log::info!(\"Checking for addon updates...\");\n\n\n\n task::block_on(async {\n\n let config = load_config().await?;\n\n let global_release_channel = config.addons.global_release_channel;\n\n\n\n let fingerprint_cache: Arc<Mutex<_>> =\n\n Arc::new(Mutex::new(load_fingerprint_cache().await?));\n\n\n\n let addon_cache: Arc<Mutex<_>> = Arc::new(Mutex::new(load_addon_cache().await?));\n\n\n\n let mut addons_to_update = vec![];\n\n\n\n // Update addons for both flavors\n\n for flavor in Flavor::ALL.iter() {\n\n // Only returns None if the path isn't set in the config\n\n let addon_directory = config.get_addon_directory_for_flavor(flavor).ok_or_else(|| format_err!(\"No WoW directory set. Launch Ajour and make sure a WoW directory is set before using the command line.\"))?;\n\n\n\n if let Ok(addons) = read_addon_directory(\n", "file_path": "src/command/update_addons.rs", "rank": 34, "score": 100960.16640060913 }, { "content": "fn query_and_sort_catalog(ajour: &mut Ajour) {\n\n if let Some(catalog) = &ajour.catalog {\n\n let query = ajour\n\n .catalog_search_state\n\n .query\n\n .as_ref()\n\n .map(|s| s.to_lowercase());\n\n let flavor = &ajour.config.wow.flavor;\n\n let source = &ajour.catalog_search_state.source;\n\n let category = &ajour.catalog_search_state.category;\n\n let result_size = ajour.catalog_search_state.result_size.as_usize();\n\n\n\n // Use default, can tweak if needed in future\n\n let fuzzy_match_config = SkimScoreConfig {\n\n ..Default::default()\n\n };\n\n let fuzzy_matcher = SkimMatcherV2::default().score_config(fuzzy_match_config);\n\n\n\n let mut catalog_rows_and_score = catalog\n\n .addons\n", "file_path": "src/gui/update.rs", "rank": 35, "score": 99673.25224017288 }, { "content": "/// Log any errors\n\npub fn log_error(error: &anyhow::Error) {\n\n log::error!(\"{}\", error);\n\n\n\n let mut causes = error.chain();\n\n // Remove first entry since it's same as top level error\n\n causes.next();\n\n\n\n for cause in causes {\n\n log::error!(\"caused by: {}\", cause);\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 36, "score": 96606.6948331338 }, { "content": "pub fn config_dir() -> PathBuf {\n\n CONFIG_DIR.lock().unwrap().clone()\n\n}\n\n\n", "file_path": "crates/core/src/fs/mod.rs", "rank": 37, "score": 96529.85361705675 }, { "content": "fn format_interface_into_game_version(interface: &str) -> String {\n\n if interface.len() == 5 {\n\n let major = interface[..1].parse::<u8>();\n\n let minor = interface[1..3].parse::<u8>();\n\n let patch = interface[3..5].parse::<u8>();\n\n if let (Ok(major), Ok(minor), Ok(patch)) = (major, minor, patch) {\n\n return format!(\"{}.{}.{}\", major, minor, patch);\n\n }\n\n }\n\n\n\n interface.to_owned()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_toc_title() {\n\n let title = RE_TOC_TITLE.replace_all(\"Atlas |cFF0099FF[Foobar]|r\", \"$1\");\n", "file_path": "crates/core/src/parse.rs", "rank": 38, "score": 95442.70631140478 }, { "content": "/// Return the wowi API endpoint.\n\nfn api_endpoint(ids: &str) -> String {\n\n format!(\"{}/{}.json\", API_ENDPOINT, ids)\n\n}\n\n\n\n/// Returns the addon website url.\n\npub(crate) fn addon_url(id: &str) -> String {\n\n format!(\"{}{}\", ADDON_URL, id)\n\n}\n\n\n\n/// Function to fetch a remote addon package which contains\n\n/// information about the addon on the repository.\n\npub(crate) async fn fetch_remote_packages(\n\n ids: &[String],\n\n) -> Result<Vec<WowIPackage>, DownloadError> {\n\n let url = api_endpoint(&ids.join(\",\"));\n\n let timeout = Some(30);\n\n let mut resp = request_async(&url, vec![], timeout).await?;\n\n\n\n if resp.status().is_success() {\n\n let packages = resp.json();\n", "file_path": "crates/core/src/repository/backend/wowi.rs", "rank": 39, "score": 95442.70631140478 }, { "content": "fn apply_config(ajour: &mut Ajour, config: Config) {\n\n // Set column widths from the config\n\n match &config.column_config {\n\n ColumnConfig::V1 {\n\n local_version_width,\n\n remote_version_width,\n\n status_width,\n\n } => {\n\n ajour\n\n .header_state\n\n .columns\n\n .get_mut(1)\n\n .as_mut()\n\n .unwrap()\n\n .width = Length::Units(*local_version_width);\n\n ajour\n\n .header_state\n\n .columns\n\n .get_mut(2)\n\n .as_mut()\n", "file_path": "src/gui/mod.rs", "rank": 40, "score": 93332.70235732979 }, { "content": "pub fn get_opts() -> Result<Opts, clap::Error> {\n\n let args = env::args_os();\n\n\n\n Opts::from_iter_safe(args)\n\n}\n\n\n", "file_path": "src/cli.rs", "rank": 41, "score": 92516.8873497044 }, { "content": "/// Deletes an Addon and all dependencies from disk.\n\npub fn delete_addons(addon_folders: &[AddonFolder]) -> Result<()> {\n\n for folder in addon_folders {\n\n let path = &folder.path;\n\n if path.exists() {\n\n remove_dir_all(path)?;\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/core/src/fs/addon.rs", "rank": 42, "score": 86548.69704398527 }, { "content": "/// Remove a file, retrying if the operation fails because of permissions\n\n///\n\n/// Will retry for ~30 seconds with longer and longer delays between each, to allow for virus scan\n\n/// and other automated operations to complete.\n\npub fn remove_file<P>(path: P) -> io::Result<()>\n\nwhere\n\n P: AsRef<Path>,\n\n{\n\n // 21 Fibonacci steps starting at 1 ms is ~28 seconds total\n\n // See https://github.com/rust-lang/rustup/pull/1873 where this was used by Rustup to work around\n\n // virus scanning file locks\n\n let path = path.as_ref();\n\n\n\n retry(\n\n Fibonacci::from_millis(1).take(21),\n\n || match fs::remove_file(path) {\n\n Ok(_) => OperationResult::Ok(()),\n\n Err(e) => match e.kind() {\n\n io::ErrorKind::PermissionDenied => OperationResult::Retry(e),\n\n _ => OperationResult::Err(e),\n\n },\n\n },\n\n )\n\n .map_err(|e| match e {\n", "file_path": "crates/core/src/utility.rs", "rank": 43, "score": 84908.77529549864 }, { "content": "/// Helper function to parse a given TOC file\n\n/// (`DirEntry`) into a `Addon` struct.\n\n///\n\n/// TOC format summary:\n\n/// https://wowwiki.fandom.com/wiki/TOC_format\n\npub fn parse_toc_path(toc_path: &PathBuf) -> Option<AddonFolder> {\n\n //direntry\n\n let file = if let Ok(file) = File::open(toc_path) {\n\n file\n\n } else {\n\n return None;\n\n };\n\n let reader = BufReader::new(file);\n\n\n\n let path = toc_path.parent()?.to_path_buf();\n\n let id = path.file_name()?.to_str()?.to_string();\n\n let mut title: Option<String> = None;\n\n let mut interface: Option<String> = None;\n\n let mut author: Option<String> = None;\n\n let mut notes: Option<String> = None;\n\n let mut version: Option<String> = None;\n\n let mut dependencies: Vec<String> = Vec::new();\n\n let mut wowi_id: Option<String> = None;\n\n let mut tukui_id: Option<String> = None;\n\n let mut curse_id: Option<i32> = None;\n", "file_path": "crates/core/src/parse.rs", "rank": 44, "score": 81541.01718323611 }, { "content": "/// Rename a file or directory to a new name, retrying if the operation fails because of permissions\n\n///\n\n/// Will retry for ~30 seconds with longer and longer delays between each, to allow for virus scan\n\n/// and other automated operations to complete.\n\npub fn rename<F, T>(from: F, to: T) -> io::Result<()>\n\nwhere\n\n F: AsRef<Path>,\n\n T: AsRef<Path>,\n\n{\n\n // 21 Fibonacci steps starting at 1 ms is ~28 seconds total\n\n // See https://github.com/rust-lang/rustup/pull/1873 where this was used by Rustup to work around\n\n // virus scanning file locks\n\n let from = from.as_ref();\n\n let to = to.as_ref();\n\n\n\n retry(Fibonacci::from_millis(1).take(21), || {\n\n match fs::rename(from, to) {\n\n Ok(_) => OperationResult::Ok(()),\n\n Err(e) => match e.kind() {\n\n io::ErrorKind::PermissionDenied => OperationResult::Retry(e),\n\n _ => OperationResult::Err(e),\n\n },\n\n }\n\n })\n\n .map_err(|e| match e {\n\n RetryError::Operation { error, .. } => error,\n\n RetryError::Internal(message) => io::Error::new(io::ErrorKind::Other, message),\n\n })\n\n}\n\n\n", "file_path": "crates/core/src/utility.rs", "rank": 45, "score": 81014.09380440322 }, { "content": "/// Logic to help pick the right World of Warcraft folder. We want the root folder.\n\npub fn wow_path_resolution(path: Option<PathBuf>) -> Option<PathBuf> {\n\n if let Some(path) = path {\n\n // Known folders in World of Warcraft dir\n\n let known_folders = [\"_retail_\", \"_classic_\", \"_ptr_\"];\n\n\n\n // If chosen path has any of the known Wow folders, we have the right one.\n\n for folder in known_folders.iter() {\n\n if path.join(folder).exists() {\n\n return Some(path);\n\n }\n\n }\n\n\n\n // Iterate ancestors. If we find any of the known folders we can guess the root.\n\n for ancestor in path.as_path().ancestors() {\n\n if let Some(file_name) = ancestor.file_name() {\n\n for folder in known_folders.iter() {\n\n if file_name == OsStr::new(folder) {\n\n return ancestor.parent().map(|p| p.to_path_buf());\n\n }\n\n }\n\n }\n\n }\n\n }\n\n\n\n None\n\n}\n\n\n", "file_path": "crates/core/src/utility.rs", "rank": 46, "score": 79941.07704470516 }, { "content": "pub fn fingerprint_addon_dir(addon_dir: &PathBuf) -> Result<u32, ParseError> {\n\n let mut to_fingerprint = HashSet::new();\n\n let mut to_parse = VecDeque::new();\n\n let root_dir = addon_dir.parent().ok_or(ParseError::NoParentDirectory {\n\n dir: addon_dir.to_owned(),\n\n })?;\n\n\n\n // Add initial files\n\n let glob_pattern = format!(\n\n \"{}/**/*.*\",\n\n addon_dir.to_str().ok_or(ParseError::InvalidUTF8Path {\n\n path: addon_dir.to_owned(),\n\n })?\n\n );\n\n for path in glob::glob(&glob_pattern)? {\n\n let path = path?;\n\n if !path.is_file() {\n\n continue;\n\n }\n\n\n", "file_path": "crates/core/src/parse.rs", "rank": 47, "score": 78470.98973042962 }, { "content": "/// Deletes all saved varaible files correlating to `[AddonFolder]`.\n\npub fn delete_saved_variables(addon_folders: &[AddonFolder], wtf_path: &PathBuf) -> Result<()> {\n\n for entry in WalkDir::new(&wtf_path)\n\n .into_iter()\n\n .filter_map(std::result::Result::ok)\n\n {\n\n let path = entry.path();\n\n let parent_name = path\n\n .parent()\n\n .and_then(|a| a.file_name())\n\n .and_then(|a| a.to_str());\n\n\n\n if parent_name == Some(\"SavedVariables\") {\n\n let file_name = path\n\n .file_stem()\n\n .and_then(|a| a.to_str())\n\n .map(|a| a.trim_end_matches(\".bak\"));\n\n\n\n // NOTE: Will reject \"Foobar_<invalid utf8>\".\n\n if let Some(file_name_str) = file_name {\n\n for folder in addon_folders {\n", "file_path": "crates/core/src/fs/addon.rs", "rank": 63, "score": 75765.11218629766 }, { "content": "use iced_native::Point;\n\n\n\n#[derive(Debug, Default)]\n\npub struct State {\n\n pub resize_hovering: bool,\n\n pub resizing: bool,\n\n pub starting_cursor_pos: Option<Point>,\n\n pub starting_left_width: f32,\n\n pub starting_right_width: f32,\n\n pub resizing_idx: usize,\n\n}\n", "file_path": "crates/widgets/src/widget/header/state.rs", "rank": 64, "score": 71268.60122448672 }, { "content": "//! Decorate content and apply alignment.\n\nuse iced_core::{Background, Color};\n\n\n\n/// The appearance of a table row.\n\n#[derive(Debug, Clone, Copy)]\n\npub struct Style {\n\n pub text_color: Option<Color>,\n\n pub background: Option<Background>,\n\n pub border_radius: f32,\n\n pub border_width: f32,\n\n pub border_color: Color,\n\n pub offset_left: f32,\n\n pub offset_right: f32,\n\n}\n\n\n\n/// A set of rules that dictate the style of a table row.\n", "file_path": "crates/widgets/src/style/table_row.rs", "rank": 65, "score": 70961.76445531828 }, { "content": " }\n\n}\n\n\n\nimpl std::default::Default for Box<dyn StyleSheet> {\n\n fn default() -> Self {\n\n Box::new(Default)\n\n }\n\n}\n\n\n\nimpl<T> From<T> for Box<dyn StyleSheet>\n\nwhere\n\n T: 'static + StyleSheet,\n\n{\n\n fn from(style: T) -> Self {\n\n Box::new(style)\n\n }\n\n}\n", "file_path": "crates/widgets/src/style/table_row.rs", "rank": 66, "score": 70937.12491699512 }, { "content": "fn hex_to_color(hex: &str) -> Option<iced_native::Color> {\n\n if hex.len() == 7 {\n\n let hash = &hex[0..1];\n\n let r = u8::from_str_radix(&hex[1..3], 16);\n\n let g = u8::from_str_radix(&hex[3..5], 16);\n\n let b = u8::from_str_radix(&hex[5..7], 16);\n\n\n\n return match (hash, r, g, b) {\n\n (\"#\", Ok(r), Ok(g), Ok(b)) => Some(iced_native::Color {\n\n r: r as f32 / 255.0,\n\n g: g as f32 / 255.0,\n\n b: b as f32 / 255.0,\n\n a: 1.0,\n\n }),\n\n _ => None,\n\n };\n\n }\n\n\n\n None\n\n}\n", "file_path": "crates/core/src/theme/mod.rs", "rank": 67, "score": 68431.19318761671 }, { "content": "struct Default;\n\n\n\nimpl StyleSheet for Default {\n\n fn style(&self) -> Style {\n\n Style {\n\n text_color: None,\n\n background: None,\n\n border_radius: 0.0,\n\n border_width: 0.0,\n\n border_color: Color::TRANSPARENT,\n\n offset_right: 0.0,\n\n offset_left: 0.0,\n\n }\n\n }\n\n\n\n fn hovered(&self) -> Style {\n\n Style {\n\n background: Some(Background::Color(Color::from_rgb(0.90, 0.90, 0.90))),\n\n ..self.style()\n\n }\n", "file_path": "crates/widgets/src/style/table_row.rs", "rank": 68, "score": 68269.709302721 }, { "content": "fn main() {\n\n #[cfg(windows)]\n\n embed_resource::compile(\"resources/windows/res.rc\");\n\n}\n", "file_path": "build.rs", "rank": 69, "score": 59693.502238678804 }, { "content": "/// A trait defining a way to back things up to the fs\n\npub trait Backup {\n\n fn backup(&self) -> Result<()>;\n\n}\n\n\n\n/// Back up folders to a zip archive and save on the fs\n\npub struct ZipBackup {\n\n src: Vec<BackupFolder>,\n\n dest: PathBuf,\n\n}\n\n\n\nimpl ZipBackup {\n\n pub(crate) fn new(src: Vec<BackupFolder>, dest: impl AsRef<Path>) -> ZipBackup {\n\n ZipBackup {\n\n src,\n\n dest: dest.as_ref().to_owned(),\n\n }\n\n }\n\n}\n\n\n\nimpl Backup for ZipBackup {\n", "file_path": "crates/core/src/fs/backup.rs", "rank": 70, "score": 55606.19595303961 }, { "content": "fn sort_addons(\n\n addons: &mut [Addon],\n\n global_release_channel: GlobalReleaseChannel,\n\n sort_direction: SortDirection,\n\n column_key: ColumnKey,\n\n) {\n\n match (column_key, sort_direction) {\n\n (ColumnKey::Title, SortDirection::Asc) => {\n\n addons.sort_by(|a, b| a.title().to_lowercase().cmp(&b.title().to_lowercase()));\n\n }\n\n (ColumnKey::Title, SortDirection::Desc) => {\n\n addons.sort_by(|a, b| {\n\n a.title()\n\n .to_lowercase()\n\n .cmp(&b.title().to_lowercase())\n\n .reverse()\n\n .then_with(|| {\n\n a.relevant_release_package(global_release_channel)\n\n .cmp(&b.relevant_release_package(global_release_channel))\n\n })\n", "file_path": "src/gui/update.rs", "rank": 71, "score": 54598.859744779926 }, { "content": "fn build_addons(\n\n flavor: Flavor,\n\n repo_packages: &mut Vec<RepositoryPackage>,\n\n addon_folders: &mut Vec<AddonFolder>,\n\n cache_entries: &[AddonCacheEntry],\n\n) -> Vec<Addon> {\n\n let cached_addons: Vec<_> = cache_entries\n\n .iter()\n\n .filter_map(|e| {\n\n // Get and remove any matching repo package\n\n let repo_idx = repo_packages\n\n .iter()\n\n .position(|r| r.id == e.repository_id && r.kind == e.repository)?;\n\n let repo_package = repo_packages.remove(repo_idx);\n\n\n\n // Get and remove all matching addon folders\n\n let folder_idxs: Vec<_> = addon_folders\n\n .iter()\n\n .enumerate()\n\n .filter(|(_, f)| e.folder_names.contains(&f.id))\n", "file_path": "crates/core/src/parse.rs", "rank": 72, "score": 53165.50785391324 }, { "content": "fn sort_catalog_addons(\n\n addons: &mut [CatalogRow],\n\n sort_direction: SortDirection,\n\n column_key: CatalogColumnKey,\n\n flavor: &Flavor,\n\n) {\n\n match (column_key, sort_direction) {\n\n (CatalogColumnKey::Title, SortDirection::Asc) => {\n\n addons.sort_by(|a, b| a.addon.name.cmp(&b.addon.name));\n\n }\n\n (CatalogColumnKey::Title, SortDirection::Desc) => {\n\n addons.sort_by(|a, b| a.addon.name.cmp(&b.addon.name).reverse());\n\n }\n\n (CatalogColumnKey::Description, SortDirection::Asc) => {\n\n addons.sort_by(|a, b| a.addon.summary.cmp(&b.addon.summary));\n\n }\n\n (CatalogColumnKey::Description, SortDirection::Desc) => {\n\n addons.sort_by(|a, b| a.addon.summary.cmp(&b.addon.summary).reverse());\n\n }\n\n (CatalogColumnKey::Source, SortDirection::Asc) => {\n", "file_path": "src/gui/update.rs", "rank": 73, "score": 53165.50785391324 }, { "content": "/// Write each path to the zip archive\n\nfn zip_write(\n\n path: &Path,\n\n prefix: &Path,\n\n buffer: &mut Vec<u8>,\n\n writer: &mut ZipWriter<BufWriter<File>>,\n\n options: FileOptions,\n\n) -> Result<()> {\n\n if !path.exists() {\n\n return Err(FilesystemError::FileDoesntExist {\n\n path: path.to_owned(),\n\n });\n\n }\n\n\n\n // On windows, convers `\\` to `/`\n\n let normalized_path = path\n\n .to_slash()\n\n .ok_or(FilesystemError::NormalizingPathSlash {\n\n path: path.to_path_buf(),\n\n })?;\n\n let normalized_prefix = prefix\n", "file_path": "crates/core/src/fs/backup.rs", "rank": 74, "score": 51839.737702859056 }, { "content": "#[cfg(target_os = \"macos\")]\n\nfn extract_binary_from_tar(\n\n archive_path: &PathBuf,\n\n temp_file: &PathBuf,\n\n bin_name: &str,\n\n) -> Result<(), FilesystemError> {\n\n use flate2::read::GzDecoder;\n\n use std::fs::File;\n\n use std::io::copy;\n\n use tar::Archive;\n\n\n\n let mut archive = Archive::new(GzDecoder::new(File::open(&archive_path)?));\n\n\n\n let mut temp_file = File::create(temp_file)?;\n\n\n\n for file in archive.entries()? {\n\n let mut file = file?;\n\n\n\n let path = file.path()?;\n\n\n\n if let Some(name) = path.to_str() {\n", "file_path": "crates/core/src/utility.rs", "rank": 75, "score": 51839.737702859056 }, { "content": "/// Defines a serializable struct that should persist on the filesystem inside the\n\n/// Ajour config directory.\n\npub trait PersistentData: DeserializeOwned + Serialize {\n\n /// Only method required to implement PersistentData on an object. Always relative to\n\n /// the config folder for Ajour.\n\n fn relative_path() -> PathBuf;\n\n\n\n /// Returns the full file path. Will create any parent directories that don't\n\n /// exist.\n\n fn path() -> Result<PathBuf> {\n\n let path = config_dir().join(Self::relative_path());\n\n\n\n if let Some(dir) = path.parent() {\n\n std::fs::create_dir_all(dir)?;\n\n }\n\n\n\n Ok(path)\n\n }\n\n\n\n /// Load from `PersistentData::path()`.\n\n fn load() -> Result<Self> {\n\n let path = Self::path()?;\n", "file_path": "crates/core/src/fs/save.rs", "rank": 76, "score": 48327.87483359516 }, { "content": "#[derive(Clone)]\n\nstruct MaybeAuraDisplay(Option<AuraDisplay>);\n\n\n\nimpl MaybeAuraDisplay {\n\n fn into_inner(self) -> Option<AuraDisplay> {\n\n self.0\n\n }\n\n}\n\n\n\nimpl<'lua> FromLua<'lua> for MaybeAuraDisplay {\n\n fn from_lua(lua_value: Value<'lua>, _lua: &'lua Lua) -> Result<Self, mlua::Error> {\n\n if let Value::Table(table) = lua_value {\n\n if let Some(url) = table.get::<_, Option<String>>(\"url\")? {\n\n if let Ok(uri) = url.parse::<http::Uri>() {\n\n let mut path = uri.path().split_terminator('/');\n\n path.next();\n\n\n\n let slug = path.next();\n\n\n\n if let Some(slug) = slug {\n\n let parent = table.get(\"parent\")?;\n", "file_path": "crates/weak_auras/src/lib.rs", "rank": 77, "score": 47227.51396996029 }, { "content": "#[derive(Debug, Clone)]\n\nstruct AuraDisplay {\n\n url: String,\n\n slug: String,\n\n version: u16,\n\n version_string: Option<String>,\n\n parent: Option<String>,\n\n id: String,\n\n uid: String,\n\n ignore_updates: bool,\n\n skip_version: Option<u16>,\n\n}\n\n\n", "file_path": "crates/weak_auras/src/lib.rs", "rank": 78, "score": 45242.34637895925 }, { "content": "#[derive(Debug, Deserialize, Clone)]\n\nstruct AuraChangelog {\n\n text: Option<String>,\n\n format: Option<String>,\n\n}\n\n\n", "file_path": "crates/weak_auras/src/lib.rs", "rank": 79, "score": 45242.279485314815 }, { "content": "fn handle_self_update_temp(cleanup_path: &PathBuf) -> Result<()> {\n\n #[cfg(not(target_os = \"linux\"))]\n\n let current_bin = env::current_exe()?;\n\n\n\n #[cfg(target_os = \"linux\")]\n\n let current_bin =\n\n PathBuf::from(env::var(\"APPIMAGE\").context(\"error getting APPIMAGE env variable\")?);\n\n\n\n // Fix for self updating pre 0.5.4 to >= 0.5.4\n\n //\n\n // Pre 0.5.4, `cleanup_path` is actually the file name of the main bin name that\n\n // got passed via the CLI in the self update process. We want to rename the\n\n // current bin to that bin name. This was passed as a string of just the file\n\n // name, so we want to make an actual full path out of it first.\n\n if current_bin\n\n .file_name()\n\n .unwrap_or_default()\n\n .to_str()\n\n .unwrap_or_default()\n\n .starts_with(\"tmp_\")\n", "file_path": "src/main.rs", "rank": 80, "score": 43883.82872328241 }, { "content": "fn str_to_backup_folder(s: &str) -> Result<BackupFolder, &'static str> {\n\n match s {\n\n \"both\" => Ok(BackupFolder::Both),\n\n \"wtf\" => Ok(BackupFolder::WTF),\n\n \"addons\" => Ok(BackupFolder::AddOns),\n\n _ => Err(\"valid values are ['both','wtf','addons']\"),\n\n }\n\n}\n", "file_path": "src/cli.rs", "rank": 81, "score": 41524.86627995431 }, { "content": "/// Finds a case sensitive path from an insensitive path\n\n/// Useful if, say, a WoW addon points to a local path in a different case but you're not on Windows\n\nfn find_file<P>(path: P) -> Option<PathBuf>\n\nwhere\n\n P: AsRef<Path>,\n\n{\n\n let mut current = path.as_ref();\n\n let mut to_finds = Vec::new();\n\n\n\n // Find first parent that exists\n\n while !current.exists() {\n\n to_finds.push(current.file_name()?);\n\n current = current.parent()?;\n\n }\n\n\n\n // Match to finds\n\n let mut current = current.to_path_buf();\n\n to_finds.reverse();\n\n for to_find in to_finds {\n\n let mut children = current.read_dir().ok()?;\n\n let lower = to_find.to_str()?.to_ascii_lowercase();\n\n let found = children\n", "file_path": "crates/core/src/parse.rs", "rank": 82, "score": 41289.6055595739 }, { "content": "pub struct UnclickableColumnHeaderButton(pub ColorPalette);\n\nimpl button::StyleSheet for UnclickableColumnHeaderButton {\n\n fn active(&self) -> button::Style {\n\n ColumnHeaderButton(self.0).active()\n\n }\n\n\n\n fn disabled(&self) -> button::Style {\n\n self.active()\n\n }\n\n}\n\n\n\npub struct SelectedColumnHeaderButton(pub ColorPalette);\n\nimpl button::StyleSheet for SelectedColumnHeaderButton {\n\n fn active(&self) -> button::Style {\n\n button::Style {\n\n background: Some(Background::Color(self.0.base.background)),\n\n text_color: Color {\n\n ..self.0.bright.primary\n\n },\n\n border_radius: 2.0,\n", "file_path": "src/gui/style.rs", "rank": 83, "score": 40144.466207212194 }, { "content": "use ajour_core::theme::ColorPalette;\n\nuse ajour_widgets::table_row;\n\nuse iced::{button, checkbox, container, pick_list, scrollable, text_input, Background, Color};\n\n\n\npub struct BrightForegroundContainer(pub ColorPalette);\n\nimpl container::StyleSheet for BrightForegroundContainer {\n\n fn style(&self) -> container::Style {\n\n container::Style {\n\n background: Some(Background::Color(self.0.base.foreground)),\n\n text_color: Some(self.0.bright.surface),\n\n ..container::Style::default()\n\n }\n\n }\n\n}\n\n\n\npub struct NormalForegroundContainer(pub ColorPalette);\n\nimpl container::StyleSheet for NormalForegroundContainer {\n\n fn style(&self) -> container::Style {\n\n container::Style {\n\n background: Some(Background::Color(self.0.base.foreground)),\n", "file_path": "src/gui/style.rs", "rank": 84, "score": 40142.201402010745 }, { "content": "}\n\n\n\npub struct Row(pub ColorPalette);\n\nimpl container::StyleSheet for Row {\n\n fn style(&self) -> container::Style {\n\n container::Style {\n\n background: Some(Background::Color(self.0.base.background)),\n\n ..container::Style::default()\n\n }\n\n }\n\n}\n\n\n\npub struct TableRow(pub ColorPalette);\n\nimpl table_row::StyleSheet for TableRow {\n\n fn style(&self) -> table_row::Style {\n\n table_row::Style {\n\n text_color: None,\n\n background: Some(Background::Color(self.0.base.foreground)),\n\n border_radius: 0.0,\n\n border_width: 0.0,\n", "file_path": "src/gui/style.rs", "rank": 85, "score": 40141.21170059306 }, { "content": " }\n\n }\n\n\n\n fn hovered(&self) -> button::Style {\n\n button::Style {\n\n background: Some(Background::Color(Color {\n\n a: 0.35,\n\n ..self.0.normal.error\n\n })),\n\n text_color: self.0.bright.error,\n\n ..self.active()\n\n }\n\n }\n\n}\n\n\n\npub struct ColumnHeaderButton(pub ColorPalette);\n\nimpl button::StyleSheet for ColumnHeaderButton {\n\n fn active(&self) -> button::Style {\n\n button::Style {\n\n background: Some(Background::Color(self.0.base.background)),\n", "file_path": "src/gui/style.rs", "rank": 86, "score": 40139.035382190865 }, { "content": " }\n\n}\n\n\n\npub struct SelectedBrightForegroundContainer(pub ColorPalette);\n\nimpl container::StyleSheet for SelectedBrightForegroundContainer {\n\n fn style(&self) -> container::Style {\n\n container::Style {\n\n background: None,\n\n text_color: Some(self.0.bright.primary),\n\n ..container::Style::default()\n\n }\n\n }\n\n}\n\n\n\npub struct FadedBrightForegroundContainer(pub ColorPalette);\n\nimpl container::StyleSheet for FadedBrightForegroundContainer {\n\n fn style(&self) -> container::Style {\n\n container::Style {\n\n text_color: Some(self.0.bright.surface),\n\n ..container::Style::default()\n", "file_path": "src/gui/style.rs", "rank": 87, "score": 40138.81051505706 }, { "content": "\n\npub struct SelectedBrightTextButton(pub ColorPalette);\n\nimpl button::StyleSheet for SelectedBrightTextButton {\n\n fn active(&self) -> button::Style {\n\n button::Style {\n\n text_color: self.0.bright.primary,\n\n border_radius: 2.0,\n\n ..button::Style::default()\n\n }\n\n }\n\n\n\n fn hovered(&self) -> button::Style {\n\n button::Style {\n\n background: Some(Background::Color(Color::TRANSPARENT)),\n\n text_color: self.0.bright.primary,\n\n ..self.active()\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/gui/style.rs", "rank": 88, "score": 40136.62569593886 }, { "content": " background: Some(Background::Color(Color::TRANSPARENT)),\n\n text_color: Color {\n\n a: 0.25,\n\n ..self.0.normal.surface\n\n },\n\n ..button::Style::default()\n\n }\n\n }\n\n\n\n fn hovered(&self) -> button::Style {\n\n button::Style { ..self.active() }\n\n }\n\n\n\n fn disabled(&self) -> button::Style {\n\n button::Style { ..self.active() }\n\n }\n\n}\n\n\n\npub struct SelectedDefaultButton(pub ColorPalette);\n\nimpl button::StyleSheet for SelectedDefaultButton {\n", "file_path": "src/gui/style.rs", "rank": 89, "score": 40135.23302605891 }, { "content": " text_color: Some(self.0.normal.surface),\n\n ..container::Style::default()\n\n }\n\n }\n\n}\n\n\n\npub struct HoverableBrightForegroundContainer(pub ColorPalette);\n\nimpl container::StyleSheet for HoverableBrightForegroundContainer {\n\n fn style(&self) -> container::Style {\n\n container::Style {\n\n background: None,\n\n text_color: Some(self.0.bright.surface),\n\n ..container::Style::default()\n\n }\n\n }\n\n}\n\n\n\npub struct HoverableForegroundContainer(pub ColorPalette);\n\nimpl container::StyleSheet for HoverableForegroundContainer {\n\n fn style(&self) -> container::Style {\n", "file_path": "src/gui/style.rs", "rank": 90, "score": 40135.143331639345 }, { "content": " container::Style {\n\n background: None,\n\n text_color: Some(self.0.normal.surface),\n\n ..container::Style::default()\n\n }\n\n }\n\n}\n\n\n\npub struct FadedNormalForegroundContainer(pub ColorPalette);\n\nimpl container::StyleSheet for FadedNormalForegroundContainer {\n\n fn style(&self) -> container::Style {\n\n container::Style {\n\n background: Some(Background::Color(Color {\n\n a: 0.80,\n\n ..self.0.base.foreground\n\n })),\n\n\n\n text_color: Some(self.0.normal.surface),\n\n ..container::Style::default()\n\n }\n", "file_path": "src/gui/style.rs", "rank": 91, "score": 40134.621881314975 }, { "content": " text_color: Some(self.0.bright.surface),\n\n ..container::Style::default()\n\n }\n\n }\n\n}\n\n\n\npub struct SegmentedContainer(pub ColorPalette);\n\nimpl container::StyleSheet for SegmentedContainer {\n\n fn style(&self) -> container::Style {\n\n container::Style {\n\n border_radius: 4.0,\n\n border_width: 1.0,\n\n border_color: Color {\n\n a: 0.5,\n\n ..self.0.normal.primary\n\n },\n\n ..container::Style::default()\n\n }\n\n }\n\n}\n", "file_path": "src/gui/style.rs", "rank": 92, "score": 40134.291249445065 }, { "content": " text_color: Color {\n\n a: 0.25,\n\n ..self.0.normal.surface\n\n },\n\n ..self.active()\n\n }\n\n }\n\n}\n\n\n\npub struct DefaultBoxedButton(pub ColorPalette);\n\nimpl button::StyleSheet for DefaultBoxedButton {\n\n fn active(&self) -> button::Style {\n\n button::Style {\n\n border_color: Color {\n\n a: 0.5,\n\n ..self.0.normal.primary\n\n },\n\n border_width: 1.0,\n\n border_radius: 2.0,\n\n text_color: self.0.bright.primary,\n", "file_path": "src/gui/style.rs", "rank": 93, "score": 40134.19541107243 }, { "content": " pick_list::Style {\n\n background: Background::Color(Color {\n\n a: 0.15,\n\n ..self.0.normal.primary\n\n }),\n\n text_color: self.0.bright.primary,\n\n ..active\n\n }\n\n }\n\n}\n\n\n\npub struct ChannelBadge(pub ColorPalette);\n\nimpl container::StyleSheet for ChannelBadge {\n\n fn style(&self) -> container::Style {\n\n container::Style {\n\n background: Some(Background::Color(self.0.base.foreground)),\n\n text_color: Some(self.0.bright.primary),\n\n border_color: self.0.bright.primary,\n\n border_radius: 3.0,\n\n border_width: 1.0,\n", "file_path": "src/gui/style.rs", "rank": 94, "score": 40132.981692578105 }, { "content": " }\n\n}\n\n\n\npub struct BrightTextButton(pub ColorPalette);\n\nimpl button::StyleSheet for BrightTextButton {\n\n fn active(&self) -> button::Style {\n\n button::Style {\n\n text_color: self.0.bright.surface,\n\n border_radius: 2.0,\n\n ..button::Style::default()\n\n }\n\n }\n\n\n\n fn hovered(&self) -> button::Style {\n\n button::Style {\n\n background: Some(Background::Color(Color::TRANSPARENT)),\n\n text_color: self.0.bright.primary,\n\n ..self.active()\n\n }\n\n }\n", "file_path": "src/gui/style.rs", "rank": 95, "score": 40132.60233174789 }, { "content": "}\n\n\n\npub struct NormalTextButton(pub ColorPalette);\n\nimpl button::StyleSheet for NormalTextButton {\n\n fn active(&self) -> button::Style {\n\n button::Style {\n\n text_color: self.0.normal.surface,\n\n border_radius: 2.0,\n\n ..button::Style::default()\n\n }\n\n }\n\n\n\n fn hovered(&self) -> button::Style {\n\n button::Style {\n\n background: Some(Background::Color(Color::TRANSPARENT)),\n\n text_color: self.0.bright.primary,\n\n ..self.active()\n\n }\n\n }\n\n}\n", "file_path": "src/gui/style.rs", "rank": 96, "score": 40132.60233174789 }, { "content": " }\n\n\n\n fn disabled(&self) -> button::Style {\n\n button::Style {\n\n text_color: Color {\n\n a: 0.25,\n\n ..self.0.normal.surface\n\n },\n\n ..self.active()\n\n }\n\n }\n\n}\n\n\n\npub struct DefaultDeleteButton(pub ColorPalette);\n\nimpl button::StyleSheet for DefaultDeleteButton {\n\n fn active(&self) -> button::Style {\n\n button::Style {\n\n border_radius: 2.0,\n\n text_color: self.0.bright.error,\n\n ..button::Style::default()\n", "file_path": "src/gui/style.rs", "rank": 97, "score": 40132.002698972705 }, { "content": " }\n\n }\n\n}\n\n\n\npub struct SecondaryButton(pub ColorPalette);\n\nimpl button::StyleSheet for SecondaryButton {\n\n fn active(&self) -> button::Style {\n\n button::Style {\n\n text_color: self.0.bright.secondary,\n\n border_radius: 2.0,\n\n ..button::Style::default()\n\n }\n\n }\n\n\n\n fn hovered(&self) -> button::Style {\n\n button::Style {\n\n background: Some(Background::Color(self.0.normal.secondary)),\n\n text_color: self.0.bright.secondary,\n\n ..self.active()\n\n }\n", "file_path": "src/gui/style.rs", "rank": 98, "score": 40131.8421496302 }, { "content": "pub struct DefaultButton(pub ColorPalette);\n\nimpl button::StyleSheet for DefaultButton {\n\n fn active(&self) -> button::Style {\n\n button::Style {\n\n text_color: self.0.bright.primary,\n\n border_radius: 2.0,\n\n ..button::Style::default()\n\n }\n\n }\n\n\n\n fn hovered(&self) -> button::Style {\n\n button::Style {\n\n background: Some(Background::Color(self.0.normal.primary)),\n\n text_color: self.0.bright.primary,\n\n ..self.active()\n\n }\n\n }\n\n\n\n fn disabled(&self) -> button::Style {\n\n button::Style {\n", "file_path": "src/gui/style.rs", "rank": 99, "score": 40131.83919768316 } ]
Rust
mm0-rs/components/mmcc/src/mir_opt/ghost.rs
RESEARCHINGETERNITYEGPHILIPPOV/mm0
a4fff5c90f5787aacef23f2b5f0c9e064379658d
use std::{collections::HashSet, mem}; #[allow(clippy::wildcard_imports)] use super::*; #[repr(u8)] #[derive(Copy, Clone, Debug, PartialOrd, Ord, PartialEq, Eq)] pub enum Reachability { Dead, Unreachable, Reachable, } impl Default for Reachability { fn default() -> Self { Self::Dead } } impl Reachability { #[inline] #[must_use] pub fn reach(self) -> bool { matches!(self, Self::Reachable) } #[inline] #[must_use] pub fn dead(self) -> bool { matches!(self, Self::Dead) } } impl Domain for Reachability { fn join(&mut self, &other: &Self) -> bool { *self < other && { *self = other; true } } } #[derive(Debug)] pub struct GhostAnalysisResult(BlockVec<im::HashSet<VarId>>); impl Cfg { #[must_use] pub fn reachability_analysis(&self) -> BlockVec<Reachability> { struct ReachabilityAnalysis; fn side_effecting(t: &Terminator) -> bool { matches!(t, Terminator::Return(_) | Terminator::Exit(_) | Terminator::Assert(_, _, _, _) | Terminator::Call {se: true, ..}) } impl Analysis for ReachabilityAnalysis { type Dir = Backward; type Doms = BlockVec<Reachability>; fn bottom(&mut self, cfg: &Cfg) -> Self::Doms { BlockVec::bottom(cfg.blocks.len()) } fn apply_trans_for_block(&mut self, _: &Self::Doms, _: BlockId, bl: &BasicBlock, d: &mut Reachability ) { if side_effecting(bl.terminator()) { *d = Reachability::Reachable } } } let mut queue = WorkQueue::with_capacity(self.blocks.len()); let mut reachable = ReachabilityAnalysis.bottom(self); Backward::preferred_traverse(self, |id, _| { reachable[id] = Reachability::Unreachable; queue.insert(id); }); ReachabilityAnalysis.iterate_to_fixpoint_from(self, &mut queue, &mut reachable); for (i, d) in reachable.enum_iter_mut() { if *d != Reachability::Dead && side_effecting(self[i].terminator()) { *d = Reachability::Reachable } } reachable } pub fn apply_reachability_analysis(&mut self, reachable: &BlockVec<Reachability>) { for id in (0..self.blocks.len()).map(BlockId::from_usize) { let mut bl = &mut self.blocks[id]; match reachable[id] { Reachability::Dead => { *bl = BasicBlock::DEAD; continue } Reachability::Unreachable => { bl.reachable = false; continue } Reachability::Reachable => {} } match bl.term.as_mut() { Some(Terminator::Assert(_, _, reach, tgt) | Terminator::Call {reach, tgt, ..}) => *reach = reachable[*tgt].reach(), Some(&mut Terminator::If(_, [(_, tgt1), (_, tgt2)])) => { let reach1 = reachable[tgt1].reach(); if reach1 == reachable[tgt2].reach() { continue } let_unchecked!(Some(Terminator::If(_, [mut vtgt1, mut vtgt2])) = bl.term.take(), { if reach1 { mem::swap(&mut vtgt1, &mut vtgt2) } let (_, ty1) = &self.ctxs.head(self[vtgt1.1].ctx).2; let (e2, ty2) = &self.ctxs.head(self[vtgt2.1].ctx).2; bl = &mut self.blocks[id]; bl.stmts.push(Statement::Let( LetKind::Let(vtgt2.0, false, e2.clone()), ty2.clone(), Constant::contra(ty1.clone(), tgt1, vtgt1.0).into() )); bl.term = Some(Terminator::Jump1(tgt2)); }); } _ => {} } } } #[must_use] pub fn can_return(&self) -> bool { struct ReturnAnalysis; impl Analysis for ReturnAnalysis { type Dir = Backward; type Doms = BitSet<BlockId>; fn bottom(&mut self, cfg: &Cfg) -> Self::Doms { BitSet::bottom(cfg.blocks.len()) } fn apply_trans_for_block(&mut self, _: &Self::Doms, _: BlockId, bl: &BasicBlock, d: &mut bool) { match *bl.terminator() { Terminator::Return(_) | Terminator::Exit(_) => *d = true, Terminator::Assert(_, _, false, _) | Terminator::Call {reach: false, ..} => *d = false, Terminator::Assert(..) | Terminator::Call {..} | Terminator::Unreachable(_) | Terminator::Dead | Terminator::Jump(..) | Terminator::Jump1(..) | Terminator::If(..) => {} } } } let doms = ReturnAnalysis.iterate_to_fixpoint(self); ReturnAnalysis.get_applied(self, &doms, BlockId::ENTRY) } #[must_use] pub fn ghost_analysis(&self, reachable: &BlockVec<Reachability>, returns: &[Arg], ) -> GhostAnalysisResult { #[derive(Default)] struct GhostDom { active: OptBlockId, vars: im::HashSet<VarId>, } impl GhostDom { #[inline] fn apply_local(&mut self, v: VarId) { self.vars.insert(v); } #[inline] fn apply_place(&mut self, p: &Place) { self.apply_local(p.local) } fn apply_operand(&mut self, o: &Operand) { if let Operand::Copy(p) | Operand::Move(p) | Operand::Ref(p) = o { self.apply_place(p) } } fn apply_rvalue(&mut self, id: BlockId, rv: &RValue) { match rv { RValue::Use(o) => self.apply_operand(o), RValue::Unop(_, o) | RValue::Cast(_, o, _) => { self.active = OptBlockId::new(id); self.apply_operand(o) } RValue::Binop(_, o1, o2) | RValue::Eq(_, _, o1, o2) => { self.active = OptBlockId::new(id); self.apply_operand(o1); self.apply_operand(o2) } RValue::Pun(_, p) | RValue::Borrow(p) => self.apply_place(p), RValue::List(os) | RValue::Array(os) => { self.active = OptBlockId::new(id); for o in &**os { self.apply_operand(o) } } RValue::Ghost(_) | RValue::Mm0(..) | RValue::Typeof(_) => {} } } } struct GhostAnalysis<'a> { reachable: &'a BlockVec<Reachability>, returns: &'a [Arg], } struct GhostDoms { active: BlockVec<OptBlockId>, vars: BlockVec<im::HashSet<VarId>>, } impl Domains for GhostDoms { type Item = GhostDom; fn cloned(&self, id: BlockId) -> Self::Item { GhostDom { active: self.active[id], vars: self.vars.cloned(id), } } fn join(&mut self, id: BlockId, &GhostDom {active, ref vars}: &GhostDom) -> bool { let cur = &mut self.active[id]; let changed = match (cur.get(), active.get()) { (None, Some(_)) => { *cur = active; true } (Some(a), Some(b)) if a != b && a != id => { *cur = OptBlockId::new(id); true } _ => false, }; changed | self.vars.join(id, vars) } } impl<'a> Analysis for GhostAnalysis<'a> { type Dir = Backward; type Doms = GhostDoms; fn bottom(&mut self, cfg: &Cfg) -> Self::Doms { GhostDoms { active: BlockVec::bottom(cfg.blocks.len()), vars: BlockVec::bottom(cfg.blocks.len()), } } fn apply_statement(&mut self, _: &Self::Doms, loc: Location, stmt: &Statement, d: &mut GhostDom) { match stmt { Statement::Let(lk, _, rv) => { let needed = match *lk { LetKind::Let(v, vr, _) => vr && d.vars.contains(&v), LetKind::Own([(x, xr, _), (y, yr, _)]) => xr && d.vars.contains(&x) || yr && d.vars.contains(&y) }; if needed { d.apply_rvalue(loc.block, rv) } } Statement::Assign(_, _, rhs, vars) => { let mut needed = false; for v in &**vars { if v.rel && d.vars.contains(&v.to) { needed = true; d.apply_local(v.from); } } if needed { d.active = OptBlockId::new(loc.block); d.apply_operand(rhs) } } } } fn apply_terminator(&mut self, _: &Self::Doms, id: BlockId, term: &Terminator, d: &mut GhostDom) { match term { Terminator::Jump(_, args, _) => { let GhostDom {vars, ..} = mem::take(d); for &(v, vr, ref o) in args { if vr && vars.contains(&v) { d.active = OptBlockId::new(id); d.apply_operand(o) } } } Terminator::Jump1(_) | Terminator::Exit(_) => {} Terminator::Return(args) => { d.active = OptBlockId::new(id); for ((_, vr, o), ret) in args.iter().zip(self.returns) { if *vr && !ret.attr.contains(ArgAttr::GHOST) { d.apply_operand(o) } } } Terminator::Unreachable(_) | Terminator::Dead => unreachable!(), Terminator::If(o, _) => if d.active == OptBlockId::new(id) { d.apply_operand(o) } Terminator::Assert(o, _, _, _) => { d.active = OptBlockId::new(id); d.apply_operand(o) } &Terminator::Call {se: side_effect, ref args, reach, ref rets, ..} => { let needed = !reach || side_effect || rets.iter().any(|&(vr, v)| vr && d.vars.contains(&v)); if needed { d.active = OptBlockId::new(id); for &(r, ref o) in &**args { if r { d.apply_operand(o) } } } } } } fn apply_trans_for_block(&mut self, ds: &Self::Doms, id: BlockId, bl: &BasicBlock, d: &mut GhostDom) { if !self.reachable[id].reach() { *d = Default::default(); return } self.do_apply_trans_for_block(ds, id, bl, d) } } let mut analysis = GhostAnalysis { reachable, returns }; let result = analysis.iterate_to_fixpoint(self); GhostAnalysisResult((0..self.blocks.len()).map(BlockId::from_usize).map(|id| { analysis.get_applied(self, &result, id).vars }).collect()) } pub fn apply_ghost_analysis(&mut self, res: &GhostAnalysisResult, returns: &[Arg], ) { self.ctxs.reset_ghost(); for (id, res) in res.0.enum_iter() { let bl = &mut self.blocks[id]; if bl.is_dead() { continue } bl.relevance = Some(self.ctxs.set_ghost(bl.ctx, |v| res.contains(&v))); let get = |v| res.contains(&v); for stmt in &mut bl.stmts { match stmt { Statement::Let(LetKind::Let(v, r, _), _, _) => *r = get(*v), Statement::Let(LetKind::Own(vs), _, _) => for (v, r, _) in vs { *r = get(*v) } Statement::Assign(_, _, _, vs) => for v in &mut **vs { v.rel = get(v.to) } } } } let mut cache = BlockVec::<Option<HashSet<VarId>>>::from_default(self.blocks.len()); let Cfg {ctxs, blocks, ..} = self; for i in 0..blocks.len() { let blocks = &mut *blocks.0; if let Some(Terminator::Jump(tgt, ref mut args, _)) = blocks[i].term { let tgt_ctx = blocks[tgt.0 as usize].ctx; let s = cache[tgt].get_or_insert_with(|| { ctxs.rev_iter(tgt_ctx).filter(|p| p.1).map(|p| p.0).collect() }); for (v, r, _) in args { *r = s.contains(v) } } } } pub fn do_ghost_analysis(&mut self, reachable: &BlockVec<Reachability>, returns: &[Arg], ) { let ghost = self.ghost_analysis(reachable, returns); self.apply_ghost_analysis(&ghost, returns); } }
use std::{collections::HashSet, mem}; #[allow(clippy::wildcard_imports)] use super::*; #[repr(u8)] #[derive(Copy, Clone, Debug, PartialOrd, Ord, PartialEq, Eq)] pub enum Reachability { Dead, Unreachable, Reachable, } impl Default for Reachability { fn default() -> Self { Self::Dead } } impl Reachability { #[inline] #[must_use] pub fn reach(self) -> bool { matches!(self, Self::Reachable) } #[inline] #[must_use] pub fn dead(self) -> bool { matches!(self, Self::Dead) } } impl Domain for Reachability { fn join(&mut self, &other: &Self) -> bool { *self < other && { *self = other; true } } } #[derive(Debug)] pub struct GhostAnalysisResult(BlockVec<im::HashSet<VarId>>); impl Cfg { #[must_use] pub fn reachability_analysis(&self) -> BlockVec<Reachability> { struct ReachabilityAnalysis; fn side_effecting(t: &Terminator) -> bool { matches!(t, Terminator::Return(_) | Terminator::Exit(_) | Terminator::Assert(_, _, _, _) | Terminator::Call {se: true, ..}) } impl Analysis for ReachabilityAnalysis { type Dir = Backward; type Doms = BlockVec<Reachability>; fn bottom(&mut self, cfg: &Cfg) -> Self::Doms { BlockVec::bottom(cfg.blocks.len()) } fn apply_trans_for_block(&mut self, _: &Self::Doms, _: BlockId, bl: &BasicBlock, d: &mut Reachability ) { if side_effecting(bl.terminator()) { *d = Reachability::Reachable } } } let mut queue = WorkQueue::with_capacity(self.blocks.len()); let mut reachable = ReachabilityAnalysis.bottom(self); Backward::preferred_traverse(self, |id, _| { reachable[id] = Reachability::Unreachable; queue.insert(id); }); ReachabilityAnalysis.iterate_to_fixpoint_from(self, &mut queue, &mut reachable); for (i, d) in reachable.enum_iter_mut() { if *d != Reachability::Dead && side_effecting(self[i].terminator()) { *d = Reachability::Reachable } } reachable } pub fn apply_reachability_analysis(&mut self, reachable: &BlockVec<Reachability>) { for id in (0..self.blocks.len()).map(BlockId::from_usize) { let mut bl = &mut self.blocks[id]; match reachable[id] { Reachability::Dead => { *bl = BasicBlock::DEAD; continue } Reachability::Unreachable => { bl.reachable = false; continue } Reachability::Reachable => {} } match bl.term.as_mut() { Some(Terminator::Assert(_, _, reach, tgt) | Terminator::Call {reach, tgt, ..}) => *reach = reachable[*tgt].reach(), Some(&mut Terminator::If(_, [(_, tgt1), (_, tgt2)])) => { let reach1 = reachable[tgt1].reach(); if reach1 == reachable[tgt2].reach() { continue } let_unchecked!(Some(Terminator::If(_, [mut vtgt1, mut vtgt2])) = bl.term.take(), { if reach1 { mem::swap(&mut vtgt1, &mut vtgt2) } let (_, ty1) = &self.ctxs.head(self[vtgt1.1].ctx).2; let (e2, ty2) = &self.ctxs.head(self[vtgt2.1].ctx).2; bl = &mut self.blocks[id]; bl.stmts.push(Statement::Let( LetKind::Let(vtgt2.0, false, e2.clone()), ty2.clone(), Constant::contra(ty1.clone(), tgt1, vtgt1.0).into() )); bl.term = Some(Terminator::Jump1(tgt2)); }); } _ => {} } } } #[must_use] pub fn can_return(&self) -> bool { struct ReturnAnalysis; impl Analysis for ReturnAnalysis { type Dir = Backward; type Doms = BitSet<BlockId>; fn bottom(&mut self, cfg: &Cfg) -> Self::Doms { BitSet::bottom(cfg.blocks.len()) } fn apply_trans_for_block(&mut self, _: &Self::Doms, _: BlockId, bl: &BasicBlock, d: &mut bool) { match *bl.terminator() { Terminator::Return(_) | Terminator::Exit(_) => *d = true, Terminator::Assert(_, _, false, _) | Terminator::Call {reach: false, ..} => *d = false, Terminator::Assert(..) | Terminator::Call {..} | Terminator::Unreachable(_) | Terminator::Dead | Terminator::Jump(..) | Terminator::Jump1(..) | Terminator::If(..) => {} } } } let doms = ReturnAnalysis.iterate_to_fixpoint(self); ReturnAnalysis.get_applied(self, &doms, BlockId::ENTRY) } #[must_use] pub fn ghost_analysis(&self, reachable: &BlockVec<Reachability>, returns: &[Arg], ) -> GhostAnalysisResult { #[derive(Default)] struct GhostDom { active: OptBlockId, vars: im::HashSet<VarId>, } impl GhostDom { #[inline] fn apply_local(&mut self, v: VarId) { self.vars.insert(v); } #[inline] fn apply_place(&mut self, p: &Place) { self.apply_local(p.local) } fn apply_operand(&mut self, o: &Operand) { if let Operand::Copy(p) | Operand::Move(p) | Operand::Ref(p) = o { self.apply_place(p) } } fn apply_rvalue(&mut self, id: BlockId, rv: &RValue) { match rv { RValue::Use(o) => self.apply_operand(o), RValue::Unop(_, o) | RValue::Cast(_, o, _) => { self.active = OptBlockId::new(id); self.apply_operand(o) } RValue::Binop(_, o1, o2) | RValue::Eq(_, _, o1, o2) => { self.active = OptBlockId::new(id); self.apply_operand(o1); self.apply_operand(o2) } RValue::Pun(_, p) | RValue::Borrow(p) => self.apply_place(p), RValue::List(os) | RValue::Array(os) => { self.active = OptBlockId::new(id); for o in &**os { self.apply_operand(o) } } RValue::Ghost(_) | RValue::Mm0(..) | RValue
.rev_iter(tgt_ctx).filter(|p| p.1).map(|p| p.0).collect() }); for (v, r, _) in args { *r = s.contains(v) } } } } pub fn do_ghost_analysis(&mut self, reachable: &BlockVec<Reachability>, returns: &[Arg], ) { let ghost = self.ghost_analysis(reachable, returns); self.apply_ghost_analysis(&ghost, returns); } }
::Typeof(_) => {} } } } struct GhostAnalysis<'a> { reachable: &'a BlockVec<Reachability>, returns: &'a [Arg], } struct GhostDoms { active: BlockVec<OptBlockId>, vars: BlockVec<im::HashSet<VarId>>, } impl Domains for GhostDoms { type Item = GhostDom; fn cloned(&self, id: BlockId) -> Self::Item { GhostDom { active: self.active[id], vars: self.vars.cloned(id), } } fn join(&mut self, id: BlockId, &GhostDom {active, ref vars}: &GhostDom) -> bool { let cur = &mut self.active[id]; let changed = match (cur.get(), active.get()) { (None, Some(_)) => { *cur = active; true } (Some(a), Some(b)) if a != b && a != id => { *cur = OptBlockId::new(id); true } _ => false, }; changed | self.vars.join(id, vars) } } impl<'a> Analysis for GhostAnalysis<'a> { type Dir = Backward; type Doms = GhostDoms; fn bottom(&mut self, cfg: &Cfg) -> Self::Doms { GhostDoms { active: BlockVec::bottom(cfg.blocks.len()), vars: BlockVec::bottom(cfg.blocks.len()), } } fn apply_statement(&mut self, _: &Self::Doms, loc: Location, stmt: &Statement, d: &mut GhostDom) { match stmt { Statement::Let(lk, _, rv) => { let needed = match *lk { LetKind::Let(v, vr, _) => vr && d.vars.contains(&v), LetKind::Own([(x, xr, _), (y, yr, _)]) => xr && d.vars.contains(&x) || yr && d.vars.contains(&y) }; if needed { d.apply_rvalue(loc.block, rv) } } Statement::Assign(_, _, rhs, vars) => { let mut needed = false; for v in &**vars { if v.rel && d.vars.contains(&v.to) { needed = true; d.apply_local(v.from); } } if needed { d.active = OptBlockId::new(loc.block); d.apply_operand(rhs) } } } } fn apply_terminator(&mut self, _: &Self::Doms, id: BlockId, term: &Terminator, d: &mut GhostDom) { match term { Terminator::Jump(_, args, _) => { let GhostDom {vars, ..} = mem::take(d); for &(v, vr, ref o) in args { if vr && vars.contains(&v) { d.active = OptBlockId::new(id); d.apply_operand(o) } } } Terminator::Jump1(_) | Terminator::Exit(_) => {} Terminator::Return(args) => { d.active = OptBlockId::new(id); for ((_, vr, o), ret) in args.iter().zip(self.returns) { if *vr && !ret.attr.contains(ArgAttr::GHOST) { d.apply_operand(o) } } } Terminator::Unreachable(_) | Terminator::Dead => unreachable!(), Terminator::If(o, _) => if d.active == OptBlockId::new(id) { d.apply_operand(o) } Terminator::Assert(o, _, _, _) => { d.active = OptBlockId::new(id); d.apply_operand(o) } &Terminator::Call {se: side_effect, ref args, reach, ref rets, ..} => { let needed = !reach || side_effect || rets.iter().any(|&(vr, v)| vr && d.vars.contains(&v)); if needed { d.active = OptBlockId::new(id); for &(r, ref o) in &**args { if r { d.apply_operand(o) } } } } } } fn apply_trans_for_block(&mut self, ds: &Self::Doms, id: BlockId, bl: &BasicBlock, d: &mut GhostDom) { if !self.reachable[id].reach() { *d = Default::default(); return } self.do_apply_trans_for_block(ds, id, bl, d) } } let mut analysis = GhostAnalysis { reachable, returns }; let result = analysis.iterate_to_fixpoint(self); GhostAnalysisResult((0..self.blocks.len()).map(BlockId::from_usize).map(|id| { analysis.get_applied(self, &result, id).vars }).collect()) } pub fn apply_ghost_analysis(&mut self, res: &GhostAnalysisResult, returns: &[Arg], ) { self.ctxs.reset_ghost(); for (id, res) in res.0.enum_iter() { let bl = &mut self.blocks[id]; if bl.is_dead() { continue } bl.relevance = Some(self.ctxs.set_ghost(bl.ctx, |v| res.contains(&v))); let get = |v| res.contains(&v); for stmt in &mut bl.stmts { match stmt { Statement::Let(LetKind::Let(v, r, _), _, _) => *r = get(*v), Statement::Let(LetKind::Own(vs), _, _) => for (v, r, _) in vs { *r = get(*v) } Statement::Assign(_, _, _, vs) => for v in &mut **vs { v.rel = get(v.to) } } } } let mut cache = BlockVec::<Option<HashSet<VarId>>>::from_default(self.blocks.len()); let Cfg {ctxs, blocks, ..} = self; for i in 0..blocks.len() { let blocks = &mut *blocks.0; if let Some(Terminator::Jump(tgt, ref mut args, _)) = blocks[i].term { let tgt_ctx = blocks[tgt.0 as usize].ctx; let s = cache[tgt].get_or_insert_with(|| { ctxs
random
[ { "content": "/// Performs \"curly transformation\", turning `{x op y op z}` into `(op x y z)`.\n\n///\n\n/// A curly list is valid if\n\n/// - it is a proper list, and\n\n/// - it has at most two elements (in which case it is transformed to itself), or\n\n/// - it has an odd number of elements and the elements at all odd numbered positions compare equal.\n\n/// (in which case the element at position 1 is moved to the front, and the later\n\n/// copies of it are removed).\n\n///\n\n/// Invalid curly lists like `{x op y op2 z}` are converted to `(:nfx x op y op2 z)`.\n\n///\n\n/// # Parameters\n\n///\n\n/// - `es`: The list of elements to transform, such as `[x, op, y, op, z]`\n\n/// - `no_dot`: True if this is a proper list. A dotted list like `{x op y . z}` is not a\n\n/// valid curly list, and is transformed to `(:nfx x op y . z)`.\n\n/// - `eq`: An equality comparator for elements of the list.\n\n/// - `nfx`: A constructor for the `:nfx` atom, in case this is not a valid curly list.\n\n///\n\n/// # Returns\n\n///\n\n/// Returns nothing, but modifies the input `es` to reorder the elements so that the\n\n/// operation at odd positions comes first and the elements at even positions come later,\n\n/// for example `[x, op, y, op, z]` becomes `[op, x, y, z]`.\n\npub fn curly_transform<T>(es: &mut Vec<T>, no_dot: bool, eq: impl Fn(&T, &T) -> bool, nfx: impl FnOnce() -> T) {\n\n let n = es.len();\n\n if n > 2 {\n\n let valid_curly = no_dot && n % 2 != 0 && {\n\n let e = &es[1];\n\n (3..n).step_by(2).all(|i| eq(&es[i], e))\n\n };\n\n if valid_curly {\n\n es.swap(0, 1);\n\n let mut from = 4;\n\n let mut to = 3;\n\n while from < n {\n\n es.swap(from, to);\n\n to += 1;\n\n from += 2;\n\n }\n\n es.truncate(to);\n\n } else {\n\n es.insert(0, nfx());\n\n }\n", "file_path": "mm0-rs/src/parser/ast.rs", "rank": 0, "score": 390082.37346316106 }, { "content": "/// A very simple jump threading visitor. Start at an unvisited basic block, then follow forward\n\n/// edges to unvisited basic blocks as long as possible. Then start over somewhere else.\n\n/// This ordering is good for code placement since a jump or branch to the immediately following\n\n/// block can be elided.\n\nfn visit_blocks<'a>(cfg: &'a Cfg, mut f: impl FnMut(BlockId, &'a BasicBlock)) {\n\n let mut visited: BitSet<BlockId> = BitSet::default();\n\n for (mut i, mut bl) in cfg.blocks() {\n\n if visited.insert(i) && !bl.is_dead() {\n\n while let Some((_, j)) = {\n\n f(i, bl);\n\n bl.successors().find(|&(_, j)| visited.insert(j))\n\n } {\n\n i = j;\n\n bl = &cfg[i];\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "mm0-rs/components/mmcc/src/build_vcode.rs", "rank": 1, "score": 374702.2724570928 }, { "content": "// Generates the actual token tree.\n\nfn mk_item(derive_input : &mut syn::DeriveInput, pub_only : bool) -> syn::ItemImpl {\n\n let format_env_path : syn::Path = parse_quote!(crate::elab::lisp::print::FormatEnv);\n\n let env_debug_path : syn::Path = parse_quote!(crate::elab::lisp::debug::EnvDebug);\n\n\n\n // For items with type parameters, add a trait bound of `EnvDebug`.\n\n // As of right now, deriving EnvDebug for some type T requires that all of its type parameters\n\n // also implement EnvDebug.\n\n for param in &mut derive_input.generics.params {\n\n match param {\n\n syn::GenericParam::Type(type_param) => {\n\n let bound : syn::TypeParamBound = parse_quote!(EnvDebug);\n\n type_param.bounds.push(bound);\n\n },\n\n _ => continue\n\n }\n\n }\n\n\n\n let (ident, data) = (&derive_input.ident, &derive_input.data);\n\n let (impl_generics, type_generics, where_clause) = &derive_input.generics.split_for_impl();\n\n\n", "file_path": "mm0-rs/components/debug_derive/src/lib.rs", "rank": 2, "score": 310333.9372935104 }, { "content": "/// Data to support the `(jump label[i])` operation.\n\ntype LabelData = (BlockId, Rc<[(VarId, bool)]>);\n\n\n", "file_path": "mm0-rs/components/mmcc/src/build_mir.rs", "rank": 3, "score": 304653.5163376519 }, { "content": "/// Main entry point for `mm0-rs server` subcommand.\n\n///\n\n/// This function is not intended for interactive use, but instead sets up an [LSP] connection\n\n/// using stdin and stdout. This allows for extensions such as [`vscode-mm0`] to use `mm0-rs`\n\n/// as a language server.\n\n///\n\n/// # Arguments\n\n///\n\n/// `mm0-rs server [--debug]`, where:\n\n///\n\n/// - `-d`, `--debug`: enables debugging output to `lsp.log`\n\n///\n\n/// [LSP]: https://microsoft.github.io/language-server-protocol/\n\n/// [`vscode-mm0`]: https://github.com/digama0/mm0/tree/master/vscode-mm0\n\npub fn main(args: &ArgMatches<'_>) {\n\n if args.is_present(\"debug\") {\n\n use {simplelog::{Config, LevelFilter, WriteLogger}, std::fs::File};\n\n std::env::set_var(\"RUST_BACKTRACE\", \"1\");\n\n if let Ok(f) = File::create(\"lsp.log\") {\n\n let _ = WriteLogger::init(LevelFilter::Debug, Config::default(), f);\n\n }\n\n }\n\n let server = &*SERVER; // start the server\n\n drop(log_message(\"started\".into()));\n\n if args.is_present(\"no_log_errors\") {\n\n server.options.ulock().log_errors = Some(false)\n\n }\n\n server.run();\n\n std::mem::take(&mut *server.reqs.ulock());\n\n std::mem::take(&mut *server.vfs.0.ulock());\n\n}\n", "file_path": "mm0-rs/src/server.rs", "rank": 4, "score": 293811.109080231 }, { "content": "fn high_rm(rex: &mut bool, rm: &PRegMem) {\n\n match rm {\n\n &RegMem::Reg(r) => high_reg(rex, r),\n\n RegMem::Mem(a) => high_amode(rex, a)\n\n }\n\n}\n\n\n", "file_path": "mm0-rs/components/mmcc/src/arch/x86/mod.rs", "rank": 5, "score": 292973.33032730554 }, { "content": "fn high_rmi(rex: &mut bool, rmi: &PRegMemImm) {\n\n match rmi {\n\n &PRegMemImm::Reg(r) => high_reg(rex, r),\n\n PRegMemImm::Mem(a) => high_amode(rex, a),\n\n PRegMemImm::Imm(_) => {}\n\n }\n\n}\n\n\n", "file_path": "mm0-rs/components/mmcc/src/arch/x86/mod.rs", "rank": 6, "score": 289527.91675292864 }, { "content": "fn layout_rm(rex: &mut bool, r: PReg, rm: &PRegMem) -> ModRMLayout {\n\n *rex |= large_preg(r);\n\n layout_opc_rm(rex, rm)\n\n}\n\n\n", "file_path": "mm0-rs/components/mmcc/src/arch/x86/mod.rs", "rank": 7, "score": 277429.4288860691 }, { "content": "/// Main entry point for `mm0-rs join` subcommand.\n\n///\n\n/// See the [module documentation](self) for the purpose of this command.\n\n///\n\n/// # Arguments\n\n///\n\n/// `mm0-rs join <in.mm0> [out.mm0]`, where:\n\n///\n\n/// - `in.mm0` (or `in.mm1`) is the file to join, an MM0 file with `import`s\n\n/// - `out.mm0` is the output location, or stdin if omitted.\n\npub fn main(args: &ArgMatches<'_>) -> io::Result<()> {\n\n let path = args.value_of(\"INPUT\").expect(\"required arg\");\n\n let file = fs::canonicalize(path)?.into();\n\n let comments = !args.is_present(\"bare\");\n\n let header = !args.is_present(\"no_header\");\n\n match args.value_of(\"OUTPUT\") {\n\n None => join_with_header(comments, header, io::stdout(), file),\n\n Some(out) => join_with_header(comments, header, fs::File::create(out)?, file),\n\n }\n\n}", "file_path": "mm0-rs/src/joiner.rs", "rank": 8, "score": 276780.52867132705 }, { "content": "/// Main entry point for `mm0-rs compile` subcommand.\n\n///\n\n/// # Arguments\n\n///\n\n/// `mm0-rs compile <in.mm1> [out.mmb]`, where:\n\n///\n\n/// - `in.mm1` is the MM1 (or MM0) file to elaborate\n\n/// - `out.mmb` (or `out.mmu`) is the MMB file to generate, if the elaboration is\n\n/// successful. The file extension is used to determine if we are outputting\n\n/// binary. If this argument is omitted, the input is only elaborated.\n\npub fn main(args: &ArgMatches<'_>) -> io::Result<()> {\n\n let path = args.value_of(\"INPUT\").expect(\"required arg\");\n\n let path: FileRef = fs::canonicalize(path)?.into();\n\n let (file, env) = elab_for_result(path.clone())?;\n\n let env = env.unwrap_or_else(|| std::process::exit(1));\n\n QUIET.store(args.is_present(\"quiet\"), Ordering::Relaxed);\n\n if let Some(s) = args.value_of_os(\"output\") {\n\n if let Err((fsp, e)) =\n\n if s == \"-\" { env.run_output(io::stdout()) }\n\n else { env.run_output(fs::File::create(s)?) }\n\n {\n\n let e = ElabError::new_e(fsp.span, e);\n\n let file = VFS.get_or_insert(fsp.file.clone())?.1;\n\n e.to_snippet(&fsp.file, file.text.ascii(), &mut mk_to_range(),\n\n |s| println!(\"{}\\n\", DisplayList::from(s)));\n\n std::process::exit(1);\n\n }\n\n }\n\n if let Some(out) = args.value_of(\"OUTPUT\") {\n\n use {fs::File, io::BufWriter};\n", "file_path": "mm0-rs/src/compiler.rs", "rank": 9, "score": 276780.31206771894 }, { "content": "fn layout_opc_rm(rex: &mut bool, rm: &PRegMem) -> ModRMLayout {\n\n match rm {\n\n PRegMem::Reg(r) => layout_opc_reg(rex, *r),\n\n PRegMem::Mem(a) => layout_opc_mem(rex, a),\n\n }\n\n}\n\n\n", "file_path": "mm0-rs/components/mmcc/src/arch/x86/mod.rs", "rank": 10, "score": 275035.5502078588 }, { "content": "fn layout_rmi(rex: &mut bool, r: PReg, rm: &PRegMemImm) -> ModRMLayout {\n\n *rex |= large_preg(r);\n\n layout_opc_rmi(rex, rm)\n\n}\n\n\n", "file_path": "mm0-rs/components/mmcc/src/arch/x86/mod.rs", "rank": 11, "score": 274540.51983145636 }, { "content": "/// Main entry point for `mm0-rs doc` subcommand.\n\n///\n\n/// # Arguments\n\n///\n\n/// `mm0-rs doc <in.mm1> [doc]`, where:\n\n///\n\n/// - `in.mm1` is the initial file to elaborate.\n\n/// - `doc` is the output folder, which will be created if not present.\n\npub fn main(args: &ArgMatches<'_>) -> io::Result<()> {\n\n let path = args.value_of(\"INPUT\").expect(\"required arg\");\n\n let path: FileRef = fs::canonicalize(path)?.into();\n\n let (fc, old) = crate::compiler::elab_for_result(path.clone())?;\n\n let old = old.unwrap_or_else(|| std::process::exit(1));\n\n println!(\"writing docs\");\n\n let mut env = Environment::new();\n\n assert!(matches!(\n\n EnvMergeIter::new(&mut env, &old, (0..0).into()).next(&mut env, &mut vec![]), Ok(None)));\n\n let mut dir = PathBuf::from(args.value_of(\"OUTPUT\").unwrap_or(\"doc\"));\n\n fs::create_dir_all(&dir)?;\n\n macro_rules! import {($($str:expr),*) => {$({\n\n let mut file = dir.to_owned();\n\n file.push($str);\n\n if !file.exists() {\n\n File::create(file)?.write_all(include_bytes!($str))?;\n\n }\n\n })*}}\n\n import!(\"stylesheet.css\", \"proof.js\");\n\n let order = match args.value_of(\"order\") {\n", "file_path": "mm0-rs/src/doc/mod.rs", "rank": 12, "score": 273554.2179509108 }, { "content": "fn layout_opc_rmi(rex: &mut bool, rm: &PRegMemImm) -> ModRMLayout {\n\n match rm {\n\n PRegMemImm::Reg(r) => layout_opc_reg(rex, *r),\n\n PRegMemImm::Mem(a) => layout_opc_mem(rex, a),\n\n PRegMemImm::Imm(_) => unreachable!(),\n\n }\n\n}\n\n\n", "file_path": "mm0-rs/components/mmcc/src/arch/x86/mod.rs", "rank": 13, "score": 272001.603653993 }, { "content": "fn layout_mem(rex: &mut bool, r: PReg, a: &PAMode) -> ModRMLayout {\n\n *rex |= large_preg(r);\n\n layout_opc_mem(rex, a)\n\n}\n\n\n", "file_path": "mm0-rs/components/mmcc/src/arch/x86/mod.rs", "rank": 14, "score": 270088.5107812995 }, { "content": "fn get_fresh_name(env: &mut Environment, mut base: Vec<u8>, mut bad: impl FnMut(Symbol, &AtomData) -> bool) -> Symbol {\n\n if !base.is_empty() {\n\n let a = env.get_atom(&base);\n\n if !bad(a, &env.data[a]) {return a}\n\n }\n\n base.push(b'_');\n\n let n = base.len();\n\n for i in 1.. {\n\n use std::io::Write;\n\n write!(&mut base, \"{}\", i).unwrap();\n\n let a = env.get_atom(&base);\n\n if !bad(a, &env.data[a]) {return a}\n\n base.truncate(n);\n\n }\n\n unreachable!()\n\n}\n\n\n\nimpl Compiler {\n\n fn head_keyword(&self, e: &LispVal) -> Option<(Keyword, Uncons)> {\n\n head_keyword(&self.keywords, e)\n\n }\n\n}\n\n\n", "file_path": "mm0-rs/components/mmcc/src/typeck.rs", "rank": 15, "score": 269119.4105545151 }, { "content": "fn join_with_header(comments: bool, header: bool, mut w: impl Write, file: FileRef) -> io::Result<()> {\n\n let mut buf = vec![];\n\n if comments && header {\n\n let mut joiner = Joiner::new(comments, &mut buf);\n\n joiner.write(file.clone())?;\n\n writeln!(w, \"\\\n\n -- This is an autogenerated file constructed by `mm0-rs join {}`.\\n\\\n\n -- It concatenates the files:\", file)?;\n\n for p in joiner.done { writeln!(w, \"-- * {}\", p)? }\n\n writeln!(w)?;\n\n w.write_all(&buf)\n\n } else {\n\n Joiner::new(comments, w).write(file)\n\n }\n\n}\n\n\n", "file_path": "mm0-rs/src/joiner.rs", "rank": 16, "score": 265652.5802394129 }, { "content": "fn disambiguated_anchor(w: &mut impl Write, ad: &AtomData, sort: bool) -> io::Result<()> {\n\n match ad {\n\n AtomData {sort: Some(_), decl: Some(_), ..} if sort => write!(w, \"{}.sort\", ad.name),\n\n AtomData {sort: Some(_), decl: Some(DeclKey::Term(_)), ..} => write!(w, \"{}.term\", ad.name),\n\n AtomData {sort: Some(_), decl: Some(DeclKey::Thm(_)), ..} => write!(w, \"{}.thm\", ad.name),\n\n _ => write!(w, \"{}\", ad.name),\n\n }\n\n}\n\n\n\nimpl<'a, W: Write> BuildDoc<'a, W> {\n\n fn thm_doc(&mut self, prev: Option<ThmId>, tid: ThmId, next: Option<ThmId>) -> io::Result<()> {\n\n let mut file = self.thm_folder.clone();\n\n #[allow(clippy::useless_transmute)]\n\n let td: &Thm = unsafe { mem::transmute(&self.env.thms[tid]) };\n\n self.mangler.mangle(&self.env, tid, |_, s| file.push(&format!(\"{}.html\", s)));\n\n let mut file = BufWriter::new(File::create(file)?);\n\n let ad = &self.env.data[td.atom];\n\n let thmname = &ad.name;\n\n let filename = td.span.file.rel();\n\n let mut nav = String::new();\n", "file_path": "mm0-rs/src/doc/mod.rs", "rank": 17, "score": 252147.88848294277 }, { "content": "/// Encode the command `cmd` (one of the `STMT_*`, `PROOF_*` or `UNIFY_*` commands\n\n/// in this module, which are all 6 bit numbers) with the given `data` field\n\n/// according to the following scheme:\n\n///\n\n/// * `cmd | 0x00` for `data = 0`\n\n/// * `cmd | 0x40, data:u8` for 8 bit `data`\n\n/// * `cmd | 0x80, data:u16` for 16 bit `data`\n\n/// * `cmd | 0xC0, data:u32` for 32 bit `data`\n\n///\n\n/// where we select the shortest available encoding given the value of `data`.\n\npub fn write_cmd(w: &mut impl Write, cmd: u8, data: u32) -> io::Result<()> {\n\n if data == 0 {\n\n w.write_u8(cmd)\n\n } else if let Ok(data) = data.try_into() {\n\n w.write_u8(cmd | DATA_8)?;\n\n w.write_u8(data)\n\n } else if let Ok(data) = data.try_into() {\n\n w.write_u8(cmd | DATA_16)?;\n\n w.write_u16::<LE>(data)\n\n } else {\n\n w.write_u8(cmd | DATA_32)?;\n\n w.write_u32::<LE>(data)\n\n }\n\n}\n\n\n", "file_path": "mm0-rs/components/mm0b_parser/src/write.rs", "rank": 18, "score": 242734.34688228276 }, { "content": "fn layout_reg(rex: &mut bool, r: PReg, rm: PReg) -> ModRMLayout {\n\n *rex |= large_preg(r);\n\n layout_opc_reg(rex, rm)\n\n}\n\n\n", "file_path": "mm0-rs/components/mmcc/src/arch/x86/mod.rs", "rank": 19, "score": 240633.86840811634 }, { "content": "fn layout_opc_mem(rex: &mut bool, a: &PAMode) -> ModRMLayout {\n\n if a.base.is_valid() { *rex |= large_preg(a.base) }\n\n if let Some(si) = a.si { *rex |= large_preg(si.index) }\n\n match a {\n\n _ if !a.base().is_valid() => ModRMLayout::Sib0,\n\n PAMode {off, si: None, ..} if a.base().hw_enc() & 7 != 4 =>\n\n ModRMLayout::Disp(layout_offset(off)),\n\n PAMode {off, base, ..} => match (*base, layout_offset(off)) {\n\n (RBP, DispLayout::S0) => ModRMLayout::SibReg(DispLayout::S8),\n\n (_, layout) => ModRMLayout::SibReg(layout)\n\n }\n\n }\n\n}\n\n\n", "file_path": "mm0-rs/components/mmcc/src/arch/x86/mod.rs", "rank": 20, "score": 240362.2473201513 }, { "content": "/// This is like [`write_cmd`], but it is followed by\n\n/// the byte array `buf`, and the initial `data` field is the length of the entire\n\n/// expression (the initial command byte, the `data` field, and the buffer).\n\n/// This can't be expressed with `write_cmd` directly because of the circular\n\n/// dependency where the value of `data` determines the size of the initial command,\n\n/// which affects the value of `data`.\n\npub fn write_cmd_bytes(w: &mut impl Write, cmd: u8, buf: &[u8]) -> io::Result<()> {\n\n if let Ok(data) = (buf.len() + 2).try_into() {\n\n w.write_u8(cmd | DATA_8)?;\n\n w.write_u8(data)?;\n\n w.write_all(buf)\n\n } else if let Ok(data) = (buf.len() + 3).try_into() {\n\n w.write_u8(cmd | DATA_16)?;\n\n w.write_u16::<LE>(data)?;\n\n w.write_all(buf)\n\n } else {\n\n w.write_u8(cmd | DATA_32)?;\n\n w.write_u32::<LE>((buf.len() + 5).try_into().expect(\"too large for format\"))?;\n\n w.write_all(buf)\n\n }\n\n}\n\n\n\nimpl UnifyCmd {\n\n /// Serialize a [`UnifyCmd`] to the given writer. Uses the `UNIFY_*` commands in\n\n /// [`mmb::export::cmd`](super::cmd).\n\n #[inline]\n", "file_path": "mm0-rs/components/mm0b_parser/src/write.rs", "rank": 21, "score": 240152.38465472677 }, { "content": "fn build_unfold_map<'a>(env: &FrozenEnv, m: &mut HashMap<AtomId, &'a ProofNode>, checked: &mut [bool],\n\n heap: &[ExprNode], node: &ExprNode, t_heap: &'a [ProofNode], mut tgt: &'a ProofNode) {\n\n match *node {\n\n ExprNode::Ref(i) => if !mem::replace(&mut checked[i], true) {\n\n build_unfold_map(env, m, checked, heap, &heap[i], t_heap, tgt)\n\n },\n\n ExprNode::Dummy(a, _) => {m.insert(a, tgt);}\n\n ExprNode::App(t, ref es) => loop {\n\n match *tgt {\n\n ProofNode::Ref(j) => tgt = &t_heap[j],\n\n ProofNode::Term {term: t2, args: ref es2} if t == t2 && es.len() == es2.len() => {\n\n for (e1, e2) in es.iter().zip(&**es2) {\n\n build_unfold_map(env, m, checked, heap, e1, t_heap, e2)\n\n }\n\n break\n\n }\n\n _ => unreachable!()\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "mm0-rs/src/mmu/export.rs", "rank": 22, "score": 238873.05727997614 }, { "content": "fn layout_opc_reg(rex: &mut bool, rm: PReg) -> ModRMLayout {\n\n *rex |= large_preg(rm);\n\n ModRMLayout::Reg\n\n}\n\n\n", "file_path": "mm0-rs/components/mmcc/src/arch/x86/mod.rs", "rank": 23, "score": 237769.53164736996 }, { "content": "fn high_reg(rex: &mut bool, r: PReg) { *rex |= r.index() & 4 != 0 }\n\n\n", "file_path": "mm0-rs/components/mmcc/src/arch/x86/mod.rs", "rank": 24, "score": 234266.6526891191 }, { "content": "fn header(w: &mut impl Write,\n\n rel: &str, desc: &str, title: &str,\n\n h1: &str, nav: &str, script: &[&str]\n\n) -> io::Result<()> {\n\n writeln!(w, \"\\\n\n <!DOCTYPE html>\\\n\n \\n<html lang=\\\"en\\\">\\\n\n \\n<head>\\\n\n \\n <meta charset=\\\"utf-8\\\">\\\n\n \\n <meta name=\\\"viewport\\\" content=\\\"width=device-width, initial-scale=1.0\\\">\\\n\n \\n <meta name=\\\"generator\\\" content=\\\"mm0-doc\\\">\\\n\n \\n <meta name=\\\"description\\\" content=\\\"{desc}\\\">\\\n\n \\n <meta name=\\\"keywords\\\" content=\\\"mm0, metamath-zero\\\">\\\n\n \\n <title>{title} - Metamath Zero</title>\\\n\n \\n <link rel=\\\"stylesheet\\\" type=\\\"text/css\\\" href=\\\"{rel}stylesheet.css\\\" />\\\n\n \\n <link rel=\\\"stylesheet\\\" href=\\\"https://fonts.googleapis.com/css?family=Neuton&amp;subset=latin\\\" type=\\\"text/css\\\" media=\\\"screen\\\">\\\n\n \\n <link rel=\\\"stylesheet\\\" href=\\\"https://fonts.googleapis.com/css?family=Nobile:regular,italic,bold,bolditalic&amp;subset=latin\\\" type=\\\"text/css\\\" media=\\\"screen\\\">\",\n\n rel = rel, desc = desc, title = title)?;\n\n for s in script {\n\n writeln!(w, r#\" <script src=\"{}\"></script>\"#, s)?\n", "file_path": "mm0-rs/src/doc/mod.rs", "rank": 25, "score": 233109.16717794808 }, { "content": "fn let_var(sp: &FileSpan, name: Symbol, v: VarId, rhs: ast::Expr) -> ast::Stmt {\n\n Spanned {span: sp.clone(), k: ast::StmtKind::Let {\n\n lhs: Spanned {span: sp.clone(), k:\n\n ast::TuplePatternKind::Name(false, name, v)},\n\n rhs\n\n }}\n\n}\n\n\n\n/// The state of the AST building pass.\n\n#[derive(Default, Debug)]\n\npub struct BuildAst {\n\n /// The mapping of user-level names to internal variable IDs. The vector represents name\n\n /// shadowing, with the active variable being the last in the list.\n\n ///\n\n /// This is a cache for `ctx`: `name_map.get(name)` is exactly the\n\n /// list of `v` such that `Var(name, v)` is in `ctx` (in order).\n\n name_map: HashMap<Symbol, Vec<VarId>>,\n\n /// The mapping of user-level labels to internal label IDs. The vector represents name\n\n /// shadowing, with the active label being the last in the list.\n\n ///\n", "file_path": "mm0-rs/components/mmcc/src/build_ast.rs", "rank": 26, "score": 231702.72253060862 }, { "content": "fn pop_user_local(m: &mut HashMap<Symbol, Vec<VarId>>, user: Symbol) -> Option<()> {\n\n if user != Symbol::UNDER {\n\n if let Some(vec) = m.get_mut(&user) { vec.pop()?; }\n\n }\n\n Some(())\n\n}\n\n\n\nimpl<'a> TypeChecker<'a> {\n\n /// Constructs a new [`TypeChecker`], which can be used to typecheck many `AST` items\n\n /// via [`typeck`](Self::typeck) and will reuse its internal buffers.\n\n pub fn new(mmc: &'a mut Compiler, elab: &'a mut Elaborator, fsp: FileSpan) -> Self {\n\n Self {mmc, elab, fsp,\n\n next_var: VarId::default(),\n\n user_locals: HashMap::new(),\n\n mut_globals: HashMap::new(),\n\n context: Vec::new(),\n\n }\n\n }\n\n\n\n fn new_type_state(&self) -> TypeState {\n", "file_path": "mm0-rs/components/mmcc/src/typeck.rs", "rank": 27, "score": 228966.42395014333 }, { "content": "fn render_line<'a>(fe: FormatEnv<'_>, mangler: &'a mut Mangler, w: &mut impl Write,\n\n line: u32, hyps: &[u32], kind: LineKind, e: &LispVal) -> io::Result<()> {\n\n let kind_class = match kind {\n\n LineKind::Hyp(_) => \"step-hyp\",\n\n LineKind::Thm(_) => \"step-thm\",\n\n LineKind::Conv(_) => \"step-conv\"\n\n };\n\n write!(w, \" \\\n\n <tr id=\\\"{line}\\\" class=\\\"{kind}\\\">\\\n\n \\n <td>{line}</td>\\\n\n \\n <td>\",\n\n kind = kind_class, line = line)?;\n\n let mut first = true;\n\n for hyp in hyps {\n\n if !mem::take(&mut first) { write!(w, \", \")? }\n\n write!(w, r##\"<a href=\"#{id}\">{id}</a>\"##, id = hyp)?\n\n }\n\n write!(w, \"</td>\\n <td>\")?;\n\n match kind {\n\n LineKind::Hyp(None) => write!(w, \"<i>hyp</i>\")?,\n", "file_path": "mm0-rs/src/doc/mod.rs", "rank": 28, "score": 227992.21599251765 }, { "content": "fn write_expr_proof(w: &mut impl Write,\n\n heap: &[ExprNode],\n\n reorder: &mut Reorder,\n\n vars: &mut Option<&mut Vec<AtomId>>,\n\n node: &ExprNode,\n\n save: bool\n\n) -> io::Result<u32> {\n\n Ok(match *node {\n\n ExprNode::Ref(i) => match reorder.map[i] {\n\n None => {\n\n let n = write_expr_proof(w, heap, reorder, vars, &heap[i], true)?;\n\n reorder.map[i] = Some(n);\n\n n\n\n }\n\n Some(n) => {ProofCmd::Ref(n).write_to(w)?; n}\n\n }\n\n ExprNode::Dummy(a, s) => {\n\n if let Some(vec) = vars {vec.push(a)}\n\n ProofCmd::Dummy(s).write_to(w)?;\n\n (reorder.idx, reorder.idx += 1).0\n", "file_path": "mm0-rs/src/mmb/export.rs", "rank": 29, "score": 227735.4893905047 }, { "content": "#[must_use]\n\npub fn ident_start(c: u8) -> bool {\n\n (b'a'..=b'z').contains(&c) || (b'A'..=b'Z').contains(&c) || c == b'_'\n\n}\n\n\n\n/// return true iff a given character is an acceptable ident character.\n", "file_path": "mm0-rs/components/mm1_parser/src/lib.rs", "rank": 30, "score": 224968.2199803817 }, { "content": "fn high_amode(rex: &mut bool, a: &PAMode) {\n\n if a.base.is_valid() { high_reg(rex, a.base) }\n\n if let Some(si) = &a.si { high_reg(rex, si.index) }\n\n}\n\n\n", "file_path": "mm0-rs/components/mmcc/src/arch/x86/mod.rs", "rank": 31, "score": 212523.58847366832 }, { "content": "#[derive(Debug, Default)]\n\nstruct TypeTarget<'a>(Option<&'a Type>, Option<&'a mut PreTuplePattern>);\n\n\n\nimpl<'a> TypeTarget<'a> {\n\n const NONE: Self = TypeTarget(None, None);\n\n\n\n fn reborrow(&mut self) -> TypeTarget<'_> {\n\n TypeTarget(self.0, self.1.as_deref_mut())\n\n }\n\n}\n\n\n\nimpl<'a> From<&'a Type> for TypeTarget<'a> {\n\n fn from(t: &'a Type) -> Self { Self(Some(t), None) }\n\n}\n\n\n\n// impl<'a> From<&'a TuplePattern> for TypeTarget<'a> {\n\n// fn from(t: &'a TuplePattern) -> Self {\n\n// if let TuplePattern::Typed(t) = t {\n\n// TypeTarget(Some(&t.1), Some(&t.0))\n\n// } else {\n\n// TypeTarget(None, Some(t))\n\n// }\n\n// }\n\n// }\n\n\n", "file_path": "mm0-rs/components/mmcc/src/typeck.rs", "rank": 32, "score": 211535.5543455241 }, { "content": "fn list<A, W: Write>(w: &mut W, mut es: impl Iterator<Item=A>,\n\n mut f: impl FnMut(&mut W, A) -> io::Result<()>) -> io::Result<()> {\n\n match es.next() {\n\n None => write!(w, \"()\"),\n\n Some(x) => {\n\n write!(w, \"(\")?;\n\n f(w, x)?;\n\n for e in es {write!(w, \" \")?; f(w, e)?}\n\n write!(w, \")\")\n\n }\n\n }\n\n}\n\n\n", "file_path": "mm0-rs/src/mmu/export.rs", "rank": 33, "score": 210913.33212712422 }, { "content": "#[proc_macro_derive(EnvDebugPub)]\n\npub fn derive_env_debug_pub(input : TokenStream) -> TokenStream {\n\n let mut parsed = parse_macro_input!(input as syn::DeriveInput);\n\n let tt = mk_item(&mut parsed, true);\n\n\n\n TokenStream::from(quote! {\n\n #tt\n\n })\n\n}\n", "file_path": "mm0-rs/components/debug_derive/src/lib.rs", "rank": 34, "score": 210394.4148954169 }, { "content": "fn mk_array<A, const N: usize>(mut f: impl FnMut(usize) -> A) -> [A; N] {\n\n let mut i = 0_usize;\n\n [(); N].map(|_| { let a = f(i); i += 1; a })\n\n}\n\n\n", "file_path": "mm0-rs/src/mmc/proof/predefs.rs", "rank": 35, "score": 209614.60462332825 }, { "content": "#[derive(PartialEq, Clone, Debug)]\n\nstruct VarValue<K, V> {\n\n parent: K, // if equal to self, this is a root\n\n value: V, // value assigned (only relevant to root)\n\n rank: u32, // max depth (only relevant to root)\n\n}\n\n\n\n/// Table of unification keys and their values. You must define a key type K\n\n/// that implements the `Idx` trait.\n\n#[derive(Clone, Debug)]\n\npub(crate) struct UnificationTable<K, V> {\n\n values: IdxVec<K, VarValue<K, V>>,\n\n}\n\n\n\nimpl<K, V> Default for UnificationTable<K, V> {\n\n fn default() -> Self { Self { values: Default::default() } }\n\n}\n\n\n\nimpl<K, V> VarValue<K, V> {\n\n fn new_var(key: K, value: V) -> VarValue<K, V> {\n\n VarValue {parent: key, value, rank: 0}\n", "file_path": "mm0-rs/components/mmcc/src/union_find.rs", "rank": 36, "score": 208314.21668578044 }, { "content": "type P<A> = (A, ProofId);\n\n\n\nimpl BuildAssemblyProc<'_> {\n\n fn bool(&mut self, b: bool) -> P<bool> {\n\n (b, if b { app!(self, (tru)) } else { app!(self, (fal)) })\n\n }\n\n\n\n fn dn(&mut self, i: u8) -> P<u8> { (i, app!(self, (dn[i]))) }\n\n fn xn(&mut self, i: u8) -> P<u8> { (i, app!(self, (h2n {self.hex[i]}))) }\n\n\n\n /// Proves `(a, |- bit x[hex] d[i] = d[a])`\n\n fn xbit(&mut self, hex: u8, i: u8) -> (P<u8>, ProofId) {\n\n let a = self.dn((hex >> i) & 1);\n\n (a, thm!(self.thm, CACHE[xbit[hex][i]]: (bit (h2n {self.hex[hex]}) (dn[i])) = {a.1}))\n\n }\n\n\n\n /// Proves `(a, |- a -ZN b = c)` given `b` and `c`.\n\n #[allow(clippy::cast_sign_loss)]\n\n fn znsub_left(&mut self, b: Num, c: i64) -> (Num, ProofId) {\n\n let a = self.hex.from_u64(&mut self.thm, b.val.wrapping_add(c as u64));\n", "file_path": "mm0-rs/src/mmc/proof/assembler.rs", "rank": 37, "score": 208224.07014120178 }, { "content": "fn list(init: &[LispVal], e: Option<&LispKind>, mut start: bool, fe: FormatEnv<'_>, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n for e in init {\n\n if start {\n\n write!(f, \"({}\", fe.to(e))?;\n\n start = false\n\n } else {\n\n write!(f, \" {}\", fe.to(e))?\n\n }\n\n }\n\n match e {\n\n None => if start {write!(f, \"()\")} else {write!(f, \")\")},\n\n Some(LispKind::List(es)) => list(es, None, start, fe, f),\n\n Some(LispKind::DottedList(es, r)) => list(es, Some(r), start, fe, f),\n\n Some(e) if e.exactly(0) => if start {write!(f, \"()\")} else {write!(f, \")\")},\n\n Some(e) => if start {write!(f, \"{}\", fe.to(e))} else {write!(f, \" . {})\", fe.to(e))}\n\n }\n\n}\n\n\n\nimpl EnvDisplay for AtomId {\n\n fn fmt(&self, fe: FormatEnv<'_>, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n", "file_path": "mm0-rs/src/elab/lisp/print.rs", "rank": 38, "score": 205902.63237300026 }, { "content": "#[must_use]\n\npub fn whitespace(c: u8) -> bool { c == b' ' || c == b'\\n' }\n\n\n", "file_path": "mm0-rs/components/mm1_parser/src/lib.rs", "rank": 39, "score": 205784.29382584838 }, { "content": "/// Set the initial proof checking behavior at the start of an MM1 file\n\n/// before a `(check-proofs)` command is found.\n\npub fn set_check_proofs(b: bool) { CHECK_PROOFS.store(b, Ordering::Relaxed) }\n", "file_path": "mm0-rs/src/lib.rs", "rank": 40, "score": 205659.80649330158 }, { "content": "/// A variant on `Dest` for values that are going out of a block via `break`.\n\ntype BlockDest = Option<VarId>;\n\n\n", "file_path": "mm0-rs/components/mmcc/src/build_mir.rs", "rank": 41, "score": 205262.61335993136 }, { "content": "/// A trait for newtyped integers, that can be used as index types in vectors and sets.\n\npub trait Idx: Copy + Eq {\n\n /// Convert from `T` to `usize`\n\n fn into_usize(self) -> usize;\n\n /// Convert from `usize` to `T`\n\n fn from_usize(_: usize) -> Self;\n\n /// Generate a fresh variable from a `&mut ID` counter.\n\n fn fresh(&mut self) -> Self {\n\n let n = *self;\n\n *self = Self::from_usize(self.into_usize() + 1);\n\n n\n\n }\n\n}\n\n\n\nimpl Idx for usize {\n\n fn into_usize(self) -> usize { self }\n\n fn from_usize(n: usize) -> Self { n }\n\n}\n\n\n\n/// A vector indexed by a custom indexing type `I`, usually a newtyped integer.\n\n#[derive(Clone)]\n", "file_path": "mm0-rs/components/mmcc/src/types/mod.rs", "rank": 42, "score": 202657.31372698036 }, { "content": "fn mk_to_range() -> impl FnMut(&FileSpan) -> Option<Range> {\n\n let mut srcs = HashMap::new();\n\n move |fsp: &FileSpan| -> Option<Range> {\n\n srcs.entry(fsp.file.ptr())\n\n .or_insert_with(|| VFS.0.ulock().get(&fsp.file).unwrap().text.clone())\n\n .try_ascii().map(|f| f.to_range(fsp.span))\n\n }\n\n}\n\n\n\nimpl ElabErrorKind {\n\n /// Convert the payload of an elaboration error to the footer data\n\n /// of a [`Snippet`].\n\n ///\n\n /// # Parameters\n\n ///\n\n /// - `arena`: A temporary [`typed_arena::Arena`] for storing [`String`]s that are\n\n /// allocated for the snippet\n\n /// - `to_range`: a function for converting (index-based) spans to (line/col) ranges\n\n pub fn to_footer<'a>(&self, arena: &'a Arena<String>,\n\n mut to_range: impl FnMut(&FileSpan) -> Option<Range>) -> Vec<Annotation<'a>> {\n", "file_path": "mm0-rs/src/compiler.rs", "rank": 43, "score": 202534.45347987037 }, { "content": "type TrMap<K, V> = HashMap<K, Result<V, HashMap<GenId, V>>>;\n", "file_path": "mm0-rs/components/mmcc/src/build_mir.rs", "rank": 44, "score": 199623.3220714972 }, { "content": "#[proc_macro_derive(EnvDebug)]\n\npub fn derive_env_debug(input : TokenStream) -> TokenStream {\n\n let mut parsed = parse_macro_input!(input as syn::DeriveInput);\n\n let tt = mk_item(&mut parsed, false);\n\n\n\n TokenStream::from(quote! {\n\n #tt\n\n })\n\n}\n\n\n\n/// Use this one if you only want to print the public fields of an item.\n", "file_path": "mm0-rs/components/debug_derive/src/lib.rs", "rank": 45, "score": 199325.01081576274 }, { "content": "#[must_use]\n\npub fn lisp_ident(c: u8) -> bool { ident_rest(c) || b\"!%&*/:<=>?^~+-.@\".contains(&c) }\n\n\n\n/// return true iff a given character is a space or newline character.\n", "file_path": "mm0-rs/components/mm1_parser/src/lib.rs", "rank": 46, "score": 196143.84561447668 }, { "content": "fn render_doc(w: &mut impl Write, doc: &Option<DocComment>) -> io::Result<()> {\n\n if let Some(doc) = doc {\n\n use pulldown_cmark::{Parser, html};\n\n write!(w, r#\" <div class=\"doc\">\"#)?;\n\n html::write_html(&mut *w, Parser::new(doc))?;\n\n writeln!(w, \"</div>\")?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "mm0-rs/src/doc/mod.rs", "rank": 47, "score": 194112.88767232993 }, { "content": "fn pattern_match<'b>(stack: &mut Vec<PatternStack<'b>>, ctx: &mut [LispVal],\n\n mut active: PatternState<'b>) -> std::result::Result<bool, TestPending<'b>> {\n\n loop {\n\n // println!(\"{}\\n\", self.print(&active));\n\n active = match active {\n\n PatternState::Eval(p, e) => match p {\n\n Pattern::Skip => PatternState::Ret(true),\n\n &Pattern::Atom(i) => {ctx[i] = e; PatternState::Ret(true)}\n\n &Pattern::QuoteAtom(a) => PatternState::Ret(e.unwrapped(|e|\n\n if let LispKind::Atom(a2) = *e {a == a2} else {false})),\n\n Pattern::String(s) => PatternState::Ret(e.unwrapped(|e|\n\n if let LispKind::String(s2) = e {s == s2} else {false})),\n\n &Pattern::Bool(b) => PatternState::Ret(e.unwrapped(|e|\n\n if let LispKind::Bool(b2) = *e {b == b2} else {false})),\n\n Pattern::Undef => PatternState::Ret(e.unwrapped(|e| *e == LispKind::Undef)),\n\n Pattern::Number(i) => PatternState::Ret(e.unwrapped(|e|\n\n if let LispKind::Number(i2) = e {i == i2} else {false})),\n\n Pattern::MVar(p) => e.unwrapped(|e| match e {\n\n LispKind::MVar(_, is) => match (p, is) {\n\n (MVarPattern::Any, _) |\n", "file_path": "mm0-rs/src/elab/lisp/eval.rs", "rank": 48, "score": 193684.5764668364 }, { "content": "type Var<'a> = (Span, AtomId, Vec<Item<'a>>);\n\n\n\nimpl<'a> LispParser<'a> {\n\n #[allow(clippy::vec_init_then_push)] // bug: rust-clippy#6615\n\n fn def_var<'c>(&mut self, mut e: &'c SExpr) -> Result<Var<'c>, ElabError> {\n\n let mut stack = vec![];\n\n loop {\n\n match &e.k {\n\n &SExprKind::Atom(a) => break Ok((e.span, self.parse_atom(e.span, a)?, stack)),\n\n SExprKind::List(xs) if !xs.is_empty() =>\n\n {stack.push(Item::List(&xs[1..])); e = &xs[0]}\n\n SExprKind::DottedList(xs, y) if !xs.is_empty() =>\n\n {stack.push(Item::DottedList(&xs[1..], y)); e = &xs[0]}\n\n _ => return Err(ElabError::new_e(e.span, \"def: invalid spec\"))\n\n }\n\n }\n\n }\n\n\n\n fn def_ir(&mut self, sp: Span, es: &[SExpr], stack: Vec<Item<'_>>) -> Result<Vec<Ir>, ElabError> {\n\n for e in stack.iter().rev() {\n", "file_path": "mm0-rs/src/elab/lisp/parser.rs", "rank": 49, "score": 189217.08070308884 }, { "content": "/// Initialize a map from symbols to values of type `T`. Note that this will create an array the\n\n/// same size as all symbols that have ever been interned, so it is best to use this only during\n\n/// initialization for keyword lists and the like.\n\npub fn init_dense_symbol_map<T: Clone>(kv: &[(Symbol, T)]) -> Box<[Option<T>]> {\n\n use crate::types::Idx;\n\n let mut vec = vec![None; kv.iter().map(|p| p.0).max().map_or(0, |n| n.into_usize() + 1)];\n\n for (k, v) in kv { vec[k.into_usize()] = Some(v.clone()) }\n\n vec.into()\n\n}\n", "file_path": "mm0-rs/components/mmcc/src/symbol.rs", "rank": 50, "score": 187950.7618497579 }, { "content": "#[must_use]\n\npub fn ident_rest(c: u8) -> bool { ident_start(c) || (b'0'..=b'9').contains(&c) }\n\n\n\n/// return true iff a given character is an acceptable lisp ident.\n", "file_path": "mm0-rs/components/mm1_parser/src/lib.rs", "rank": 51, "score": 187652.67623840325 }, { "content": "fn indent(i: usize, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n (0..i).try_for_each(|_| write!(f, \" \"))\n\n}\n\n\n\n/// A field accessor.\n\n#[derive(Copy, Clone, Debug)]\n\npub enum FieldName {\n\n /// A numbered field access like `x.1`.\n\n Number(u32),\n\n /// A named field access like `x.foo`.\n\n Named(Symbol),\n\n}\n\n#[cfg(feature = \"memory\")] mm0_deepsize::deep_size_0!(FieldName);\n\n\n\nimpl std::fmt::Display for FieldName {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n match *self {\n\n FieldName::Number(n) => n.fmt(f),\n\n FieldName::Named(a) => a.fmt(f),\n\n }\n", "file_path": "mm0-rs/components/mmcc/src/types/mod.rs", "rank": 52, "score": 187055.42331858265 }, { "content": "fn set_report_mode(fe: FormatEnv<'_>, mode: &mut ReportMode, args: &[LispVal]) -> SResult<()> {\n\n if args.len() == 1 {\n\n if let Some(b) = args[0].as_bool() {\n\n mode.error = b;\n\n mode.warn = b;\n\n mode.info = b;\n\n Ok(())\n\n } else {Err(\"invalid arguments\".into())}\n\n } else if let Some(b) = args[1].as_bool() {\n\n match args[0].as_atom().ok_or(\"expected an atom\")? {\n\n AtomId::ERROR => mode.error = b,\n\n AtomId::WARN => mode.warn = b,\n\n AtomId::INFO => mode.info = b,\n\n s => return Err(format!(\"unknown error level '{}'\", fe.to(&s)))\n\n }\n\n Ok(())\n\n } else {Err(\"invalid arguments\".into())}\n\n}\n\n\n\n/// The lisp evaluation context, representing a lisp evaluation in progress.\n", "file_path": "mm0-rs/src/elab/lisp/eval.rs", "rank": 53, "score": 184105.8405625623 }, { "content": "fn parse_request(Request {id, method, params}: Request) -> Result<Option<(RequestId, RequestType)>> {\n\n Ok(match method.as_str() {\n\n \"textDocument/completion\" => Some((id, RequestType::Completion(from_value(params)?))),\n\n \"completionItem/resolve\" => Some((id, RequestType::CompletionResolve(from_value(params)?))),\n\n \"textDocument/hover\" => Some((id, RequestType::Hover(from_value(params)?))),\n\n \"textDocument/definition\" => Some((id, RequestType::Definition(from_value(params)?))),\n\n \"textDocument/documentSymbol\" => Some((id, RequestType::DocumentSymbol(from_value(params)?))),\n\n \"textDocument/references\" => Some((id, RequestType::References(from_value(params)?))),\n\n \"textDocument/documentHighlight\" => Some((id, RequestType::DocumentHighlight(from_value(params)?))),\n\n _ => None\n\n })\n\n}\n\n\n", "file_path": "mm0-rs/src/server.rs", "rank": 54, "score": 182228.0593634521 }, { "content": "#[allow(unused)]\n\nfn mk_arm_pub(\n\n item_ident : &Ident,\n\n fields : &Fields,\n\n variant_ident : Option<&Ident>\n\n) -> syn::Arm {\n\n let (item_path, namestring) : (syn::Path, String) = match variant_ident {\n\n // if struct\n\n None => (\n\n parse_quote!(#item_ident),\n\n format!(\"{}\", item_ident)\n\n ),\n\n // if enum\n\n Some(variant) => (\n\n parse_quote!(#item_ident::#variant),\n\n format!(\"{}::{}\", item_ident, variant)\n\n )\n\n };\n\n\n\n match fields {\n\n Fields::Named(named) => {\n", "file_path": "mm0-rs/components/debug_derive/src/lib.rs", "rank": 55, "score": 182075.74340559557 }, { "content": "fn new_mvar(mvars: &mut Vec<LispVal>, tgt: InferTarget, sp: Option<FileSpan>) -> LispVal {\n\n let n = mvars.len();\n\n let e = LispVal::new(LispKind::MVar(n, tgt));\n\n let e = LispVal::new_ref(if let Some(sp) = sp {e.span(sp)} else {e});\n\n mvars.push(e.clone());\n\n e\n\n}\n\n\n\nimpl LocalContext {\n\n /// Create a new local context.\n\n #[must_use] pub fn new() -> LocalContext { Default::default() }\n\n\n\n /// Reset the local context. This is the same as assigning to `new()` except it\n\n /// is a bit more efficient because it reuses allocations.\n\n pub fn clear(&mut self) {\n\n self.vars.clear();\n\n self.var_order.clear();\n\n self.mvars.clear();\n\n self.goals.clear();\n\n self.proofs.clear();\n", "file_path": "mm0-rs/src/elab/local_context.rs", "rank": 56, "score": 179290.76627498306 }, { "content": "struct Backward;\n\n\n\nimpl Direction for Backward {\n\n const FORWARD: bool = false;\n\n\n\n #[inline] fn map_block<'a, D>(\n\n bl: &'a BasicBlock,\n\n d: &mut D,\n\n mut f: impl FnMut(usize, &'a Statement, &mut D),\n\n g: impl FnOnce(&'a Terminator, &mut D)\n\n ) {\n\n g(bl.terminator(), d);\n\n bl.stmts.iter().enumerate().rev().for_each(|(bl, s)| f(bl, s, d))\n\n }\n\n\n\n fn preferred_traverse<'a>(cfg: &'a Cfg, mut f: impl FnMut(BlockId, &'a BasicBlock)) {\n\n cfg.postorder(BlockId::ENTRY).for_each(|(id, bl)| f(id, bl))\n\n }\n\n\n\n fn join_state_into_successors<'a, D>(\n\n cfg: &Cfg, id: BlockId, exit_state: &'a D,\n\n mut propagate: impl FnMut(Edge, BlockId, &'a D)\n\n ) {\n\n cfg.predecessors()[id].iter().for_each(|&(e, pred)| propagate(e, pred, exit_state))\n\n }\n\n}\n\n\n", "file_path": "mm0-rs/components/mmcc/src/mir_opt/mod.rs", "rank": 57, "score": 177184.923661081 }, { "content": "struct OnVars<F>(F);\n\n\n\nimpl<'a, F: FnMut(VarId)> TyVisit<'a> for OnVars<F> {\n\n fn visit_expr(&mut self, e: Expr<'a>) { e.visit(self) }\n\n}\n\nimpl<'a, F: FnMut(VarId)> ExprVisit<'a> for OnVars<F> {\n\n fn visit_ty(&mut self, ty: Ty<'a>) { ty.visit(self) }\n\n fn visit_var(&mut self, v: VarId) { self.0(v) }\n\n}\n\n\n\n/// A place expression.\n\npub type Place<'a> = &'a PlaceS<'a>;\n\n/// A place expression.\n\npub type PlaceS<'a> = WithMeta<PlaceKind<'a>>;\n\n/// A place expression, or a \"place to blame\" for why it's not pure.\n\npub type RPlace<'a> = Result<Place<'a>, &'a FileSpan>;\n\n/// A pair of an optional place expression and a type, used to classify the result\n\n/// of expressions that may or may not be pure.\n\npub type RPlaceTy<'a> = (RPlace<'a>, Ty<'a>);\n\n\n", "file_path": "mm0-rs/components/mmcc/src/types/ty.rs", "rank": 58, "score": 175586.12750897388 }, { "content": "/// A `JoinBlock` represents a potential jump location, together with the information needed to\n\n/// correctly pass all the updated values of mutable variables from the current context.\n\n/// * `gen`: The generation on entry to the target\n\n/// * `muts`: The variables that could potentially have been mutated between when this `JoinBlock`\n\n/// was created and the context we are jumping from. These lists are calculated during type\n\n/// inference and are mostly syntax directed.\n\ntype JoinPoint = (GenId, Rc<[HVarId]>);\n\n\n\n/// A `JoinBlock` represents a potential jump location, together with the information needed to\n\n/// correctly pass all the updated values of mutable variables from the current context.\n", "file_path": "mm0-rs/components/mmcc/src/build_mir.rs", "rank": 59, "score": 174607.10105053213 }, { "content": "/// A visitor that calls the function `F` on every computationally relevant use of a variable.\n\nstruct UseVisitor<F>(F);\n\n\n\nimpl<F: FnMut(VarId)> Visitor for UseVisitor<F> {\n\n fn visit_var(&mut self, v: VarId) { (self.0)(v) }\n\n}\n\n\n\n/// A basic block, which consists of an initial context (containing the logical parameters to the\n\n/// block), followed by a list of statements, and ending with a terminator. The terminator is\n\n/// optional only during MIR construction, and represents an \"unfinished\" block.\n\n#[derive(Clone)]\n\n#[cfg_attr(feature = \"memory\", derive(DeepSizeOf))]\n\npub struct BasicBlock {\n\n /// The initial context on entry to the block.\n\n pub ctx: CtxId,\n\n /// The computational relevance of all the variables on entry to the block\n\n /// (filled by ghost propagation pass).\n\n pub relevance: Option<BitVec>,\n\n /// If false, then the current context is able to prove false,\n\n /// and all control paths end in `unreachable`.\n\n pub reachable: bool,\n", "file_path": "mm0-rs/components/mmcc/src/types/mir.rs", "rank": 60, "score": 173025.18271690217 }, { "content": "#[derive(Debug)]\n\nstruct Counter<V> {\n\n vars: HashMap<V, usize>,\n\n max: usize\n\n}\n\n\n\nimpl<V> Default for Counter<V> {\n\n fn default() -> Self { Self { vars: Default::default(), max: 0 } }\n\n}\n\n\n\nimpl<V: Hash + Eq> Counter<V> {\n\n fn get(&mut self, v: V) -> usize {\n\n let Counter {vars, max} = self;\n\n *vars.entry(v).or_insert_with(|| { let n = *max; *max += 1; n })\n\n }\n\n}\n\n\n", "file_path": "mm0-rs/components/mmcc/src/infer.rs", "rank": 61, "score": 172377.03636240528 }, { "content": "fn parse_u8(p: &mut &[u8]) -> u8 { parse_arr::<1>(p)[0] }\n", "file_path": "mm0-rs/src/mmc/proof/assembler.rs", "rank": 62, "score": 169907.33113922394 }, { "content": "trait Domain: Clone {\n\n /// Compute the least upper bound of `self` and `other`,\n\n /// storing into `self` and returning `true` if `self` changed as a result.\n\n fn join(&mut self, other: &Self) -> bool;\n\n}\n\n\n", "file_path": "mm0-rs/components/mmcc/src/mir_opt/mod.rs", "rank": 63, "score": 169819.62588066648 }, { "content": "struct AxiomUse(HashMap<ThmId, BitSet>);\n\n\n\nimpl AxiomUse {\n\n fn new(env: &Environment) -> (Vec<ThmId>, Self) {\n\n let mut axuse = HashMap::new();\n\n let mut to_tid = vec![ThmId(u32::MAX)];\n\n for (tid, td) in env.thms.enum_iter() {\n\n if let ThmKind::Axiom = td.kind {\n\n let axid = to_tid.len();\n\n to_tid.push(tid);\n\n let mut bs = BitSet::new();\n\n bs.insert(axid);\n\n axuse.insert(tid, bs);\n\n }\n\n }\n\n (to_tid, AxiomUse(axuse))\n\n }\n\n\n\n fn accumulate(&mut self, env: &Environment, bs: &mut BitSet, node: &ProofNode) {\n\n match node {\n", "file_path": "mm0-rs/src/doc/mod.rs", "rank": 64, "score": 166846.29380194427 }, { "content": "fn parse_u8(p: &mut &[u8]) -> u8 { parse_arr::<1>(p)[0] }\n", "file_path": "mm0-rs/components/mmcc/src/arch/x86/proof.rs", "rank": 65, "score": 166810.0877815446 }, { "content": "fn parse_u32(p: &mut &[u8]) -> u32 { u32::from_le_bytes(parse_arr(p)) }\n\n\n", "file_path": "mm0-rs/src/mmc/proof/assembler.rs", "rank": 66, "score": 165339.64207202435 }, { "content": "fn parse_u64(p: &mut &[u8]) -> u64 { u64::from_le_bytes(parse_arr(p)) }\n\n\n\n/// The layout for an instruction.\n\n#[derive(Clone, Copy, Debug)]\n\npub struct InstLayout {\n\n /// The REX byte, if present (`Some(0..16)` or `None`)\n\n pub rex: Option<u8>,\n\n /// The main layout,\n\n pub opc: OpcodeLayout,\n\n}\n\n\n\nimpl super::InstLayout {\n\n fn parse(self, p: &mut &[u8]) -> InstLayout {\n\n InstLayout {\n\n rex: if self.rex { Some(parse_u8(p) & 15) } else { None },\n\n opc: self.opc.parse(p),\n\n }\n\n }\n\n}\n\n\n", "file_path": "mm0-rs/components/mmcc/src/arch/x86/proof.rs", "rank": 67, "score": 162542.37621501222 }, { "content": "fn parse_u32(p: &mut &[u8]) -> u32 { u32::from_le_bytes(parse_arr(p)) }\n", "file_path": "mm0-rs/components/mmcc/src/arch/x86/proof.rs", "rank": 68, "score": 162542.37621501222 }, { "content": "#[wasm_bindgen(start)]\n\npub fn main() -> Result<(), JsValue> {\n\n std::panic::set_hook(Box::new(console_error_panic_hook::hook));\n\n Ok(())\n\n}\n", "file_path": "m0e/src/lib.rs", "rank": 69, "score": 160745.35234267783 }, { "content": "type OpenRequests = Mutex<HashMap<RequestId, Arc<AtomicBool>>>;\n\n\n", "file_path": "mm0-rs/src/server.rs", "rank": 70, "score": 160036.1161524398 }, { "content": "/// Performs \"curly transformation\", turning `{x op y op z}` into `(op x y z)`.\n\n///\n\n/// A curly list is valid if\n\n/// - it is a proper list, and\n\n/// - it has at most two elements (in which case it is transformed to itself), or\n\n/// - it has an odd number of elements and the elements at all odd numbered positions compare equal.\n\n/// (in which case the element at position 1 is moved to the front, and the later\n\n/// copies of it are removed).\n\n///\n\n/// Invalid curly lists like `{x op y op2 z}` are converted to `(:nfx x op y op2 z)`.\n\n///\n\n/// # Parameters\n\n///\n\n/// - `es`: The list of elements to transform, such as `[x, op, y, op, z]`\n\n/// - `no_dot`: True if this is a proper list. A dotted list like `{x op y . z}` is not a\n\n/// valid curly list, and is transformed to `(:nfx x op y . z)`.\n\n/// - `eq`: An equality comparator for elements of the list.\n\n/// - `nfx`: A constructor for the `:nfx` atom, in case this is not a valid curly list.\n\n///\n\n/// # Returns\n\n///\n\n/// Returns nothing, but modifies the input `es` to reorder the elements so that the\n\n/// operation at odd positions comes first and the elements at even positions come later,\n\n/// for example `[x, op, y, op, z]` becomes `[op, x, y, z]`.\n\npub fn curly_transform<T>(\n\n es: &mut Vec<T>, no_dot: bool, eq: impl Fn(&T, &T) -> bool, nfx: impl FnOnce() -> T,\n\n) {\n\n let n = es.len();\n\n if n > 2 {\n\n let valid_curly = no_dot && n % 2 != 0 && {\n\n let e = &es[1];\n\n (3..n).step_by(2).all(|i| eq(&es[i], e))\n\n };\n\n if valid_curly {\n\n es.swap(0, 1);\n\n let mut from = 4;\n\n let mut to = 3;\n\n while from < n {\n\n es.swap(from, to);\n\n to += 1;\n\n from += 2;\n\n }\n\n es.truncate(to);\n\n } else {\n", "file_path": "mm0-rs/components/mm1_parser/src/ast.rs", "rank": 71, "score": 160034.5347979187 }, { "content": "#[allow(clippy::cast_possible_wrap)]\n\nfn parse_i8_64(p: &mut &[u8]) -> i64 { i64::from(parse_u8(p) as i8) }\n\n\n", "file_path": "mm0-rs/src/mmc/proof/assembler.rs", "rank": 72, "score": 160033.22328390618 }, { "content": "#[allow(clippy::cast_possible_wrap)]\n\nfn parse_i32_64(p: &mut &[u8]) -> i64 { i64::from(parse_u32(p) as i32) }\n\n\n", "file_path": "mm0-rs/src/mmc/proof/assembler.rs", "rank": 73, "score": 160033.22328390618 }, { "content": "fn log_msg(#[allow(unused_mut)] mut s: String) {\n\n #[cfg(feature = \"memory\")]\n\n match crate::get_memory_usage() {\n\n 0 => {}\n\n n => {\n\n use std::fmt::Write;\n\n write!(s, \", memory = {}M\", n >> 20).expect(\"writing to a string\");\n\n }\n\n }\n\n println!(\"{}\", s)\n\n}\n\n\n\n/// Elaborate a file for an [`Environment`](crate::elab::Environment) result.\n\n///\n\n/// This is the main elaboration function, as an `async fn`. Given a `path`,\n\n/// it gets it from the [`VFS`] (which will probably load it from the filesystem),\n\n/// and checks if it has already been elaborated, returning it if finished and\n\n/// awaiting if it is in progress in another task.\n\n///\n\n/// If the file has not yet been elaborated, it parses it into an [`Ast`], reports\n", "file_path": "mm0-rs/src/compiler.rs", "rank": 74, "score": 159947.23786124983 }, { "content": "#[cfg(all(feature = \"memory\", target_os = \"linux\"))]\n\n#[must_use]\n\npub fn get_memory_usage() -> usize {\n\n procinfo::pid::statm_self().map_or_else(|_| get_memory_rusage(), |stat| stat.data * 4096)\n\n}\n\n\n\n/// Try to get total memory usage (stack + data) in bytes using the `/proc` filesystem.\n\n/// Falls back on [`getrusage()`](libc::getrusage) if procfs doesn't exist.\n", "file_path": "mm0-rs/components/mm0_util/src/lib.rs", "rank": 75, "score": 158346.95891121865 }, { "content": "#[derive(Debug)]\n\nstruct Dedup(Vec<(ProofHash, bool)>);\n\n\n\nimpl Dedup {\n\n fn new(nargs: usize) -> Dedup {\n\n Self((0..nargs).map(|i| (ProofHash::Ref(ProofKind::Expr, i), true)).collect())\n\n }\n\n\n\n fn push(&mut self, v: ProofHash) -> usize {\n\n (self.0.len(), self.0.push((v, false))).0\n\n }\n\n}\n\n\n\nimpl std::ops::Index<usize> for Dedup {\n\n type Output = ProofHash;\n\n fn index(&self, n: usize) -> &ProofHash { &self.0[n].0 }\n\n}\n\n\n\nimpl IDedup<ProofHash> for Dedup {\n\n fn add_direct(&mut self, v: ProofHash) -> usize { self.push(v) }\n\n\n\n fn reuse(&mut self, n: usize) -> usize {\n\n self.0[n].1 = true;\n\n n\n\n }\n\n}\n\n\n", "file_path": "mm0-rs/src/mmb/import.rs", "rank": 76, "score": 157941.90796131658 }, { "content": "#[derive(Clone, Debug)]\n\nstruct JoinBlock(BlockId, JoinPoint);\n\n\n", "file_path": "mm0-rs/components/mmcc/src/build_mir.rs", "rank": 77, "score": 157901.06124449696 }, { "content": "fn is_nonatomic_proof(e: &ProofNode) -> bool {\n\n matches!(e, ProofNode::Thm {..} | ProofNode::Conv(_))\n\n}\n\n\n", "file_path": "mm0-rs/src/mmu/export.rs", "rank": 78, "score": 157347.89204552743 }, { "content": "fn match_enum_fields(fields: &syn::Fields) -> TokenStream {\n\n match fields {\n\n Fields::Named(ref fields) => {\n\n let recurse = fields.named.iter().map(|f| {\n\n let name = &f.ident;\n\n quote_spanned! {f.span()=>\n\n ::mm0_deepsize::DeepSizeOf::deep_size_of_children(#name, context)\n\n }\n\n });\n\n quote! {\n\n 0 #(+ #recurse)*\n\n }\n\n }\n\n Fields::Unnamed(ref fields) => {\n\n let recurse = fields.unnamed.iter().enumerate().map(|(i, f)| {\n\n let i = syn::Ident::new(&format!(\"_{}\", i), proc_macro2::Span::call_site());\n\n quote_spanned! {f.span()=>\n\n ::mm0_deepsize::DeepSizeOf::deep_size_of_children(#i, context)\n\n }\n\n });\n", "file_path": "mm0-rs/components/mm0_deepsize_derive/src/lib.rs", "rank": 79, "score": 157330.46115345712 }, { "content": "fn parse_slice<'a>(p: &mut &'a [u8], n: usize) -> &'a [u8] {\n\n let (start, rest) = p.split_at(n);\n\n *p = rest;\n\n start\n\n}\n", "file_path": "mm0-rs/src/mmc/proof/assembler.rs", "rank": 80, "score": 157248.8252823147 }, { "content": "fn parse_arr<const N: usize>(p: &mut &[u8]) -> [u8; N] {\n\n parse_slice(p, N).try_into().expect(\"parse error\")\n\n}\n", "file_path": "mm0-rs/src/mmc/proof/assembler.rs", "rank": 81, "score": 155956.09441319463 }, { "content": "/// A trait for populating the `data` field on the index `X` of an [`MmbFile`] given a table entry.\n\npub trait MmbIndexBuilder<'a>: Default {\n\n /// Implementors are expected to match on the [`TableEntry::id`] field, and use the data if it\n\n /// matches a particular name.\n\n fn build<X>(&mut self, f: &mut MmbFile<'a, X>, e: &'a TableEntry) -> Result<(), ParseError>;\n\n}\n\n\n\nimpl<'a> MmbIndexBuilder<'a> for () {\n\n #[inline]\n\n fn build<X>(&mut self, _: &mut MmbFile<'a, X>, _: &'a TableEntry) -> Result<(), ParseError> {\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl<'a, A: MmbIndexBuilder<'a>, B: MmbIndexBuilder<'a>> MmbIndexBuilder<'a> for (A, B) {\n\n #[inline]\n\n fn build<X>(&mut self, f: &mut MmbFile<'a, X>, e: &'a TableEntry) -> Result<(), ParseError> {\n\n self.0.build(f, e)?;\n\n self.1.build(f, e)\n\n }\n\n}\n", "file_path": "mm0-rs/components/mm0b_parser/src/parser.rs", "rank": 82, "score": 155174.41434350435 }, { "content": "/// A trait to factor the commonalities of [`VReg`] and [`PReg`].\n\npub trait IsReg: Sized + Eq {\n\n /// A special value of the type representing the invalid value.\n\n fn invalid() -> Self;\n\n /// Is this value not the invalid values?\n\n fn is_valid(&self) -> bool { *self != Self::invalid() }\n\n}\n\nimpl IsReg for VReg {\n\n fn invalid() -> Self { VReg::invalid() }\n\n}\n\nimpl IsReg for PReg {\n\n fn invalid() -> Self { PReg::invalid() }\n\n}\n\n\n\n/// A memory address. This has the form `off+base+si`, where `off` is a base memory location\n\n/// (a 32 bit address, or an offset from a stack slot, named global or named constant),\n\n/// `base` is a register or 0, and `si` is a shifted register or 0.\n\n/// Note that `base` must be 0 if `off` is `Spill(..)` because spill slots are RSP-relative,\n\n/// so there is no space for a second register in the encoding.\n\n#[derive(Clone, Copy)]\n\npub struct AMode<Reg = VReg> {\n", "file_path": "mm0-rs/components/mmcc/src/arch/x86/mod.rs", "rank": 83, "score": 155143.79854400048 }, { "content": "fn get_clobbers(vcode: &VCode, out: &regalloc2::Output) -> PRegSet {\n\n let mut result = PRegSet::default();\n\n for (_, edit) in &out.edits {\n\n if let Edit::Move { to, .. } = *edit {\n\n if let Some(r) = to.as_reg() { result.insert(r) }\n\n }\n\n }\n\n for (i, _) in vcode.insts.enum_iter() {\n\n for &r in vcode.inst_clobbers(i) { result.insert(r) }\n\n for (op, alloc) in vcode.inst_operands(i).iter().zip(out.inst_allocs(i)) {\n\n if op.kind() != regalloc2::OperandKind::Use {\n\n if let Some(r) = alloc.as_reg() { result.insert(r) }\n\n }\n\n }\n\n }\n\n if let Some(rets) = &vcode.abi.rets {\n\n for abi in &**rets {\n\n if let vcode::ArgAbi::Reg(r, _) = *abi { result.remove(r) }\n\n }\n\n }\n", "file_path": "mm0-rs/components/mmcc/src/regalloc.rs", "rank": 84, "score": 154065.71536213788 }, { "content": "#[cfg(not(feature = \"memory\"))]\n\n#[must_use]\n\npub fn get_memory_usage() -> usize { 0 }\n", "file_path": "mm0-rs/components/mm0_util/src/lib.rs", "rank": 85, "score": 153593.74029321002 }, { "content": "/// Extension trait for [`HashMap`]`<K, V>`.\n\npub trait HashMapExt<K, V> {\n\n /// Like `insert`, but if the insertion fails then it returns the value\n\n /// that it attempted to insert, as well as an [`OccupiedEntry`] containing\n\n /// the other value that was found.\n\n // TODO: Change this to try_insert when #82766 lands\n\n fn try_insert_ext(&mut self, k: K, v: V) -> Option<(V, OccupiedEntry<'_, K, V>)>;\n\n}\n\n\n\nimpl<K: Hash + Eq, V, S: BuildHasher> HashMapExt<K, V> for HashMap<K, V, S> {\n\n fn try_insert_ext(&mut self, k: K, v: V) -> Option<(V, OccupiedEntry<'_, K, V>)> {\n\n match self.entry(k) {\n\n Entry::Vacant(e) => {\n\n e.insert(v);\n\n None\n\n }\n\n Entry::Occupied(e) => Some((v, e)),\n\n }\n\n }\n\n}\n\n\n", "file_path": "mm0-rs/components/mm0_util/src/lib.rs", "rank": 86, "score": 153499.38970683215 }, { "content": "fn parse_arr<const N: usize>(p: &mut &[u8]) -> [u8; N] {\n\n let (start, rest) = p.split_at(N);\n\n *p = rest;\n\n start.try_into().expect(\"parse error\")\n\n}\n", "file_path": "mm0-rs/components/mmcc/src/arch/x86/proof.rs", "rank": 87, "score": 153063.8704458257 }, { "content": "/// A \"hashable\" type. We use this to abstract the difference between\n\n/// [`ExprHash`] and [`ProofHash`]. The definition of [`NodeHash`] is mutually recursive\n\n/// with the [`Dedup`] struct. A [`NodeHash`] type represents a nonrecursive shadow\n\n/// of a recursive type (namely [`ExprNode`] and [`ProofNode`], respectively),\n\n/// where recursive occurrences are replaced with indices tracked by the [`Dedup`] type.\n\n/// Effectively, [`Dedup`] is acting as an arena allocator where the pointers are\n\n/// replaced by integers.\n\npub trait NodeHash: Hash + Eq + Sized {\n\n /// The variant that constructs a variable from an index.\n\n const REF: fn(ProofKind, usize) -> Self;\n\n\n\n /// Given a lisp expression `r` representing an element of the type,\n\n /// parse it into a [`NodeHash`] object. If the object has already been constructed,\n\n /// it may also return an index to the element in the [`Dedup`].\n\n fn from<'a>(nh: &NodeHasher<'a>, fsp: Option<&FileSpan>, kind: ProofKind, r: &LispVal,\n\n de: &mut Dedup<Self>) -> Result<Result<Self, usize>>;\n\n\n\n /// Calculate the variable dependence of a [`NodeHash`] object, given a function\n\n /// `deps` that will provide the dependencies of elements. Bump `bv` if this object\n\n /// is a dummy variable.\n\n fn vars(&self, bv: &mut u64, deps: impl Fn(usize) -> u64) -> u64;\n\n}\n\n\n\n/// The main hash-consing state object. This tracks previously hash-consed elements\n\n/// and uses the [`Hash`] implementation required by [`NodeHash`] to hash elements of\n\n/// the hash type `H`. (Since these objects may be somewhat large, we store them\n\n/// behind an [`Rc`] so that they can go in both the map and the vec.)\n", "file_path": "mm0-rs/src/elab/proof.rs", "rank": 88, "score": 152646.58394200224 }, { "content": "#[allow(clippy::cast_sign_loss, clippy::cast_possible_truncation)]\n\nfn layout_binop_lo(sz: Size, dst: PReg, src: &PRegMemImm) -> InstLayout {\n\n let mut rex = sz == Size::S64;\n\n if sz == Size::S8 { high_reg(&mut rex, dst); high_rmi(&mut rex, src) }\n\n let mut opc = match *src {\n\n PRegMemImm::Imm(i) => match sz {\n\n Size::S8 => OpcodeLayout::BinopImm(false, layout_opc_reg(&mut rex, dst)),\n\n Size::S16 => unimplemented!(),\n\n _ if i as i8 as u32 == i => OpcodeLayout::BinopImm8(layout_opc_reg(&mut rex, dst)),\n\n _ => OpcodeLayout::BinopImm(true, layout_opc_reg(&mut rex, dst)),\n\n }\n\n _ => OpcodeLayout::BinopReg(layout_rmi(&mut rex, dst, src))\n\n };\n\n if dst == RAX && matches!(src, PRegMemImm::Imm(..)) {\n\n let rax = OpcodeLayout::BinopRAX(sz != Size::S8);\n\n if rax.len() <= opc.len() { opc = rax }\n\n }\n\n InstLayout { rex, opc }\n\n}\n\n\n\nimpl PInst {\n", "file_path": "mm0-rs/components/mmcc/src/arch/x86/mod.rs", "rank": 89, "score": 152624.288709488 }, { "content": "#[must_use]\n\npub fn alphanumber(n: usize) -> String {\n\n let mut out = Vec::with_capacity(2);\n\n let mut n = n + 1;\n\n while n != 0 {\n\n #[allow(clippy::cast_possible_truncation)]\n\n {\n\n out.push(b'a' + ((n - 1) % 26) as u8);\n\n }\n\n #[allow(clippy::integer_division)]\n\n {\n\n n = (n - 1) / 26;\n\n }\n\n }\n\n out.reverse();\n\n unsafe { String::from_utf8_unchecked(out) }\n\n}\n\n\n", "file_path": "mm0-rs/components/mm0_util/src/lib.rs", "rank": 90, "score": 151167.57102421665 }, { "content": "#[inline]\n\n#[must_use]\n\npub fn u32_as_usize(n: u32) -> usize {\n\n n.try_into().expect(\"here's a nickel, get a better computer\")\n\n}\n\n\n\n/// Translate a number into an alphabetic numbering system, indexing into the following infinite\n\n/// sequence:\n\n/// ```ignore\n\n/// a, b, c, ... z, aa, ab, ... az, ba, ... bz, ... zz, aaa, ...\n\n/// ```\n", "file_path": "mm0-rs/components/mm0_util/src/lib.rs", "rank": 91, "score": 149481.42990792324 }, { "content": "#[cfg(all(feature = \"memory\", not(target_os = \"linux\")))]\n\n#[must_use]\n\npub fn get_memory_usage() -> usize { get_memory_rusage() }\n\n\n\n/// Try to get total memory usage (stack + data) in bytes using the `/proc` filesystem.\n\n/// Falls back on [`getrusage()`](libc::getrusage) if procfs doesn't exist.\n", "file_path": "mm0-rs/components/mm0_util/src/lib.rs", "rank": 92, "score": 148870.64609711984 }, { "content": "#[wasm_bindgen_test(async)]\n\nfn async_test() -> impl Future<Item = (), Error = JsValue> {\n\n // Creates a JavaScript Promise which will asynchronously resolve with the value 42.\n\n let promise = js_sys::Promise::resolve(&JsValue::from(42));\n\n\n\n // Converts that Promise into a Future.\n\n // The unit test will wait for the Future to resolve.\n\n JsFuture::from(promise)\n\n .map(|x| {\n\n assert_eq!(x, 42);\n\n })\n\n}\n", "file_path": "m0e/tests/app.rs", "rank": 93, "score": 147973.34756328503 }, { "content": " u32 e2;\n", "file_path": "mm0-c/verifier_types.c", "rank": 94, "score": 146671.20025795337 }, { "content": " u16 var;\n", "file_path": "mm0-c/verifier_types.c", "rank": 95, "score": 146561.94963247178 }, { "content": "/// Companion to [`EnvDisplay`](super::print::EnvDisplay)\n\npub trait EnvDebug {\n\n /// Get the actual debug representation. It's highly unlikely you'll\n\n /// need to call this outside of another [`EnvDebug`] implementation.\n\n fn env_dbg<'a>(&self, fe: FormatEnv<'a>, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result;\n\n}\n\n\n\n\n\n// For types external to mm0-rs, generate an instance of EnvDebug that just returns its default\n\n// std::fmt::Debug representation using the {:#?} formatter.\n\nmacro_rules! env_debug {\n\n ( $($xs:ty),+ ) => {\n\n $(\n\n impl EnvDebug for $xs {\n\n fn env_dbg<'a>(&self, _: FormatEnv<'a>, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"{:#?}\", self)\n\n }\n\n }\n\n )+\n\n };\n\n}\n", "file_path": "mm0-rs/src/elab/lisp/debug.rs", "rank": 96, "score": 145838.74622843234 } ]
Rust
src/main.rs
magicgoose/oxipng
a857cfcc1e4379434c1848a58578e94ed231845a
#![cfg_attr(feature = "clippy", feature(plugin))] #![cfg_attr(feature = "clippy", plugin(clippy))] #![cfg_attr(feature = "clippy", warn(enum_glob_use))] #![cfg_attr(feature = "clippy", warn(if_not_else))] #![cfg_attr(feature = "clippy", warn(string_add))] #![cfg_attr(feature = "clippy", warn(string_add_assign))] #![warn(trivial_casts, trivial_numeric_casts, unused_import_braces)] #![deny(missing_debug_implementations, missing_copy_implementations)] extern crate clap; extern crate glob; extern crate oxipng; extern crate regex; use clap::{App, Arg, ArgMatches}; use glob::glob; use oxipng::AlphaOptim; use oxipng::Deflaters; use oxipng::Headers; use oxipng::{Options, PngError}; use regex::Regex; use std::collections::HashSet; use std::fs::DirBuilder; use std::path::PathBuf; use std::process::exit; fn main() { let matches = App::new("oxipng") .version(env!("CARGO_PKG_VERSION")) .author("Joshua Holmer <[email protected]>") .about("Losslessly improves compression of PNG files") .arg(Arg::with_name("files") .help("File(s) to compress") .index(1) .multiple(true) .use_delimiter(false) .required(true)) .arg(Arg::with_name("optimization") .help("Optimization level - Default: 2") .short("o") .long("opt") .takes_value(true) .possible_value("0") .possible_value("1") .possible_value("2") .possible_value("3") .possible_value("4") .possible_value("5") .possible_value("6")) .arg(Arg::with_name("backup") .help("Back up modified files") .short("b") .long("backup")) .arg(Arg::with_name("force") .help("Write output even if larger than the original") .short("F") .long("force")) .arg(Arg::with_name("recursive") .help("Recurse into subdirectories") .short("r") .long("recursive")) .arg(Arg::with_name("output_dir") .help("Write output file(s) to <directory>") .long("dir") .takes_value(true) .conflicts_with("output_file") .conflicts_with("stdout")) .arg(Arg::with_name("output_file") .help("Write output file to <file>") .long("out") .takes_value(true) .conflicts_with("output_dir") .conflicts_with("stdout")) .arg(Arg::with_name("stdout") .help("Write output to stdout") .long("stdout") .conflicts_with("output_dir") .conflicts_with("output_file")) .arg(Arg::with_name("fix") .help("Enable error recovery") .long("fix")) .arg(Arg::with_name("no-clobber") .help("Do not overwrite existing files") .long("no-clobber")) .arg(Arg::with_name("pretend") .help("Do not write any files, only calculate compression gains") .short("P") .long("pretend")) .arg(Arg::with_name("preserve") .help("Preserve file attributes if possible") .short("p") .long("preserve")) .arg(Arg::with_name("quiet") .help("Run in quiet mode") .short("q") .long("quiet") .conflicts_with("verbose")) .arg(Arg::with_name("verbose") .help("Run in verbose mode") .short("v") .long("verbose") .conflicts_with("quiet")) .arg(Arg::with_name("filters") .help("PNG delta filters (0-5) - Default: 0,5") .short("f") .long("filters") .takes_value(true) .validator(|x| { match parse_numeric_range_opts(&x, 0, 5) { Ok(_) => Ok(()), Err(_) => Err("Invalid option for filters".to_owned()), } })) .arg(Arg::with_name("interlace") .help("PNG interlace type") .short("i") .long("interlace") .takes_value(true) .possible_value("0") .possible_value("1")) .arg(Arg::with_name("compression") .help("zlib compression levels (1-9) - Default: 9") .long("zc") .takes_value(true) .validator(|x| { match parse_numeric_range_opts(&x, 1, 9) { Ok(_) => Ok(()), Err(_) => Err("Invalid option for compression".to_owned()), } })) .arg(Arg::with_name("strategies") .help("zlib compression strategies (0-3) - Default: 0-3") .long("zs") .takes_value(true) .validator(|x| { match parse_numeric_range_opts(&x, 0, 3) { Ok(_) => Ok(()), Err(_) => Err("Invalid option for strategies".to_owned()), } })) .arg(Arg::with_name("window") .help("zlib window size - Default: 32k") .long("zw") .takes_value(true) .possible_value("256") .possible_value("512") .possible_value("1k") .possible_value("2k") .possible_value("4k") .possible_value("8k") .possible_value("16k") .possible_value("32k")) .arg(Arg::with_name("alpha") .help("Perform additional alpha optimizations") .short("a") .long("alpha")) .arg(Arg::with_name("no-bit-reduction") .help("No bit depth reduction") .long("nb")) .arg(Arg::with_name("no-color-reduction") .help("No color type reduction") .long("nc")) .arg(Arg::with_name("no-palette-reduction") .help("No palette reduction") .long("np")) .arg(Arg::with_name("no-reductions") .help("No reductions") .long("nx")) .arg(Arg::with_name("no-recoding") .help("No IDAT recoding unless necessary") .long("nz")) .arg(Arg::with_name("strip") .help("Strip metadata objects ['safe', 'all', or comma-separated list]") .long("strip") .takes_value(true) .conflicts_with("strip-safe")) .arg(Arg::with_name("strip-safe") .help("Strip safely-removable metadata objects") .short("s") .conflicts_with("strip")) .arg(Arg::with_name("zopfli") .help("Use the slower but better compressing Zopfli algorithm, overrides zlib-specific options") .short("Z") .long("zopfli")) .arg(Arg::with_name("threads") .help("Set number of threads to use - default 1.5x CPU cores") .long("threads") .short("t") .takes_value(true) .validator(|x| { match x.parse::<usize>() { Ok(val) => { if val > 0 { Ok(()) } else { Err("Thread count must be >= 1".to_owned()) } } Err(_) => Err("Thread count must be >= 1".to_owned()), } })) .after_help("Optimization levels: -o 0 => --zc 3 --nz (0 or 1 trials) -o 1 => --zc 9 (1 trial, determined heuristically) -o 2 => --zc 9 --zs 0-3 -f 0,5 (8 trials) -o 3 => --zc 9 --zs 0-3 -f 0-5 (24 trials) -o 4 => --zc 9 --zs 0-3 -f 0-5 -a (24 trials + 6 alpha trials) -o 5 => --zc 3-9 --zs 0-3 -f 0-5 -a (96 trials + 6 alpha trials) -o 6 => --zc 1-9 --zs 0-3 -f 0-5 -a (180 trials + 6 alpha trials) Manually specifying a compression option (zc, zs, etc.) will override the optimization preset, regardless of the order you write the arguments.") .get_matches(); let opts = match parse_opts_into_struct(&matches) { Ok(x) => x, Err(x) => { eprintln!("{}", x); exit(1) } }; if let Err(e) = handle_optimization( matches .values_of("files") .unwrap() .map(|pattern| glob(pattern).expect("Failed to parse input file path")) .flat_map(|paths| { paths .into_iter() .map(|path| path.expect("Failed to parse input file path")) }) .collect(), &opts, ) { eprintln!("{}", e); exit(1); } } fn handle_optimization(inputs: Vec<PathBuf>, opts: &Options) -> Result<(), PngError> { inputs.into_iter().fold(Ok(()), |res, input| { let mut current_opts = opts.clone(); if input.is_dir() { if current_opts.recursive { let cur_result = handle_optimization( input .read_dir() .unwrap() .map(|x| x.unwrap().path()) .collect(), &current_opts, ); return res.and(cur_result); } else { eprintln!("{} is a directory, skipping", input.display()); } return res; } if let Some(ref out_dir) = current_opts.out_dir { current_opts.out_file = Some(out_dir.join(input.file_name().unwrap())); } let cur_result = oxipng::optimize(&input, &current_opts); res.and(cur_result) }) } #[cfg_attr(feature = "clippy", allow(cyclomatic_complexity))] fn parse_opts_into_struct(matches: &ArgMatches) -> Result<Options, String> { let mut opts = if let Some(x) = matches.value_of("optimization") { if let Ok(opt) = x.parse::<u8>() { Options::from_preset(opt) } else { unreachable!() } } else { Options::default() }; if let Some(x) = matches.value_of("interlace") { opts.interlace = x.parse::<u8>().ok(); } if let Some(x) = matches.value_of("filters") { opts.filter = parse_numeric_range_opts(x, 0, 5).unwrap(); } if let Some(x) = matches.value_of("compression") { opts.compression = parse_numeric_range_opts(x, 1, 9).unwrap(); } if let Some(x) = matches.value_of("strategies") { opts.strategies = parse_numeric_range_opts(x, 0, 3).unwrap(); } match matches.value_of("window") { Some("256") => opts.window = 8, Some("512") => opts.window = 9, Some("1k") => opts.window = 10, Some("2k") => opts.window = 11, Some("4k") => opts.window = 12, Some("8k") => opts.window = 13, Some("16k") => opts.window = 14, _ => (), } if let Some(x) = matches.value_of("output_dir") { let path = PathBuf::from(x); if !path.exists() { match DirBuilder::new().recursive(true).create(&path) { Ok(_) => (), Err(x) => return Err(format!("Could not create output directory {}", x)), }; } else if !path.is_dir() { return Err(format!( "{} is an existing file (not a directory), cannot create directory", x )); } opts.out_dir = Some(path); } if let Some(x) = matches.value_of("output_file") { opts.out_file = Some(PathBuf::from(x)); } if matches.is_present("stdout") { opts.stdout = true; } if matches.is_present("alpha") { opts.alphas.insert(AlphaOptim::White); opts.alphas.insert(AlphaOptim::Up); opts.alphas.insert(AlphaOptim::Down); opts.alphas.insert(AlphaOptim::Left); opts.alphas.insert(AlphaOptim::Right); } if matches.is_present("backup") { opts.backup = true; } if matches.is_present("force") { opts.force = true; } if matches.is_present("recursive") { opts.recursive = true; } if matches.is_present("fix") { opts.fix_errors = true; } if matches.is_present("clobber") { opts.clobber = false; } if matches.is_present("pretend") { opts.pretend = true; } if matches.is_present("preserve") { opts.preserve_attrs = true; } if matches.is_present("quiet") { opts.verbosity = None; } if matches.is_present("verbose") { opts.verbosity = Some(1); } if matches.is_present("no-bit-reduction") { opts.bit_depth_reduction = false; } if matches.is_present("no-color-reduction") { opts.color_type_reduction = false; } if matches.is_present("no-palette-reduction") { opts.palette_reduction = false; } if matches.is_present("no-reductions") { opts.bit_depth_reduction = false; opts.color_type_reduction = false; opts.palette_reduction = false; } if matches.is_present("no-recoding") { opts.idat_recoding = false; } if let Some(hdrs) = matches.value_of("strip") { let hdrs = hdrs.split(',') .map(|x| x.trim().to_owned()) .collect::<Vec<String>>(); if hdrs.contains(&"safe".to_owned()) || hdrs.contains(&"all".to_owned()) { if hdrs.len() > 1 { return Err( "'safe' or 'all' presets for --strip should be used by themselves".to_owned(), ); } if hdrs[0] == "safe" { opts.strip = Headers::Safe; } else { opts.strip = Headers::All; } } else { const FORBIDDEN_CHUNKS: [&str; 5] = ["IHDR", "IDAT", "tRNS", "PLTE", "IEND"]; for i in &hdrs { if FORBIDDEN_CHUNKS.contains(&i.as_ref()) { return Err(format!("{} chunk is not allowed to be stripped", i)); } } opts.strip = Headers::Some(hdrs); } } if matches.is_present("strip-safe") { opts.strip = Headers::Safe; } if matches.is_present("zopfli") { opts.deflate = Deflaters::Zopfli; } if let Some(x) = matches.value_of("threads") { opts.threads = x.parse::<usize>().unwrap(); } Ok(opts) } fn parse_numeric_range_opts( input: &str, min_value: u8, max_value: u8, ) -> Result<HashSet<u8>, String> { let one_item = Regex::new(format!(r"^[{}-{}]$", min_value, max_value).as_ref()).unwrap(); let multiple_items = Regex::new( format!( r"^([{}-{}])(,|-)([{}-{}])$", min_value, max_value, min_value, max_value ).as_ref(), ).unwrap(); let mut items = HashSet::new(); if one_item.is_match(input) { items.insert(input.parse::<u8>().unwrap()); return Ok(items); } if let Some(captures) = multiple_items.captures(input) { let first = captures[1].parse::<u8>().unwrap(); let second = captures[3].parse::<u8>().unwrap(); if first >= second { return Err("Not a valid input".to_owned()); } match &captures[2] { "," => { items.insert(first); items.insert(second); } "-" => for i in first..second + 1 { items.insert(i); }, _ => unreachable!(), }; return Ok(items); } Err("Not a valid input".to_owned()) }
#![cfg_attr(feature = "clippy", feature(plugin))] #![cfg_attr(feature = "clippy", plugin(clippy))] #![cfg_attr(feature = "clippy", warn(enum_glob_use))] #![cfg_attr(feature = "clippy", warn(if_not_else))] #![cfg_attr(feature = "clippy", warn(string_add))] #![cfg_attr(feature = "clippy", warn(string_add_assign))] #![warn(trivial_casts, trivial_numeric_casts, unused_import_braces)] #![deny(missing_debug_implementations, missing_copy_implementations)] extern crate clap; extern crate glob; extern crate oxipng; extern crate regex; use clap::{App, Arg, ArgMatches}; use glob::glob; use oxipng::AlphaOptim; use oxipng::Deflaters; use oxipng::Headers; use oxipng::{Options, PngError}; use regex::Regex; use std::collections::HashSet; use std::fs::DirBuilder; use std::path::PathBuf; use std::process::exit; fn main() { let matches = App::new("oxipng") .version(env!("CARGO_PKG_VERSION")) .author("Joshua Holmer <[email protected]>") .about("Losslessly improves compression of PNG files") .arg(Arg::with_name("files") .help("File(s) to compress") .index(1) .multiple(true) .use_delimiter(false) .required(true)) .arg(Arg::with_name("optimization") .help("Optimization level - Default: 2") .short("o") .long("opt") .takes_value(true) .possible_value("0") .possible_value("1") .possible_value("2") .possible_value("3") .possible_value("4") .possible_value("5") .possible_value("6")) .arg(Arg::with_name("backup") .help("Back up modified files") .short("b") .long("backup")) .arg(Arg::with_name("force") .help("Write output even if larger than the original") .short("F") .long("force")) .arg(Arg::with_name("recursive") .help("Recurse into subdirectories") .short("r") .long("recursive")) .arg(Arg::with_name("output_dir") .help("Write output file(s) to <directory>") .long("dir") .takes_value(true) .conflicts_with("output_file") .conflicts_with("stdout")) .arg(Arg::with_name("output_file") .help("Write output file to <file>") .long("out") .takes_value(true) .conflicts_with("output_dir") .conflicts_with("stdout")) .arg(Arg::with_name("stdout") .help("Write output to stdout") .long("stdout") .conflicts_with("output_dir") .conflicts_with("output_file")) .arg(Arg::with_name("fix") .help("Enable error recovery") .long("fix")) .arg(Arg::with_name("no-clobber") .help("Do not overwrite existing files") .long("no-clobber")) .arg(Arg::with_name("pretend") .help("Do not write any files, only calculate compression gains") .short("P") .long("pretend")) .arg(Arg::with_name("preserve") .help("Preserve file attributes if possible") .short("p") .long("preserve")) .arg(Arg::with_name("quiet") .help("Run in quiet mode") .short("q") .long("quiet") .conflicts_with("verbose")) .arg(Arg::with_name("verbose") .help("Run in verbose mode") .short("v") .long("verbose") .conflicts_with("quiet")) .arg(Arg::with_name("filters") .help("PNG delta filters (0-5) - Default: 0,5") .short("f") .long("filters") .takes_value(true) .validator(|x| { match parse_numeric_range_opts(&x, 0, 5) { Ok(_) => Ok(()), Err(_) => Err("Invalid option for filters".to_owned()), } })) .arg(Arg::with_name("interlace") .help("PNG interlace type") .short("i") .long("interlace") .takes_value(true) .possible_value("0") .possible_value("1")) .arg(Arg::with_name("compression") .help("zlib compression levels (1-9) - Default: 9") .long("zc") .takes_value(true) .validator(|x| { match parse_numeric_range_opts(&x, 1, 9) { Ok(_) => Ok(()), Err(_) => Err("Invalid option for compression".to_owned()), } })) .arg(Arg::with_name("strategies") .help("zlib compression strategies (0-3) - Default: 0-3") .long("zs") .takes_value(true) .validator(|x| { match parse_numeric_range_opts(&x, 0, 3) { Ok(_) => Ok(()), Err(_) => Err("Invalid option for strategies".to_owned()), } })) .arg(Arg::with_name("window") .help("zlib window size - Default: 32k") .long("zw") .takes_value(true) .possible_value("256") .possible_value("512") .possible_value("1k") .possible_value("2k") .possible_value("4k") .possible_value("8k") .possible_value("16k") .possible_value("32k")) .arg(Arg::with_name("alpha") .help("Perform additional alpha optimizations") .short("a") .long("alpha")) .arg(Arg::with_name("no-bit-reduction") .help("No bit depth reduction") .long("nb")) .arg(Arg::with_name("no-color-reduction") .help("No color type reduction") .long("nc")) .arg(Arg::with_name("no-palette-reduction") .help("No palette reduction") .long("np")) .arg(Arg::with_name("no-reductions") .help("No reductions") .long("nx")) .arg(Arg::with_name("no-recoding") .help("No IDAT recoding unless necessary") .long("nz")) .arg(Arg::with_name("strip") .help("Strip metadata objects ['safe', 'all', or comma-separated list]") .long("strip") .takes_value(true) .conflicts_with("strip-safe")) .arg(Arg::with_name("strip-safe") .help("Strip safely-removable metadata objects") .short("s") .conflicts_with("strip")) .arg(Arg::with_name("zopfli") .help("Use the slower but better compressing Zopfli algorithm, overrides zlib-specific options") .short("Z") .long("zopfli")) .arg(Arg::with_name("threads") .help("Set number of threads to use - default 1.5x CPU cores") .long("threads") .short("t") .takes_value(true) .validator(|x| { match x.parse::<usize>() { Ok(val) => { if val > 0 { Ok(()) } else { Err("Thread count must be >= 1".to_owned()) } } Err(_) => Err("Thread count must be >= 1".to_owned()), } })) .after_help("Optimization levels: -o 0 => --zc 3 --nz (0 or 1 trials) -o 1 => --zc 9 (1 trial, determined heuristically) -o 2 => --zc 9 --zs 0-3 -f 0,5 (8 trials) -o 3 => --zc 9 --zs 0-3 -f 0-5 (24 trials) -o 4 => --zc 9 --zs 0-3 -f 0-5 -a (24 trials + 6 alpha trials) -o 5 => --zc 3-9 --zs 0-3 -f 0-5 -a (96 trials + 6 alpha trials) -o 6 => --zc 1-9 --zs 0-3 -f 0-5 -a (180 trials + 6 alpha trials) Manually specifying a compression option (zc, zs, etc.) will override the optimization preset, regardless of the order you write the arguments.") .get_matches(); let opts = match parse_opts_into_struct(&matches) { Ok(x) => x, Err(x) => { eprintln!("{}", x); exit(1) } }; if let Err(e) = handle_optimization( matches .values_of("files") .unwrap() .map(|pattern| glob(pattern).expect("Failed to parse input file path")) .flat_map(|paths| { paths .into_iter() .map(|path| path.expect("Failed to parse input file path")) }) .collect(), &opts, ) { eprintln!("{}", e); exit(1); } } fn handle_optimization(inputs: Vec<PathBuf>, opts: &Options) -> Result<(), PngError> { inputs.into_iter().fold(Ok(()), |res, input| { let mut current_opts = opts.clone(); if input.is_dir() { if current_opts.recursive { let cur_result = handle_optimization( input .read_dir() .unwrap() .map(|x| x.unwrap().path()) .collect(), &current_opts, ); return res.and(cur_result); } else { eprintln!("{} is a directory, skipping", input.display()); } return res; } if let Some(ref out_dir) = current_opts.out_dir { current_opts.out_file = Some(out_dir.join(input.file_name().unwrap())); } let cur_result = oxipng::optimize(&input, &current_opts); res.and(cur_result) }) } #[cfg_attr(feature = "clippy", allow(cyclomatic_complexity))] fn parse_opts_into_struct(matches: &ArgMatches) -> Result<Options, String> { let mut opts = if let Some(x) = matches.value_of("optimization") { if let Ok(opt) = x.parse::<u8>() { Options::from_preset(opt) } else { unreachable!() } } else { Options::default() }; if let Some(x) = matches.value_of("interlace") { opts.interlace = x.parse::<u8>().ok(); } if let Some(x) = matches.value_of("filters") { opts.filter = parse_numeric_range_opts(x, 0, 5).unwrap(); } if let Some(x) = matches.value_of("compression") { opts.compression = parse_numeric_range_opts(x, 1, 9).unwrap(); } if let Some(x) = matches.value_of("strategies") { opts.strategies = parse_numeric_range_opts(x, 0, 3).unwrap(); } match matches.value_of("window") { Some("256") => opts.window = 8, Some("512") => opts.window = 9, Some("1k") => opts.window = 10, Some("2k") => opts.window = 11, Some("4k") => opts.window = 12, Some("8k") => opts.window = 13, Some("16k") => opts.window = 14, _ => (), } if let Some(x) = matches.value_of("output_dir") { let path = PathBuf::from(x); if !path.exists() { match DirBuilder::new().recursive(true).create(&path) { Ok(_) => (), Err(x) => return Err(format!("Could not create output directory {}", x)), }; } else if !path.is_dir() { return Err(format!( "{} is an existing file (not a directory), cannot create directory", x )); } opts.out_dir = Some(path); } if let Some(x) = matches.value_of("output_file") { opts.out_file = Some(PathBuf::from(x)); } if matches.is_present("stdout") { opts.stdout = true; } if matches.is_present("alpha") { opts.alphas.insert(AlphaOptim::White); opts.alphas.insert(AlphaOptim::Up); opts.alphas.insert(AlphaOptim::Down); opts.alphas.insert(AlphaOptim::Left); opts.alphas.insert(AlphaOptim::Right); } if matches.is_present("backup") { opts.backup = true; } if matches.is_present("force") { opts.force = true; } if matches.is_present("recursive") { opts.recursive = true; } if matches.is_present("fix") { opts.fix_errors = true; } if matches.is_present("clobber") { opts.clobber = false; } if matches.is_present("pretend") { opts.pretend = true; } if matches.is_present("preserve") { opts.preserve_attrs = true; } if matches.is_present("quiet") { opts.verbosity = None; } if matches.is_present("verbose") { opts.verbosity = Some(1); } if matches.is_present("no-bit-reduction") { opts.bit_depth_reduction = false; } if matches.is_present("no-color-reduction") { opts.color_type_reduction = false; } if matches.is_present("no-palette-reduction") { opts.palette_reduction = false; } if matches.is_present("no-reductions") { opts.bit_depth_reduction = false; opts.color_type_reduction = false; opts.palette_reduction = false; } if matches.is_present("no-recoding") { opts.idat_recoding = false; } if let Some(hdrs) = matches.value_of("strip") { let hdrs = hdrs.split(',') .map(|x| x.trim().to_owned()) .collect::<Vec<String>>(); if hdrs.contains(&"safe".to_owned()) || hdrs.contains(&"all".to_owned()) { if hdrs.len() > 1 { return
; } if hdrs[0] == "safe" { opts.strip = Headers::Safe; } else { opts.strip = Headers::All; } } else { const FORBIDDEN_CHUNKS: [&str; 5] = ["IHDR", "IDAT", "tRNS", "PLTE", "IEND"]; for i in &hdrs { if FORBIDDEN_CHUNKS.contains(&i.as_ref()) { return Err(format!("{} chunk is not allowed to be stripped", i)); } } opts.strip = Headers::Some(hdrs); } } if matches.is_present("strip-safe") { opts.strip = Headers::Safe; } if matches.is_present("zopfli") { opts.deflate = Deflaters::Zopfli; } if let Some(x) = matches.value_of("threads") { opts.threads = x.parse::<usize>().unwrap(); } Ok(opts) } fn parse_numeric_range_opts( input: &str, min_value: u8, max_value: u8, ) -> Result<HashSet<u8>, String> { let one_item = Regex::new(format!(r"^[{}-{}]$", min_value, max_value).as_ref()).unwrap(); let multiple_items = Regex::new( format!( r"^([{}-{}])(,|-)([{}-{}])$", min_value, max_value, min_value, max_value ).as_ref(), ).unwrap(); let mut items = HashSet::new(); if one_item.is_match(input) { items.insert(input.parse::<u8>().unwrap()); return Ok(items); } if let Some(captures) = multiple_items.captures(input) { let first = captures[1].parse::<u8>().unwrap(); let second = captures[3].parse::<u8>().unwrap(); if first >= second { return Err("Not a valid input".to_owned()); } match &captures[2] { "," => { items.insert(first); items.insert(second); } "-" => for i in first..second + 1 { items.insert(i); }, _ => unreachable!(), }; return Ok(items); } Err("Not a valid input".to_owned()) }
Err( "'safe' or 'all' presets for --strip should be used by themselves".to_owned(), )
call_expression
[ { "content": "/// Perform optimization on the input file using the options provided\n\npub fn optimize(input_path: &Path, opts: &Options) -> Result<(), PngError> {\n\n // Initialize the thread pool with correct number of threads\n\n let thread_count = opts.threads;\n\n let _ = rayon::ThreadPoolBuilder::new()\n\n .num_threads(thread_count)\n\n .build_global();\n\n\n\n // Read in the file and try to decode as PNG.\n\n if opts.verbosity.is_some() {\n\n eprintln!(\"Processing: {}\", input_path.to_str().unwrap());\n\n }\n\n\n\n let in_data = PngData::read_file(input_path)?;\n\n let mut png = PngData::from_slice(&in_data, opts.fix_errors)?;\n\n let output_path = opts.out_file\n\n .clone()\n\n .unwrap_or_else(|| input_path.to_path_buf());\n\n\n\n // Run the optimizer on the decoded PNG.\n\n let mut optimized_output = optimize_png(&mut png, &in_data, opts)?;\n", "file_path": "src/lib.rs", "rank": 0, "score": 338791.77395685803 }, { "content": "fn get_opts(input: &Path) -> oxipng::Options {\n\n let mut options = oxipng::Options::default();\n\n options.out_file = Some(input.with_extension(\"out.png\").to_owned());\n\n options.verbosity = None;\n\n options.force = true;\n\n let mut filter = HashSet::new();\n\n filter.insert(0);\n\n options.filter = filter;\n\n\n\n options\n\n}\n\n\n", "file_path": "tests/interlaced.rs", "rank": 2, "score": 315896.01241805457 }, { "content": "fn get_opts(input: &Path) -> oxipng::Options {\n\n let mut options = oxipng::Options::default();\n\n options.out_file = Some(input.with_extension(\"out.png\").to_owned());\n\n options.verbosity = None;\n\n options.force = true;\n\n let mut filter = HashSet::new();\n\n filter.insert(0);\n\n options.filter = filter;\n\n\n\n options\n\n}\n\n\n", "file_path": "tests/filters.rs", "rank": 3, "score": 315615.00812489656 }, { "content": "fn get_opts(input: &Path) -> oxipng::Options {\n\n let mut options = oxipng::Options::default();\n\n options.out_file = Some(input.with_extension(\"out.png\").to_owned());\n\n options.verbosity = None;\n\n options.force = true;\n\n let mut filter = HashSet::new();\n\n filter.insert(0);\n\n options.filter = filter;\n\n\n\n options\n\n}\n\n\n", "file_path": "tests/reduction.rs", "rank": 4, "score": 315305.42732684175 }, { "content": "fn get_opts(input: &Path) -> oxipng::Options {\n\n let mut options = oxipng::Options::default();\n\n options.out_file = Some(input.with_extension(\"out.png\").to_owned());\n\n options.verbosity = None;\n\n options.force = true;\n\n let mut filter = HashSet::new();\n\n filter.insert(0);\n\n options.filter = filter;\n\n\n\n options\n\n}\n\n\n", "file_path": "tests/flags.rs", "rank": 6, "score": 285130.4674192951 }, { "content": "fn get_opts(input: &Path) -> oxipng::Options {\n\n let mut options = oxipng::Options::default();\n\n options.out_file = Some(input.with_extension(\"out.png\").to_owned());\n\n options.verbosity = None;\n\n options.force = true;\n\n let mut filter = HashSet::new();\n\n filter.insert(0);\n\n options.filter = filter;\n\n\n\n options\n\n}\n\n\n", "file_path": "tests/regression.rs", "rank": 7, "score": 285130.4674192951 }, { "content": "fn perform_backup(input_path: &Path) -> Result<(), PngError> {\n\n let backup_file = input_path.with_extension(format!(\n\n \"bak.{}\",\n\n input_path.extension().unwrap().to_str().unwrap()\n\n ));\n\n copy(input_path, &backup_file).map(|_| ()).map_err(|_| {\n\n PngError::new(&format!(\n\n \"Unable to write to backup file at {}\",\n\n backup_file.display()\n\n ))\n\n })\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 8, "score": 283399.0344264924 }, { "content": "pub fn reduce_bit_depth_8_or_less(png: &mut PngData) -> bool {\n\n let mut reduced = BitVec::with_capacity(png.raw_data.len() * 8);\n\n let bit_depth: usize = png.ihdr_data.bit_depth.as_u8() as usize;\n\n let mut allowed_bits = 1;\n\n for line in png.scan_lines() {\n\n let bit_vec = BitVec::from_bytes(&line.data);\n\n for (i, bit) in bit_vec.iter().enumerate() {\n\n let bit_index = if png.ihdr_data.color_type == ColorType::Indexed {\n\n bit_depth - (i % bit_depth)\n\n } else {\n\n i % bit_depth\n\n };\n\n if bit && bit_index > allowed_bits {\n\n allowed_bits = bit_index.next_power_of_two();\n\n if allowed_bits == bit_depth {\n\n // Not reducable\n\n return false;\n\n }\n\n }\n\n }\n", "file_path": "src/reduction/bit_depth.rs", "rank": 9, "score": 248938.52237357007 }, { "content": "/// Attempt all reduction operations requested by the given `Options` struct\n\n/// and apply them directly to the `PngData` passed in\n\nfn perform_reductions(png: &mut png::PngData, opts: &Options) -> bool {\n\n let mut reduction_occurred = false;\n\n\n\n if opts.palette_reduction && png.reduce_palette() {\n\n reduction_occurred = true;\n\n if opts.verbosity == Some(1) {\n\n report_reduction(png);\n\n }\n\n }\n\n\n\n if opts.bit_depth_reduction && png.reduce_bit_depth() {\n\n reduction_occurred = true;\n\n if opts.verbosity == Some(1) {\n\n report_reduction(png);\n\n }\n\n }\n\n\n\n if opts.color_type_reduction && png.reduce_color_type() {\n\n reduction_occurred = true;\n\n if opts.verbosity == Some(1) {\n", "file_path": "src/lib.rs", "rank": 10, "score": 245437.54826973914 }, { "content": "#[cfg(unix)]\n\nfn copy_permissions(input_path: &Path, out_file: &File, verbosity: Option<u8>) {\n\n use std::os::unix::fs::PermissionsExt;\n\n\n\n if let Ok(f) = File::open(input_path) {\n\n if let Ok(metadata) = f.metadata() {\n\n if let Ok(out_meta) = out_file.metadata() {\n\n let permissions = metadata.permissions().mode();\n\n out_meta.permissions().set_mode(permissions);\n\n return;\n\n }\n\n }\n\n };\n\n if verbosity.is_some() {\n\n eprintln!(\"Failed to set permissions on output file\");\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 11, "score": 239715.96459350613 }, { "content": "pub fn reduce_rgba_to_palette(png: &mut PngData) -> bool {\n\n if png.ihdr_data.bit_depth != BitDepth::Eight {\n\n return false;\n\n }\n\n let mut reduced = Vec::with_capacity(png.raw_data.len());\n\n let mut palette = Vec::with_capacity(256);\n\n let bpp: usize = (4 * png.ihdr_data.bit_depth.as_u8() as usize) >> 3;\n\n for line in png.scan_lines() {\n\n reduced.push(line.filter);\n\n let mut cur_pixel = Vec::with_capacity(bpp);\n\n for (i, byte) in line.data.iter().enumerate() {\n\n cur_pixel.push(*byte);\n\n if i % bpp == bpp - 1 {\n\n if let Some(idx) = palette.iter().position(|x| x == &cur_pixel) {\n\n reduced.push(idx as u8);\n\n } else {\n\n let len = palette.len();\n\n if len == 256 {\n\n return false;\n\n }\n", "file_path": "src/reduction/color.rs", "rank": 12, "score": 236417.84649371455 }, { "content": "pub fn reduce_rgb_to_palette(png: &mut PngData) -> bool {\n\n if png.ihdr_data.bit_depth != BitDepth::Eight {\n\n return false;\n\n }\n\n let mut reduced = Vec::with_capacity(png.raw_data.len());\n\n let mut palette = Vec::with_capacity(256);\n\n let bpp: usize = (3 * png.ihdr_data.bit_depth.as_u8() as usize) >> 3;\n\n for line in png.scan_lines() {\n\n reduced.push(line.filter);\n\n let mut cur_pixel = Vec::with_capacity(bpp);\n\n for (i, byte) in line.data.iter().enumerate() {\n\n cur_pixel.push(*byte);\n\n if i % bpp == bpp - 1 {\n\n if let Some(idx) = palette.iter().position(|x| x == &cur_pixel) {\n\n reduced.push(idx as u8);\n\n } else {\n\n let len = palette.len();\n\n if len == 256 {\n\n return false;\n\n }\n", "file_path": "src/reduction/color.rs", "rank": 13, "score": 236417.84649371455 }, { "content": "pub fn reduce_alpha_channel(png: &mut PngData, bpp_factor: usize) -> Option<Vec<u8>> {\n\n let mut reduced = Vec::with_capacity(png.raw_data.len());\n\n let byte_depth: u8 = png.ihdr_data.bit_depth.as_u8() >> 3;\n\n let bpp: usize = bpp_factor * byte_depth as usize;\n\n let colored_bytes = bpp - byte_depth as usize;\n\n for line in png.scan_lines() {\n\n reduced.push(line.filter);\n\n for (i, byte) in line.data.iter().enumerate() {\n\n if i % bpp >= colored_bytes {\n\n if *byte != 255 {\n\n return None;\n\n }\n\n } else {\n\n reduced.push(*byte);\n\n }\n\n }\n\n }\n\n if let Some(sbit_header) = png.aux_headers.get_mut(&\"sBIT\".to_string()) {\n\n assert_eq!(sbit_header.len(), bpp_factor);\n\n sbit_header.pop();\n\n }\n\n\n\n Some(reduced)\n\n}\n", "file_path": "src/reduction/alpha.rs", "rank": 14, "score": 233349.11227193952 }, { "content": "/// Perform optimization on the input file using the options provided, where the file is already\n\n/// loaded in-memory\n\npub fn optimize_from_memory(data: &[u8], opts: &Options) -> Result<Vec<u8>, PngError> {\n\n // Initialize the thread pool with correct number of threads\n\n let thread_count = opts.threads;\n\n let _ = rayon::ThreadPoolBuilder::new()\n\n .num_threads(thread_count)\n\n .build_global();\n\n\n\n // Read in the file and try to decode as PNG.\n\n if opts.verbosity.is_some() {\n\n eprintln!(\"Processing from memory\");\n\n }\n\n let original_size = data.len() as usize;\n\n let mut png = PngData::from_slice(data, opts.fix_errors)?;\n\n\n\n // Run the optimizer on the decoded PNG.\n\n let optimized_output = optimize_png(&mut png, data, opts)?;\n\n\n\n if is_fully_optimized(original_size, optimized_output.len(), opts) {\n\n eprintln!(\"Image already optimized\");\n\n Ok(data.to_vec())\n\n } else {\n\n Ok(optimized_output)\n\n }\n\n}\n\n\n\n#[derive(Debug, PartialEq, Clone, Copy)]\n", "file_path": "src/lib.rs", "rank": 15, "score": 232495.3039058646 }, { "content": "pub fn reduce_rgba_to_grayscale_alpha(png: &mut PngData) -> bool {\n\n let mut reduced = Vec::with_capacity(png.raw_data.len());\n\n let byte_depth: u8 = png.ihdr_data.bit_depth.as_u8() >> 3;\n\n let bpp: usize = 4 * byte_depth as usize;\n\n let colored_bytes = bpp - byte_depth as usize;\n\n for line in png.scan_lines() {\n\n reduced.push(line.filter);\n\n let mut low_bytes = Vec::with_capacity(4);\n\n let mut high_bytes = Vec::with_capacity(4);\n\n let mut trans_bytes = Vec::with_capacity(byte_depth as usize);\n\n for (i, byte) in line.data.iter().enumerate() {\n\n if i % bpp < colored_bytes {\n\n if byte_depth == 1 || i % 2 == 1 {\n\n low_bytes.push(*byte);\n\n } else {\n\n high_bytes.push(*byte);\n\n }\n\n } else {\n\n trans_bytes.push(*byte);\n\n }\n", "file_path": "src/reduction/color.rs", "rank": 16, "score": 231847.1962020956 }, { "content": "pub fn reduce_grayscale_alpha_to_grayscale(png: &mut PngData) -> bool {\n\n if let Some(reduced) = reduce_alpha_channel(png, 2) {\n\n png.raw_data = reduced;\n\n png.ihdr_data.color_type = ColorType::Grayscale;\n\n true\n\n } else {\n\n false\n\n }\n\n}\n", "file_path": "src/reduction/color.rs", "rank": 17, "score": 231847.1962020956 }, { "content": "pub fn compress_to_vec_oxipng(input: &[u8], level: u8, window_bits: i32, strategy: i32) -> Vec<u8> {\n\n // The comp flags function sets the zlib flag if the window_bits parameter is > 0.\n\n let flags = create_comp_flags_from_zip_params(level.into(), window_bits, strategy);\n\n let mut compressor = CompressorOxide::new(flags);\n\n let mut output = Vec::with_capacity(input.len() / 2);\n\n // # Unsafe\n\n // We trust compress to not read the uninitialized bytes.\n\n unsafe {\n\n let cap = output.capacity();\n\n output.set_len(cap);\n\n }\n\n let mut in_pos = 0;\n\n let mut out_pos = 0;\n\n loop {\n\n let (status, bytes_in, bytes_out) = compress(\n\n &mut compressor,\n\n &input[in_pos..],\n\n &mut output[out_pos..],\n\n TDEFLFlush::Finish,\n\n );\n", "file_path": "src/deflate/miniz_stream.rs", "rank": 18, "score": 227811.5728658111 }, { "content": "/// Strip headers from the `PngData` object, as requested by the passed `Options`\n\nfn perform_strip(png: &mut png::PngData, opts: &Options) {\n\n match opts.strip {\n\n // Strip headers\n\n Headers::None => (),\n\n Headers::Some(ref hdrs) => for hdr in hdrs {\n\n png.aux_headers.remove(hdr);\n\n },\n\n Headers::Safe => {\n\n const PRESERVED_HEADERS: [&str; 9] = [\n\n \"cHRM\", \"gAMA\", \"iCCP\", \"sBIT\", \"sRGB\", \"bKGD\", \"hIST\", \"pHYs\", \"sPLT\"\n\n ];\n\n let hdrs = png.aux_headers.keys().cloned().collect::<Vec<String>>();\n\n for hdr in hdrs {\n\n if !PRESERVED_HEADERS.contains(&hdr.as_ref()) {\n\n png.aux_headers.remove(&hdr);\n\n }\n\n }\n\n }\n\n Headers::All => {\n\n png.aux_headers = HashMap::new();\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 19, "score": 226637.20687658753 }, { "content": "#[bench]\n\nfn zopfli_4_bits_strategy_0(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\n\n \"tests/files/palette_4_should_be_palette_4.png\",\n\n ));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n deflate::zopfli_deflate(png.raw_data.as_ref()).ok();\n\n });\n\n}\n\n\n", "file_path": "benches/zopfli.rs", "rank": 20, "score": 214247.00840121802 }, { "content": "#[bench]\n\nfn zopfli_8_bits_strategy_0(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\"tests/files/rgb_8_should_be_rgb_8.png\"));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n deflate::zopfli_deflate(png.raw_data.as_ref()).ok();\n\n });\n\n}\n\n\n", "file_path": "benches/zopfli.rs", "rank": 21, "score": 214247.00840121802 }, { "content": "#[bench]\n\nfn zopfli_16_bits_strategy_0(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\"tests/files/rgb_16_should_be_rgb_16.png\"));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n deflate::zopfli_deflate(png.raw_data.as_ref()).ok();\n\n });\n\n}\n\n\n", "file_path": "benches/zopfli.rs", "rank": 22, "score": 214247.00840121802 }, { "content": "#[bench]\n\nfn zopfli_2_bits_strategy_0(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\n\n \"tests/files/palette_2_should_be_palette_2.png\",\n\n ));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n deflate::zopfli_deflate(png.raw_data.as_ref()).ok();\n\n });\n\n}\n\n\n", "file_path": "benches/zopfli.rs", "rank": 23, "score": 214247.00840121802 }, { "content": "#[bench]\n\nfn zopfli_1_bits_strategy_0(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\n\n \"tests/files/palette_1_should_be_palette_1.png\",\n\n ));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n deflate::zopfli_deflate(png.raw_data.as_ref()).ok();\n\n });\n\n}\n", "file_path": "benches/zopfli.rs", "rank": 24, "score": 214247.00840121802 }, { "content": "pub fn reduce_rgba_to_rgb(png: &mut PngData) -> bool {\n\n if let Some(reduced) = reduce_alpha_channel(png, 4) {\n\n png.raw_data = reduced;\n\n png.ihdr_data.color_type = ColorType::RGB;\n\n true\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "src/reduction/color.rs", "rank": 25, "score": 209385.78075143666 }, { "content": "pub fn reduce_rgb_to_grayscale(png: &mut PngData) -> bool {\n\n let mut reduced = Vec::with_capacity(png.raw_data.len());\n\n let byte_depth: u8 = png.ihdr_data.bit_depth.as_u8() >> 3;\n\n let bpp: usize = 3 * byte_depth as usize;\n\n let mut cur_pixel = Vec::with_capacity(bpp);\n\n for line in png.scan_lines() {\n\n reduced.push(line.filter);\n\n for (i, byte) in line.data.iter().enumerate() {\n\n cur_pixel.push(*byte);\n\n if i % bpp == bpp - 1 {\n\n if bpp == 3 {\n\n if cur_pixel.iter().unique().count() > 1 {\n\n return false;\n\n }\n\n reduced.push(cur_pixel[0]);\n\n } else {\n\n let pixel_bytes = cur_pixel\n\n .iter()\n\n .step(2)\n\n .cloned()\n", "file_path": "src/reduction/color.rs", "rank": 26, "score": 209385.78075143666 }, { "content": "pub fn interlace_image(png: &mut PngData) {\n\n let mut passes: Vec<BitVec> = vec![BitVec::new(); 7];\n\n let bits_per_pixel = png.ihdr_data.bit_depth.as_u8() * png.channels_per_pixel();\n\n for (index, line) in png.scan_lines().enumerate() {\n\n match index % 8 {\n\n // Add filter bytes to passes that will be in the output image\n\n 0 => {\n\n passes[0].extend(BitVec::from_elem(8, false));\n\n if png.ihdr_data.width >= 5 {\n\n passes[1].extend(BitVec::from_elem(8, false));\n\n }\n\n if png.ihdr_data.width >= 3 {\n\n passes[3].extend(BitVec::from_elem(8, false));\n\n }\n\n if png.ihdr_data.width >= 2 {\n\n passes[5].extend(BitVec::from_elem(8, false));\n\n }\n\n }\n\n 4 => {\n\n passes[2].extend(BitVec::from_elem(8, false));\n", "file_path": "src/interlace.rs", "rank": 27, "score": 207698.51760187067 }, { "content": "pub fn deinterlace_image(png: &mut PngData) {\n\n let bits_per_pixel = png.ihdr_data.bit_depth.as_u8() * png.channels_per_pixel();\n\n let bits_per_line = 8 + bits_per_pixel as usize * png.ihdr_data.width as usize;\n\n // Initialize each output line with a starting filter byte of 0\n\n // as well as some blank data\n\n let mut lines: Vec<BitVec> =\n\n vec![BitVec::from_elem(bits_per_line, false); png.ihdr_data.height as usize];\n\n let mut current_pass = 1;\n\n let mut pass_constants = interlaced_constants(current_pass);\n\n let mut current_y: usize = pass_constants.y_shift as usize;\n\n for line in png.scan_lines() {\n\n let bit_vec = BitVec::from_bytes(&line.data);\n\n let bits_in_line = ((png.ihdr_data.width - u32::from(pass_constants.x_shift)) as f32\n\n / f32::from(pass_constants.x_step))\n\n .ceil() as usize * bits_per_pixel as usize;\n\n for (i, bit) in bit_vec.iter().enumerate() {\n\n // Avoid moving padded 0's into new image\n\n if i >= bits_in_line {\n\n break;\n\n }\n", "file_path": "src/interlace.rs", "rank": 28, "score": 197316.3448938327 }, { "content": "/// Check if an image was already optimized prior to oxipng's operations\n\nfn is_fully_optimized(original_size: usize, optimized_size: usize, opts: &Options) -> bool {\n\n original_size <= optimized_size && !opts.force && opts.interlace.is_none()\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 29, "score": 193058.79907618664 }, { "content": "fn write_png_block(key: &[u8], header: &[u8], output: &mut Vec<u8>) {\n\n let mut header_data = Vec::with_capacity(header.len() + 4);\n\n header_data.extend_from_slice(key);\n\n header_data.extend_from_slice(header);\n\n output.reserve(header_data.len() + 8);\n\n let _ = output.write_u32::<BigEndian>(header_data.len() as u32 - 4);\n\n let crc = crc32::checksum_ieee(&header_data);\n\n output.append(&mut header_data);\n\n let _ = output.write_u32::<BigEndian>(crc);\n\n}\n", "file_path": "src/png/mod.rs", "rank": 30, "score": 192498.69480603398 }, { "content": "pub fn zopfli_deflate(data: &[u8]) -> Result<Vec<u8>, PngError> {\n\n let mut output = Vec::with_capacity(max(1024, data.len() / 20));\n\n let options = zopfli::Options::default();\n\n match zopfli::compress(&options, &zopfli::Format::Zlib, data, &mut output) {\n\n Ok(_) => (),\n\n Err(_) => return Err(PngError::new(\"Failed to compress in zopfli\")),\n\n };\n\n output.shrink_to_fit();\n\n Ok(output)\n\n}\n\n\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n/// DEFLATE algorithms supported by oxipng\n\npub enum Deflaters {\n\n /// Use the Zlib/Miniz DEFLATE implementation\n\n Zlib,\n\n /// Use the better but slower Zopfli implementation\n\n Zopfli,\n\n}\n", "file_path": "src/deflate/mod.rs", "rank": 31, "score": 190422.93486326063 }, { "content": "#[bench]\n\nfn filters_16_bits_filter_2(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\"tests/files/rgb_16_should_be_rgb_16.png\"));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n png.filter_image(2);\n\n });\n\n}\n\n\n", "file_path": "benches/filters.rs", "rank": 32, "score": 185264.65868699158 }, { "content": "#[bench]\n\nfn filters_4_bits_filter_2(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\n\n \"tests/files/palette_4_should_be_palette_4.png\",\n\n ));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n png.filter_image(2);\n\n });\n\n}\n\n\n", "file_path": "benches/filters.rs", "rank": 33, "score": 185264.65868699158 }, { "content": "#[bench]\n\nfn filters_4_bits_filter_5(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\n\n \"tests/files/palette_4_should_be_palette_4.png\",\n\n ));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n png.filter_image(5);\n\n });\n\n}\n\n\n", "file_path": "benches/filters.rs", "rank": 34, "score": 185264.65868699158 }, { "content": "#[bench]\n\nfn filters_16_bits_filter_5(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\"tests/files/rgb_16_should_be_rgb_16.png\"));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n png.filter_image(5);\n\n });\n\n}\n\n\n", "file_path": "benches/filters.rs", "rank": 35, "score": 185264.65868699158 }, { "content": "#[bench]\n\nfn filters_1_bits_filter_4(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\n\n \"tests/files/palette_1_should_be_palette_1.png\",\n\n ));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n png.filter_image(4);\n\n });\n\n}\n\n\n", "file_path": "benches/filters.rs", "rank": 36, "score": 185264.65868699158 }, { "content": "#[bench]\n\nfn filters_4_bits_filter_0(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\n\n \"tests/files/palette_4_should_be_palette_4.png\",\n\n ));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n png.filter_image(0);\n\n });\n\n}\n\n\n", "file_path": "benches/filters.rs", "rank": 37, "score": 185264.65868699158 }, { "content": "#[bench]\n\nfn filters_2_bits_filter_3(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\n\n \"tests/files/palette_2_should_be_palette_2.png\",\n\n ));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n png.filter_image(3);\n\n });\n\n}\n\n\n", "file_path": "benches/filters.rs", "rank": 38, "score": 185264.65868699158 }, { "content": "#[bench]\n\nfn filters_8_bits_filter_4(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\"tests/files/rgb_8_should_be_rgb_8.png\"));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n png.filter_image(4);\n\n });\n\n}\n\n\n", "file_path": "benches/filters.rs", "rank": 39, "score": 185264.65868699158 }, { "content": "#[bench]\n\nfn filters_2_bits_filter_4(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\n\n \"tests/files/palette_2_should_be_palette_2.png\",\n\n ));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n png.filter_image(4);\n\n });\n\n}\n\n\n", "file_path": "benches/filters.rs", "rank": 40, "score": 185264.65868699158 }, { "content": "#[bench]\n\nfn filters_8_bits_filter_0(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\"tests/files/rgb_8_should_be_rgb_8.png\"));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n png.filter_image(0);\n\n });\n\n}\n\n\n", "file_path": "benches/filters.rs", "rank": 41, "score": 185264.65868699158 }, { "content": "#[bench]\n\nfn filters_1_bits_filter_3(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\n\n \"tests/files/palette_1_should_be_palette_1.png\",\n\n ));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n png.filter_image(3);\n\n });\n\n}\n\n\n", "file_path": "benches/filters.rs", "rank": 42, "score": 185264.65868699158 }, { "content": "#[bench]\n\nfn filters_16_bits_filter_1(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\"tests/files/rgb_16_should_be_rgb_16.png\"));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n png.filter_image(1);\n\n });\n\n}\n\n\n", "file_path": "benches/filters.rs", "rank": 43, "score": 185264.65868699158 }, { "content": "#[bench]\n\nfn filters_1_bits_filter_2(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\n\n \"tests/files/palette_1_should_be_palette_1.png\",\n\n ));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n png.filter_image(2);\n\n });\n\n}\n\n\n", "file_path": "benches/filters.rs", "rank": 44, "score": 185264.65868699158 }, { "content": "#[bench]\n\nfn filters_4_bits_filter_1(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\n\n \"tests/files/palette_4_should_be_palette_4.png\",\n\n ));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n png.filter_image(1);\n\n });\n\n}\n\n\n", "file_path": "benches/filters.rs", "rank": 45, "score": 185264.65868699158 }, { "content": "#[bench]\n\nfn filters_4_bits_filter_3(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\n\n \"tests/files/palette_4_should_be_palette_4.png\",\n\n ));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n png.filter_image(3);\n\n });\n\n}\n\n\n", "file_path": "benches/filters.rs", "rank": 46, "score": 185264.65868699158 }, { "content": "#[bench]\n\nfn filters_1_bits_filter_5(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\n\n \"tests/files/palette_1_should_be_palette_1.png\",\n\n ));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n png.filter_image(5);\n\n });\n\n}\n", "file_path": "benches/filters.rs", "rank": 47, "score": 185264.65868699158 }, { "content": "#[bench]\n\nfn filters_1_bits_filter_0(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\n\n \"tests/files/palette_1_should_be_palette_1.png\",\n\n ));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n png.filter_image(0);\n\n });\n\n}\n\n\n", "file_path": "benches/filters.rs", "rank": 48, "score": 185264.65868699158 }, { "content": "#[bench]\n\nfn filters_8_bits_filter_1(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\"tests/files/rgb_8_should_be_rgb_8.png\"));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n png.filter_image(1);\n\n });\n\n}\n\n\n", "file_path": "benches/filters.rs", "rank": 49, "score": 185264.65868699158 }, { "content": "#[bench]\n\nfn filters_16_bits_filter_4(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\"tests/files/rgb_16_should_be_rgb_16.png\"));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n png.filter_image(4);\n\n });\n\n}\n\n\n", "file_path": "benches/filters.rs", "rank": 50, "score": 185264.65868699158 }, { "content": "#[bench]\n\nfn filters_2_bits_filter_2(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\n\n \"tests/files/palette_2_should_be_palette_2.png\",\n\n ));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n png.filter_image(2);\n\n });\n\n}\n\n\n", "file_path": "benches/filters.rs", "rank": 51, "score": 185264.65868699158 }, { "content": "#[bench]\n\nfn filters_16_bits_filter_0(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\"tests/files/rgb_16_should_be_rgb_16.png\"));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n png.filter_image(0);\n\n });\n\n}\n\n\n", "file_path": "benches/filters.rs", "rank": 52, "score": 185264.65868699158 }, { "content": "#[bench]\n\nfn filters_8_bits_filter_5(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\"tests/files/rgb_8_should_be_rgb_8.png\"));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n png.filter_image(5);\n\n });\n\n}\n\n\n", "file_path": "benches/filters.rs", "rank": 53, "score": 185264.65868699158 }, { "content": "#[bench]\n\nfn filters_2_bits_filter_0(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\n\n \"tests/files/palette_2_should_be_palette_2.png\",\n\n ));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n png.filter_image(0);\n\n });\n\n}\n\n\n", "file_path": "benches/filters.rs", "rank": 54, "score": 185264.65868699158 }, { "content": "#[bench]\n\nfn filters_2_bits_filter_1(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\n\n \"tests/files/palette_2_should_be_palette_2.png\",\n\n ));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n png.filter_image(1);\n\n });\n\n}\n\n\n", "file_path": "benches/filters.rs", "rank": 55, "score": 185264.65868699158 }, { "content": "#[bench]\n\nfn filters_1_bits_filter_1(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\n\n \"tests/files/palette_1_should_be_palette_1.png\",\n\n ));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n png.filter_image(1);\n\n });\n\n}\n\n\n", "file_path": "benches/filters.rs", "rank": 56, "score": 185264.65868699158 }, { "content": "#[bench]\n\nfn filters_8_bits_filter_2(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\"tests/files/rgb_8_should_be_rgb_8.png\"));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n png.filter_image(2);\n\n });\n\n}\n\n\n", "file_path": "benches/filters.rs", "rank": 57, "score": 185264.65868699158 }, { "content": "#[bench]\n\nfn filters_2_bits_filter_5(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\n\n \"tests/files/palette_2_should_be_palette_2.png\",\n\n ));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n png.filter_image(5);\n\n });\n\n}\n\n\n", "file_path": "benches/filters.rs", "rank": 58, "score": 185264.65868699158 }, { "content": "#[bench]\n\nfn filters_16_bits_filter_3(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\"tests/files/rgb_16_should_be_rgb_16.png\"));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n png.filter_image(3);\n\n });\n\n}\n\n\n", "file_path": "benches/filters.rs", "rank": 59, "score": 185264.65868699158 }, { "content": "#[bench]\n\nfn filters_8_bits_filter_3(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\"tests/files/rgb_8_should_be_rgb_8.png\"));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n png.filter_image(3);\n\n });\n\n}\n\n\n", "file_path": "benches/filters.rs", "rank": 60, "score": 185264.65868699158 }, { "content": "#[bench]\n\nfn filters_4_bits_filter_4(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\n\n \"tests/files/palette_4_should_be_palette_4.png\",\n\n ));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n png.filter_image(4);\n\n });\n\n}\n\n\n", "file_path": "benches/filters.rs", "rank": 61, "score": 185264.65868699158 }, { "content": "#[bench]\n\nfn interlacing_2_bits(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\n\n \"tests/files/palette_2_should_be_palette_2.png\",\n\n ));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n let mut safe_png = png.clone();\n\n safe_png.change_interlacing(1);\n\n });\n\n}\n\n\n", "file_path": "benches/interlacing.rs", "rank": 62, "score": 185164.3763781731 }, { "content": "#[bench]\n\nfn interlacing_8_bits(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\"tests/files/rgb_8_should_be_rgb_8.png\"));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n let mut safe_png = png.clone();\n\n safe_png.change_interlacing(1);\n\n });\n\n}\n\n\n", "file_path": "benches/interlacing.rs", "rank": 63, "score": 185164.3763781731 }, { "content": "#[bench]\n\nfn interlacing_16_bits(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\"tests/files/rgb_16_should_be_rgb_16.png\"));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n let mut safe_png = png.clone();\n\n safe_png.change_interlacing(1);\n\n });\n\n}\n\n\n", "file_path": "benches/interlacing.rs", "rank": 64, "score": 185164.3763781731 }, { "content": "#[bench]\n\nfn interlacing_4_bits(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\n\n \"tests/files/palette_4_should_be_palette_4.png\",\n\n ));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n let mut safe_png = png.clone();\n\n safe_png.change_interlacing(1);\n\n });\n\n}\n\n\n", "file_path": "benches/interlacing.rs", "rank": 65, "score": 185164.3763781731 }, { "content": "#[bench]\n\nfn interlacing_1_bits(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\n\n \"tests/files/palette_1_should_be_palette_1.png\",\n\n ));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n let mut safe_png = png.clone();\n\n safe_png.change_interlacing(1);\n\n });\n\n}\n\n\n", "file_path": "benches/interlacing.rs", "rank": 66, "score": 185164.3763781731 }, { "content": "#[bench]\n\nfn reductions_alpha_up(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\"tests/files/rgba_8_reduce_alpha_up.png\"));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n let mut safe_png = png.clone();\n\n safe_png.reduce_alpha_channel(AlphaOptim::Up);\n\n });\n\n}\n\n\n", "file_path": "benches/reductions.rs", "rank": 67, "score": 184447.3573032759 }, { "content": "#[bench]\n\nfn reductions_alpha_down(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\"tests/files/rgba_8_reduce_alpha_down.png\"));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n let mut safe_png = png.clone();\n\n safe_png.reduce_alpha_channel(AlphaOptim::Down);\n\n });\n\n}\n", "file_path": "benches/reductions.rs", "rank": 68, "score": 184447.3573032759 }, { "content": "#[bench]\n\nfn reductions_8_to_4_bits(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\n\n \"tests/files/palette_8_should_be_palette_4.png\",\n\n ));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n let mut safe_png = png.clone();\n\n safe_png.reduce_bit_depth();\n\n });\n\n}\n\n\n", "file_path": "benches/reductions.rs", "rank": 69, "score": 184337.7795065614 }, { "content": "#[bench]\n\nfn reductions_8_to_2_bits(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\n\n \"tests/files/palette_8_should_be_palette_2.png\",\n\n ));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n let mut safe_png = png.clone();\n\n safe_png.reduce_bit_depth();\n\n });\n\n}\n\n\n", "file_path": "benches/reductions.rs", "rank": 70, "score": 184337.7795065614 }, { "content": "#[bench]\n\nfn reductions_8_to_1_bits(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\n\n \"tests/files/palette_8_should_be_palette_1.png\",\n\n ));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n let mut safe_png = png.clone();\n\n safe_png.reduce_bit_depth();\n\n });\n\n}\n\n\n", "file_path": "benches/reductions.rs", "rank": 71, "score": 184337.7795065614 }, { "content": "#[bench]\n\nfn reductions_4_to_2_bits(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\n\n \"tests/files/palette_4_should_be_palette_2.png\",\n\n ));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n let mut safe_png = png.clone();\n\n safe_png.reduce_bit_depth();\n\n });\n\n}\n\n\n", "file_path": "benches/reductions.rs", "rank": 72, "score": 184337.7795065614 }, { "content": "#[bench]\n\nfn reductions_2_to_1_bits(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\n\n \"tests/files/palette_2_should_be_palette_1.png\",\n\n ));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n let mut safe_png = png.clone();\n\n safe_png.reduce_bit_depth();\n\n });\n\n}\n\n\n", "file_path": "benches/reductions.rs", "rank": 73, "score": 184337.7795065614 }, { "content": "#[bench]\n\nfn reductions_16_to_8_bits(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\"tests/files/rgb_16_should_be_rgb_8.png\"));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n let mut safe_png = png.clone();\n\n safe_png.reduce_bit_depth();\n\n });\n\n}\n\n\n", "file_path": "benches/reductions.rs", "rank": 74, "score": 184337.7795065614 }, { "content": "#[bench]\n\nfn reductions_4_to_1_bits(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\n\n \"tests/files/palette_4_should_be_palette_1.png\",\n\n ));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n let mut safe_png = png.clone();\n\n safe_png.reduce_bit_depth();\n\n });\n\n}\n\n\n", "file_path": "benches/reductions.rs", "rank": 75, "score": 184337.7795065614 }, { "content": "#[bench]\n\nfn reductions_palette_unused_reduction(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\n\n \"tests/files/palette_should_be_reduced_with_unused.png\",\n\n ));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n let mut safe_png = png.clone();\n\n safe_png.reduce_palette();\n\n });\n\n}\n\n\n", "file_path": "benches/reductions.rs", "rank": 76, "score": 180757.81845755922 }, { "content": "#[bench]\n\nfn reductions_palette_full_reduction(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\n\n \"tests/files/palette_should_be_reduced_with_both.png\",\n\n ));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n let mut safe_png = png.clone();\n\n safe_png.reduce_palette();\n\n });\n\n}\n\n\n", "file_path": "benches/reductions.rs", "rank": 77, "score": 180757.81845755922 }, { "content": "#[bench]\n\nfn reductions_palette_duplicate_reduction(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\n\n \"tests/files/palette_should_be_reduced_with_dupes.png\",\n\n ));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n let mut safe_png = png.clone();\n\n safe_png.reduce_palette();\n\n });\n\n}\n\n\n", "file_path": "benches/reductions.rs", "rank": 78, "score": 180757.81845755922 }, { "content": "#[bench]\n\nfn reductions_alpha_left(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\"tests/files/rgba_8_reduce_alpha_left.png\"));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n let mut safe_png = png.clone();\n\n safe_png.reduce_alpha_channel(AlphaOptim::Left);\n\n });\n\n}\n\n\n", "file_path": "benches/reductions.rs", "rank": 79, "score": 180007.94959766843 }, { "content": "#[bench]\n\nfn reductions_alpha_right(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\"tests/files/rgba_8_reduce_alpha_right.png\"));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n let mut safe_png = png.clone();\n\n safe_png.reduce_alpha_channel(AlphaOptim::Right);\n\n });\n\n}\n\n\n", "file_path": "benches/reductions.rs", "rank": 80, "score": 180007.94959766843 }, { "content": "#[bench]\n\nfn reductions_alpha_black(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\"tests/files/rgba_8_reduce_alpha_black.png\"));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n let mut safe_png = png.clone();\n\n safe_png.reduce_alpha_channel(AlphaOptim::Black);\n\n });\n\n}\n\n\n", "file_path": "benches/reductions.rs", "rank": 81, "score": 180007.94959766843 }, { "content": "#[bench]\n\nfn reductions_alpha_white(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\"tests/files/rgba_8_reduce_alpha_white.png\"));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n let mut safe_png = png.clone();\n\n safe_png.reduce_alpha_channel(AlphaOptim::White);\n\n });\n\n}\n\n\n", "file_path": "benches/reductions.rs", "rank": 82, "score": 180007.94959766843 }, { "content": "#[bench]\n\nfn reductions_rgb_to_palette_8(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\"tests/files/rgb_8_should_be_palette_8.png\"));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n let mut safe_png = png.clone();\n\n safe_png.reduce_color_type();\n\n });\n\n}\n\n\n", "file_path": "benches/reductions.rs", "rank": 83, "score": 179859.51229589156 }, { "content": "#[bench]\n\nfn reductions_rgba_to_palette_8(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\"tests/files/rgba_8_should_be_palette_8.png\"));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n let mut safe_png = png.clone();\n\n safe_png.reduce_color_type();\n\n });\n\n}\n\n\n", "file_path": "benches/reductions.rs", "rank": 84, "score": 179859.51229589156 }, { "content": "#[bench]\n\nfn reductions_rgba_to_grayscale_alpha_16(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\n\n \"tests/files/rgba_16_should_be_grayscale_alpha_16.png\",\n\n ));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n let mut safe_png = png.clone();\n\n safe_png.reduce_color_type();\n\n });\n\n}\n\n\n", "file_path": "benches/reductions.rs", "rank": 85, "score": 175856.57990092743 }, { "content": "#[bench]\n\nfn reductions_rgba_to_grayscale_alpha_8(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\n\n \"tests/files/rgba_8_should_be_grayscale_alpha_8.png\",\n\n ));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n let mut safe_png = png.clone();\n\n safe_png.reduce_color_type();\n\n });\n\n}\n\n\n", "file_path": "benches/reductions.rs", "rank": 86, "score": 175856.57990092743 }, { "content": "#[bench]\n\nfn deinterlacing_16_bits(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\n\n \"tests/files/interlaced_rgb_16_should_be_rgb_16.png\",\n\n ));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n let mut safe_png = png.clone();\n\n safe_png.change_interlacing(0);\n\n });\n\n}\n\n\n", "file_path": "benches/interlacing.rs", "rank": 87, "score": 175193.09080110022 }, { "content": "#[bench]\n\nfn deinterlacing_1_bits(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\n\n \"tests/files/interlaced_palette_1_should_be_palette_1.png\",\n\n ));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n let mut safe_png = png.clone();\n\n safe_png.change_interlacing(0);\n\n });\n\n}\n", "file_path": "benches/interlacing.rs", "rank": 88, "score": 175193.09080110022 }, { "content": "#[bench]\n\nfn deinterlacing_8_bits(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\n\n \"tests/files/interlaced_rgb_8_should_be_rgb_8.png\",\n\n ));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n let mut safe_png = png.clone();\n\n safe_png.change_interlacing(0);\n\n });\n\n}\n\n\n", "file_path": "benches/interlacing.rs", "rank": 89, "score": 175193.09080110022 }, { "content": "#[bench]\n\nfn deinterlacing_4_bits(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\n\n \"tests/files/interlaced_palette_4_should_be_palette_4.png\",\n\n ));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n let mut safe_png = png.clone();\n\n safe_png.change_interlacing(0);\n\n });\n\n}\n\n\n", "file_path": "benches/interlacing.rs", "rank": 90, "score": 175193.09080110022 }, { "content": "#[bench]\n\nfn deinterlacing_2_bits(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\n\n \"tests/files/interlaced_palette_2_should_be_palette_2.png\",\n\n ));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n let mut safe_png = png.clone();\n\n safe_png.change_interlacing(0);\n\n });\n\n}\n\n\n", "file_path": "benches/interlacing.rs", "rank": 91, "score": 175193.09080110022 }, { "content": "#[bench]\n\nfn deflate_1_bits_strategy_0(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\n\n \"tests/files/palette_1_should_be_palette_1.png\",\n\n ));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n deflate::deflate(png.raw_data.as_ref(), 9, 0, 15)\n\n });\n\n}\n\n\n", "file_path": "benches/deflate.rs", "rank": 92, "score": 171181.9705212262 }, { "content": "#[bench]\n\nfn deflate_8_bits_strategy_0(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\"tests/files/rgb_8_should_be_rgb_8.png\"));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n deflate::deflate(png.raw_data.as_ref(), 9, 0, 15)\n\n });\n\n}\n\n\n", "file_path": "benches/deflate.rs", "rank": 93, "score": 171181.9705212262 }, { "content": "#[bench]\n\nfn deflate_2_bits_strategy_0(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\n\n \"tests/files/palette_2_should_be_palette_2.png\",\n\n ));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n deflate::deflate(png.raw_data.as_ref(), 9, 0, 15)\n\n });\n\n}\n\n\n", "file_path": "benches/deflate.rs", "rank": 94, "score": 171181.9705212262 }, { "content": "#[bench]\n\nfn deflate_8_bits_strategy_1(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\"tests/files/rgb_8_should_be_rgb_8.png\"));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n deflate::deflate(png.raw_data.as_ref(), 9, 1, 15)\n\n });\n\n}\n\n\n", "file_path": "benches/deflate.rs", "rank": 95, "score": 171181.9705212262 }, { "content": "#[bench]\n\nfn deflate_4_bits_strategy_0(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\n\n \"tests/files/palette_4_should_be_palette_4.png\",\n\n ));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n deflate::deflate(png.raw_data.as_ref(), 9, 0, 15)\n\n });\n\n}\n\n\n", "file_path": "benches/deflate.rs", "rank": 96, "score": 171181.9705212262 }, { "content": "#[bench]\n\nfn deflate_16_bits_strategy_1(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\"tests/files/rgb_16_should_be_rgb_16.png\"));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n deflate::deflate(png.raw_data.as_ref(), 9, 1, 15)\n\n });\n\n}\n\n\n", "file_path": "benches/deflate.rs", "rank": 97, "score": 171181.9705212262 }, { "content": "#[bench]\n\nfn deflate_16_bits_strategy_0(b: &mut Bencher) {\n\n let input = test::black_box(PathBuf::from(\"tests/files/rgb_16_should_be_rgb_16.png\"));\n\n let png = png::PngData::new(&input, false).unwrap();\n\n\n\n b.iter(|| {\n\n deflate::deflate(png.raw_data.as_ref(), 9, 0, 15)\n\n });\n\n}\n\n\n", "file_path": "benches/deflate.rs", "rank": 98, "score": 171181.9705212262 } ]
Rust
quicksilver-utils-async/src/std_web/websocket.rs
johnpmayer/quicksilver-utils
0ddd30d02d4dcf152a142ec79b29495069999d4c
use futures_util::future::poll_fn; use std::cell::RefCell; use std::collections::VecDeque; use std::sync::Arc; use std::task::{Poll, Waker}; use url::Url; use std_web::web::{ event::{ IMessageEvent, SocketCloseEvent, SocketErrorEvent, SocketMessageData, SocketMessageEvent, SocketOpenEvent, }, IEventTarget, SocketBinaryType, TypedArray, WebSocket, }; use crate::websocket::{WebSocketError, WebSocketMessage}; use log::{debug, trace}; enum SocketState { Init, Open, Error(String), Closed, } struct AsyncWebSocketInner { ws: WebSocket, state: SocketState, waker: Option<Waker>, buffer: VecDeque<SocketMessageEvent>, } pub struct AsyncWebSocket { inner: Arc<RefCell<AsyncWebSocketInner>>, } impl Clone for AsyncWebSocket { fn clone(&self) -> Self { AsyncWebSocket { inner: self.inner.clone(), } } } impl AsyncWebSocket { pub async fn connect(url: &Url) -> Result<Self, WebSocketError> { let ws = WebSocket::new(url.as_str()) .map_err(|_| WebSocketError::NativeError("Creation".to_string()))?; ws.set_binary_type(SocketBinaryType::ArrayBuffer); let async_ws: AsyncWebSocket = { let ws = ws.clone(); let state = SocketState::Init; let waker = None; let buffer = VecDeque::new(); let inner = Arc::new(RefCell::new(AsyncWebSocketInner { ws, state, waker, buffer, })); AsyncWebSocket { inner } }; ws.add_event_listener({ let async_ws = async_ws.clone(); move |_: SocketOpenEvent| { trace!("Websocket onopen callback!"); let inner: &mut AsyncWebSocketInner = &mut *async_ws.inner.borrow_mut(); inner.state = SocketState::Open; if let Some(waker) = inner.waker.take() { waker.wake() } } }); ws.add_event_listener({ let async_ws = async_ws.clone(); move |_: SocketCloseEvent| { trace!("Websocket onclose callback!"); let inner: &mut AsyncWebSocketInner = &mut *async_ws.inner.borrow_mut(); inner.state = SocketState::Closed; if let Some(waker) = inner.waker.take() { waker.wake() } } }); ws.add_event_listener({ let async_ws = async_ws.clone(); move |error_event: SocketErrorEvent| { trace!("Websocket onerror callback!"); let inner: &mut AsyncWebSocketInner = &mut *async_ws.inner.borrow_mut(); let error_message = format!("{:?}", error_event); inner.state = SocketState::Error(error_message); if let Some(waker) = inner.waker.take() { waker.wake() } } }); ws.add_event_listener({ let async_ws = async_ws.clone(); move |message_event: SocketMessageEvent| { let inner: &mut AsyncWebSocketInner = &mut *async_ws.inner.borrow_mut(); inner.buffer.push_back(message_event); if let Some(waker) = inner.waker.take() { waker.wake() } } }); poll_fn({ let async_ws = async_ws.clone(); move |cx| { trace!("Polling"); let inner: &mut AsyncWebSocketInner = &mut *async_ws.inner.borrow_mut(); match &inner.state { SocketState::Init => { inner.waker.replace(cx.waker().clone()); Poll::Pending } SocketState::Open => Poll::Ready(Ok(())), SocketState::Error(val) => { Poll::Ready(Err(WebSocketError::StateError(val.clone()))) } SocketState::Closed => Poll::Ready(Err(WebSocketError::StateClosed)), } } }) .await?; Ok(async_ws) } pub async fn send(&self, msg: &str) -> Result<(), WebSocketError> { trace!("Send"); let inner: &AsyncWebSocketInner = &self.inner.borrow(); inner .ws .send_text(msg) .map_err(|_| WebSocketError::NativeError("Send".to_string()))?; Ok(()) } pub async fn receive(&self) -> Result<WebSocketMessage, WebSocketError> { let message_event = poll_fn({ move |cx| { trace!("Polling"); let inner: &mut AsyncWebSocketInner = &mut *self.inner.borrow_mut(); match &inner.state { SocketState::Init => Poll::Ready(Err(WebSocketError::StateInit)), SocketState::Open => { if let Some(ev) = inner.buffer.pop_front() { Poll::Ready(Ok(ev)) } else { inner.waker.replace(cx.waker().clone()); Poll::Pending } } SocketState::Error(val) => { Poll::Ready(Err(WebSocketError::StateError(val.clone()))) } SocketState::Closed => Poll::Ready(Err(WebSocketError::StateClosed)), } } }) .await?; let data = message_event.data(); debug!("{:?}", &data); let message = match data { SocketMessageData::Text(s) => WebSocketMessage::String(s), SocketMessageData::ArrayBuffer(buf) => { let t_buffer: TypedArray<u8> = TypedArray::from(buf); WebSocketMessage::Binary(t_buffer.to_vec()) } SocketMessageData::Blob(_) => { panic!("binary should have been set to array buffer above...") } }; Ok(message) } }
use futures_util::future::poll_fn; use std::cell::RefCell; use std::collections::VecDeque; use std::sync::Arc; use std::task::{Poll, Waker}; use url::Url; use std_web::web::{ event::{ IMessageEvent, SocketCloseEvent, SocketErrorEvent, SocketMessageData, SocketMessageEvent, SocketOpenEvent, }, IEventTarget, SocketBinaryType, TypedArray, WebSocket, }; use crate::websocket::{WebSocketError, WebSocketMessage}; use log::{debug, trace}; enum SocketState { Init, Open, Error(String), Closed, } struct AsyncWebSocketInner { ws: WebSocket, state: SocketState, waker: Option<Waker>, buffer: VecDeque<SocketMessageEvent>, } pub struct AsyncWebSocket { inner: Arc<RefCell<AsyncWebSocketInner>>, } impl Clone for AsyncWebSocket { fn clone(&self) -> Self { AsyncWebSocket { inner: self.inner.clone(), } } } impl AsyncWebSocket { pub async fn connect(url: &Url) -> Result<Self, WebSocketError> { let ws = WebSocket::new(url.as_str()) .map_err(|_| WebSocketError::NativeError("Creation".to_string()))?; ws.set_binary_type(SocketBinaryType::ArrayBuffer); let async_ws: AsyncWebSocket = { let ws = ws.clone(); let state = SocketState::Init; let waker = None; let buffer = VecDeque::new(); let inner = Arc::new(RefCell::new(AsyncWebSocketInner { ws, state, waker, buffer, })); AsyncWebSocket { inner } }; ws.add_event_listener({ let async_ws = async_ws.clone(); move |_: SocketOpenEvent| { trace!("Websocket onopen callback!"); let inner: &mut AsyncWebSocketInner = &mut *async_ws.inner.borrow_mut(); inner.state = SocketState::Open; if let Some(waker) = inner.waker.take() { waker.wake() } } }); ws.add_event_listener({ let async_ws = async_ws.clone(); move |_: SocketCloseEvent| { trace!("Websocket onclose callback!"); let inner: &mut AsyncWebSocketInner = &mut *async_ws.inner.borrow_mut(); inner.state = SocketState::Closed; if let Some(waker) = inner.waker.take() { waker.wake() } } }); ws.add_event_listener({ let async_ws = async_ws.clone(); move |error_event: SocketErrorEvent| { trace!("Websocket onerror callback!"); let inner: &mut AsyncWebSocketInner = &mut *async_ws.inner.borrow_mut(); let error_message = format!("{:?}", error_event); inner.state = SocketState::Error(error_message); if let Some(waker) = inner.waker.take() { waker.wake() } } }); ws.add_event_listener({ let async_ws = async_ws.clone(); move |message_event: SocketMessageEvent| { let inner: &mut AsyncWebSocketInner = &mut *async_ws.inner.borrow_mut(); inner.buffer.push_back(message_event); if let Some(waker) = inner.waker.take() { waker.wake() } } });
.await?; Ok(async_ws) } pub async fn send(&self, msg: &str) -> Result<(), WebSocketError> { trace!("Send"); let inner: &AsyncWebSocketInner = &self.inner.borrow(); inner .ws .send_text(msg) .map_err(|_| WebSocketError::NativeError("Send".to_string()))?; Ok(()) } pub async fn receive(&self) -> Result<WebSocketMessage, WebSocketError> { let message_event = poll_fn({ move |cx| { trace!("Polling"); let inner: &mut AsyncWebSocketInner = &mut *self.inner.borrow_mut(); match &inner.state { SocketState::Init => Poll::Ready(Err(WebSocketError::StateInit)), SocketState::Open => { if let Some(ev) = inner.buffer.pop_front() { Poll::Ready(Ok(ev)) } else { inner.waker.replace(cx.waker().clone()); Poll::Pending } } SocketState::Error(val) => { Poll::Ready(Err(WebSocketError::StateError(val.clone()))) } SocketState::Closed => Poll::Ready(Err(WebSocketError::StateClosed)), } } }) .await?; let data = message_event.data(); debug!("{:?}", &data); let message = match data { SocketMessageData::Text(s) => WebSocketMessage::String(s), SocketMessageData::ArrayBuffer(buf) => { let t_buffer: TypedArray<u8> = TypedArray::from(buf); WebSocketMessage::Binary(t_buffer.to_vec()) } SocketMessageData::Blob(_) => { panic!("binary should have been set to array buffer above...") } }; Ok(message) } }
poll_fn({ let async_ws = async_ws.clone(); move |cx| { trace!("Polling"); let inner: &mut AsyncWebSocketInner = &mut *async_ws.inner.borrow_mut(); match &inner.state { SocketState::Init => { inner.waker.replace(cx.waker().clone()); Poll::Pending } SocketState::Open => Poll::Ready(Ok(())), SocketState::Error(val) => { Poll::Ready(Err(WebSocketError::StateError(val.clone()))) } SocketState::Closed => Poll::Ready(Err(WebSocketError::StateClosed)), } } })
call_expression
[ { "content": "struct AsyncWebSocketInner {\n\n ws: WebSocket,\n\n state: SocketState,\n\n waker: Option<Waker>,\n\n buffer: VecDeque<MessageEvent>,\n\n}\n\n\n\npub struct AsyncWebSocket {\n\n inner: Arc<RefCell<AsyncWebSocketInner>>,\n\n}\n\n\n\nimpl Clone for AsyncWebSocket {\n\n fn clone(&self) -> Self {\n\n AsyncWebSocket {\n\n inner: self.inner.clone(),\n\n }\n\n }\n\n}\n\n\n\nimpl AsyncWebSocket {\n", "file_path": "quicksilver-utils-async/src/web_sys/websocket.rs", "rank": 0, "score": 122217.0670927532 }, { "content": "enum SocketState {\n\n Init,\n\n Open,\n\n Error(String),\n\n Closed,\n\n}\n\n\n", "file_path": "quicksilver-utils-async/src/web_sys/websocket.rs", "rank": 2, "score": 120482.64343270307 }, { "content": "#[cfg(not(target_arch = \"wasm32\"))]\n\ntype WebSocketInner = crate::desktop::websocket::AsyncWebSocket;\n\n\n\n#[derive(Clone)]\n\npub struct WebSocket {\n\n inner: WebSocketInner,\n\n}\n\n\n\n// TODO: switch to http::Uri\n\n// TODO: switch to async_trait..\n\nimpl WebSocket {\n\n pub async fn connect(url: &Url) -> Result<Self, WebSocketError> {\n\n let inner = WebSocketInner::connect(url).await?;\n\n Ok(WebSocket { inner })\n\n }\n\n\n\n pub async fn send(&self, msg: &WebSocketMessage) -> Result<(), WebSocketError> {\n\n self.inner.send(msg).await\n\n }\n\n\n\n pub async fn receive(&self) -> Result<WebSocketMessage, WebSocketError> {\n\n self.inner.receive().await\n\n }\n\n\n\n pub async fn close(&self) -> Result<(), WebSocketError> {\n\n self.inner.close().await\n\n }\n\n}\n", "file_path": "quicksilver-utils-async/src/websocket.rs", "rank": 4, "score": 103520.88319808595 }, { "content": "#[cfg(all(target_arch = \"wasm32\", feature = \"stdweb\"))]\n\ntype WebSocketInner = crate::std_web::websocket::AsyncWebSocket;\n\n\n", "file_path": "quicksilver-utils-async/src/websocket.rs", "rank": 5, "score": 101927.95597323515 }, { "content": "#[cfg(all(target_arch = \"wasm32\", feature = \"web-sys\"))]\n\ntype WebSocketInner = crate::web_sys::websocket::AsyncWebSocket;\n\n\n", "file_path": "quicksilver-utils-async/src/websocket.rs", "rank": 6, "score": 101927.95597323515 }, { "content": "pub fn client_config() -> ClientConfig {\n\n let mut config = ClientConfig::new();\n\n let native_certs = load_native_certs().expect(\"Could not load platform certs\");\n\n config.root_store = native_certs;\n\n config\n\n}\n", "file_path": "quicksilver-utils-async/src/desktop/tls.rs", "rank": 7, "score": 98763.91408716515 }, { "content": "struct ReadyWaker {\n\n ready: bool,\n\n waker: Option<Waker>,\n\n}\n\n\n\npub async fn sleep_ms(ms: u32) {\n\n let window = window().expect(\"Get the window\");\n\n\n\n let ready_waker = Arc::new(RefCell::new(ReadyWaker {\n\n ready: false,\n\n waker: None,\n\n }));\n\n\n\n let callback = {\n\n let ready_waker = ready_waker.clone();\n\n Closure::wrap(Box::new(move |_| {\n\n trace!(\"set_timeout callback!\");\n\n let inner: &mut ReadyWaker = &mut *ready_waker.borrow_mut();\n\n inner.ready = true;\n\n if let Some(waker) = inner.waker.take() {\n", "file_path": "quicksilver-utils-async/src/web_sys/time.rs", "rank": 8, "score": 94770.19542313294 }, { "content": "struct XhrClosureInner {\n\n xhr: XmlHttpRequest,\n\n have_set_handlers: bool,\n\n}\n\n\n", "file_path": "quicksilver-utils-async/src/std_web/request.rs", "rank": 9, "score": 92420.98241769944 }, { "content": "#[cfg(feature = \"web-sys\")]\n\n#[wasm_bindgen(start)]\n\npub fn main_js() {\n\n #[cfg(debug_assertions)]\n\n console_error_panic_hook::set_once();\n\n\n\n run(Settings::default(), app::app)\n\n}\n", "file_path": "examples/project/src/lib.rs", "rank": 10, "score": 86194.89201877582 }, { "content": "#[derive(Debug)]\n\nenum CustomEvent {\n\n OnePingOnly,\n\n Ticked,\n\n EchoResponse(WebSocketMessage),\n\n // Resource(String),\n\n}\n\n\n\nasync fn tick_loop(task_context: TaskContext<'_, CustomEvent>) {\n\n loop {\n\n task_context.dispatch(CustomEvent::Ticked);\n\n sleep_ms(500).await\n\n }\n\n}\n\n\n\nasync fn read_websocket_loop(task_context: TaskContext<'_, CustomEvent>, ws: WebSocket) {\n\n loop {\n\n let message: WebSocketMessage = ws.receive().await.unwrap();\n\n task_context.dispatch(CustomEvent::EchoResponse(message))\n\n }\n\n}\n", "file_path": "examples/project/src/app.rs", "rank": 11, "score": 76005.2517561467 }, { "content": "struct XhrClosure {\n\n inner: Arc<RefCell<XhrClosureInner>>,\n\n}\n\n\n\npub async fn get_resource(url: &str) -> Result<String, RequestError> {\n\n debug!(\"stdweb get request {}\", url);\n\n let xhr = XmlHttpRequest::new();\n\n xhr.open(\"GET\", url)\n\n .map_err(|e| RequestError::NativeError(format!(\"Open: {}\", e)))?;\n\n xhr.set_response_type(XhrResponseType::ArrayBuffer)\n\n .map_err(|e| RequestError::NativeError(format!(\"Set Response Type: {}\", e)))?;\n\n xhr.send()\n\n .map_err(|e| RequestError::NativeError(format!(\"Send: {}\", e)))?;\n\n\n\n let xhr_closure = XhrClosure {\n\n inner: Arc::new(RefCell::new(XhrClosureInner {\n\n xhr,\n\n have_set_handlers: false,\n\n })),\n\n };\n", "file_path": "quicksilver-utils-async/src/std_web/request.rs", "rank": 12, "score": 68089.66509497908 }, { "content": "#[async_trait]\n\npub trait ServiceClient {\n\n fn new() -> Self;\n\n\n\n fn set_auth_token(&mut self, auth_token: &str);\n\n\n\n async fn post_raw(&self, uri: Uri, request_body: Bytes) -> Result<Bytes>;\n\n\n\n async fn post_proto<RequestT, ResponseT>(\n\n &self,\n\n uri: Uri,\n\n request_payload: &RequestT,\n\n ) -> Result<ResponseT>\n\n where\n\n RequestT: Message,\n\n ResponseT: Message,\n\n {\n\n let request_body: Bytes = From::from(request_payload.write_to_bytes().unwrap());\n\n trace!(\"Request bytes: {:?}\", request_body);\n\n let response_body: Bytes = self.post_raw(uri, request_body).await?;\n\n trace!(\"Response bytes: {:?}\", response_body);\n\n let response_payload =\n\n protobuf::parse_from_carllerche_bytes::<ResponseT>(&response_body).unwrap();\n\n Ok(response_payload)\n\n }\n\n}\n\n\n\npub use platform::ServiceClientImpl;\n", "file_path": "quicksilver-utils-async/src/request.rs", "rank": 13, "score": 67392.62849423102 }, { "content": "fn main() -> io::Result<()> {\n\n // let url = Url::parse(\"https://echo.websocket.org\").expect(\"parse a url\");\n\n let url = Url::parse(\"https://www.google.com\").expect(\"parse a url\");\n\n // let url = Url::parse(\"https://www.websocket.org/\").expect(\"parse a url\");\n\n let port = url.port_or_known_default();\n\n let addr = url.socket_addrs(|| port).expect(\"url lookup via dns\")[0]; // THIS IS THE BUG! ipv6/ipv4, need to try all addresses\n\n let domain = url.host_str().expect(\"url host\");\n\n // let domain = \"websocket.org\";\n\n\n\n // Create a bare bones HTTP GET request\n\n let http_request = format!(\"GET / HTTP/1.0\\r\\nHost: {}\\r\\n\\r\\n\", domain);\n\n\n\n // let cafile = &options.cafile;\n\n\n\n task::block_on(async move {\n\n // Create default connector comes preconfigured with all you need to safely connect\n\n // to remote servers!\n\n let connector = TlsConnector::default();\n\n\n\n // Open a normal TCP connection, just as you are used to\n", "file_path": "quicksilver-utils-async/examples/wss.rs", "rank": 14, "score": 64687.66100235041 }, { "content": "trait AsyncStream: AsyncRead + AsyncWrite + Unpin {}\n\n\n\nimpl<T: AsyncRead + AsyncWrite + Unpin> AsyncStream for T {}\n\n\n\nasync fn client(url: &Url) -> Result<Client<'_, Box<dyn AsyncStream>>, WebSocketError> {\n\n debug!(\"Creating client to url {}\", url);\n\n let port = url.port_or_known_default();\n\n let host = url.host_str().expect(\"url host\");\n\n let path = url.path();\n\n let scheme = url.scheme();\n\n let addresses = url.socket_addrs(|| port).expect(\"url lookup via dns\");\n\n\n\n trace!(\"Possible addresses {:?}\", addresses);\n\n let address = addresses[0];\n\n\n\n trace!(\"Connecting to address {}\", address);\n\n let transport_stream = {\n\n let mut connected_stream: Option<TcpStream> = None;\n\n for address in addresses {\n\n let attempted_stream = TcpStream::connect(address).await;\n", "file_path": "quicksilver-utils-async/src/desktop/websocket.rs", "rank": 15, "score": 58704.06026286455 }, { "content": "//! # websocket\n\n//!\n\n//! An async websocket client that can send and recieve. The\n\n//! `WebSocket` is cloneable, so reading and writing can happen\n\n//! on separate futures.\n\nuse bytes::Bytes;\n\nuse url::Url;\n\n\n\n#[derive(Debug)]\n\npub enum WebSocketError {\n\n NativeError(String),\n\n StateInit,\n\n StateError(String),\n\n StateClosed,\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum WebSocketMessage {\n\n String(String),\n\n Binary(Bytes),\n\n}\n\n\n\n#[cfg(all(target_arch = \"wasm32\", feature = \"web-sys\"))]\n", "file_path": "quicksilver-utils-async/src/websocket.rs", "rank": 16, "score": 56161.23895964928 }, { "content": "use futures_io::{AsyncRead, AsyncWrite};\n\nuse url::Url;\n\n\n\nuse async_std::net::TcpStream;\n\nuse async_tls::TlsConnector;\n\nuse bytes::Bytes;\n\nuse soketto::{\n\n connection::{Error as ConnectionError, Receiver, Sender},\n\n handshake::{Client, Error as HandshakeError, ServerResponse},\n\n};\n\nuse std::cell::RefCell;\n\nuse std::io::Error as IoError;\n\nuse std::sync::Arc;\n\n\n\nuse log::{debug, trace, warn};\n\n\n\nuse super::tls::client_config;\n\nuse crate::websocket::{WebSocketError, WebSocketMessage};\n\n\n\n#[derive(Clone)]\n", "file_path": "quicksilver-utils-async/src/desktop/websocket.rs", "rank": 17, "score": 54365.636163822666 }, { "content": "}\n\n\n\nimpl AsyncWebSocket {\n\n pub async fn connect(url: &Url) -> Result<Self, WebSocketError> {\n\n let mut client = client(url).await?;\n\n\n\n let (sender, receiver) = match client.handshake().await? {\n\n ServerResponse::Accepted { .. } => client.into_builder().finish(),\n\n ServerResponse::Redirect { .. } => unimplemented!(\"follow location URL\"),\n\n ServerResponse::Rejected { .. } => unimplemented!(\"handle failure\"),\n\n };\n\n\n\n let sender = Arc::new(RefCell::new(sender));\n\n let receiver = Arc::new(RefCell::new(receiver));\n\n\n\n Ok(AsyncWebSocket { sender, receiver })\n\n }\n\n\n\n pub async fn send(&self, msg: &WebSocketMessage) -> Result<(), WebSocketError> {\n\n let mut sender = self.sender.borrow_mut();\n", "file_path": "quicksilver-utils-async/src/desktop/websocket.rs", "rank": 18, "score": 54365.625382601276 }, { "content": "pub struct AsyncWebSocket {\n\n sender: Arc<RefCell<Sender<Box<dyn AsyncStream>>>>,\n\n receiver: Arc<RefCell<Receiver<Box<dyn AsyncStream>>>>,\n\n}\n\n\n\nimpl From<HandshakeError> for WebSocketError {\n\n fn from(err: HandshakeError) -> Self {\n\n WebSocketError::NativeError(format!(\"Handshake error: {}\", err))\n\n }\n\n}\n\n\n\nimpl From<ConnectionError> for WebSocketError {\n\n fn from(err: ConnectionError) -> Self {\n\n WebSocketError::NativeError(format!(\"Connection error: {}\", err))\n\n }\n\n}\n\n\n\nimpl From<IoError> for WebSocketError {\n\n fn from(err: IoError) -> Self {\n\n WebSocketError::NativeError(format!(\"IO Error: {}\", err))\n\n }\n\n}\n\n\n", "file_path": "quicksilver-utils-async/src/desktop/websocket.rs", "rank": 19, "score": 54361.26486753203 }, { "content": " match msg {\n\n WebSocketMessage::String(s) => sender.send_text(s).await?,\n\n WebSocketMessage::Binary(b) => sender.send_binary(b).await?,\n\n }\n\n sender.flush().await?; // otherwise it just sits there, which is just surprising for casual users\n\n Ok(())\n\n }\n\n\n\n pub async fn receive(&self) -> Result<WebSocketMessage, WebSocketError> {\n\n let data = self.receiver.borrow_mut().receive_data().await?;\n\n let message = if data.is_binary() {\n\n let data_slice: &[u8] = data.as_ref();\n\n WebSocketMessage::Binary(Bytes::copy_from_slice(data_slice))\n\n } else {\n\n let data_slice: &[u8] = data.as_ref();\n\n let s = String::from_utf8(Vec::from(data_slice))\n\n .map_err(|_| WebSocketError::NativeError(\"invalid ut8\".to_string()))?;\n\n WebSocketMessage::String(s)\n\n };\n\n Ok(message)\n\n }\n\n\n\n pub async fn close(&self) -> Result<(), WebSocketError> {\n\n self.sender.borrow_mut().close().await?;\n\n Ok(())\n\n }\n\n}\n", "file_path": "quicksilver-utils-async/src/desktop/websocket.rs", "rank": 20, "score": 54359.41646265981 }, { "content": " let boxed_stream: Box<dyn AsyncStream> = if scheme == \"wss\" {\n\n debug!(\n\n \"Starting TLS handshake for secure websocket with domain {}\",\n\n host\n\n );\n\n\n\n let config = client_config();\n\n\n\n let connector: TlsConnector = TlsConnector::from(Arc::new(config));\n\n trace!(\"Created connector\");\n\n\n\n let handshake = connector.connect(host, transport_stream);\n\n let tls_stream = handshake.await?;\n\n debug!(\"Completed TLS handshake\");\n\n Box::new(tls_stream)\n\n } else {\n\n Box::new(transport_stream)\n\n };\n\n\n\n Ok(Client::new(boxed_stream, host, path))\n", "file_path": "quicksilver-utils-async/src/desktop/websocket.rs", "rank": 21, "score": 54356.7271689486 }, { "content": " match attempted_stream {\n\n Ok(stream) => {\n\n connected_stream = Some(stream);\n\n trace!(\"Successfully connected to address {}\", address);\n\n break;\n\n }\n\n Err(e) => warn!(\"Couldn't connect to address {}, {}\", address, e),\n\n }\n\n }\n\n match connected_stream {\n\n Some(stream) => stream,\n\n None => {\n\n return Err(WebSocketError::NativeError(\n\n \"All addresses failed to connect\".to_string(),\n\n ))\n\n }\n\n }\n\n };\n\n\n\n trace!(\"Scheme: {}\", scheme);\n", "file_path": "quicksilver-utils-async/src/desktop/websocket.rs", "rank": 22, "score": 54356.50271598166 }, { "content": " Closure::wrap(Box::new(move |_| {\n\n trace!(\"Websocket onopen callback!\");\n\n let inner: &mut AsyncWebSocketInner = &mut *async_ws.inner.borrow_mut();\n\n inner.state = SocketState::Open;\n\n if let Some(waker) = inner.waker.take() {\n\n waker.wake()\n\n }\n\n }) as Box<dyn FnMut(JsValue)>)\n\n };\n\n ws.set_onopen(Some(onopen_callback.as_ref().unchecked_ref()));\n\n onopen_callback.forget();\n\n\n\n let onclose_callback = {\n\n let async_ws = async_ws.clone();\n\n Closure::wrap(Box::new(move |_| {\n\n trace!(\"Websocket onclose callback!\");\n\n let inner: &mut AsyncWebSocketInner = &mut *async_ws.inner.borrow_mut();\n\n inner.state = SocketState::Closed;\n\n if let Some(waker) = inner.waker.take() {\n\n waker.wake()\n", "file_path": "quicksilver-utils-async/src/web_sys/websocket.rs", "rank": 24, "score": 52711.38558134651 }, { "content": " let onmessage_callback = {\n\n let async_ws = async_ws.clone();\n\n Closure::wrap(Box::new(move |ev: MessageEvent| {\n\n trace!(\"Websocket onmessage callback!\");\n\n let inner: &mut AsyncWebSocketInner = &mut *async_ws.inner.borrow_mut();\n\n inner.buffer.push_back(ev);\n\n if let Some(waker) = inner.waker.take() {\n\n waker.wake()\n\n }\n\n }) as Box<dyn FnMut(MessageEvent)>)\n\n };\n\n ws.set_onmessage(Some(onmessage_callback.as_ref().unchecked_ref()));\n\n onmessage_callback.forget();\n\n\n\n poll_fn({\n\n let async_ws = async_ws.clone();\n\n move |cx| {\n\n trace!(\"Polling\");\n\n let inner: &mut AsyncWebSocketInner = &mut *async_ws.inner.borrow_mut();\n\n match &inner.state {\n", "file_path": "quicksilver-utils-async/src/web_sys/websocket.rs", "rank": 25, "score": 52708.08680248312 }, { "content": " pub async fn connect(url: &Url) -> Result<Self, WebSocketError> {\n\n let ws = WebSocket::new(url.as_str())?;\n\n ws.set_binary_type(BinaryType::Arraybuffer);\n\n let async_ws: AsyncWebSocket = {\n\n let ws = ws.clone();\n\n let state = SocketState::Init;\n\n let waker = None;\n\n let buffer = VecDeque::new();\n\n\n\n let inner = Arc::new(RefCell::new(AsyncWebSocketInner {\n\n ws,\n\n state,\n\n waker,\n\n buffer,\n\n }));\n\n AsyncWebSocket { inner }\n\n };\n\n\n\n let onopen_callback = {\n\n let async_ws = async_ws.clone();\n", "file_path": "quicksilver-utils-async/src/web_sys/websocket.rs", "rank": 28, "score": 52706.33465009639 }, { "content": " }\n\n }) as Box<dyn FnMut(JsValue)>)\n\n };\n\n ws.set_onclose(Some(onclose_callback.as_ref().unchecked_ref()));\n\n onclose_callback.forget();\n\n\n\n let onerror_callback = {\n\n let async_ws = async_ws.clone();\n\n Closure::wrap(Box::new(move |err: JsValue| {\n\n trace!(\"Websocket onerror callback!\");\n\n let inner: &mut AsyncWebSocketInner = &mut *async_ws.inner.borrow_mut();\n\n inner.state = SocketState::Error(err.as_string().unwrap());\n\n if let Some(waker) = inner.waker.take() {\n\n waker.wake()\n\n }\n\n }) as Box<dyn FnMut(JsValue)>)\n\n };\n\n ws.set_onerror(Some(onerror_callback.as_ref().unchecked_ref()));\n\n onerror_callback.forget();\n\n\n", "file_path": "quicksilver-utils-async/src/web_sys/websocket.rs", "rank": 29, "score": 52705.683712597405 }, { "content": " match msg {\n\n WebSocketMessage::String(s) => inner.ws.send_with_str(s)?,\n\n WebSocketMessage::Binary(b) => inner.ws.send_with_u8_array(b)?,\n\n }\n\n Ok(())\n\n }\n\n\n\n pub async fn close(&self) -> Result<(), WebSocketError> {\n\n let inner: &mut AsyncWebSocketInner = &mut *self.inner.borrow_mut();\n\n inner.ws.close()?;\n\n Ok(())\n\n }\n\n\n\n pub async fn receive(&self) -> Result<WebSocketMessage, WebSocketError> {\n\n let message_event = poll_fn({\n\n move |cx| {\n\n trace!(\"Polling\");\n\n let inner: &mut AsyncWebSocketInner = &mut *self.inner.borrow_mut();\n\n match &inner.state {\n\n SocketState::Init => Poll::Ready(Err(WebSocketError::StateInit)),\n", "file_path": "quicksilver-utils-async/src/web_sys/websocket.rs", "rank": 32, "score": 52700.50896114969 }, { "content": " SocketState::Init => {\n\n inner.waker.replace(cx.waker().clone());\n\n Poll::Pending\n\n }\n\n SocketState::Open => Poll::Ready(Ok(())),\n\n SocketState::Error(val) => {\n\n Poll::Ready(Err(WebSocketError::StateError(val.clone())))\n\n }\n\n SocketState::Closed => Poll::Ready(Err(WebSocketError::StateClosed)),\n\n }\n\n }\n\n })\n\n .await?;\n\n\n\n Ok(async_ws)\n\n }\n\n\n\n pub async fn send(&self, msg: &WebSocketMessage) -> Result<(), WebSocketError> {\n\n trace!(\"Send\");\n\n let inner: &mut AsyncWebSocketInner = &mut *self.inner.borrow_mut();\n", "file_path": "quicksilver-utils-async/src/web_sys/websocket.rs", "rank": 33, "score": 52700.09523792412 }, { "content": " SocketState::Open => {\n\n if let Some(ev) = inner.buffer.pop_front() {\n\n Poll::Ready(Ok(ev))\n\n } else {\n\n inner.waker.replace(cx.waker().clone());\n\n Poll::Pending\n\n }\n\n }\n\n SocketState::Error(val) => {\n\n Poll::Ready(Err(WebSocketError::StateError(val.clone())))\n\n }\n\n SocketState::Closed => Poll::Ready(Err(WebSocketError::StateClosed)),\n\n }\n\n }\n\n })\n\n .await?;\n\n\n\n let data: JsValue = message_event.data();\n\n trace!(\"{:?}\", &data);\n\n\n", "file_path": "quicksilver-utils-async/src/web_sys/websocket.rs", "rank": 34, "score": 52693.820690533714 }, { "content": "use js_sys::{ArrayBuffer, Uint8Array};\n\nuse web_sys::{BinaryType, MessageEvent, WebSocket};\n\n\n\nuse std::cell::RefCell;\n\nuse std::sync::Arc;\n\n\n\nuse std::collections::VecDeque;\n\n\n\nuse std::task::{Poll, Waker};\n\n\n\nuse futures_util::future::poll_fn;\n\nuse url::Url;\n\nuse wasm_bindgen::prelude::{Closure, JsValue};\n\nuse wasm_bindgen::JsCast;\n\n\n\nuse bytes::Bytes;\n\n\n\nuse crate::websocket::{WebSocketError, WebSocketMessage};\n\n\n\nuse log::trace;\n\n\n\nimpl From<JsValue> for WebSocketError {\n\n fn from(js_value: JsValue) -> Self {\n\n WebSocketError::NativeError(js_value.as_string().unwrap())\n\n }\n\n}\n\n\n", "file_path": "quicksilver-utils-async/src/web_sys/websocket.rs", "rank": 37, "score": 52689.256829293765 }, { "content": " let message = match data.as_string() {\n\n Some(s) => WebSocketMessage::String(s),\n\n None => {\n\n let buf: &ArrayBuffer = data.as_ref().unchecked_ref(); // consider using JsCast::dyn_into for safety?\n\n let vec: Vec<u8> = Uint8Array::new(buf).to_vec();\n\n let bytes = Bytes::from(vec);\n\n WebSocketMessage::Binary(bytes)\n\n }\n\n };\n\n\n\n Ok(message)\n\n }\n\n}\n", "file_path": "quicksilver-utils-async/src/web_sys/websocket.rs", "rank": 38, "score": 52678.11370344212 }, { "content": "#[derive(Debug, Clone, Copy)]\n\nenum Monestary {\n\n Northern,\n\n Southern,\n\n Eastern,\n\n King,\n\n}\n\n\n", "file_path": "examples/monk/src/dialog.rs", "rank": 40, "score": 47733.54823343645 }, { "content": "fn main() {\n\n let mut settings = Settings::default();\n\n settings.log_level = Level::Debug;\n\n run(settings, app);\n\n}\n", "file_path": "examples/project/src/main.rs", "rank": 41, "score": 47184.1742254079 }, { "content": "#[derive(Eq, Hash, PartialEq)]\n\nenum Animation {\n\n Idle,\n\n Run,\n\n SlashUp,\n\n SlashDown,\n\n SlashForward,\n\n Jump,\n\n Hit,\n\n Faint,\n\n}\n\n\n", "file_path": "quicksilver-utils-ecs/examples/adventurer.rs", "rank": 42, "score": 46772.18948837946 }, { "content": "fn main() {\n\n let mut settings = Settings::default();\n\n settings.log_level = Level::Debug;\n\n run(settings, app)\n\n}\n\n\n\nasync fn app(window: Window, gfx: Graphics, input: Input) -> Result<()> {\n\n let sprite_sheet_data = load_file(\"sprite_sheet.png\").await?;\n\n // let sprite_image = Image::from_raw(&gfx, Some(&sprite_sheet), 416, 512, PixelFormat::RGBA)?;\n\n let sprite_image: Image = Image::from_encoded_bytes(&gfx, &sprite_sheet_data)?;\n\n\n\n debug!(\"Got the image\");\n\n\n\n let mut world = World::new();\n\n\n\n world.insert(RenderContext {\n\n gfx: SendWrapper::new(gfx),\n\n window: SendWrapper::new(window),\n\n });\n\n\n", "file_path": "quicksilver-utils-ecs/examples/adventurer.rs", "rank": 43, "score": 46229.76685241895 }, { "content": "struct BoundingBox<'a> {\n\n pub position: &'a Position,\n\n pub width: f32,\n\n pub height: f32,\n\n}\n\n\n", "file_path": "examples/monk/src/interact.rs", "rank": 44, "score": 43926.25010306028 }, { "content": "fn letter_options(progress: &GameProgression) -> Vec<Monestary> {\n\n let mut letters = Vec::new();\n\n if progress.know_northern_monestary && !progress.sent_northern_monestary {\n\n letters.push(Monestary::Northern)\n\n }\n\n if progress.know_southern_monestary && !progress.sent_southern_monestary {\n\n letters.push(Monestary::Southern)\n\n }\n\n if progress.know_eastern_monestary && !progress.sent_eastern_monestary {\n\n letters.push(Monestary::Eastern)\n\n }\n\n if progress.know_invite && !progress.sent_invite {\n\n letters.push(Monestary::King)\n\n }\n\n letters\n\n}\n\n\n\nimpl Dialog {\n\n fn text(&self, progress: &GameProgression) -> String {\n\n match self {\n", "file_path": "examples/monk/src/dialog.rs", "rank": 45, "score": 35036.4162807906 }, { "content": "// Number of milliseconds to display the frame\n\nfn frames() -> HashMap<Animation, Vec<u32>> {\n\n let mut dat = HashMap::new();\n\n dat.insert(\n\n Animation::Idle,\n\n vec![\n\n 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100,\n\n ],\n\n );\n\n dat.insert(Animation::Run, vec![1, 1, 1, 1, 1, 1, 1, 1]);\n\n dat.insert(Animation::SlashUp, vec![1, 1, 1, 1, 1, 1, 1, 1, 1, 1]);\n\n dat.insert(Animation::SlashDown, vec![1, 1, 1, 1, 1, 1, 1, 1, 1, 1]);\n\n dat.insert(Animation::SlashForward, vec![1, 1, 1, 1, 1, 1, 1, 1, 1, 1]);\n\n dat.insert(Animation::Jump, vec![1, 1, 1, 1, 1, 1]);\n\n dat.insert(Animation::Hit, vec![1, 1, 1, 1]);\n\n dat.insert(Animation::Faint, vec![1, 1, 1, 1, 1, 1, 1]);\n\n dat\n\n}\n\n\n", "file_path": "quicksilver-utils-ecs/examples/adventurer.rs", "rank": 46, "score": 35036.4162807906 }, { "content": "fn overlaps(a: &BoundingBox, b: &BoundingBox) -> bool {\n\n let out_left = a.position.x + a.width < b.position.x;\n\n let out_right = a.position.x > b.position.x + b.width;\n\n let out_up = a.position.y + a.height < b.position.y;\n\n let out_down = a.position.y > b.position.y + b.height;\n\n !(out_left || out_right || out_up || out_down)\n\n}\n\n\n\nimpl<'a> System<'a> for InteractionSystem {\n\n type SystemData = (\n\n Write<'a, Global>,\n\n Read<'a, InputContext>,\n\n ReadStorage<'a, Position>,\n\n ReadStorage<'a, PlayerInteract>,\n\n ReadStorage<'a, ObjectInteract>,\n\n );\n\n\n\n fn run(\n\n &mut self,\n\n (\n", "file_path": "examples/monk/src/interact.rs", "rank": 47, "score": 34793.06779961072 }, { "content": "//! This file only exists to debug the issues with connecting to wss on desktop\n\n\n\nuse async_std::io;\n\nuse async_std::net::TcpStream;\n\nuse async_std::prelude::*;\n\nuse async_std::task;\n\nuse async_tls::TlsConnector;\n\nuse url::Url;\n\n\n\n// use rustls::ClientConfig;\n\n\n", "file_path": "quicksilver-utils-async/examples/wss.rs", "rank": 48, "score": 27208.045030827587 }, { "content": "use async_trait::async_trait;\n\nuse bytes::Bytes;\n\nuse http::Uri;\n\nuse log::trace;\n\nuse protobuf::Message;\n\n\n\n#[derive(Debug)]\n\npub enum RequestError {\n\n NativeError(String),\n\n}\n\n\n\npub type Result<T> = std::result::Result<T, RequestError>;\n\n\n\n#[cfg(not(target_arch = \"wasm32\"))]\n\nuse crate::desktop::request as platform;\n\n\n\n#[cfg(all(target_arch = \"wasm32\", feature = \"stdweb\"))]\n\nuse crate::std_web::request as platform;\n\n\n\n#[cfg(all(target_arch = \"wasm32\", feature = \"web-sys\"))]\n\nuse crate::web_sys::request as platform;\n\n\n\n#[async_trait]\n", "file_path": "quicksilver-utils-async/src/request.rs", "rank": 49, "score": 27207.71914461703 }, { "content": "\n\n#[cfg(all(target_arch = \"wasm32\", feature = \"stdweb\"))]\n\nmod std_web;\n\n\n\n#[cfg(all(target_arch = \"wasm32\", feature = \"web-sys\"))]\n\nmod web_sys;\n\n\n\npub mod request;\n\npub mod task_context;\n\npub mod time;\n\npub mod websocket;\n", "file_path": "quicksilver-utils-async/src/lib.rs", "rank": 50, "score": 27203.114602474758 }, { "content": "/// ```\n\n/// async fn tick_loop() {\n\n/// loop {\n\n/// sleep_ms(500).await;\n\n/// do_something_periodically()\n\n/// }\n\n/// }\n\n/// ```\n\npub async fn sleep_ms(ms: u32) {\n\n time::sleep_ms(ms).await\n\n}\n", "file_path": "quicksilver-utils-async/src/time.rs", "rank": 51, "score": 27203.090035788053 }, { "content": " let tcp_stream = TcpStream::connect(&addr).await?;\n\n\n\n // Use the connector to start the handshake process.\n\n // This consumes the TCP stream to ensure you are not reusing it.\n\n // Awaiting the handshake gives you an encrypted\n\n // stream back which you can use like any other.\n\n let mut tls_stream = connector.connect(&domain, tcp_stream).await?;\n\n\n\n // We write our crafted HTTP request to it\n\n tls_stream.write_all(http_request.as_bytes()).await?;\n\n\n\n // And read it all to stdout\n\n let mut stdout = io::stdout();\n\n io::copy(&mut tls_stream, &mut stdout).await?;\n\n\n\n // Voila, we're done here!\n\n Ok(())\n\n })\n\n}\n", "file_path": "quicksilver-utils-async/examples/wss.rs", "rank": 52, "score": 27202.763222896 }, { "content": "//! # time\n\n//!\n\n//! `time` just contains `sleep_ms` right not, but is the place\n\n//! where I'd put something like a timer or something more\n\n//! sophistocated like periodic scheduler with a \"maximum fps\"\n\n//! governed periodic invocation\n\n\n\n#[cfg(not(target_arch = \"wasm32\"))]\n\nuse crate::desktop::time;\n\n\n\n#[cfg(all(target_arch = \"wasm32\", feature = \"stdweb\"))]\n\nuse crate::std_web::time;\n\n\n\n#[cfg(all(target_arch = \"wasm32\", feature = \"web-sys\"))]\n\nuse crate::web_sys::time;\n\n\n\n/// Block the async task until woken by the system after <ms> milliseconds\n\n///\n\n/// # Examples\n\n///\n", "file_path": "quicksilver-utils-async/src/time.rs", "rank": 53, "score": 27201.793041109493 }, { "content": "extern crate futures_util;\n\n\n\n#[cfg(all(not(target_arch = \"wasm32\"), feature = \"stdweb\"))]\n\ncompile_error!(\"stdweb can only be enabled for wasm32 targets\");\n\n\n\n#[cfg(all(not(target_arch = \"wasm32\"), feature = \"web-sys\"))]\n\ncompile_error!(\"websys can only be enabled for wasm32 targets\");\n\n\n\n#[cfg(all(feature = \"stdweb\", feature = \"web-sys\"))]\n\ncompile_error!(\"stdweb and web_sys may not both be enabled at once, you must pick one\");\n\n\n\n#[cfg(all(\n\n target_arch = \"wasm32\",\n\n not(feature = \"stdweb\"),\n\n not(feature = \"web-sys\")\n\n))]\n\ncompile_error!(\"either stdweb or web-sys must be enabled for wasm32 targets\");\n\n\n\n#[cfg(not(target_arch = \"wasm32\"))]\n\nmod desktop;\n", "file_path": "quicksilver-utils-async/src/lib.rs", "rank": 54, "score": 27197.061345142385 }, { "content": " }\n\n\n\n pub async fn run_until_stalled(&mut self) {\n\n poll_fn(move |cx| {\n\n let mut x = self.futures.borrow_mut();\n\n loop {\n\n let pinned_pool = Pin::new(&mut *x);\n\n let pool_state = pinned_pool.poll_next(cx);\n\n // trace!(\"Task context run pool_state: {:?}\", pool_state);\n\n match pool_state {\n\n Poll::Pending => break Poll::Ready(()),\n\n Poll::Ready(Some(_)) => {\n\n // debug!(\"Task finished\");\n\n continue;\n\n }\n\n Poll::Ready(None) => {\n\n self.task_waker.replace(Some(cx.waker().clone()));\n\n break Poll::Ready(());\n\n }\n\n }\n", "file_path": "quicksilver-utils-async/src/task_context.rs", "rank": 55, "score": 26351.466764168545 }, { "content": " events: self.events.clone(),\n\n futures: self.futures.clone(),\n\n task_waker: self.task_waker.clone(),\n\n }\n\n }\n\n}\n\n\n\nimpl<'a, E> Default for TaskContext<'a, E> {\n\n fn default() -> Self {\n\n TaskContext {\n\n events: Arc::new(RefCell::new(Vec::new())),\n\n futures: Arc::new(RefCell::new(FuturesUnordered::new())),\n\n task_waker: Arc::new(RefCell::new(None)),\n\n }\n\n }\n\n}\n\n\n\nimpl<'a, E> TaskContext<'a, E> {\n\n pub fn new() -> Self {\n\n TaskContext::default()\n", "file_path": "quicksilver-utils-async/src/task_context.rs", "rank": 56, "score": 26348.028337158245 }, { "content": "use crate::request::{RequestError, Result, ServiceClient};\n\nuse async_trait::async_trait;\n\nuse bytes::Bytes;\n\nuse http::Uri;\n\n\n\npub struct ServiceClientImpl {\n\n auth_token: Option<String>,\n\n}\n\n\n\n#[async_trait]\n\nimpl ServiceClient for ServiceClientImpl {\n\n // TODO: build a surf client once and re-use?\n\n fn new() -> Self {\n\n let auth_token = None;\n\n ServiceClientImpl { auth_token }\n\n }\n\n\n\n fn set_auth_token(&mut self, auth_token: &str) {\n\n self.auth_token = Some(auth_token.to_string())\n\n }\n", "file_path": "quicksilver-utils-async/src/desktop/request.rs", "rank": 57, "score": 26346.445091729336 }, { "content": "use futures_util::{\n\n future::{poll_fn, LocalFutureObj},\n\n stream::{FuturesUnordered, Stream},\n\n task::LocalSpawnExt,\n\n};\n\nuse std::cell::RefCell;\n\nuse std::future::Future;\n\nuse std::pin::Pin;\n\nuse std::sync::Arc;\n\nuse std::task::{Poll, Waker};\n\n\n\npub struct TaskContext<'a, E> {\n\n events: Arc<RefCell<Vec<E>>>,\n\n futures: Arc<RefCell<FuturesUnordered<LocalFutureObj<'a, ()>>>>,\n\n task_waker: Arc<RefCell<Option<Waker>>>,\n\n}\n\n\n\nimpl<'a, E> Clone for TaskContext<'a, E> {\n\n fn clone(&self) -> Self {\n\n TaskContext {\n", "file_path": "quicksilver-utils-async/src/task_context.rs", "rank": 58, "score": 26346.384138260022 }, { "content": " }\n\n })\n\n .await\n\n }\n\n\n\n pub fn spawn<Fut>(&mut self, task: Fut)\n\n where\n\n Fut: 'static + Future<Output = ()>,\n\n {\n\n // debug!(\"Spawning new task\");\n\n self.futures.borrow().spawn_local(task).expect(\"\");\n\n if let Some(waker) = self.task_waker.replace(None) {\n\n waker.wake();\n\n }\n\n }\n\n\n\n pub fn dispatch(&self, event: E) {\n\n self.events.borrow_mut().push(event)\n\n }\n\n\n\n pub fn drain(&self) -> Vec<E> {\n\n self.events.replace(Vec::new())\n\n }\n\n}\n", "file_path": "quicksilver-utils-async/src/task_context.rs", "rank": 59, "score": 26344.72799550704 }, { "content": "use async_std::task::sleep;\n\nuse std::time::Duration;\n\n\n\npub(crate) async fn sleep_ms(ms: u32) {\n\n sleep(Duration::from_millis(ms as u64)).await\n\n}\n", "file_path": "quicksilver-utils-async/src/desktop/time.rs", "rank": 60, "score": 26338.99807935976 }, { "content": "extern crate async_std;\n\nextern crate surf;\n\n\n\npub(crate) mod request;\n\npub(crate) mod time;\n\nmod tls;\n\npub(crate) mod websocket;\n", "file_path": "quicksilver-utils-async/src/desktop/mod.rs", "rank": 61, "score": 26338.771141201294 }, { "content": "use rustls::ClientConfig;\n\nuse rustls_native_certs::load_native_certs;\n\n\n", "file_path": "quicksilver-utils-async/src/desktop/tls.rs", "rank": 62, "score": 26333.42570727503 }, { "content": "\n\n async fn post_raw(&self, uri: Uri, request_body: Bytes) -> Result<Bytes> {\n\n let raw_uri = format!(\"{}\", uri);\n\n let mut request = surf::post(raw_uri)\n\n .set_header(\"Accept\", \"application/octet-stream\")\n\n .set_header(\"Content-Type\", \"application/octet-stream\");\n\n\n\n if let Some(auth_token) = &self.auth_token {\n\n request = request.set_header(\"Authorization\", auth_token);\n\n }\n\n\n\n request = request.body_bytes(request_body);\n\n\n\n let response_bytes_vec = request\n\n .recv_bytes()\n\n .await\n\n .map_err(|e| RequestError::NativeError(format!(\"Failed making hyper request {}\", e)))?;\n\n\n\n Ok(Bytes::from(response_bytes_vec))\n\n }\n\n}\n", "file_path": "quicksilver-utils-async/src/desktop/request.rs", "rank": 63, "score": 26332.87618915686 }, { "content": "\n\n let result = poll_fn(move |ctx| {\n\n debug!(\"stdweb get request Polling\");\n\n let inner: &mut XhrClosureInner = &mut *xhr_closure.inner.borrow_mut();\n\n\n\n if !inner.have_set_handlers {\n\n inner.have_set_handlers = true;\n\n let waker = ctx.waker().clone();\n\n inner\n\n .xhr\n\n .add_event_listener(move |_: ProgressLoadEvent| waker.wake_by_ref());\n\n let waker = ctx.waker().clone();\n\n inner\n\n .xhr\n\n .add_event_listener(move |_: ProgressAbortEvent| waker.wake_by_ref());\n\n }\n\n\n\n let status = inner.xhr.status();\n\n let ready_state = inner.xhr.ready_state();\n\n match (status / 100, ready_state) {\n", "file_path": "quicksilver-utils-async/src/std_web/request.rs", "rank": 64, "score": 25539.415773229965 }, { "content": " waker.wake()\n\n }\n\n }) as Box<dyn FnMut(JsValue)>)\n\n };\n\n\n\n window\n\n .set_timeout_with_callback_and_timeout_and_arguments(\n\n callback.as_ref().unchecked_ref(),\n\n ms as i32,\n\n &Array::new(),\n\n )\n\n .expect(\"Invoke set_timeout\");\n\n callback.forget();\n\n\n\n poll_fn({\n\n let ready_waker = ready_waker.clone();\n\n move |cx| {\n\n trace!(\"Polling\");\n\n let inner: &mut ReadyWaker = &mut *ready_waker.borrow_mut();\n\n if inner.ready {\n", "file_path": "quicksilver-utils-async/src/web_sys/time.rs", "rank": 65, "score": 25538.005615709204 }, { "content": "use crate::request::{RequestError, Result, ServiceClient};\n\nuse async_trait::async_trait;\n\nuse bytes::Bytes;\n\nuse http::Uri;\n\n\n\n// TODO: this file is identical to desktop; clean up the package switching?\n\n\n\npub struct ServiceClientImpl {\n\n auth_token: Option<String>,\n\n}\n\n\n\n#[async_trait]\n\nimpl ServiceClient for ServiceClientImpl {\n\n // TODO: build a surf client once and re-use?\n\n fn new() -> Self {\n\n let auth_token = None;\n\n ServiceClientImpl { auth_token }\n\n }\n\n\n\n fn set_auth_token(&mut self, auth_token: &str) {\n", "file_path": "quicksilver-utils-async/src/web_sys/request.rs", "rank": 66, "score": 25532.640004500423 }, { "content": "use crate::request::RequestError;\n\nuse futures_util::future::poll_fn;\n\nuse log::debug;\n\nuse std::cell::RefCell;\n\nuse std::sync::Arc;\n\nuse std::task::Poll;\n\nuse std_web::{\n\n traits::*,\n\n unstable::TryInto,\n\n web::{\n\n event::{ProgressAbortEvent, ProgressLoadEvent},\n\n ArrayBuffer, TypedArray, XhrReadyState, XhrResponseType, XmlHttpRequest,\n\n },\n\n Reference,\n\n};\n\n\n", "file_path": "quicksilver-utils-async/src/std_web/request.rs", "rank": 67, "score": 25528.747540877168 }, { "content": "use std::cell::RefCell;\n\nuse std::sync::Arc;\n\nuse std::task::{Poll, Waker};\n\n\n\nuse wasm_bindgen::prelude::{Closure, JsValue};\n\nuse wasm_bindgen::JsCast;\n\n\n\nuse futures_util::future::poll_fn;\n\nuse js_sys::Array;\n\nuse web_sys::window;\n\n\n\nuse log::trace;\n\n\n", "file_path": "quicksilver-utils-async/src/web_sys/time.rs", "rank": 68, "score": 25526.47684299912 }, { "content": " Poll::Ready(())\n\n } else {\n\n inner.waker.replace(cx.waker().clone());\n\n Poll::Pending\n\n }\n\n }\n\n })\n\n .await\n\n}\n", "file_path": "quicksilver-utils-async/src/web_sys/time.rs", "rank": 69, "score": 25525.96116239559 }, { "content": "use std_web::web::wait;\n\n\n\npub async fn sleep_ms(ms: u32) {\n\n wait(ms).await\n\n}\n", "file_path": "quicksilver-utils-async/src/std_web/time.rs", "rank": 70, "score": 25524.675232926023 }, { "content": "extern crate std_web;\n\n\n\npub(crate) mod request;\n\npub(crate) mod time;\n\npub(crate) mod websocket;\n", "file_path": "quicksilver-utils-async/src/std_web/mod.rs", "rank": 71, "score": 25522.72936856046 }, { "content": "extern crate js_sys;\n\nextern crate surf;\n\nextern crate wasm_bindgen;\n\nextern crate web_sys;\n\n\n\npub(crate) mod request;\n\npub(crate) mod time;\n\npub(crate) mod websocket;\n", "file_path": "quicksilver-utils-async/src/web_sys/mod.rs", "rank": 72, "score": 25522.099042316782 }, { "content": " (2, XhrReadyState::Done) => {\n\n let reference: Reference = inner\n\n .xhr\n\n .raw_response()\n\n .try_into()\n\n .expect(\"The response will always be a JS object\");\n\n Poll::Ready(\n\n reference\n\n .downcast::<ArrayBuffer>()\n\n .map(|arr| TypedArray::<u8>::from(arr).to_vec())\n\n .ok_or_else(|| {\n\n RequestError::NativeError(\"Failed to cast file into bytes\".to_string())\n\n }),\n\n )\n\n }\n\n (2, _) => Poll::Pending,\n\n (0, _) => Poll::Pending,\n\n _ => Poll::Ready(Err(RequestError::NativeError(\n\n \"Non-200 status code returned\".to_string(),\n\n ))),\n\n }\n\n })\n\n .await?;\n\n\n\n String::from_utf8(result).map_err(|e| RequestError::NativeError(format!(\"Invalid utf8 {}\", e)))\n\n}\n", "file_path": "quicksilver-utils-async/src/std_web/request.rs", "rank": 73, "score": 25521.14010009269 }, { "content": " self.auth_token = Some(auth_token.to_string())\n\n }\n\n\n\n async fn post_raw(&self, uri: Uri, request_body: Bytes) -> Result<Bytes> {\n\n let raw_uri = format!(\"{}\", uri);\n\n let mut request = surf::post(raw_uri)\n\n .set_header(\"Accept\", \"application/octet-stream\")\n\n .set_header(\"Content-Type\", \"application/octet-stream\");\n\n\n\n if let Some(auth_token) = &self.auth_token {\n\n request = request.set_header(\"Authorization\", auth_token);\n\n }\n\n\n\n request = request.body_bytes(request_body);\n\n\n\n let response_bytes_vec = request\n\n .recv_bytes()\n\n .await\n\n .map_err(|e| RequestError::NativeError(format!(\"Failed making hyper request {}\", e)))?;\n\n\n\n Ok(Bytes::from(response_bytes_vec))\n\n }\n\n}\n", "file_path": "quicksilver-utils-async/src/web_sys/request.rs", "rank": 74, "score": 25519.051359576562 }, { "content": "\n\n# Quicksilver Async Utilities\n\n\n\nComponents primarily designed to be used with the Quicksilver game\n\nengine, though quicksilver is not a dependency and these could\n\nconceievably be useful in any cross-platform async project.\n\n\n\nMostly this implies two things about each component\n\n\n\n1. Runs on desktop and on the web (both cargo-web and wasm-bindgen)\n\n2. Async api\n\n\n\n## Current\n\n\n\n* Cooperative tasks with an event buffer\n\n* Async sleep() function\n\n* Async Websocket Client\n\n* Async HTTP Client\n\n\n\n## Planned\n\n\n\n* More sophistocated timers (look at async-timer)\n", "file_path": "quicksilver-utils-async/README.md", "rank": 75, "score": 19975.721299843266 }, { "content": "\n\npub async fn app(_window: Window, _gfx: Graphics, mut input: Input) -> Result<()> {\n\n let mut task_context: TaskContext<CustomEvent> = TaskContext::new();\n\n\n\n task_context.spawn(tick_loop(task_context.clone()));\n\n\n\n let cloned_task_context = task_context.clone();\n\n task_context.spawn(async move {\n\n cloned_task_context.dispatch(CustomEvent::OnePingOnly);\n\n });\n\n\n\n let url_string = \"ws://echo.websocket.org\";\n\n // let url_string = \"wss://echo.websocket.org\"; // fails TLS on desktop?\n\n let ws = WebSocket::connect(&Url::parse(url_string).unwrap())\n\n .await\n\n .unwrap();\n\n task_context.spawn(read_websocket_loop(task_context.clone(), ws.clone()));\n\n\n\n 'main: loop {\n\n task_context.run_until_stalled().await;\n", "file_path": "examples/project/src/app.rs", "rank": 76, "score": 20.95904580474933 }, { "content": "\n\npub struct RenderContext {\n\n pub gfx: SendWrapper<Graphics>, // quicksilver graphics uses Rc\n\n pub window: SendWrapper<Window>, // quicksilver graphics uses *mut(0)\n\n}\n\n\n\nimpl Default for RenderContext {\n\n fn default() -> Self {\n\n panic!(\"must be injected...\")\n\n }\n\n}\n\n\n\npub struct TimeContext {\n\n pub now: f64,\n\n}\n\n\n\nimpl Default for TimeContext {\n\n fn default() -> Self {\n\n panic!(\"must be injected...\")\n\n }\n", "file_path": "quicksilver-utils-ecs/src/lib.rs", "rank": 77, "score": 14.267849762348156 }, { "content": "use super::global::Global;\n\nuse log::trace;\n\nuse quicksilver::geom::{Rectangle, Vector};\n\nuse specs::prelude::*;\n\n\n\nuse quicksilver_utils_ecs::*;\n\n\n\npub struct BackgroundRender;\n\n\n\nimpl<'a> System<'a> for BackgroundRender {\n\n type SystemData = (Write<'a, Global>, Write<'a, RenderContext>);\n\n\n\n fn run(&mut self, (global, mut render_ctx_resource): Self::SystemData) {\n\n trace!(\"Drawing background\");\n\n if let Some(background) = &global.background {\n\n let ctx: &mut RenderContext = &mut render_ctx_resource;\n\n let full: Rectangle = Rectangle::new(Vector::new(0., 0.), Vector::new(800., 600.));\n\n ctx.gfx.draw_image(background, full);\n\n }\n\n }\n\n}\n", "file_path": "examples/monk/src/background.rs", "rank": 78, "score": 13.817067275571828 }, { "content": "extern crate log;\n\nextern crate quicksilver;\n\nextern crate url;\n\n\n\nuse log::{debug, info};\n\nuse quicksilver_utils_async::{\n\n // request::get_resource,\n\n task_context::TaskContext,\n\n time::sleep_ms,\n\n websocket::{WebSocket, WebSocketMessage},\n\n};\n\n\n\nuse quicksilver::{\n\n graphics::Graphics,\n\n input::{Event as BlindsEvent, Input, Key},\n\n Result, Window,\n\n};\n\n\n\nuse url::Url;\n\n\n\n#[derive(Debug)]\n", "file_path": "examples/project/src/app.rs", "rank": 79, "score": 13.456533658396943 }, { "content": "\n\nuse specs::prelude::*;\n\nuse quicksilver::{geom::Vector, graphics::Color};\n\nuse log::trace;\n\nuse super::global::Global;\n\n\n\nuse quicksilver_utils_ecs::*;\n\n\n\npub struct HudRender;\n\n\n\nimpl<'a> System<'a> for HudRender {\n\n type SystemData = (Write<'a, Global>, Write<'a, RenderContext>,);\n\n\n\n fn run(&mut self, (mut global, mut render_ctx_resource): Self::SystemData) {\n\n let ctx: &mut RenderContext = &mut render_ctx_resource;\n\n if global.dialog.is_some() {\n\n // Don't display interaction options\n\n return;\n\n }\n\n if let Some(focus_object) = global.focus {\n\n let focus_text = format!(\"'E' to {}.\", focus_object.label());\n\n trace!(\"We have some text to render: {}\", focus_text);\n\n global.font.draw(&mut ctx.gfx, &focus_text, Color::BLACK, Vector::new(100., 500.)).expect(\"draw text\");\n\n }\n\n }\n\n}", "file_path": "examples/monk/src/hud.rs", "rank": 80, "score": 12.93676348078974 }, { "content": " ws.send(&msg).await.unwrap();\n\n }\n\n\n\n // if key_event.key() == Key::R && key_event.is_down() {\n\n // let cloned_task_context = task_context.clone();\n\n // task_context.spawn(async move {\n\n // let response = get_resource(\"https://jsonplaceholder.typicode.com/todos/1\")\n\n // .await\n\n // .expect(\"HTTP GET success\");\n\n // cloned_task_context.dispatch(CustomEvent::Resource(response))\n\n // });\n\n // }\n\n }\n\n\n\n debug!(\"BlindsEvent: {:?}\", ev);\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/project/src/app.rs", "rank": 81, "score": 12.65230058283134 }, { "content": " Objects::Desk => \"use the desk\",\n\n Objects::TalkKing => \"speak with the King\",\n\n }\n\n }\n\n}\n\n\n\n#[derive(Component)]\n\npub struct ObjectInteract {\n\n pub object: Objects,\n\n pub width: f32,\n\n pub height: f32,\n\n}\n\n\n\npub struct InteractionSystem {\n\n last_interaction: Option<Instant>,\n\n}\n\n\n\nimpl InteractionSystem {\n\n pub fn new() -> Self {\n\n InteractionSystem {\n\n last_interaction: None,\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/monk/src/interact.rs", "rank": 82, "score": 12.481389980734724 }, { "content": "}\n\n\n\nimpl Default for InputContext {\n\n fn default() -> Self {\n\n panic!(\"must be injected...\")\n\n }\n\n}\n\n\n\npub struct WasdMovement;\n\n\n\nimpl<'a> System<'a> for WasdMovement {\n\n type SystemData = (\n\n Write<'a, InputContext>,\n\n ReadStorage<'a, PlayerInputFlag>,\n\n WriteStorage<'a, Position>,\n\n );\n\n\n\n fn run(\n\n &mut self,\n\n (mut input_ctx_resource, player_input_flag_storage, mut position_storage): Self::SystemData,\n", "file_path": "quicksilver-utils-ecs/src/lib.rs", "rank": 83, "score": 11.011514211763291 }, { "content": "use super::{dialog::Dialog, global::Global, room::Room};\n\nuse instant::Instant;\n\nuse log::{info, trace};\n\nuse quicksilver::input::Key;\n\nuse quicksilver_utils_ecs::*;\n\nuse specs::{prelude::*, Component, System, Write};\n\n\n\n#[derive(Component)]\n\npub struct PlayerInteract {\n\n pub width: f32,\n\n pub height: f32,\n\n}\n\n\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum Objects {\n\n Bed,\n\n EnterHall,\n\n EnterBedroom,\n\n EnterCellar,\n\n EnterGarden,\n", "file_path": "examples/monk/src/interact.rs", "rank": 84, "score": 10.592293341136113 }, { "content": "\n\n for custom_event in task_context.drain().into_iter() {\n\n info!(\"CustomEvent: {:?}\", custom_event)\n\n }\n\n\n\n while let Some(ev) = input.next_event().await {\n\n if let BlindsEvent::KeyboardInput(key_event) = &ev {\n\n if key_event.key() == Key::Escape && key_event.is_down() {\n\n break 'main;\n\n }\n\n\n\n if key_event.key() == Key::P && key_event.is_down() {\n\n let cloned_task_context = task_context.clone();\n\n task_context.spawn(async move {\n\n cloned_task_context.dispatch(CustomEvent::OnePingOnly)\n\n });\n\n }\n\n\n\n if key_event.key() == Key::W && key_event.is_down() {\n\n let msg = WebSocketMessage::String(\"Hello free infrastructure\".to_string());\n", "file_path": "examples/project/src/app.rs", "rank": 85, "score": 10.142384016027762 }, { "content": " fn default() -> Self {\n\n panic!(\"Must be injected\")\n\n }\n\n}\n\n\n\nimpl Global {\n\n pub fn new(font: FontRenderer, initial_room: Room) -> Self {\n\n let player = None;\n\n let focus = None;\n\n let font = SendWrapper::new(font);\n\n let background = None;\n\n let pending_room = Some(initial_room);\n\n let dialog = Some(Dialog::Welcome);\n\n let progress = GameProgression::default();\n\n Global{player, focus, font, background, pending_room, dialog, progress}\n\n }\n\n}", "file_path": "examples/monk/src/global.rs", "rank": 86, "score": 10.134000990846886 }, { "content": "}\n\n\n\npub struct RenderSprites;\n\n\n\nimpl<'a> System<'a> for RenderSprites {\n\n type SystemData = (\n\n ReadStorage<'a, Position>,\n\n ReadStorage<'a, SpriteConfig>,\n\n Read<'a, TimeContext>,\n\n Write<'a, RenderContext>,\n\n );\n\n\n\n fn run(\n\n &mut self,\n\n (position_storage, sprite_storage, time_ctx_resource, mut render_ctx_resource): Self::SystemData,\n\n ) {\n\n let time_ctx: &TimeContext = &time_ctx_resource;\n\n let ctx: &mut RenderContext = &mut render_ctx_resource;\n\n trace!(\"Running RenderSprites\");\n\n for (position, sprite) in (&position_storage, &sprite_storage).join() {\n", "file_path": "quicksilver-utils-ecs/src/lib.rs", "rank": 87, "score": 10.09697673196851 }, { "content": "\n\nuse specs::prelude::*;\n\nuse quicksilver_utils_ecs::*;\n\nuse super::{global::Global, interact::*};\n\nuse log::info;\n\nuse quicksilver::graphics::Image;\n\nuse send_wrapper::SendWrapper;\n\n\n\n#[derive(PartialEq, Eq, Hash, Clone, Copy)]\n\npub enum Room {\n\n Bedroom,\n\n Hall,\n\n Cellar,\n\n Garden,\n\n}\n\n\n\npub struct RoomData {\n\n pub characters_spritesheet: Image,\n\n pub bedroom_background: Image,\n\n pub bedroom_bed_sprite: Image,\n", "file_path": "examples/monk/src/room.rs", "rank": 88, "score": 9.12737323777017 }, { "content": "\n\nuse specs::prelude::*;\n\nuse super::{interact::Objects, room::Room, dialog::Dialog};\n\nuse quicksilver::graphics::{Image, FontRenderer};\n\nuse send_wrapper::SendWrapper;\n\n\n\n#[derive(Default, Clone, Copy)]\n\npub struct GameProgression {\n\n pub delegated_wheat: bool,\n\n pub growing_wheat: bool,\n\n\n\n pub delegated_baking: bool,\n\n pub baking_bread: bool,\n\n\n\n pub gave_to_charity: bool,\n\n pub charity_inspiration: bool,\n\n\n\n pub delegated_papermaking: bool,\n\n pub making_paper: bool,\n\n\n", "file_path": "examples/monk/src/global.rs", "rank": 89, "score": 8.888358564104465 }, { "content": " }\n\n}\n\n\n\npub struct DialogRender;\n\n\n\nimpl<'a> System<'a> for DialogRender {\n\n type SystemData = (Write<'a, Global>, Write<'a, RenderContext>);\n\n\n\n fn run(&mut self, (mut global, mut render_ctx_resource): Self::SystemData) {\n\n let ctx: &mut RenderContext = &mut render_ctx_resource;\n\n if let Some(dialog) = global.dialog {\n\n let popup_area = Rectangle::new(Vector::new(100., 100.), Vector::new(600., 400.));\n\n ctx.gfx\n\n .fill_rect(&popup_area, Color::from_rgba(200, 200, 200, 0.9));\n\n\n\n let text_area = Rectangle::new(Vector::new(120., 120.), Vector::new(560., 360.));\n\n let text = dialog.text(&global.progress);\n\n global\n\n .font\n\n .draw_wrapping(\n", "file_path": "examples/monk/src/dialog.rs", "rank": 90, "score": 8.324888640599573 }, { "content": "extern crate log;\n\nextern crate quicksilver;\n\nextern crate send_wrapper;\n\nextern crate specs;\n\n#[macro_use]\n\nextern crate specs_derive;\n\n\n\nuse log::{debug, trace};\n\nuse quicksilver::{\n\n geom::{Rectangle, Vector},\n\n graphics::{Graphics, Image},\n\n input::{Input, Key},\n\n Window,\n\n};\n\nuse send_wrapper::SendWrapper;\n\nuse specs::{prelude::*, Component, System, Write};\n\n\n\n#[derive(Component)]\n\npub struct Position {\n\n pub x: f32,\n", "file_path": "quicksilver-utils-ecs/src/lib.rs", "rank": 91, "score": 8.160475488197179 }, { "content": " scale: 2.,\n\n animation: None,\n\n };\n\n\n\n world\n\n .create_entity()\n\n .with(Position{x: 50., y: 500.})\n\n .with(beggar_sprite)\n\n .with(ObjectInteract{object: Objects::TalkBeggar, width: 64., height: 64.})\n\n .build();\n\n }\n\n\n\n let mut global = world.get_mut::<Global>().expect(\"global resource\");\n\n global.player = Some(player_entity);\n\n global.background = Some(SendWrapper::new(self.room_data.garden_background.clone()))\n\n }\n\n }\n\n\n\n let global: &mut Global = world.get_mut::<Global>().expect(\"global resource\");\n\n\n\n global.pending_room = None\n\n }\n\n }\n\n}\n", "file_path": "examples/monk/src/room.rs", "rank": 92, "score": 7.882564619850669 }, { "content": " debug!(\"Entering main loop\");\n\n\n\n loop {\n\n let now: f64 = instant::now();\n\n *world.write_resource::<TimeContext>() = TimeContext { now };\n\n\n\n {\n\n let ctx = world\n\n .get_mut::<RenderContext>()\n\n .expect(\"has render context\");\n\n ctx.gfx.clear(Color::from_rgba(200, 200, 200, 1.));\n\n }\n\n\n\n trace!(\"In the loop\");\n\n\n\n {\n\n let ctx = world.get_mut::<InputContext>().expect(\"has input context\");\n\n let input: &mut Input = &mut ctx.input;\n\n while let Some(ev) = input.next_event().await {\n\n debug!(\"Quicksilver event: {:?}\", ev);\n", "file_path": "quicksilver-utils-ecs/examples/adventurer.rs", "rank": 93, "score": 7.797754071285434 }, { "content": " pub bedroom_desk_sprite: Image,\n\n pub hall_background: Image,\n\n pub cellar_background: Image,\n\n pub garden_background: Image,\n\n}\n\n\n\npub struct RoomSystem {\n\n pub room_data: SendWrapper<RoomData>\n\n}\n\n\n\nimpl RoomSystem {\n\n pub fn setup_new_room(&self, world: &mut World) {\n\n\n\n let pending_room = world.fetch::<Global>().pending_room;\n\n\n\n if let Some(room) = pending_room {\n\n\n\n info!(\"Switching rooms\");\n\n\n\n {\n", "file_path": "examples/monk/src/room.rs", "rank": 94, "score": 7.693892035603298 }, { "content": "use super::{global::*, room::*};\n\nuse log::warn;\n\nuse quicksilver::{\n\n geom::{Rectangle, Vector},\n\n graphics::Color,\n\n input::{Input, Key},\n\n};\n\nuse quicksilver_utils_ecs::*;\n\nuse specs::prelude::*;\n\n\n\n#[derive(Clone, Copy)]\n\npub enum Dialog {\n\n Welcome,\n\n Greet, // Eventually write custom greetings for each monk... they should suggest who to write to\n\n SleepConfirm,\n\n DelegateWheat,\n\n PendingDelegateWheat,\n\n NoWheatToBake,\n\n DelegateBake,\n\n PendingDelegateBake,\n", "file_path": "examples/monk/src/dialog.rs", "rank": 95, "score": 7.3192895829666185 }, { "content": " }\n\n\n\n let mut global = world.get_mut::<Global>().expect(\"global resource\");\n\n global.player = Some(player_entity);\n\n global.background = Some(SendWrapper::new(self.room_data.hall_background.clone()))\n\n }\n\n Room::Cellar => {\n\n let player_sprite = SpriteConfig {\n\n image: SendWrapper::new(self.room_data.characters_spritesheet.clone()),\n\n row: 0,\n\n width: 32,\n\n height: 32,\n\n scale: 2.,\n\n animation: None,\n\n };\n\n\n\n let player_entity = world\n\n .create_entity()\n\n .with(Position { x: 650., y: 300. })\n\n .with(player_sprite)\n", "file_path": "examples/monk/src/room.rs", "rank": 96, "score": 7.2303464143465535 }, { "content": " loop_start_time: now,\n\n frames: frames()\n\n .get(&Animation::Idle)\n\n .expect(\"frames for idle animation\")\n\n .clone(),\n\n }),\n\n };\n\n\n\n world\n\n .create_entity()\n\n .with(Position { x: 0., y: 0. })\n\n .with(player_sprite)\n\n .with(PlayerInputFlag)\n\n .build();\n\n\n\n debug!(\"Created world, components, and entities\");\n\n\n\n let mut sprite_system = RenderSprites;\n\n let mut move_system = WasdMovement;\n\n\n", "file_path": "quicksilver-utils-ecs/examples/adventurer.rs", "rank": 97, "score": 7.226390813317488 }, { "content": " .build();\n\n\n\n world\n\n .create_entity()\n\n .with(Position{x: 50., y: 140.})\n\n .with(ObjectInteract{object: Objects::EnterHall, width: 100., height: 220.})\n\n .build();\n\n\n\n let mut global = world.get_mut::<Global>().expect(\"global resource\");\n\n global.player = Some(player_entity);\n\n global.background = Some(SendWrapper::new(self.room_data.bedroom_background.clone()))\n\n }\n\n Room::Hall => {\n\n let player_sprite = SpriteConfig {\n\n image: SendWrapper::new(self.room_data.characters_spritesheet.clone()),\n\n row: 0,\n\n width: 32,\n\n height: 32,\n\n scale: 2.,\n\n animation: None,\n", "file_path": "examples/monk/src/room.rs", "rank": 98, "score": 6.975898881585227 }, { "content": "pub mod app;\n\n\n\n#[cfg(feature = \"web-sys\")]\n\nuse wasm_bindgen::prelude::*;\n\n\n\n#[cfg(feature = \"web-sys\")]\n\nuse quicksilver::lifecycle::{run, Settings};\n\n\n\n#[cfg(feature = \"web-sys\")]\n\n#[global_allocator]\n\nstatic ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT;\n\n\n\n#[cfg(feature = \"web-sys\")]\n\n#[wasm_bindgen(start)]\n", "file_path": "examples/project/src/lib.rs", "rank": 99, "score": 6.958682632613692 } ]
Rust
widgetry/src/widgets/compare_times.rs
tnederlof/abstreet
4f00c8d2bbbe2ccb4b65c11b2a071d5d1f87290d
use geom::{Angle, Circle, Distance, Duration, Pt2D}; use crate::{ Color, Drawable, EventCtx, GeomBatch, GfxCtx, Line, ScreenDims, ScreenPt, ScreenRectangle, Text, TextExt, Widget, WidgetImpl, WidgetOutput, }; pub struct CompareTimes { draw: Drawable, max: Duration, top_left: ScreenPt, dims: ScreenDims, } impl CompareTimes { pub fn new_widget<I: AsRef<str>>( ctx: &mut EventCtx, x_name: I, y_name: I, points: Vec<(Duration, Duration)>, ) -> Widget { if points.is_empty() { return Widget::nothing(); } let actual_max = *points.iter().map(|(b, a)| a.max(b)).max().unwrap(); let num_labels = 5; let (max, labels) = actual_max.make_intervals_for_max(num_labels); let width = 500.0; let height = width; let mut batch = GeomBatch::new(); batch.autocrop_dims = false; let thickness = Distance::meters(2.0); for i in 1..num_labels { let x = (i as f64) / (num_labels as f64) * width; let y = (i as f64) / (num_labels as f64) * height; batch.push( Color::grey(0.5), geom::Line::new(Pt2D::new(0.0, y), Pt2D::new(width, y)) .unwrap() .make_polygons(thickness), ); batch.push( Color::grey(0.5), geom::Line::new(Pt2D::new(x, 0.0), Pt2D::new(x, height)) .unwrap() .make_polygons(thickness), ); } batch.push( Color::grey(0.5), geom::Line::new(Pt2D::new(0.0, height), Pt2D::new(width, 0.0)) .unwrap() .make_polygons(thickness), ); let circle = Circle::new(Pt2D::new(0.0, 0.0), Distance::meters(4.0)).to_polygon(); for (b, a) in points { let pt = Pt2D::new((b / max) * width, (1.0 - (a / max)) * height); let color = match a.cmp(&b) { std::cmp::Ordering::Equal => Color::YELLOW.alpha(0.5), std::cmp::Ordering::Less => Color::GREEN.alpha(0.9), std::cmp::Ordering::Greater => Color::RED.alpha(0.9), }; batch.push(color, circle.translate(pt.x(), pt.y())); } let plot = Widget::new(Box::new(CompareTimes { dims: batch.get_dims(), draw: ctx.upload(batch), max, top_left: ScreenPt::new(0.0, 0.0), })); let y_axis = Widget::custom_col( labels .iter() .rev() .map(|x| Line(x.to_string()).small().into_widget(ctx)) .collect(), ) .evenly_spaced(); let mut y_label = Text::from(format!("{} (minutes)", y_name.as_ref())) .render(ctx) .rotate(Angle::degrees(90.0)); y_label.autocrop_dims = true; let y_label = y_label .autocrop() .into_widget(ctx) .centered_vert() .margin_right(5); let x_axis = Widget::custom_row( labels .iter() .map(|x| Line(x.to_string()).small().into_widget(ctx)) .collect(), ) .evenly_spaced(); let x_label = format!("{} (minutes)", x_name.as_ref()) .text_widget(ctx) .centered_horiz(); let plot_width = plot.get_width_for_forcing(); Widget::custom_col(vec![ Widget::custom_row(vec![y_label, y_axis, plot]), Widget::custom_col(vec![x_axis, x_label]) .force_width(plot_width) .align_right(), ]) .container() } } impl WidgetImpl for CompareTimes { fn get_dims(&self) -> ScreenDims { self.dims } fn set_pos(&mut self, top_left: ScreenPt) { self.top_left = top_left; } fn event(&mut self, _: &mut EventCtx, _: &mut WidgetOutput) {} fn draw(&self, g: &mut GfxCtx) { g.redraw_at(self.top_left, &self.draw); if let Some(cursor) = g.canvas.get_cursor_in_screen_space() { let rect = ScreenRectangle::top_left(self.top_left, self.dims); if let Some((pct_x, pct_y)) = rect.pt_to_percent(cursor) { let thickness = Distance::meters(2.0); let mut batch = GeomBatch::new(); if let Some(l) = geom::Line::new(Pt2D::new(rect.x1, cursor.y), cursor.to_pt()) { batch.push(Color::WHITE, l.make_polygons(thickness)); } if let Some(l) = geom::Line::new(Pt2D::new(cursor.x, rect.y2), cursor.to_pt()) { batch.push(Color::WHITE, l.make_polygons(thickness)); } g.fork_screenspace(); let draw = g.upload(batch); g.redraw(&draw); let before = pct_x * self.max; let after = (1.0 - pct_y) * self.max; if after <= before { g.draw_mouse_tooltip(Text::from_multiline(vec![ Line(format!("Before: {}", before)), Line(format!("After: {}", after)), Line(format!( "{} faster (-{:.1}%)", before - after, 100.0 * (1.0 - after / before) )) .fg(Color::hex("#72CE36")), ])); } else { g.draw_mouse_tooltip(Text::from_multiline(vec![ Line(format!("Before: {}", before)), Line(format!("After: {}", after)), Line(format!( "{} slower (+{:.1}%)", after - before, 100.0 * (after / before - 1.0) )) .fg(Color::hex("#EB3223")), ])); } g.unfork(); } } } }
use geom::{Angle, Circle, Distance, Duration, Pt2D}; use crate::{ Color, Drawable, EventCtx, GeomBatch, GfxCtx, Line, ScreenDims, ScreenPt, ScreenRectangle, Text, TextExt, Widget, WidgetImpl, WidgetOutput, }; pub struct CompareTimes { draw: Drawable, max: Duration, top_left: ScreenPt, dims: ScreenDims, } impl CompareTimes {
} impl WidgetImpl for CompareTimes { fn get_dims(&self) -> ScreenDims { self.dims } fn set_pos(&mut self, top_left: ScreenPt) { self.top_left = top_left; } fn event(&mut self, _: &mut EventCtx, _: &mut WidgetOutput) {} fn draw(&self, g: &mut GfxCtx) { g.redraw_at(self.top_left, &self.draw); if let Some(cursor) = g.canvas.get_cursor_in_screen_space() { let rect = ScreenRectangle::top_left(self.top_left, self.dims); if let Some((pct_x, pct_y)) = rect.pt_to_percent(cursor) { let thickness = Distance::meters(2.0); let mut batch = GeomBatch::new(); if let Some(l) = geom::Line::new(Pt2D::new(rect.x1, cursor.y), cursor.to_pt()) { batch.push(Color::WHITE, l.make_polygons(thickness)); } if let Some(l) = geom::Line::new(Pt2D::new(cursor.x, rect.y2), cursor.to_pt()) { batch.push(Color::WHITE, l.make_polygons(thickness)); } g.fork_screenspace(); let draw = g.upload(batch); g.redraw(&draw); let before = pct_x * self.max; let after = (1.0 - pct_y) * self.max; if after <= before { g.draw_mouse_tooltip(Text::from_multiline(vec![ Line(format!("Before: {}", before)), Line(format!("After: {}", after)), Line(format!( "{} faster (-{:.1}%)", before - after, 100.0 * (1.0 - after / before) )) .fg(Color::hex("#72CE36")), ])); } else { g.draw_mouse_tooltip(Text::from_multiline(vec![ Line(format!("Before: {}", before)), Line(format!("After: {}", after)), Line(format!( "{} slower (+{:.1}%)", after - before, 100.0 * (after / before - 1.0) )) .fg(Color::hex("#EB3223")), ])); } g.unfork(); } } } }
pub fn new_widget<I: AsRef<str>>( ctx: &mut EventCtx, x_name: I, y_name: I, points: Vec<(Duration, Duration)>, ) -> Widget { if points.is_empty() { return Widget::nothing(); } let actual_max = *points.iter().map(|(b, a)| a.max(b)).max().unwrap(); let num_labels = 5; let (max, labels) = actual_max.make_intervals_for_max(num_labels); let width = 500.0; let height = width; let mut batch = GeomBatch::new(); batch.autocrop_dims = false; let thickness = Distance::meters(2.0); for i in 1..num_labels { let x = (i as f64) / (num_labels as f64) * width; let y = (i as f64) / (num_labels as f64) * height; batch.push( Color::grey(0.5), geom::Line::new(Pt2D::new(0.0, y), Pt2D::new(width, y)) .unwrap() .make_polygons(thickness), ); batch.push( Color::grey(0.5), geom::Line::new(Pt2D::new(x, 0.0), Pt2D::new(x, height)) .unwrap() .make_polygons(thickness), ); } batch.push( Color::grey(0.5), geom::Line::new(Pt2D::new(0.0, height), Pt2D::new(width, 0.0)) .unwrap() .make_polygons(thickness), ); let circle = Circle::new(Pt2D::new(0.0, 0.0), Distance::meters(4.0)).to_polygon(); for (b, a) in points { let pt = Pt2D::new((b / max) * width, (1.0 - (a / max)) * height); let color = match a.cmp(&b) { std::cmp::Ordering::Equal => Color::YELLOW.alpha(0.5), std::cmp::Ordering::Less => Color::GREEN.alpha(0.9), std::cmp::Ordering::Greater => Color::RED.alpha(0.9), }; batch.push(color, circle.translate(pt.x(), pt.y())); } let plot = Widget::new(Box::new(CompareTimes { dims: batch.get_dims(), draw: ctx.upload(batch), max, top_left: ScreenPt::new(0.0, 0.0), })); let y_axis = Widget::custom_col( labels .iter() .rev() .map(|x| Line(x.to_string()).small().into_widget(ctx)) .collect(), ) .evenly_spaced(); let mut y_label = Text::from(format!("{} (minutes)", y_name.as_ref())) .render(ctx) .rotate(Angle::degrees(90.0)); y_label.autocrop_dims = true; let y_label = y_label .autocrop() .into_widget(ctx) .centered_vert() .margin_right(5); let x_axis = Widget::custom_row( labels .iter() .map(|x| Line(x.to_string()).small().into_widget(ctx)) .collect(), ) .evenly_spaced(); let x_label = format!("{} (minutes)", x_name.as_ref()) .text_widget(ctx) .centered_horiz(); let plot_width = plot.get_width_for_forcing(); Widget::custom_col(vec![ Widget::custom_row(vec![y_label, y_axis, plot]), Widget::custom_col(vec![x_axis, x_label]) .force_width(plot_width) .align_right(), ]) .container() }
function_block-full_function
[ { "content": "pub fn make_bar(ctx: &mut EventCtx, filled_color: Color, value: usize, max: usize) -> Widget {\n\n let pct_full = if max == 0 {\n\n 0.0\n\n } else {\n\n (value as f64) / (max as f64)\n\n };\n\n let txt = Text::from(format!(\n\n \"{} / {}\",\n\n prettyprint_usize(value),\n\n prettyprint_usize(max)\n\n ));\n\n custom_bar(ctx, filled_color, pct_full, txt)\n\n}\n", "file_path": "santa/src/meters.rs", "rank": 0, "score": 288552.520514861 }, { "content": "pub fn custom_bar(ctx: &mut EventCtx, filled_color: Color, pct_full: f64, txt: Text) -> Widget {\n\n let total_width = 300.0;\n\n let height = 32.0;\n\n let radius = 4.0;\n\n\n\n let mut batch = GeomBatch::new();\n\n // Background\n\n batch.push(\n\n Color::hex(\"#666666\"),\n\n Polygon::rounded_rectangle(total_width, height, radius),\n\n );\n\n // Foreground\n\n if let Some(poly) = Polygon::maybe_rounded_rectangle(pct_full * total_width, height, radius) {\n\n batch.push(filled_color, poly);\n\n }\n\n // Text\n\n let label = txt.render_autocropped(ctx);\n\n let dims = label.get_dims();\n\n batch.append(label.translate(10.0, height / 2.0 - dims.height / 2.0));\n\n batch.into_widget(ctx)\n\n}\n\n\n", "file_path": "santa/src/meters.rs", "rank": 1, "score": 285304.75559694873 }, { "content": "#[allow(non_snake_case)]\n\npub fn Line<S: Into<String>>(text: S) -> TextSpan {\n\n TextSpan {\n\n text: text.into(),\n\n fg_color: None,\n\n size: DEFAULT_FONT_SIZE,\n\n font: DEFAULT_FONT,\n\n underlined: false,\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Text {\n\n // The bg_color will cover the entire block, but some lines can have extra highlighting.\n\n lines: Vec<(Option<Color>, Vec<TextSpan>)>,\n\n // TODO Stop using this as much as possible.\n\n bg_color: Option<Color>,\n\n}\n\n\n\nimpl From<TextSpan> for Text {\n\n fn from(line: TextSpan) -> Text {\n", "file_path": "widgetry/src/text.rs", "rank": 2, "score": 242188.08813764947 }, { "content": "/// Create a new widget by implementing this trait. You can instantiate your widget by calling\n\n/// `Widget::new(Box::new(instance of your new widget))`, which gives you the usual style options.\n\npub trait WidgetImpl: downcast_rs::Downcast {\n\n /// What width and height does the widget occupy? If this changes, be sure to set\n\n /// `redo_layout` to true in `event`.\n\n fn get_dims(&self) -> ScreenDims;\n\n /// Your widget's top left corner should be here. Handle mouse events and draw appropriately.\n\n fn set_pos(&mut self, top_left: ScreenPt);\n\n /// Your chance to react to an event. Any side effects outside of this widget are communicated\n\n /// through the output.\n\n fn event(&mut self, ctx: &mut EventCtx, output: &mut WidgetOutput);\n\n /// Draw the widget. Be sure to draw relative to the top-left specified by `set_pos`.\n\n fn draw(&self, g: &mut GfxCtx);\n\n /// If a new Panel is being created to replace an older one, all widgets have the chance to\n\n /// preserve state from the previous version.\n\n fn can_restore(&self) -> bool {\n\n false\n\n }\n\n /// Restore state from the previous version of this widget, with the same ID. Implementors must\n\n /// downcast.\n\n fn restore(&mut self, _: &mut EventCtx, _prev: &dyn WidgetImpl) {\n\n unreachable!()\n", "file_path": "widgetry/src/widgets/mod.rs", "rank": 3, "score": 220668.54045366345 }, { "content": "pub trait TextExt {\n\n fn text_widget(self, ctx: &EventCtx) -> Widget;\n\n fn batch_text(self, ctx: &EventCtx) -> Widget;\n\n}\n\n\n\nimpl TextExt for &str {\n\n fn text_widget(self, ctx: &EventCtx) -> Widget {\n\n Line(self).into_widget(ctx)\n\n }\n\n fn batch_text(self, ctx: &EventCtx) -> Widget {\n\n Line(self).batch(ctx)\n\n }\n\n}\n\n\n\nimpl TextExt for String {\n\n fn text_widget(self, ctx: &EventCtx) -> Widget {\n\n Line(self).into_widget(ctx)\n\n }\n\n fn batch_text(self, ctx: &EventCtx) -> Widget {\n\n Line(self).batch(ctx)\n", "file_path": "widgetry/src/text.rs", "rank": 4, "score": 196294.94458458247 }, { "content": "pub fn problem_matrix(ctx: &mut EventCtx, app: &App, trips: &[(Duration, isize)]) -> Widget {\n\n let points = trips;\n\n\n\n let duration_buckets = vec![\n\n Duration::ZERO,\n\n Duration::minutes(5),\n\n Duration::minutes(15),\n\n Duration::minutes(30),\n\n Duration::hours(1),\n\n Duration::hours(2),\n\n ];\n\n\n\n let num_buckets = 7;\n\n let mut matrix = Matrix::new(duration_buckets, bucketize_isizes(num_buckets, points));\n\n for (x, y) in points {\n\n matrix.add_pt(*x, *y);\n\n }\n\n matrix.draw(\n\n ctx,\n\n app,\n", "file_path": "game/src/sandbox/dashboards/trip_problems.rs", "rank": 5, "score": 185805.67889298807 }, { "content": "// TODO If this proves useful, lift to geom\n\npub fn thick_lineseries(pts: Vec<Pt2D>, width: Distance) -> Polygon {\n\n use lyon::math::{point, Point};\n\n use lyon::path::Path;\n\n use lyon::tessellation::geometry_builder::{BuffersBuilder, Positions, VertexBuffers};\n\n use lyon::tessellation::{StrokeOptions, StrokeTessellator};\n\n\n\n let mut builder = Path::builder();\n\n for (idx, pt) in pts.into_iter().enumerate() {\n\n let pt = point(pt.x() as f32, pt.y() as f32);\n\n if idx == 0 {\n\n builder.move_to(pt);\n\n } else {\n\n builder.line_to(pt);\n\n }\n\n }\n\n let path = builder.build();\n\n\n\n let mut geom: VertexBuffers<Point, u32> = VertexBuffers::new();\n\n let mut buffer = BuffersBuilder::new(&mut geom, Positions);\n\n StrokeTessellator::new()\n", "file_path": "widgetry/src/widgets/plots.rs", "rank": 6, "score": 179644.39038837678 }, { "content": "fn make_elevation(ctx: &EventCtx, color: Color, walking: bool, path: &Path, map: &Map) -> Widget {\n\n let mut pts: Vec<(Distance, Distance)> = Vec::new();\n\n let mut dist = Distance::ZERO;\n\n for step in path.get_steps() {\n\n if let PathStep::Turn(t) = step {\n\n pts.push((dist, map.get_i(t.parent).elevation));\n\n }\n\n dist += step.as_traversable().get_polyline(map).length();\n\n }\n\n // TODO Show roughly where we are in the trip; use distance covered by current path for this\n\n LinePlot::new_widget(\n\n ctx,\n\n vec![Series {\n\n label: if walking {\n\n \"Elevation for walking\"\n\n } else {\n\n \"Elevation for biking\"\n\n }\n\n .to_string(),\n\n color,\n\n pts,\n\n }],\n\n PlotOptions::fixed(),\n\n )\n\n}\n\n\n", "file_path": "game/src/info/trip.rs", "rank": 7, "score": 176687.52772696494 }, { "content": "/// Creates the top row for any layer panel.\n\npub fn header(ctx: &mut EventCtx, name: &str) -> Widget {\n\n Widget::row(vec![\n\n Image::from_path(\"system/assets/tools/layers.svg\")\n\n .into_widget(ctx)\n\n .centered_vert(),\n\n name.text_widget(ctx).centered_vert(),\n\n ctx.style().btn_close_widget(ctx),\n\n ])\n\n}\n\n\n\npub const PANEL_PLACEMENT: (HorizontalAlignment, VerticalAlignment) = (\n\n HorizontalAlignment::Percent(0.02),\n\n VerticalAlignment::Percent(0.2),\n\n);\n", "file_path": "game/src/layer/mod.rs", "rank": 8, "score": 172191.25131304027 }, { "content": "/// Adjust the path to start on the polygon's border, not center.\n\npub fn trim_path(poly: &Polygon, path: Line) -> Line {\n\n for line in poly.points().windows(2) {\n\n if let Some(l1) = Line::new(line[0], line[1]) {\n\n if let Some(hit) = l1.intersection(&path) {\n\n if let Some(l2) = Line::new(hit, path.pt2()) {\n\n return l2;\n\n }\n\n }\n\n }\n\n }\n\n // Just give up\n\n path\n\n}\n", "file_path": "map_model/src/make/mod.rs", "rank": 9, "score": 171275.26506408077 }, { "content": "pub fn draw_star(ctx: &mut EventCtx, b: &Building) -> GeomBatch {\n\n GeomBatch::load_svg(ctx, \"system/assets/tools/star.svg\")\n\n .centered_on(b.polygon.center())\n\n .color(RewriteColor::ChangeAll(Color::BLACK))\n\n}\n\n\n", "file_path": "fifteen_min/src/viewer.rs", "rank": 10, "score": 170676.9097647203 }, { "content": "// TODO This is A/B Street specific\n\npub fn loading_tips() -> Text {\n\n Text::from_multiline(vec![\n\n Line(\"Have you tried...\"),\n\n Line(\"\"),\n\n Line(\"- simulating cities in Britain, Taiwan, Poland, and more?\"),\n\n Line(\"- the 15-minute neighborhood tool?\"),\n\n Line(\"- exploring all of the map layers?\"),\n\n Line(\"- playing 15-minute Santa, our arcade game spin-off?\"),\n\n ])\n\n}\n\n\n", "file_path": "map_gui/src/tools/mod.rs", "rank": 11, "score": 169494.8251127086 }, { "content": "pub fn draw_unwalkable_roads(ctx: &mut EventCtx, app: &App, opts: &Options) -> Drawable {\n\n let allow_shoulders = match opts {\n\n Options::Walking(ref opts) => opts.allow_shoulders,\n\n Options::Biking => {\n\n return Drawable::empty(ctx);\n\n }\n\n };\n\n\n\n let mut batch = GeomBatch::new();\n\n 'ROADS: for road in app.map.all_roads() {\n\n if road.is_light_rail() {\n\n continue;\n\n }\n\n for (_, _, lt) in road.lanes_ltr() {\n\n if lt == LaneType::Sidewalk || (lt == LaneType::Shoulder && allow_shoulders) {\n\n continue 'ROADS;\n\n }\n\n }\n\n // TODO Skip highways\n\n batch.push(Color::BLUE.alpha(0.5), road.get_thick_polygon(&app.map));\n\n }\n\n ctx.upload(batch)\n\n}\n", "file_path": "fifteen_min/src/viewer.rs", "rank": 12, "score": 166162.95278904692 }, { "content": "// Shorter is better\n\npub fn cmp_duration_shorter(app: &App, after: Duration, before: Duration) -> Vec<TextSpan> {\n\n if after.epsilon_eq(before) {\n\n vec![Line(\"same\")]\n\n } else if after < before {\n\n vec![\n\n Line((before - after).to_string(&app.opts.units)).fg(Color::GREEN),\n\n Line(\" faster\"),\n\n ]\n\n } else if after > before {\n\n vec![\n\n Line((after - before).to_string(&app.opts.units)).fg(Color::RED),\n\n Line(\" slower\"),\n\n ]\n\n } else {\n\n unreachable!()\n\n }\n\n}\n\n\n", "file_path": "game/src/common/mod.rs", "rank": 13, "score": 165949.6698425085 }, { "content": "pub fn color_for_mode(app: &App, m: TripMode) -> Color {\n\n match m {\n\n TripMode::Walk => app.cs.unzoomed_pedestrian,\n\n TripMode::Bike => app.cs.unzoomed_bike,\n\n TripMode::Transit => app.cs.unzoomed_bus,\n\n TripMode::Drive => app.cs.unzoomed_car,\n\n }\n\n}\n\n\n", "file_path": "game/src/common/mod.rs", "rank": 14, "score": 161631.3449174002 }, { "content": "pub fn color_for_agent_type(app: &App, a: AgentType) -> Color {\n\n match a {\n\n AgentType::Pedestrian => app.cs.unzoomed_pedestrian,\n\n AgentType::Bike => app.cs.unzoomed_bike,\n\n AgentType::Bus | AgentType::Train => app.cs.unzoomed_bus,\n\n AgentType::TransitRider => app.cs.bus_trip,\n\n AgentType::Car => app.cs.unzoomed_car,\n\n }\n\n}\n\n\n", "file_path": "game/src/common/mod.rs", "rank": 15, "score": 159587.59881966974 }, { "content": "pub fn color_for_trip_phase(app: &App, tpt: TripPhaseType) -> Color {\n\n match tpt {\n\n TripPhaseType::Driving => app.cs.unzoomed_car,\n\n TripPhaseType::Walking => app.cs.unzoomed_pedestrian,\n\n TripPhaseType::Biking => app.cs.bike_trip,\n\n TripPhaseType::Parking => app.cs.parking_trip,\n\n TripPhaseType::WaitingForBus(_, _) => app.cs.bus_layer,\n\n TripPhaseType::RidingBus(_, _, _) => app.cs.bus_trip,\n\n TripPhaseType::Cancelled | TripPhaseType::Finished => unreachable!(),\n\n TripPhaseType::DelayedStart => Color::YELLOW,\n\n }\n\n}\n\n\n", "file_path": "game/src/common/mod.rs", "rank": 16, "score": 155754.0706184029 }, { "content": "// TODO copied from DrawLane\n\nfn perp_line(l: Line, length: Distance) -> Line {\n\n let pt1 = l.shift_right(length / 2.0).pt1();\n\n let pt2 = l.shift_left(length / 2.0).pt1();\n\n Line::must_new(pt1, pt2)\n\n}\n", "file_path": "map_gui/src/render/intersection.rs", "rank": 17, "score": 153284.2841018375 }, { "content": "// TODO this always does it at pt1\n\nfn perp_line(l: Line, length: Distance) -> Line {\n\n let pt1 = l.shift_right(length / 2.0).pt1();\n\n let pt2 = l.shift_left(length / 2.0).pt1();\n\n Line::must_new(pt1, pt2)\n\n}\n\n\n", "file_path": "map_gui/src/render/lane.rs", "rank": 18, "score": 153278.1007002647 }, { "content": "pub fn area(ctx: &EventCtx, app: &App, _: &mut Details, id: AreaID) -> Widget {\n\n let header = Widget::row(vec![\n\n Line(id.to_string()).small_heading().into_widget(ctx),\n\n header_btns(ctx),\n\n ]);\n\n\n\n Widget::custom_col(vec![header, area_body(ctx, app, id).tab_body(ctx)])\n\n}\n\n\n", "file_path": "game/src/info/debug.rs", "rank": 19, "score": 150564.09398742887 }, { "content": "pub fn debug(ctx: &EventCtx, app: &App, details: &mut Details, id: LaneID) -> Widget {\n\n Widget::custom_col(vec![\n\n header(ctx, app, details, id, Tab::LaneDebug(id)),\n\n debug_body(ctx, app, id).tab_body(ctx),\n\n ])\n\n}\n\n\n", "file_path": "game/src/info/lane.rs", "rank": 20, "score": 148894.15014363884 }, { "content": "pub fn info(ctx: &EventCtx, app: &App, details: &mut Details, id: LaneID) -> Widget {\n\n Widget::custom_col(vec![\n\n header(ctx, app, details, id, Tab::LaneInfo(id)),\n\n info_body(ctx, app, id).tab_body(ctx),\n\n ])\n\n}\n\n\n", "file_path": "game/src/info/lane.rs", "rank": 21, "score": 148894.15014363884 }, { "content": "pub fn crowd(ctx: &EventCtx, app: &App, details: &mut Details, members: &[PedestrianID]) -> Widget {\n\n let header = Widget::custom_col(vec![\n\n Line(\"Pedestrian crowd\").small_heading().into_widget(ctx),\n\n header_btns(ctx),\n\n ]);\n\n Widget::custom_col(vec![\n\n header,\n\n crowd_body(ctx, app, details, members).tab_body(ctx),\n\n ])\n\n}\n\n\n", "file_path": "game/src/info/person.rs", "rank": 22, "score": 148894.15014363884 }, { "content": "pub fn info(ctx: &EventCtx, app: &App, details: &mut Details, id: IntersectionID) -> Widget {\n\n Widget::custom_col(vec![\n\n header(ctx, app, details, id, Tab::IntersectionInfo(id)),\n\n info_body(ctx, app, id).tab_body(ctx),\n\n ])\n\n}\n\n\n", "file_path": "game/src/info/intersection.rs", "rank": 23, "score": 148894.15014363884 }, { "content": "#[derive(Clone, Debug, Default)]\n\nstruct Label {\n\n text: Option<String>,\n\n color: Option<Color>,\n\n styled_text: Option<Text>,\n\n font_size: Option<usize>,\n\n font: Option<Font>,\n\n}\n\n\n\n// Like an image map from the old HTML days\n\npub struct MultiButton {\n\n draw: Drawable,\n\n hitboxes: Vec<(Polygon, String)>,\n\n hovering: Option<usize>,\n\n\n\n top_left: ScreenPt,\n\n dims: ScreenDims,\n\n}\n\n\n\nimpl MultiButton {\n\n pub fn new_widget(\n", "file_path": "widgetry/src/widgets/button.rs", "rank": 24, "score": 147057.40867956047 }, { "content": "struct Tab {\n\n tab_id: String,\n\n bar_item: ButtonBuilder<'static, 'static>,\n\n content: Widget,\n\n}\n\n\n\nimpl Tab {\n\n fn new(tab_id: String, bar_item: ButtonBuilder<'static, 'static>, content: Widget) -> Self {\n\n Self {\n\n tab_id,\n\n bar_item,\n\n content,\n\n }\n\n }\n\n\n\n fn build_bar_item_widget(&self, ctx: &EventCtx, active: bool) -> Widget {\n\n self.bar_item\n\n .clone()\n\n .corner_rounding(CornerRadii {\n\n top_left: DEFAULT_CORNER_RADIUS,\n", "file_path": "widgetry/src/widgets/tabs.rs", "rank": 25, "score": 147057.40867956047 }, { "content": "fn build_jump_to_delay_button(ctx: &EventCtx, delay: Duration) -> Widget {\n\n ctx.style()\n\n .btn_solid_primary\n\n .text(format!(\"Jump to next {} delay\", delay))\n\n .hotkey(Key::Enter)\n\n .build_widget(ctx, \"jump to delay\")\n\n .centered_horiz()\n\n .margin_above(16)\n\n}\n", "file_path": "game/src/sandbox/time_warp.rs", "rank": 26, "score": 144972.21517323705 }, { "content": "pub fn info(ctx: &mut EventCtx, app: &App, details: &mut Details, id: BuildingID) -> Widget {\n\n Widget::custom_col(vec![\n\n header(ctx, app, details, id, Tab::BldgInfo(id)),\n\n info_body(ctx, app, details, id).tab_body(ctx),\n\n ])\n\n}\n\n\n", "file_path": "game/src/info/building.rs", "rank": 27, "score": 144431.56454927992 }, { "content": "pub fn people(ctx: &mut EventCtx, app: &App, details: &mut Details, id: BuildingID) -> Widget {\n\n Widget::custom_col(vec![\n\n header(ctx, app, details, id, Tab::BldgPeople(id)),\n\n people_body(ctx, app, details, id).tab_body(ctx),\n\n ])\n\n}\n\n\n", "file_path": "game/src/info/building.rs", "rank": 28, "score": 144431.56454927992 }, { "content": "struct LayoutStyle {\n\n bg_color: Option<Color>,\n\n // (thickness, color)\n\n outline: Option<(f64, Color)>,\n\n corner_rounding: CornerRounding,\n\n style: Style,\n\n}\n\n\n\n// Layouting\n\n// TODO Maybe I just want margin, not padding. And maybe more granular controls per side. And to\n\n// apply margin to everything in a row or column.\n\n// TODO Row and columns feel backwards when using them.\n\nimpl Widget {\n\n pub fn centered(mut self) -> Widget {\n\n self.layout.style.align_items = AlignItems::Center;\n\n self.layout.style.justify_content = JustifyContent::SpaceAround;\n\n self\n\n }\n\n\n\n pub fn centered_horiz(self) -> Widget {\n", "file_path": "widgetry/src/widgets/mod.rs", "rank": 29, "score": 143873.56211706199 }, { "content": "pub fn bus_status(ctx: &mut EventCtx, app: &App, details: &mut Details, id: CarID) -> Widget {\n\n Widget::custom_col(vec![\n\n bus_header(ctx, app, details, id, Tab::BusStatus(id)),\n\n bus_status_body(ctx, app, details, id).tab_body(ctx),\n\n ])\n\n}\n\n\n", "file_path": "game/src/info/bus.rs", "rank": 30, "score": 142904.88970065262 }, { "content": "pub fn route(ctx: &mut EventCtx, app: &App, details: &mut Details, id: BusRouteID) -> Widget {\n\n let header = {\n\n let map = &app.primary.map;\n\n let route = map.get_br(id);\n\n\n\n Widget::row(vec![\n\n Line(format!(\"Route {}\", route.short_name))\n\n .small_heading()\n\n .into_widget(ctx),\n\n header_btns(ctx),\n\n ])\n\n };\n\n\n\n Widget::custom_col(vec![\n\n header,\n\n route_body(ctx, app, details, id).tab_body(ctx),\n\n ])\n\n}\n\n\n", "file_path": "game/src/info/bus.rs", "rank": 31, "score": 142904.88970065262 }, { "content": "pub fn stop(ctx: &mut EventCtx, app: &App, details: &mut Details, id: BusStopID) -> Widget {\n\n let header = Widget::row(vec![\n\n Line(\"Bus stop\").small_heading().into_widget(ctx),\n\n header_btns(ctx),\n\n ]);\n\n\n\n Widget::custom_col(vec![header, stop_body(ctx, app, details, id).tab_body(ctx)])\n\n}\n\n\n", "file_path": "game/src/info/bus.rs", "rank": 32, "score": 142904.88970065262 }, { "content": "pub fn info(ctx: &mut EventCtx, app: &App, details: &mut Details, id: ParkingLotID) -> Widget {\n\n Widget::custom_col(vec![\n\n header(ctx, details, id, Tab::ParkingLot(id)),\n\n info_body(ctx, app, id).tab_body(ctx),\n\n ])\n\n}\n\n\n", "file_path": "game/src/info/parking_lot.rs", "rank": 33, "score": 141443.0319028881 }, { "content": "struct Column<A, T> {\n\n name: String,\n\n render: Box<dyn Fn(&EventCtx, &A, &T) -> GeomBatch>,\n\n col: Col<T>,\n\n}\n\n\n\npub struct Filter<A, T, F> {\n\n pub state: F,\n\n pub to_controls: Box<dyn Fn(&mut EventCtx, &A, &F) -> Widget>,\n\n pub from_controls: Box<dyn Fn(&Panel) -> F>,\n\n pub apply: Box<dyn Fn(&F, &T, &A) -> bool>,\n\n}\n\n\n\nimpl<A, T, F> Table<A, T, F> {\n\n pub fn new(\n\n id: impl Into<String>,\n\n data: Vec<T>,\n\n label_per_row: Box<dyn Fn(&T) -> String>,\n\n default_sort_by: &str,\n\n filter: Filter<A, T, F>,\n", "file_path": "widgetry/src/widgets/table.rs", "rank": 34, "score": 138545.19254914904 }, { "content": "pub fn draw_isochrone(\n\n app: &App,\n\n time_to_reach_building: &HashMap<BuildingID, Duration>,\n\n thresholds: &[f64],\n\n colors: &[Color],\n\n) -> GeomBatch {\n\n // To generate the polygons covering areas between 0-5 mins, 5-10 mins, etc, we have to feed\n\n // in a 2D grid of costs. Use a 100x100 meter resolution.\n\n let bounds = app.map.get_bounds();\n\n let resolution_m = 100.0;\n\n // The costs we're storing are currenly durations, but the contour crate needs f64, so\n\n // just store the number of seconds.\n\n let mut grid: Grid<f64> = Grid::new(\n\n (bounds.width() / resolution_m).ceil() as usize,\n\n (bounds.height() / resolution_m).ceil() as usize,\n\n 0.0,\n\n );\n\n\n\n // Calculate the cost from the start building to every other building in the map\n\n for (b, cost) in time_to_reach_building {\n", "file_path": "fifteen_min/src/isochrone.rs", "rank": 35, "score": 137136.88773477473 }, { "content": "#[cfg(not(unix))]\n\npub fn clear_current_line() {\n\n print!(\"\\r\");\n\n}\n\n\n", "file_path": "abstutil/src/time.rs", "rank": 36, "score": 137127.7153579531 }, { "content": "pub trait SpinnerValue:\n\n Copy\n\n + PartialOrd\n\n + std::fmt::Display\n\n + std::ops::Add<Output = Self>\n\n + std::ops::AddAssign\n\n + std::ops::Sub<Output = Self>\n\n + std::ops::SubAssign\n\nwhere\n\n Self: std::marker::Sized,\n\n{\n\n}\n\n\n\nimpl<T> SpinnerValue for T where\n\n T: Copy\n\n + PartialOrd\n\n + std::fmt::Display\n\n + std::ops::Add<Output = Self>\n\n + std::ops::AddAssign\n\n + std::ops::Sub<Output = Self>\n", "file_path": "widgetry/src/widgets/spinner.rs", "rank": 37, "score": 136790.42917871373 }, { "content": "pub fn list_names<F: Fn(TextSpan) -> TextSpan>(txt: &mut Text, styler: F, names: BTreeSet<String>) {\n\n let len = names.len();\n\n for (idx, n) in names.into_iter().enumerate() {\n\n if idx != 0 {\n\n if idx == len - 1 {\n\n if len == 2 {\n\n txt.append(Line(\" and \"));\n\n } else {\n\n txt.append(Line(\", and \"));\n\n }\n\n } else {\n\n txt.append(Line(\", \"));\n\n }\n\n }\n\n txt.append(styler(Line(n)));\n\n }\n\n}\n\n\n", "file_path": "game/src/common/mod.rs", "rank": 38, "score": 135557.74780454853 }, { "content": "#[derive(Clone, Debug, Default)]\n\nstruct ButtonStateStyle<'a, 'c> {\n\n image: Option<Image<'a, 'c>>,\n\n label: Option<Label>,\n\n outline: Option<OutlineStyle>,\n\n bg_color: Option<Color>,\n\n custom_batch: Option<GeomBatch>,\n\n}\n\n\n\n// can we take 'b out? and make the func that uses it generic?\n\nimpl<'b, 'a: 'b, 'c> ButtonBuilder<'a, 'c> {\n\n pub fn new() -> Self {\n\n ButtonBuilder {\n\n padding: EdgeInsets {\n\n top: 8.0,\n\n bottom: 8.0,\n\n left: 16.0,\n\n right: 16.0,\n\n },\n\n stack_spacing: 10.0,\n\n ..Default::default()\n", "file_path": "widgetry/src/widgets/button.rs", "rank": 39, "score": 135553.68159237356 }, { "content": "pub fn draw_signal_stage(\n\n prerender: &Prerender,\n\n stage: &Stage,\n\n idx: usize,\n\n i: IntersectionID,\n\n time_left: Option<Duration>,\n\n batch: &mut GeomBatch,\n\n app: &dyn AppLike,\n\n signal_style: TrafficSignalStyle,\n\n) {\n\n let signal = app.map().get_traffic_signal(i);\n\n\n\n match signal_style {\n\n TrafficSignalStyle::Brian => {\n\n let mut dont_walk = BTreeSet::new();\n\n let mut crossed_roads = BTreeSet::new();\n\n for m in signal.movements.keys() {\n\n if m.crosswalk {\n\n dont_walk.insert(m);\n\n // TODO This is incorrect; some crosswalks hop over intermediate roads. How do\n", "file_path": "map_gui/src/render/traffic_signal.rs", "rank": 40, "score": 129139.79289248155 }, { "content": "pub fn draw_stage_number(\n\n app: &dyn AppLike,\n\n prerender: &Prerender,\n\n i: IntersectionID,\n\n idx: usize,\n\n batch: &mut GeomBatch,\n\n) {\n\n let radius = Distance::meters(1.0);\n\n let center = app.map().get_i(i).polygon.polylabel();\n\n batch.push(\n\n Color::hex(\"#5B5B5B\"),\n\n Circle::new(center, radius).to_polygon(),\n\n );\n\n batch.append(\n\n Text::from(Line(format!(\"{}\", idx + 1)).fg(Color::WHITE))\n\n .render_autocropped(prerender)\n\n .scale(0.075)\n\n .centered_on(center),\n\n );\n\n}\n\n\n", "file_path": "map_gui/src/render/traffic_signal.rs", "rank": 41, "score": 129139.79289248155 }, { "content": "fn zoomed_color_car(input: &DrawCarInput, sim: &Sim, cs: &ColorScheme) -> Color {\n\n if input.id.vehicle_type == VehicleType::Bus {\n\n cs.bus_body\n\n } else if input.id.vehicle_type == VehicleType::Train {\n\n cs.train_body\n\n } else {\n\n let color = match input.status {\n\n CarStatus::Moving => cs.rotating_color_agents(input.id.id),\n\n CarStatus::Parked => cs.parked_car,\n\n };\n\n grey_out_unhighlighted_people(color, &input.person, sim)\n\n }\n\n}\n", "file_path": "map_gui/src/render/car.rs", "rank": 42, "score": 126196.52721706335 }, { "content": "pub fn make_crosswalk(batch: &mut GeomBatch, turn: &Turn, map: &Map, cs: &ColorScheme) {\n\n if make_rainbow_crosswalk(batch, turn, map) {\n\n return;\n\n }\n\n\n\n // This size also looks better for shoulders\n\n let width = SIDEWALK_THICKNESS;\n\n // Start at least width out to not hit sidewalk corners. Also account for the thickness of the\n\n // crosswalk line itself. Center the lines inside these two boundaries.\n\n let boundary = width;\n\n let tile_every = width * 0.6;\n\n let line = {\n\n // The middle line in the crosswalk geometry is the main crossing line.\n\n let pts = turn.geom.points();\n\n if pts.len() < 3 {\n\n println!(\n\n \"Not rendering crosswalk for {}; its geometry was squished earlier\",\n\n turn.id\n\n );\n\n return;\n", "file_path": "map_gui/src/render/intersection.rs", "rank": 43, "score": 123938.32835976189 }, { "content": "pub fn round(cost: Duration) -> usize {\n\n // Round up! 0 cost edges are ignored\n\n (cost.inner_seconds().round() as usize).max(1)\n\n}\n", "file_path": "map_model/src/pathfind/ch.rs", "rank": 44, "score": 122627.0173442437 }, { "content": "fn render_line(spans: Vec<TextSpan>, tolerance: f32, assets: &Assets) -> GeomBatch {\n\n // Just set a sufficiently large view box\n\n let mut svg = r##\"<svg width=\"9999\" height=\"9999\" viewBox=\"0 0 9999 9999\" xmlns=\"http://www.w3.org/2000/svg\">\"##.to_string();\n\n\n\n write!(&mut svg, r##\"<text x=\"0\" y=\"0\" xml:space=\"preserve\">\"##,).unwrap();\n\n\n\n let mut contents = String::new();\n\n for span in spans {\n\n let fg_color = span.fg_color_for_style(&assets.style.borrow());\n\n write!(\n\n &mut contents,\n\n r##\"<tspan font-size=\"{}\" font-family=\"{}\" {} fill=\"{}\" fill-opacity=\"{}\" {}>{}</tspan>\"##,\n\n span.size,\n\n span.font.family(),\n\n match span.font {\n\n Font::OverpassBold => \"font-weight=\\\"bold\\\"\",\n\n Font::OverpassSemiBold => \"font-weight=\\\"600\\\"\",\n\n _ => \"\",\n\n },\n\n fg_color.as_hex(),\n", "file_path": "widgetry/src/text.rs", "rank": 45, "score": 122033.7472338768 }, { "content": "fn header_btns(ctx: &EventCtx) -> Widget {\n\n Widget::row(vec![\n\n ctx.style()\n\n .btn_plain\n\n .icon(\"system/assets/tools/location.svg\")\n\n .hotkey(Key::J)\n\n .build_widget(ctx, \"jump to object\"),\n\n ctx.style().btn_close_widget(ctx),\n\n ])\n\n .align_right()\n\n}\n\n\n", "file_path": "game/src/info/mod.rs", "rank": 46, "score": 119035.5403740968 }, { "content": "fn modulo_color(colors: &[Color], idx: usize) -> Color {\n\n colors[idx % colors.len()]\n\n}\n\n\n", "file_path": "map_gui/src/colors.rs", "rank": 47, "score": 117308.56267442062 }, { "content": "pub trait Axis<T>: 'static + Copy + std::cmp::Ord {\n\n // percent is [0.0, 1.0]\n\n fn from_percent(&self, percent: f64) -> T;\n\n fn to_percent(self, max: T) -> f64;\n\n fn prettyprint(self) -> String;\n\n // For order of magnitude calculations\n\n fn to_f64(self) -> f64;\n\n fn from_f64(&self, x: f64) -> T;\n\n fn zero() -> T;\n\n}\n\n\n\nimpl Axis<usize> for usize {\n\n fn from_percent(&self, percent: f64) -> usize {\n\n ((*self as f64) * percent) as usize\n\n }\n\n fn to_percent(self, max: usize) -> f64 {\n\n if max == 0 {\n\n 0.0\n\n } else {\n\n (self as f64) / (max as f64)\n", "file_path": "widgetry/src/widgets/plots.rs", "rank": 48, "score": 112968.3542373259 }, { "content": "pub fn unzoomed_agent_radius(vt: Option<VehicleType>) -> Distance {\n\n // Lane thickness is a little hard to see, so double it. Most of the time, the circles don't\n\n // leak out of the road too much.\n\n if vt.is_some() {\n\n 4.0 * NORMAL_LANE_THICKNESS\n\n } else {\n\n 4.0 * SIDEWALK_THICKNESS\n\n }\n\n}\n\n\n", "file_path": "map_gui/src/render/mod.rs", "rank": 49, "score": 112105.8373756592 }, { "content": "/// Draw a start marker pointing at something.\n\npub fn start_marker<P: AsRef<Prerender>>(prerender: &P, pt: Pt2D, scale: f64) -> GeomBatch {\n\n GeomBatch::load_svg(prerender, \"system/assets/timeline/start_pos.svg\")\n\n .scale(scale)\n\n .centered_on(pt)\n\n .color(RewriteColor::ChangeAlpha(0.8))\n\n}\n\n\n", "file_path": "map_gui/src/tools/icons.rs", "rank": 50, "score": 111925.55458736082 }, { "content": "/// Draw a goal marker pointing at something.\n\npub fn goal_marker<P: AsRef<Prerender>>(prerender: &P, pt: Pt2D, scale: f64) -> GeomBatch {\n\n GeomBatch::load_svg(prerender, \"system/assets/timeline/goal_pos.svg\")\n\n .scale(scale)\n\n .centered_on(pt)\n\n .color(RewriteColor::ChangeAlpha(0.8))\n\n}\n", "file_path": "map_gui/src/tools/icons.rs", "rank": 51, "score": 111925.55458736082 }, { "content": "fn make_pagination(ctx: &mut EventCtx, total: usize, skip: usize) -> Widget {\n\n let next = ctx\n\n .style()\n\n .btn_next()\n\n .disabled(skip + 1 + ROWS >= total)\n\n .hotkey(Key::RightArrow);\n\n let prev = ctx\n\n .style()\n\n .btn_prev()\n\n .disabled(skip == 0)\n\n .hotkey(Key::LeftArrow);\n\n\n\n Widget::row(vec![\n\n prev.build_widget(ctx, \"previous\"),\n\n format!(\n\n \"{}-{} of {}\",\n\n if total > 0 {\n\n prettyprint_usize(skip + 1)\n\n } else {\n\n \"0\".to_string()\n\n },\n\n prettyprint_usize((skip + 1 + ROWS).min(total)),\n\n prettyprint_usize(total)\n\n )\n\n .text_widget(ctx)\n\n .centered_vert(),\n\n next.build_widget(ctx, \"next\"),\n\n ])\n\n}\n\n\n", "file_path": "widgetry/src/widgets/table.rs", "rank": 52, "score": 111831.58213666664 }, { "content": "fn draw_banned_turns(ctx: &mut EventCtx, app: &App) -> Drawable {\n\n let mut batch = GeomBatch::new();\n\n let map = &app.primary.map;\n\n for i in map.all_intersections() {\n\n let mut pairs: HashSet<(RoadID, RoadID)> = HashSet::new();\n\n // Don't call out one-ways, so use incoming/outgoing roads, and just for cars.\n\n for l1 in i.get_incoming_lanes(map, PathConstraints::Car) {\n\n for l2 in i.get_outgoing_lanes(map, PathConstraints::Car) {\n\n pairs.insert((map.get_l(l1).parent, map.get_l(l2).parent));\n\n }\n\n }\n\n for t in &i.turns {\n\n let r1 = map.get_l(t.id.src).parent;\n\n let r2 = map.get_l(t.id.dst).parent;\n\n pairs.remove(&(r1, r2));\n\n }\n\n\n\n for (r1, r2) in pairs {\n\n if let Ok(pl) = PolyLine::new(vec![\n\n map.get_r(r1).center_pts.middle(),\n", "file_path": "game/src/debug/mod.rs", "rank": 53, "score": 111710.99093063895 }, { "content": "fn draw_arterial_crosswalks(ctx: &mut EventCtx, app: &App) -> Drawable {\n\n let mut batch = GeomBatch::new();\n\n let map = &app.primary.map;\n\n for turn in map.all_turns() {\n\n if turn.is_crossing_arterial_intersection(map) {\n\n batch.push(\n\n Color::RED,\n\n turn.geom\n\n .make_arrow(Distance::meters(2.0), ArrowCap::Triangle),\n\n );\n\n }\n\n }\n\n ctx.upload(batch)\n\n}\n", "file_path": "game/src/debug/mod.rs", "rank": 54, "score": 111710.99093063895 }, { "content": "pub fn make_legend<X: Axis<X>, Y: Axis<Y>>(\n\n ctx: &EventCtx,\n\n series: &[Series<X, Y>],\n\n opts: &PlotOptions<X, Y>,\n\n) -> Widget {\n\n let mut row = Vec::new();\n\n let mut seen = HashSet::new();\n\n for s in series {\n\n if seen.contains(&s.label) {\n\n continue;\n\n }\n\n seen.insert(s.label.clone());\n\n if opts.filterable {\n\n row.push(Toggle::colored_checkbox(\n\n ctx,\n\n &s.label,\n\n s.color,\n\n !opts.disabled.contains(&s.label),\n\n ));\n\n } else {\n", "file_path": "widgetry/src/widgets/plots.rs", "rank": 55, "score": 110837.77636632278 }, { "content": "// TODO Can we automatically transform text and SVG colors?\n\nfn cutscene_pt1_task(ctx: &mut EventCtx) -> Widget {\n\n let icon_builder = Image::empty().color(Color::BLACK).dims(50.0);\n\n Widget::custom_col(vec![\n\n Text::from_multiline(vec![\n\n Line(format!(\n\n \"Don't let anyone be delayed by one traffic signal more than {}!\",\n\n THRESHOLD\n\n ))\n\n .fg(Color::BLACK),\n\n Line(\"Survive as long as possible through 24 hours of a busy weekday.\")\n\n .fg(Color::BLACK),\n\n ])\n\n .into_widget(ctx)\n\n .margin_below(30),\n\n Widget::custom_row(vec![\n\n Widget::col(vec![\n\n Line(\"Time\").fg(Color::BLACK).into_widget(ctx),\n\n icon_builder\n\n .clone()\n\n .source_path(\"system/assets/tools/time.svg\")\n", "file_path": "game/src/sandbox/gameplay/fix_traffic_signals.rs", "rank": 56, "score": 109716.10569243794 }, { "content": "// TODO Kinda misnomer\n\npub fn tool_panel(ctx: &mut EventCtx) -> Panel {\n\n Panel::new_builder(Widget::row(vec![\n\n ctx.style()\n\n .btn_plain\n\n .icon(\"system/assets/tools/home.svg\")\n\n .hotkey(Key::Escape)\n\n .build_widget(ctx, \"back\"),\n\n ctx.style()\n\n .btn_plain\n\n .icon(\"system/assets/tools/settings.svg\")\n\n .build_widget(ctx, \"settings\"),\n\n ]))\n\n .aligned(HorizontalAlignment::Left, VerticalAlignment::BottomAboveOSD)\n\n .build(ctx)\n\n}\n\n\n", "file_path": "game/src/common/mod.rs", "rank": 57, "score": 108003.01731314801 }, { "content": "pub fn angle_from_arrow_keys(ctx: &EventCtx) -> Option<Angle> {\n\n let mut x: f64 = 0.0;\n\n let mut y: f64 = 0.0;\n\n if ctx.is_key_down(Key::LeftArrow) || ctx.is_key_down(Key::A) {\n\n x -= 1.0;\n\n }\n\n if ctx.is_key_down(Key::RightArrow) || ctx.is_key_down(Key::D) {\n\n x += 1.0;\n\n }\n\n if ctx.is_key_down(Key::UpArrow) || ctx.is_key_down(Key::W) {\n\n y -= 1.0;\n\n }\n\n if ctx.is_key_down(Key::DownArrow) || ctx.is_key_down(Key::S) {\n\n y += 1.0;\n\n }\n\n\n\n if x == 0.0 && y == 0.0 {\n\n return None;\n\n }\n\n Some(Angle::new_rads(y.atan2(x)))\n\n}\n", "file_path": "santa/src/controls.rs", "rank": 58, "score": 108003.01731314801 }, { "content": "// TODO Dedupe with the version in helpers\n\nfn cmp_duration_shorter(after: Duration, before: Duration) -> TextSpan {\n\n if after.epsilon_eq(before) {\n\n Line(\"no change\").small()\n\n } else if after < before {\n\n Line(format!(\"{} faster\", before - after))\n\n .small()\n\n .fg(Color::GREEN)\n\n } else if after > before {\n\n Line(format!(\"{} slower\", after - before))\n\n .small()\n\n .fg(Color::RED)\n\n } else {\n\n unreachable!()\n\n }\n\n}\n", "file_path": "game/src/info/person.rs", "rank": 59, "score": 107616.40972428792 }, { "content": "/// If the sim has highlighted people, then fade all others out.\n\nfn grey_out_unhighlighted_people(color: Color, person: &Option<PersonID>, sim: &Sim) -> Color {\n\n if let Some(ref highlighted) = sim.get_highlighted_people() {\n\n if person\n\n .as_ref()\n\n .map(|p| !highlighted.contains(p))\n\n .unwrap_or(false)\n\n {\n\n return color.tint(0.5);\n\n }\n\n }\n\n color\n\n}\n", "file_path": "map_gui/src/render/mod.rs", "rank": 60, "score": 107427.43749511731 }, { "content": "pub fn maybe_exit_sandbox(ctx: &mut EventCtx) -> Transition {\n\n Transition::Push(ChooseSomething::new_state(\n\n ctx,\n\n \"Are you ready to leave this mode?\",\n\n vec![\n\n Choice::string(\"keep playing\"),\n\n Choice::string(\"quit to main screen\").key(Key::Q),\n\n ],\n\n Box::new(|resp, ctx, app| {\n\n if resp == \"keep playing\" {\n\n return Transition::Pop;\n\n }\n\n\n\n if app.primary.map.unsaved_edits() {\n\n return Transition::Multi(vec![\n\n Transition::Push(Box::new(BackToMainMenu)),\n\n Transition::Push(SaveEdits::new_state(\n\n ctx,\n\n app,\n\n \"Do you want to save your proposal first?\",\n", "file_path": "game/src/sandbox/mod.rs", "rank": 61, "score": 106936.47791429906 }, { "content": "fn build_jump_to_time_btn(ctx: &EventCtx, target: Time) -> Widget {\n\n ctx.style()\n\n .btn_solid_primary\n\n .text(format!(\"Jump to {}\", target.ampm_tostring()))\n\n .hotkey(Key::Enter)\n\n .build_widget(ctx, \"jump to time\")\n\n .centered_horiz()\n\n .margin_above(16)\n\n}\n\n\n", "file_path": "game/src/sandbox/time_warp.rs", "rank": 62, "score": 106116.78639617984 }, { "content": "pub fn import_grid2demand(ctx: &mut EventCtx) -> Transition {\n\n Transition::Push(FilePicker::new_state(\n\n ctx,\n\n None,\n\n Box::new(|ctx, app, maybe_path| {\n\n if let Ok(Some(path)) = maybe_path {\n\n Transition::Replace(RunCommand::new_state(\n\n ctx,\n\n app,\n\n vec![\n\n find_exe(\"import_grid2demand\"),\n\n format!(\"--map={}\", app.primary.map.get_name().path()),\n\n format!(\"--input={}\", path),\n\n ],\n\n Box::new(|_, app, success, _| {\n\n if success {\n\n // Clear out the cached scenario. If we repeatedly use this import, the\n\n // scenario name is always the same, but the file is changing.\n\n app.primary.scenario = None;\n\n Transition::Replace(SandboxMode::simple_new(\n", "file_path": "game/src/sandbox/gameplay/freeform/importers.rs", "rank": 63, "score": 105923.12295276514 }, { "content": "pub fn import_json(ctx: &mut EventCtx) -> Transition {\n\n Transition::Push(FilePicker::new_state(\n\n ctx,\n\n None,\n\n Box::new(|ctx, app, maybe_path| {\n\n if let Ok(Some(path)) = maybe_path {\n\n let result = ctx.loading_screen(\"import JSON scenario\", |_, mut timer| {\n\n import_json_scenario(&app.primary.map, path, &mut timer)\n\n });\n\n match result {\n\n Ok(scenario_name) => {\n\n // Clear out the cached scenario. If we repeatedly use this import, the\n\n // scenario name is always the same, but the file is changing.\n\n app.primary.scenario = None;\n\n Transition::Replace(SandboxMode::simple_new(\n\n app,\n\n GameplayMode::PlayScenario(\n\n app.primary.map.get_name().clone(),\n\n scenario_name,\n\n Vec::new(),\n", "file_path": "game/src/sandbox/gameplay/freeform/importers.rs", "rank": 64, "score": 105923.12295276514 }, { "content": "pub fn build_graph_for_pedestrians(map: &Map) -> DiGraphMap<WalkingNode, Duration> {\n\n let max_speed = Some(crate::MAX_WALKING_SPEED);\n\n let mut graph: DiGraphMap<WalkingNode, Duration> = DiGraphMap::new();\n\n for l in map.all_lanes().values() {\n\n if l.is_walkable() {\n\n let cost = l.length()\n\n / Traversable::Lane(l.id).max_speed_along(\n\n max_speed,\n\n PathConstraints::Pedestrian,\n\n map,\n\n );\n\n let n1 = WalkingNode::SidewalkEndpoint(l.get_directed_parent(), true);\n\n let n2 = WalkingNode::SidewalkEndpoint(l.get_directed_parent(), false);\n\n graph.add_edge(n1, n2, cost);\n\n graph.add_edge(n2, n1, cost);\n\n\n\n for turn in map.get_turns_for(l.id, PathConstraints::Pedestrian) {\n\n graph.add_edge(\n\n WalkingNode::SidewalkEndpoint(\n\n l.get_directed_parent(),\n", "file_path": "map_model/src/pathfind/dijkstra.rs", "rank": 65, "score": 104995.05596522902 }, { "content": "fn options_to_controls(ctx: &mut EventCtx, opts: &Options) -> Widget {\n\n let mut rows = vec![Toggle::choice(\n\n ctx,\n\n \"walking / biking\",\n\n \"walking\",\n\n \"biking\",\n\n None,\n\n match opts {\n\n Options::Walking(_) => true,\n\n Options::Biking => false,\n\n },\n\n )];\n\n match opts {\n\n Options::Walking(ref opts) => {\n\n rows.push(Toggle::switch(\n\n ctx,\n\n \"Allow walking on the shoulder of the road without a sidewalk\",\n\n None,\n\n opts.allow_shoulders,\n\n ));\n", "file_path": "fifteen_min/src/viewer.rs", "rank": 66, "score": 104846.71539617745 }, { "content": "fn make_btn(ctx: &mut EventCtx, num: usize) -> Widget {\n\n let title = match num {\n\n 0 => \"Record 0 intersections\".to_string(),\n\n 1 => \"Record 1 intersection\".to_string(),\n\n _ => format!(\"Record {} intersections\", num),\n\n };\n\n ctx.style()\n\n .btn_solid_primary\n\n .text(title)\n\n .disabled(num == 0)\n\n .hotkey(Key::Enter)\n\n .build_widget(ctx, \"record\")\n\n}\n", "file_path": "game/src/sandbox/misc_tools.rs", "rank": 67, "score": 103835.73664922814 }, { "content": "fn challenge_header(ctx: &mut EventCtx, title: &str) -> Widget {\n\n Widget::row(vec![\n\n Line(title).small_heading().into_widget(ctx).centered_vert(),\n\n ctx.style()\n\n .btn_plain\n\n .icon(\"system/assets/tools/info.svg\")\n\n .build_widget(ctx, \"instructions\")\n\n .centered_vert(),\n\n Widget::vert_separator(ctx, 50.0),\n\n ctx.style()\n\n .btn_outline\n\n .icon_text(\"system/assets/tools/pencil.svg\", \"Edit map\")\n\n .hotkey(lctrl(Key::E))\n\n .build_widget(ctx, \"edit map\")\n\n .centered_vert(),\n\n ])\n\n .padding(5)\n\n}\n\n\n\npub struct FinalScore {\n", "file_path": "game/src/sandbox/gameplay/mod.rs", "rank": 68, "score": 103835.73664922814 }, { "content": "fn make_tool_panel(ctx: &mut EventCtx, app: &App) -> Widget {\n\n let buttons = ctx\n\n .style()\n\n .btn_floating\n\n .btn()\n\n .image_dims(ScreenDims::square(20.0))\n\n // the default transparent button background is jarring for these buttons which are floating\n\n // in a transparent panel.\n\n .bg_color(app.cs.inner_panel_bg, ControlState::Default)\n\n .padding(8);\n\n\n\n Widget::col(vec![\n\n (if ctx.canvas.cam_zoom >= app.opts.min_zoom_for_detail {\n\n buttons\n\n .clone()\n\n .image_path(\"system/assets/minimap/zoom_out_fully.svg\")\n\n .build_widget(ctx, \"zoom out fully\")\n\n } else {\n\n buttons\n\n .clone()\n", "file_path": "game/src/sandbox/minimap.rs", "rank": 69, "score": 103835.73664922814 }, { "content": "fn make_btn(ctx: &mut EventCtx, num: usize) -> Widget {\n\n let title = match num {\n\n 0 => \"Edit 0 signals\".to_string(),\n\n 1 => \"Edit 1 signal\".to_string(),\n\n _ => format!(\"Edit {} signals\", num),\n\n };\n\n ctx.style()\n\n .btn_solid_primary\n\n .text(title)\n\n .disabled(num == 0)\n\n .hotkey(hotkeys(vec![Key::Enter, Key::E]))\n\n .build_widget(ctx, \"edit\")\n\n}\n", "file_path": "game/src/edit/traffic_signals/picker.rs", "rank": 70, "score": 102873.94533308121 }, { "content": "use geom::{Angle, Bounds, Circle, Distance, FindClosest, PolyLine, Pt2D};\n\n\n\nuse crate::widgets::plots::{make_legend, thick_lineseries, Axis, PlotOptions, Series};\n\nuse crate::{\n\n Color, Drawable, EventCtx, GeomBatch, GfxCtx, ScreenDims, ScreenPt, ScreenRectangle, Text,\n\n TextExt, Widget, WidgetImpl, WidgetOutput,\n\n};\n\n\n\npub struct LinePlot<X: Axis<X>, Y: Axis<Y>> {\n\n draw: Drawable,\n\n\n\n // The geometry here is in screen-space.\n\n max_x: X,\n\n max_y: Y,\n\n closest: FindClosest<String>,\n\n\n\n top_left: ScreenPt,\n\n dims: ScreenDims,\n\n}\n\n\n", "file_path": "widgetry/src/widgets/line_plot.rs", "rank": 71, "score": 102375.48777652676 }, { "content": "use geom::Polygon;\n\n\n\nuse crate::{\n\n Drawable, EventCtx, GeomBatch, GfxCtx, ScreenDims, ScreenPt, ScreenRectangle, Text, Widget,\n\n WidgetImpl, WidgetOutput,\n\n};\n\n\n\n// Just draw something, no interaction.\n\npub struct JustDraw {\n\n pub draw: Drawable,\n\n\n\n pub top_left: ScreenPt,\n\n pub dims: ScreenDims,\n\n}\n\n\n\nimpl JustDraw {\n\n pub(crate) fn wrap(ctx: &EventCtx, batch: GeomBatch) -> Widget {\n\n Widget::new(Box::new(JustDraw {\n\n dims: batch.get_dims(),\n\n draw: ctx.upload(batch),\n", "file_path": "widgetry/src/widgets/just_draw.rs", "rank": 72, "score": 102371.29862083642 }, { "content": "enum Dims {\n\n MaxPercent(Percent, Percent),\n\n ExactPercent(f64, f64),\n\n ExactHeight(f64),\n\n ExactSize(ScreenDims),\n\n}\n\n\n\nimpl PanelBuilder {\n\n pub fn build(mut self, ctx: &mut EventCtx) -> Panel {\n\n self.top_level = self.top_level.padding(16).bg(ctx.style.panel_bg);\n\n self.build_custom(ctx)\n\n }\n\n\n\n pub fn build_custom(self, ctx: &mut EventCtx) -> Panel {\n\n let mut panel = Panel {\n\n top_level: self.top_level,\n\n\n\n horiz: self.horiz,\n\n vert: self.vert,\n\n dims: self.dims,\n", "file_path": "widgetry/src/widgets/panel.rs", "rank": 73, "score": 102358.6432559562 }, { "content": "use geom::{Distance, Polygon};\n\n\n\nuse crate::{\n\n EdgeInsets, EventCtx, GeomBatch, GfxCtx, Key, Line, Outcome, ScreenDims, ScreenPt,\n\n ScreenRectangle, Style, Text, Widget, WidgetImpl, WidgetOutput,\n\n};\n\n\n\n// TODO right now, only a single line\n\n// TODO max_chars isn't enforced; you can type as much as you want...\n\n\n\npub struct TextBox {\n\n line: String,\n\n label: String,\n\n cursor_x: usize,\n\n has_focus: bool,\n\n hovering: bool,\n\n autofocus: bool,\n\n padding: EdgeInsets,\n\n\n\n top_left: ScreenPt,\n", "file_path": "widgetry/src/widgets/text_box.rs", "rank": 74, "score": 102355.95730055658 }, { "content": "\n\npub struct DrawWithTooltips {\n\n draw: Drawable,\n\n tooltips: Vec<(Polygon, Text)>,\n\n hover: Box<dyn Fn(&Polygon) -> GeomBatch>,\n\n\n\n top_left: ScreenPt,\n\n dims: ScreenDims,\n\n}\n\n\n\nimpl DrawWithTooltips {\n\n /// `batch`: the `GeomBatch` to draw\n\n /// `tooltips`: (hitbox, text) tuples where each `text` is shown when the user hovers over\n\n /// the respective `hitbox`\n\n /// `hover`: returns a GeomBatch to render upon hovering. Return an `GeomBox::new()` if\n\n /// you want hovering to be a no-op\n\n pub fn new_widget(\n\n ctx: &EventCtx,\n\n batch: GeomBatch,\n\n tooltips: Vec<(Polygon, Text)>,\n", "file_path": "widgetry/src/widgets/just_draw.rs", "rank": 75, "score": 102354.69316719529 }, { "content": " // TODO This \"cursor\" looks awful!\n\n txt.append_all(vec![\n\n Line(\"|\").fg(style.text_primary_color),\n\n Line(&self.line[self.cursor_x..=self.cursor_x]),\n\n Line(&self.line[self.cursor_x + 1..]),\n\n ]);\n\n } else {\n\n txt.append(Line(\"|\").fg(style.text_primary_color));\n\n }\n\n txt\n\n }\n\n\n\n pub fn get_line(&self) -> String {\n\n self.line.clone()\n\n }\n\n}\n\n\n\nimpl WidgetImpl for TextBox {\n\n fn get_dims(&self) -> ScreenDims {\n\n self.dims\n", "file_path": "widgetry/src/widgets/text_box.rs", "rank": 76, "score": 102350.54527470113 }, { "content": " }\n\n }\n\n }\n\n}\n\n\n\n// TODO Name is bad. Lay out JustDraw stuff with flexbox, just to consume it and produce one big\n\n// GeomBatch.\n\npub struct DeferDraw {\n\n pub batch: GeomBatch,\n\n\n\n pub top_left: ScreenPt,\n\n dims: ScreenDims,\n\n}\n\n\n\nimpl DeferDraw {\n\n pub fn new_widget(batch: GeomBatch) -> Widget {\n\n Widget::new(Box::new(DeferDraw {\n\n dims: batch.get_dims(),\n\n batch,\n\n top_left: ScreenPt::new(0.0, 0.0),\n", "file_path": "widgetry/src/widgets/just_draw.rs", "rank": 77, "score": 102349.66857133366 }, { "content": " hover: Box<dyn Fn(&Polygon) -> GeomBatch>,\n\n ) -> Widget {\n\n Widget::new(Box::new(DrawWithTooltips {\n\n dims: batch.get_dims(),\n\n top_left: ScreenPt::new(0.0, 0.0),\n\n hover,\n\n draw: ctx.upload(batch),\n\n tooltips,\n\n }))\n\n }\n\n}\n\n\n\nimpl WidgetImpl for DrawWithTooltips {\n\n fn get_dims(&self) -> ScreenDims {\n\n self.dims\n\n }\n\n\n\n fn set_pos(&mut self, top_left: ScreenPt) {\n\n self.top_left = top_left;\n\n }\n", "file_path": "widgetry/src/widgets/just_draw.rs", "rank": 78, "score": 102344.93606997193 }, { "content": " }))\n\n }\n\n}\n\n\n\nimpl WidgetImpl for DeferDraw {\n\n fn get_dims(&self) -> ScreenDims {\n\n self.dims\n\n }\n\n\n\n fn set_pos(&mut self, top_left: ScreenPt) {\n\n self.top_left = top_left;\n\n }\n\n\n\n fn event(&mut self, _: &mut EventCtx, _: &mut WidgetOutput) {\n\n unreachable!()\n\n }\n\n\n\n fn draw(&self, _: &mut GfxCtx) {\n\n unreachable!()\n\n }\n\n}\n", "file_path": "widgetry/src/widgets/just_draw.rs", "rank": 79, "score": 102344.42090646672 }, { "content": " top_left: ScreenPt::new(0.0, 0.0),\n\n }))\n\n }\n\n}\n\n\n\nimpl WidgetImpl for JustDraw {\n\n fn get_dims(&self) -> ScreenDims {\n\n self.dims\n\n }\n\n\n\n fn set_pos(&mut self, top_left: ScreenPt) {\n\n self.top_left = top_left;\n\n }\n\n\n\n fn event(&mut self, _: &mut EventCtx, _: &mut WidgetOutput) {}\n\n\n\n fn draw(&self, g: &mut GfxCtx) {\n\n g.redraw_at(self.top_left, &self.draw);\n\n }\n\n}\n", "file_path": "widgetry/src/widgets/just_draw.rs", "rank": 80, "score": 102344.33136527213 }, { "content": " dims: ScreenDims,\n\n}\n\n\n\nimpl TextBox {\n\n // TODO Really should have an options struct with defaults\n\n pub fn default_widget<I: Into<String>>(ctx: &EventCtx, label: I, prefilled: String) -> Widget {\n\n TextBox::widget(ctx, label, prefilled, true, 50)\n\n }\n\n\n\n pub fn widget<I: Into<String>>(\n\n ctx: &EventCtx,\n\n label: I,\n\n prefilled: String,\n\n autofocus: bool,\n\n max_chars: usize,\n\n ) -> Widget {\n\n let label = label.into();\n\n Widget::new(Box::new(TextBox::new(\n\n ctx,\n\n label.clone(),\n", "file_path": "widgetry/src/widgets/text_box.rs", "rank": 81, "score": 102342.48462791984 }, { "content": "\n\n fn draw(&self, g: &mut GfxCtx) {\n\n g.redraw_at(self.top_left, &self.draw);\n\n\n\n if let Some(cursor) = g.canvas.get_cursor_in_screen_space() {\n\n if ScreenRectangle::top_left(self.top_left, self.dims).contains(cursor) {\n\n let radius = Distance::meters(15.0);\n\n let mut txt = Text::new();\n\n for (label, pt, _) in self.closest.all_close_pts(\n\n Pt2D::new(cursor.x - self.top_left.x, cursor.y - self.top_left.y),\n\n radius,\n\n ) {\n\n // TODO If some/all of the matches have the same x, write it once?\n\n let x = self.max_x.from_percent(pt.x() / self.dims.width);\n\n let y_percent = 1.0 - (pt.y() / self.dims.height);\n\n\n\n // TODO Draw this info in the ColorLegend\n\n txt.add_line(format!(\n\n \"{}: at {}, {}\",\n\n label,\n", "file_path": "widgetry/src/widgets/line_plot.rs", "rank": 82, "score": 102341.21209118224 }, { "content": " pts = Pt2D::approx_dedupe(pts, Distance::meters(1.0));\n\n if pts.len() >= 2 {\n\n closest.add(s.label.clone(), &pts);\n\n batch.push(s.color, thick_lineseries(pts, Distance::meters(5.0)));\n\n }\n\n }\n\n\n\n let plot = LinePlot {\n\n draw: ctx.upload(batch),\n\n closest,\n\n max_x,\n\n max_y,\n\n\n\n top_left: ScreenPt::new(0.0, 0.0),\n\n dims: ScreenDims::new(width, height),\n\n };\n\n\n\n let num_x_labels = 3;\n\n let mut row = Vec::new();\n\n for i in 0..num_x_labels {\n", "file_path": "widgetry/src/widgets/line_plot.rs", "rank": 83, "score": 102339.4651839257 }, { "content": " let max_char_width = 25.0;\n\n TextBox {\n\n label,\n\n cursor_x: prefilled.len(),\n\n line: prefilled,\n\n has_focus: false,\n\n hovering: false,\n\n autofocus,\n\n padding,\n\n top_left: ScreenPt::new(0.0, 0.0),\n\n dims: ScreenDims::new(\n\n (max_chars as f64) * max_char_width + (padding.left + padding.right) as f64,\n\n ctx.default_line_height() + (padding.top + padding.bottom) as f64,\n\n ),\n\n }\n\n }\n\n\n\n fn calculate_text(&self, style: &Style) -> Text {\n\n let mut txt = Text::from(&self.line[0..self.cursor_x]);\n\n if self.cursor_x < self.line.len() {\n", "file_path": "widgetry/src/widgets/text_box.rs", "rank": 84, "score": 102334.63932795459 }, { "content": "\n\n fn event(&mut self, _: &mut EventCtx, _: &mut WidgetOutput) {}\n\n\n\n fn draw(&self, g: &mut GfxCtx) {\n\n g.redraw_at(self.top_left, &self.draw);\n\n\n\n if let Some(cursor) = g.canvas.get_cursor_in_screen_space() {\n\n if !ScreenRectangle::top_left(self.top_left, self.dims).contains(cursor) {\n\n return;\n\n }\n\n let translated =\n\n ScreenPt::new(cursor.x - self.top_left.x, cursor.y - self.top_left.y).to_pt();\n\n // TODO Assume regions are non-overlapping\n\n for (region, txt) in &self.tooltips {\n\n if region.contains_pt(translated) {\n\n let extra = g.upload((self.hover)(region));\n\n g.redraw_at(self.top_left, &extra);\n\n g.draw_mouse_tooltip(txt.clone());\n\n return;\n\n }\n", "file_path": "widgetry/src/widgets/just_draw.rs", "rank": 85, "score": 102333.24812716006 }, { "content": "impl<X: Axis<X>, Y: Axis<Y>> LinePlot<X, Y> {\n\n pub fn new_widget(\n\n ctx: &EventCtx,\n\n mut series: Vec<Series<X, Y>>,\n\n opts: PlotOptions<X, Y>,\n\n ) -> Widget {\n\n let legend = make_legend(ctx, &series, &opts);\n\n series.retain(|s| !opts.disabled.contains(&s.label));\n\n\n\n // Assume min_x is X::zero() and min_y is Y::zero()\n\n let max_x = opts.max_x.unwrap_or_else(|| {\n\n series\n\n .iter()\n\n .map(|s| s.pts.iter().map(|(x, _)| *x).max().unwrap_or_else(X::zero))\n\n .max()\n\n .unwrap_or_else(X::zero)\n\n });\n\n let max_y = opts.max_y.unwrap_or_else(|| {\n\n series\n\n .iter()\n", "file_path": "widgetry/src/widgets/line_plot.rs", "rank": 86, "score": 102332.74274717674 }, { "content": " // Don't let the x-axis fill the parent container\n\n Widget::custom_col(vec![\n\n legend.margin_below(10),\n\n Widget::custom_row(vec![y_axis, Widget::new(Box::new(plot))]),\n\n x_axis,\n\n ])\n\n .container()\n\n }\n\n}\n\n\n\nimpl<X: Axis<X>, Y: Axis<Y>> WidgetImpl for LinePlot<X, Y> {\n\n fn get_dims(&self) -> ScreenDims {\n\n self.dims\n\n }\n\n\n\n fn set_pos(&mut self, top_left: ScreenPt) {\n\n self.top_left = top_left;\n\n }\n\n\n\n fn event(&mut self, _: &mut EventCtx, _: &mut WidgetOutput) {}\n", "file_path": "widgetry/src/widgets/line_plot.rs", "rank": 87, "score": 102332.23932120598 }, { "content": " x.prettyprint(),\n\n self.max_y.from_percent(y_percent).prettyprint()\n\n ));\n\n }\n\n if !txt.is_empty() {\n\n g.fork_screenspace();\n\n g.draw_polygon(Color::RED, Circle::new(cursor.to_pt(), radius).to_polygon());\n\n g.draw_mouse_tooltip(txt);\n\n g.unfork();\n\n }\n\n }\n\n }\n\n }\n\n}\n", "file_path": "widgetry/src/widgets/line_plot.rs", "rank": 88, "score": 102331.9602066214 }, { "content": " max_chars,\n\n prefilled,\n\n autofocus,\n\n )))\n\n .named(label)\n\n }\n\n\n\n pub(crate) fn new(\n\n ctx: &EventCtx,\n\n label: String,\n\n max_chars: usize,\n\n prefilled: String,\n\n autofocus: bool,\n\n ) -> TextBox {\n\n let padding = EdgeInsets {\n\n top: 6.0,\n\n left: 8.0,\n\n bottom: 8.0,\n\n right: 8.0,\n\n };\n", "file_path": "widgetry/src/widgets/text_box.rs", "rank": 89, "score": 102330.72494202135 }, { "content": " // lines.\n\n {\n\n let order_of_mag = 10.0_f64.powf(max_y.to_f64().log10().ceil());\n\n for i in 0..10 {\n\n let y = max_y.from_f64(order_of_mag / 10.0 * (i as f64));\n\n let pct = y.to_percent(max_y);\n\n if pct > 1.0 {\n\n break;\n\n }\n\n batch.push(\n\n Color::hex(\"#7C7C7C\"),\n\n PolyLine::must_new(vec![\n\n Pt2D::new(0.0, (1.0 - pct) * height),\n\n Pt2D::new(width, (1.0 - pct) * height),\n\n ])\n\n .make_polygons(Distance::meters(1.0)),\n\n );\n\n }\n\n }\n\n // X axis grid\n", "file_path": "widgetry/src/widgets/line_plot.rs", "rank": 90, "score": 102330.59463017884 }, { "content": " )]);\n\n\n\n let outline_style = g.style().btn_outline.outline;\n\n if let Ok(outline) = Polygon::rounded_rectangle(self.dims.width, self.dims.height, 2.0)\n\n .to_outline(Distance::meters(outline_style.0))\n\n {\n\n batch.push(outline_style.1, outline);\n\n }\n\n\n\n batch.append(\n\n self.calculate_text(g.style())\n\n .render_autocropped(g)\n\n .translate(self.padding.left, self.padding.top),\n\n );\n\n let draw = g.upload(batch);\n\n g.redraw_at(self.top_left, &draw);\n\n }\n\n}\n", "file_path": "widgetry/src/widgets/text_box.rs", "rank": 91, "score": 102330.39224700117 }, { "content": " if max_x != X::zero() {\n\n let order_of_mag = 10.0_f64.powf(max_x.to_f64().log10().ceil());\n\n for i in 0..10 {\n\n let x = max_x.from_f64(order_of_mag / 10.0 * (i as f64));\n\n let pct = x.to_percent(max_x);\n\n if pct > 1.0 {\n\n break;\n\n }\n\n batch.push(\n\n Color::hex(\"#7C7C7C\"),\n\n PolyLine::must_new(vec![\n\n Pt2D::new(pct * width, 0.0),\n\n Pt2D::new(pct * width, height),\n\n ])\n\n .make_polygons(Distance::meters(1.0)),\n\n );\n\n }\n\n }\n\n\n\n let mut closest = FindClosest::new(&Bounds::from(&[\n", "file_path": "widgetry/src/widgets/line_plot.rs", "rank": 92, "score": 102329.12372021977 }, { "content": " let percent_x = (i as f64) / ((num_x_labels - 1) as f64);\n\n let x = max_x.from_percent(percent_x);\n\n // TODO Need ticks now to actually see where this goes\n\n let batch = Text::from(x.prettyprint())\n\n .render(ctx)\n\n .rotate(Angle::degrees(-15.0))\n\n .autocrop();\n\n row.push(batch.into_widget(ctx));\n\n }\n\n let x_axis = Widget::custom_row(row).padding(10).evenly_spaced();\n\n\n\n let num_y_labels = 4;\n\n let mut col = Vec::new();\n\n for i in 0..num_y_labels {\n\n let percent_y = (i as f64) / ((num_y_labels - 1) as f64);\n\n col.push(max_y.from_percent(percent_y).prettyprint().text_widget(ctx));\n\n }\n\n col.reverse();\n\n let y_axis = Widget::custom_col(col).padding(10).evenly_spaced();\n\n\n", "file_path": "widgetry/src/widgets/line_plot.rs", "rank": 93, "score": 102326.66069634122 }, { "content": " .map(|s| {\n\n s.pts\n\n .iter()\n\n .map(|(_, value)| *value)\n\n .max()\n\n .unwrap_or_else(Y::zero)\n\n })\n\n .max()\n\n .unwrap_or_else(Y::zero)\n\n });\n\n\n\n // TODO Tuned to fit the info panel. Instead these should somehow stretch to fill their\n\n // container.\n\n let width = 0.23 * ctx.canvas.window_width;\n\n let height = 0.2 * ctx.canvas.window_height;\n\n\n\n let mut batch = GeomBatch::new();\n\n // Grid lines for the Y scale. Draw up to 10 lines max to cover the order of magnitude of\n\n // the range.\n\n // TODO This caps correctly, but if the max is 105, then suddenly we just have 2 grid\n", "file_path": "widgetry/src/widgets/line_plot.rs", "rank": 94, "score": 102325.93258069144 }, { "content": " }\n\n }\n\n _ => {\n\n if let Some(c) = key.to_char(ctx.is_key_down(Key::LeftShift)) {\n\n output.outcome = Outcome::Changed(self.label.clone());\n\n self.line.insert(self.cursor_x, c);\n\n self.cursor_x += 1;\n\n } else {\n\n ctx.input.unconsume_event();\n\n }\n\n }\n\n };\n\n }\n\n }\n\n\n\n fn draw(&self, g: &mut GfxCtx) {\n\n // TODO Cache\n\n let mut batch = GeomBatch::from(vec![(\n\n g.style().field_bg,\n\n Polygon::rounded_rectangle(self.dims.width, self.dims.height, 2.0),\n", "file_path": "widgetry/src/widgets/text_box.rs", "rank": 95, "score": 102324.70041164673 }, { "content": " }\n\n\n\n fn set_pos(&mut self, top_left: ScreenPt) {\n\n self.top_left = top_left;\n\n }\n\n\n\n fn event(&mut self, ctx: &mut EventCtx, output: &mut WidgetOutput) {\n\n if ctx.redo_mouseover() {\n\n if let Some(pt) = ctx.canvas.get_cursor_in_screen_space() {\n\n self.hovering = ScreenRectangle::top_left(self.top_left, self.dims).contains(pt);\n\n } else {\n\n self.hovering = false;\n\n }\n\n }\n\n\n\n if ctx.normal_left_click() {\n\n // Let all textboxes see this event, so they can deactivate their own focus.\n\n // TODO But if a button is clicked before this textbox, that event isn't seen here...\n\n ctx.input.unconsume_event();\n\n self.has_focus = self.hovering;\n", "file_path": "widgetry/src/widgets/text_box.rs", "rank": 96, "score": 102317.45319042412 }, { "content": " Pt2D::new(0.0, 0.0),\n\n Pt2D::new(width, height),\n\n ]));\n\n for s in series {\n\n if max_x == X::zero() {\n\n continue;\n\n }\n\n\n\n let mut pts = Vec::new();\n\n for (t, y) in s.pts {\n\n let percent_x = t.to_percent(max_x);\n\n let percent_y = y.to_percent(max_y);\n\n pts.push(Pt2D::new(\n\n percent_x * width,\n\n // Y inversion! :D\n\n (1.0 - percent_y) * height,\n\n ));\n\n }\n\n // Downsample to avoid creating polygons with a huge number of points. 1m is untuned,\n\n // and here \"meters\" is really pixels.\n", "file_path": "widgetry/src/widgets/line_plot.rs", "rank": 97, "score": 102316.16133319205 }, { "content": " }\n\n\n\n if !self.has_focus && !self.autofocus {\n\n return;\n\n }\n\n if let Some(key) = ctx.input.any_pressed() {\n\n match key {\n\n Key::LeftArrow => {\n\n if self.cursor_x > 0 {\n\n self.cursor_x -= 1;\n\n }\n\n }\n\n Key::RightArrow => {\n\n self.cursor_x = (self.cursor_x + 1).min(self.line.len());\n\n }\n\n Key::Backspace => {\n\n if self.cursor_x > 0 {\n\n output.outcome = Outcome::Changed(self.label.clone());\n\n self.line.remove(self.cursor_x - 1);\n\n self.cursor_x -= 1;\n", "file_path": "widgetry/src/widgets/text_box.rs", "rank": 98, "score": 102315.73993006726 } ]
Rust
src/transforms/geoip.rs
XOSplicer/vector
f04d9452471147c082d8262e103cdb33fb846e26
use super::Transform; use crate::{ event::{Event, Value}, topology::config::{DataType, TransformConfig, TransformContext}, }; use serde::{Deserialize, Serialize}; use string_cache::DefaultAtom as Atom; use std::str::FromStr; use tracing::field; #[derive(Deserialize, Serialize, Debug)] #[serde(deny_unknown_fields)] pub struct GeoipConfig { pub source: Atom, pub database: String, #[serde(default = "default_geoip_target_field")] pub target: String, } pub struct Geoip { pub dbreader: maxminddb::Reader<Vec<u8>>, pub source: Atom, pub target: String, } fn default_geoip_target_field() -> String { "geoip".to_string() } #[typetag::serde(name = "geoip")] impl TransformConfig for GeoipConfig { fn build(&self, _cx: TransformContext) -> Result<Box<dyn Transform>, crate::Error> { let reader = maxminddb::Reader::open_readfile(self.database.clone())?; Ok(Box::new(Geoip::new( reader, self.source.clone(), self.target.clone(), ))) } fn input_type(&self) -> DataType { DataType::Log } fn output_type(&self) -> DataType { DataType::Log } fn transform_type(&self) -> &'static str { "geoip" } } impl Geoip { pub fn new(dbreader: maxminddb::Reader<Vec<u8>>, source: Atom, target: String) -> Self { Geoip { dbreader, source, target, } } } impl Transform for Geoip { fn transform(&mut self, mut event: Event) -> Option<Event> { let target_field = self.target.clone(); let ipaddress = event .as_log() .get(&self.source) .map(|s| s.to_string_lossy()); if let Some(ipaddress) = &ipaddress { if let Ok(ip) = FromStr::from_str(ipaddress) { if let Ok(data) = self.dbreader.lookup::<maxminddb::geoip2::City>(ip) { if let Some(city_names) = data.city.and_then(|c| c.names) { if let Some(city_name_en) = city_names.get("en") { event.as_mut_log().insert( Atom::from(format!("{}.city_name", target_field)), Value::from(city_name_en.to_string()), ); } } let continent_code = data.continent.and_then(|c| c.code); if let Some(continent_code) = continent_code { event.as_mut_log().insert( Atom::from(format!("{}.continent_code", target_field)), Value::from(continent_code), ); } let iso_code = data.country.and_then(|cy| cy.iso_code); if let Some(iso_code) = iso_code { event.as_mut_log().insert( Atom::from(format!("{}.country_code", target_field)), Value::from(iso_code), ); } let time_zone = data.location.clone().and_then(|loc| loc.time_zone); if let Some(time_zone) = time_zone { event.as_mut_log().insert( Atom::from(format!("{}.timezone", target_field)), Value::from(time_zone), ); } let latitude = data.location.clone().and_then(|loc| loc.latitude); if let Some(latitude) = latitude { event.as_mut_log().insert( Atom::from(format!("{}.latitude", target_field)), Value::from(latitude.to_string()), ); } let longitude = data.location.clone().and_then(|loc| loc.longitude); if let Some(longitude) = longitude { event.as_mut_log().insert( Atom::from(format!("{}.longitude", target_field)), Value::from(longitude.to_string()), ); } let postal_code = data.postal.clone().and_then(|p| p.code); if let Some(postal_code) = postal_code { event.as_mut_log().insert( Atom::from(format!("{}.postal_code", target_field)), Value::from(postal_code), ); } } } else { debug!( message = "IP Address not parsed correctly.", ipaddr = &field::display(&ipaddress), ); } } else { debug!( message = "Field does not exist.", field = self.source.as_ref(), ); }; let geoip_fields = [ format!("{}.city_name", target_field), format!("{}.country_code", target_field), format!("{}.continent_code", target_field), format!("{}.timezone", target_field), format!("{}.latitude", target_field), format!("{}.longitude", target_field), format!("{}.postal_code", target_field), ]; for field in geoip_fields.iter() { let e = event.as_mut_log(); let d = e.get(&Atom::from(field.to_string())); match d { None => { e.insert(Atom::from(field.to_string()), Value::from("")); } _ => (), } } Some(event) } } #[cfg(feature = "transforms-json_parser")] #[cfg(test)] mod tests { use super::Geoip; use crate::{ event::Event, transforms::json_parser::{JsonParser, JsonParserConfig}, transforms::Transform, }; use std::collections::HashMap; use string_cache::DefaultAtom as Atom; #[test] fn geoip_lookup_success() { let mut parser = JsonParser::from(JsonParserConfig::default()); let event = Event::from(r#"{"remote_addr": "2.125.160.216", "request_path": "foo/bar"}"#); let event = parser.transform(event).unwrap(); let reader = maxminddb::Reader::open_readfile("test-data/GeoIP2-City-Test.mmdb").unwrap(); let mut augment = Geoip::new(reader, Atom::from("remote_addr"), "geo".to_string()); let new_event = augment.transform(event).unwrap(); let mut exp_geoip_attr = HashMap::new(); exp_geoip_attr.insert("city_name", "Boxford"); exp_geoip_attr.insert("country_code", "GB"); exp_geoip_attr.insert("continent_code", "EU"); exp_geoip_attr.insert("timezone", "Europe/London"); exp_geoip_attr.insert("latitude", "51.75"); exp_geoip_attr.insert("longitude", "-1.25"); exp_geoip_attr.insert("postal_code", "OX1"); for field in exp_geoip_attr.keys() { let k = Atom::from(format!("geo.{}", field).to_string()); let geodata = new_event.as_log().get(&k).unwrap().to_string_lossy(); match exp_geoip_attr.get(field) { Some(&v) => assert_eq!(geodata, v), _ => assert!(false), } } } #[test] fn geoip_lookup_partial_results() { let mut parser = JsonParser::from(JsonParserConfig::default()); let event = Event::from(r#"{"remote_addr": "67.43.156.9", "request_path": "foo/bar"}"#); let event = parser.transform(event).unwrap(); let reader = maxminddb::Reader::open_readfile("test-data/GeoIP2-City-Test.mmdb").unwrap(); let mut augment = Geoip::new(reader, Atom::from("remote_addr"), "geo".to_string()); let new_event = augment.transform(event).unwrap(); let mut exp_geoip_attr = HashMap::new(); exp_geoip_attr.insert("city_name", ""); exp_geoip_attr.insert("country_code", "BT"); exp_geoip_attr.insert("continent_code", "AS"); exp_geoip_attr.insert("timezone", "Asia/Thimphu"); exp_geoip_attr.insert("latitude", "27.5"); exp_geoip_attr.insert("longitude", "90.5"); exp_geoip_attr.insert("postal_code", ""); for field in exp_geoip_attr.keys() { let k = Atom::from(format!("geo.{}", field).to_string()); let geodata = new_event.as_log().get(&k).unwrap().to_string_lossy(); match exp_geoip_attr.get(field) { Some(&v) => assert_eq!(geodata, v), _ => assert!(false), } } } #[test] fn geoip_lookup_no_results() { let mut parser = JsonParser::from(JsonParserConfig::default()); let event = Event::from(r#"{"remote_addr": "10.1.12.1", "request_path": "foo/bar"}"#); let event = parser.transform(event).unwrap(); let reader = maxminddb::Reader::open_readfile("test-data/GeoIP2-City-Test.mmdb").unwrap(); let mut augment = Geoip::new(reader, Atom::from("remote_addr"), "geo".to_string()); let new_event = augment.transform(event).unwrap(); let mut exp_geoip_attr = HashMap::new(); exp_geoip_attr.insert("city_name", ""); exp_geoip_attr.insert("country_code", ""); exp_geoip_attr.insert("continent_code", ""); exp_geoip_attr.insert("timezone", ""); exp_geoip_attr.insert("latitude", ""); exp_geoip_attr.insert("longitude", ""); exp_geoip_attr.insert("postal_code", ""); for field in exp_geoip_attr.keys() { let k = Atom::from(format!("geo.{}", field).to_string()); println!("Looking for {:?}", k); let geodata = new_event.as_log().get(&k).unwrap().to_string_lossy(); match exp_geoip_attr.get(field) { Some(&v) => assert_eq!(geodata, v), _ => assert!(false), } } } }
use super::Transform; use crate::{ event::{Event, Value}, topology::config::{DataType, TransformConfig, TransformContext}, }; use serde::{Deserialize, Serialize}; use string_cache::DefaultAtom as Atom; use std::str::FromStr; use tracing::field; #[derive(Deserialize, Serialize, Debug)] #[serde(deny_unknown_fields)] pub struct GeoipConfig { pub source: Atom, pub database: String, #[serde(default = "default_geoip_target_field")] pub target: String, } pub struct Geoip { pub dbreader: maxminddb::Reader<Vec<u8>>, pub source: Atom, pub target: String, } fn default_geoip_target_field() -> String { "geoip".to_string() } #[typetag::serde(name = "geoip")] impl TransformConfig for GeoipConfig { fn build(&self, _cx: TransformContext) -> Result<Box<dyn Transform>, crate::Error> { let reader = maxminddb::Reader::open_readfile(self.database.clone())?; Ok(Box::new(Geoip::new( reader, self.source.clone(), self.target.clone(), ))) } fn input_type(&self) -> DataType { DataType::Log } fn output_type(&self) -> DataType { DataType::Log } fn transform_type(&self) -> &'static str { "geoip" } } impl Geoip { pub fn new(dbreader: maxminddb::Reader<Vec<u8>>, source: Atom, target: String) -> Self { Geoip { dbreader, source, target, } } } impl Transform for Geoip { fn transform(&mut self, mut event: Event) -> Option<Event> { let target_field = self.target.clone(); let ipaddress = event .as_log() .get(&self.source) .map(|s| s.to_string_lossy()); if let Some(ipaddress) = &ipaddress { if let Ok(ip) = FromStr::from_str(ipaddress) { if let Ok(data) = self.dbreader.lookup::<maxminddb::geoip2::City>(ip) { if let Some(city_names) = data.city.and_then(|c| c.names) { if let Some(city_name_en) = city_names.get("en") { event.as_mut_log().insert( Atom::from(format!("{}.city_name", target_field)), Value::from(city_name_en.to_string()), ); } } let continent_code = data.continent.and_then(|c| c.code); if let Some(continent_code) = continent_code { event.as_mut_log().insert( Atom::from(format!("{}.continent_code", target_field)), Value::from(continent_code), ); } let iso_code = data.country.and_then(|cy| cy.iso_code); if let Some(iso_code) = iso_code { event.as_mut_log().insert( Atom::from(format!("{}.country_code", target_field)), Value::from(iso_code), ); } let time_zone = data.location.clone().and_then(|loc| loc.time_zone); if let Some(time_zone) = time_zone { event.as_mut_log().insert( Atom::from(format!("{}.timezone", target_field)), Value::from(time_zone), ); } let latitude = data.location.clone().and_then(|loc| loc.latitude); if let Some(latitude) = latitude { event.as_mut_log().insert( Atom::from(format!("{}.latitude", target_field)), Value::from(latitude.to_string()), ); } let longitude = data.location.clone().and_then(|loc| loc.longitude); if let Some(longitude) = longitude { event.as_mut_log().insert( Atom::from(format!("{}.longitude", target_field)), Value::from(longitude.to_string()), ); } let postal_code = data.postal.clone().and_then(|p| p.code); if let Some(postal_code) = postal_code { event.as_mut_log().insert( Atom::from(format!("{}.postal_code", target_field)), Value::from(postal_code), ); } } } else { debug!( message = "IP Address not parsed correctly.", ipaddr = &field::display(&ipaddress), ); } } else { debug!( message = "Field does not exist.", field = self.source.as_ref(), ); }; let geoip_fields = [ format!("{}.city_name", target_field), format!("{}.country_code", target_field), format!("{}.continent_code", target_field), format!("{}.timezone", target_field), format!("{}.latitude", target_field), format!("{}.longitude", target_field), format!("{}.postal_code", target_field), ]; for field in geoip_fields.iter() { let e = event.as_mut_log(); let d = e.get(&Atom::from(field.to_string())); match d { None => { e.insert(Atom::from(field.to_string()), Value::from("")); } _ => (), } } Some(event) } } #[cfg(feature = "transforms-json_parser")] #[cfg(test)] mod tests { use super::Geoip; use crate::{ event::Event, transforms::json_parser::{JsonParser, JsonParserConfig}, transforms::Transform, }; use std::collections::HashMap; use string_cache::DefaultAtom as Atom; #[test] fn geoip_lookup_success() { let mut parser = JsonParser::from(JsonParserConfig::default()); let event = Event::from(r#"{"remote_addr": "2.125.160.216", "request_path": "foo/bar"}"#); let event = parser.transform(event).unwrap(); let reader = maxminddb::Reader::open_readfile("test-data/GeoIP2-City-Test.mmdb").unwrap(); let mut augment = Geoip::new(reader, Atom::from("remote_addr"), "geo".to_string()); let new_event = augment.transform(event).unwrap(); let mut exp_geoip_attr = HashMap::new(); exp_geoip_attr.insert("city_name", "Boxford"); exp_geoip_attr.insert("country_code", "GB"); exp_geoip_attr.insert("continent_code", "EU"); exp_geoip_attr.insert("timezone", "Europe/London"); exp_geoip_attr.insert("latitude", "51.75"); exp_geoip_attr.insert("longitude", "-1.25"); exp_geoip_attr.insert("postal_code", "OX1"); for field in exp_geoip_attr.keys() { let k = Atom::from(format!("geo.{}", field).to_string()); let geodata = new_event.as_log().get(&k).unwrap().to_string_lossy(); match exp_geoip_attr.get(field) { Some(&v) => assert_eq!(geodata, v), _ => assert!(false), } } } #[test] fn geoip_lookup_partial_results() { let mut parser = JsonParser::from(JsonParserConfig::default()); let event = Event::from(r#"{"remote_addr": "67.43.156.9", "request_path": "foo/bar"}"#); let event = parser.transform(event).unwrap(); let reader = maxminddb::Reader::open_readfile("test-data/GeoIP2-City-Test.mmdb").unwrap(); let mut augment = Geoip::new(reader, Atom::from("re
exp_geoip_attr.insert("timezone", ""); exp_geoip_attr.insert("latitude", ""); exp_geoip_attr.insert("longitude", ""); exp_geoip_attr.insert("postal_code", ""); for field in exp_geoip_attr.keys() { let k = Atom::from(format!("geo.{}", field).to_string()); println!("Looking for {:?}", k); let geodata = new_event.as_log().get(&k).unwrap().to_string_lossy(); match exp_geoip_attr.get(field) { Some(&v) => assert_eq!(geodata, v), _ => assert!(false), } } } }
mote_addr"), "geo".to_string()); let new_event = augment.transform(event).unwrap(); let mut exp_geoip_attr = HashMap::new(); exp_geoip_attr.insert("city_name", ""); exp_geoip_attr.insert("country_code", "BT"); exp_geoip_attr.insert("continent_code", "AS"); exp_geoip_attr.insert("timezone", "Asia/Thimphu"); exp_geoip_attr.insert("latitude", "27.5"); exp_geoip_attr.insert("longitude", "90.5"); exp_geoip_attr.insert("postal_code", ""); for field in exp_geoip_attr.keys() { let k = Atom::from(format!("geo.{}", field).to_string()); let geodata = new_event.as_log().get(&k).unwrap().to_string_lossy(); match exp_geoip_attr.get(field) { Some(&v) => assert_eq!(geodata, v), _ => assert!(false), } } } #[test] fn geoip_lookup_no_results() { let mut parser = JsonParser::from(JsonParserConfig::default()); let event = Event::from(r#"{"remote_addr": "10.1.12.1", "request_path": "foo/bar"}"#); let event = parser.transform(event).unwrap(); let reader = maxminddb::Reader::open_readfile("test-data/GeoIP2-City-Test.mmdb").unwrap(); let mut augment = Geoip::new(reader, Atom::from("remote_addr"), "geo".to_string()); let new_event = augment.transform(event).unwrap(); let mut exp_geoip_attr = HashMap::new(); exp_geoip_attr.insert("city_name", ""); exp_geoip_attr.insert("country_code", ""); exp_geoip_attr.insert("continent_code", "");
random
[ { "content": "/// Returns a mutable reference to field value specified by the given path.\n\npub fn get_mut<'a>(fields: &'a mut BTreeMap<Atom, Value>, path: &str) -> Option<&'a mut Value> {\n\n let mut path_iter = PathIter::new(path);\n\n\n\n match path_iter.next() {\n\n Some(PathComponent::Key(key)) => match fields.get_mut(&key) {\n\n None => None,\n\n Some(value) => get_mut_value(value, path_iter),\n\n },\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "src/event/util/log/get_mut.rs", "rank": 0, "score": 529047.2165526844 }, { "content": "/// Inserts field value using a path specified using `a.b[1].c` notation.\n\npub fn insert(fields: &mut BTreeMap<Atom, Value>, path: &str, value: Value) -> Option<Value> {\n\n map_insert(fields, PathIter::new(path).peekable(), value)\n\n}\n\n\n", "file_path": "src/event/util/log/insert.rs", "rank": 1, "score": 518106.2151462674 }, { "content": "/// Removes field value specified by the given path and return its value.\n\n///\n\n/// A special case worth mentioning: if there is a nested array and an item is removed\n\n/// from the middle of this array, then it is just replaced by `Value::Null`.\n\npub fn remove(fields: &mut BTreeMap<Atom, Value>, path: &str, prune: bool) -> Option<Value> {\n\n remove_map(fields, PathIter::new(path).peekable(), prune).map(|(value, _)| value)\n\n}\n\n\n", "file_path": "src/event/util/log/remove.rs", "rank": 2, "score": 507503.7713876646 }, { "content": "fn insert_fields_from_syslog(event: &mut Event, parsed: Message<&str>) {\n\n let log = event.as_mut_log();\n\n\n\n if let Some(severity) = parsed.severity {\n\n log.insert(\"severity\", severity.as_str());\n\n }\n\n if let Some(facility) = parsed.facility {\n\n log.insert(\"facility\", facility.as_str());\n\n }\n\n if let Protocol::RFC5424(version) = parsed.protocol {\n\n log.insert(\"version\", version as i64);\n\n }\n\n if let Some(app_name) = parsed.appname {\n\n log.insert(\"appname\", app_name);\n\n }\n\n if let Some(msg_id) = parsed.msgid {\n\n log.insert(\"msgid\", msg_id);\n\n }\n\n if let Some(procid) = parsed.procid {\n\n let value: Value = match procid {\n", "file_path": "src/sources/syslog.rs", "rank": 3, "score": 474257.6551398422 }, { "content": "/// Iterates over all paths in form \"a.b[0].c[1]\" in alphabetical order.\n\n/// It is implemented as a wrapper around `all_fields` to reduce code\n\n/// duplication.\n\npub fn keys<'a>(fields: &'a BTreeMap<Atom, Value>) -> impl Iterator<Item = Atom> + 'a {\n\n all_fields(fields).map(|(k, _)| k)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::super::test::fields_from_json;\n\n use super::*;\n\n use serde_json::json;\n\n\n\n #[test]\n\n fn keys_simple() {\n\n let fields = fields_from_json(json!({\n\n \"field2\": 3,\n\n \"field1\": 4,\n\n \"field3\": 5\n\n }));\n\n let expected: Vec<_> = vec![\"field1\", \"field2\", \"field3\"]\n\n .into_iter()\n\n .map(|s| Atom::from(s))\n", "file_path": "src/event/util/log/keys.rs", "rank": 4, "score": 471092.6847324392 }, { "content": "/// Checks whether a field specified by a given path is present.\n\npub fn contains(fields: &BTreeMap<Atom, Value>, path: &str) -> bool {\n\n let mut path_iter = PathIter::new(path);\n\n\n\n match path_iter.next() {\n\n Some(PathComponent::Key(key)) => match fields.get(&key) {\n\n None => false,\n\n Some(value) => value_contains(value, path_iter),\n\n },\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "src/event/util/log/contains.rs", "rank": 5, "score": 468597.23394628044 }, { "content": "/// Returns a reference to a field value specified by the given path.\n\npub fn get<'a>(fields: &'a BTreeMap<Atom, Value>, path: &str) -> Option<&'a Value> {\n\n let mut path_iter = PathIter::new(path);\n\n\n\n match path_iter.next() {\n\n Some(PathComponent::Key(key)) => match fields.get(&key) {\n\n None => None,\n\n Some(value) => get_value(value, path_iter),\n\n },\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "src/event/util/log/get.rs", "rank": 6, "score": 461055.90581293486 }, { "content": "/// Determines format of message.\n\n/// This exists because Docker is still a special entity in Kubernetes as it can write in Json\n\n/// despite CRI defining it's own format.\n\npub fn build_message_parser() -> crate::Result<ApplicableTransform> {\n\n let transforms = vec![\n\n Box::new(DockerMessageTransformer::new()) as Box<dyn Transform>,\n\n transform_cri_message()?,\n\n ];\n\n Ok(ApplicableTransform::Candidates(transforms))\n\n}\n\n\n", "file_path": "src/sources/kubernetes/message_parser.rs", "rank": 7, "score": 458955.0971705497 }, { "content": "fn flatten_field(key: Atom, value: TomlValue, new_fields: &mut IndexMap<Atom, TemplateOrValue>) {\n\n match value {\n\n TomlValue::String(s) => {\n\n let t = Template::from(s);\n\n new_fields.insert(key, t.into())\n\n }\n\n TomlValue::Integer(i) => {\n\n let i = Value::from(i);\n\n new_fields.insert(key, i.into())\n\n }\n\n TomlValue::Float(f) => {\n\n let f = Value::from(f);\n\n new_fields.insert(key, f.into())\n\n }\n\n TomlValue::Boolean(b) => {\n\n let b = Value::from(b);\n\n new_fields.insert(key, b.into())\n\n }\n\n TomlValue::Datetime(dt) => {\n\n let dt = dt.to_string();\n", "file_path": "src/transforms/add_fields.rs", "rank": 8, "score": 447637.46410453407 }, { "content": "pub fn source_with_event_counter() -> (Sender<Event>, MockSourceConfig, Arc<AtomicUsize>) {\n\n let event_counter = Arc::new(AtomicUsize::new(0));\n\n let (tx, rx) = futures01::sync::mpsc::channel(0);\n\n let source = MockSourceConfig::new_with_event_counter(rx, event_counter.clone());\n\n (tx, source, event_counter)\n\n}\n\n\n", "file_path": "tests/support/mod.rs", "rank": 10, "score": 437142.9265401171 }, { "content": "fn render_fields(src: &str, event: &Event) -> Result<String, Vec<Atom>> {\n\n let mut missing_fields = Vec::new();\n\n let out = RE\n\n .replace_all(src, |caps: &Captures<'_>| {\n\n let key = caps\n\n .get(1)\n\n .map(|s| Atom::from(s.as_str().trim()))\n\n .expect(\"src should match regex\");\n\n if let Some(val) = event.as_log().get(&key) {\n\n val.to_string_lossy()\n\n } else {\n\n missing_fields.push(key.clone());\n\n String::new()\n\n }\n\n })\n\n .into_owned();\n\n if missing_fields.is_empty() {\n\n Ok(out)\n\n } else {\n\n Err(missing_fields)\n\n }\n\n}\n\n\n", "file_path": "src/template.rs", "rank": 11, "score": 431300.8409083684 }, { "content": "fn encode_map(fields: BTreeMap<Atom, Value>) -> proto::ValueMap {\n\n proto::ValueMap {\n\n fields: fields\n\n .into_iter()\n\n .map(|(key, value)| (key.to_string(), encode_value(value)))\n\n .collect(),\n\n }\n\n}\n\n\n", "file_path": "src/event/mod.rs", "rank": 12, "score": 428686.6328383418 }, { "content": "pub fn to_string(value: impl serde::Serialize) -> String {\n\n let value = serde_json::to_value(value).unwrap();\n\n value.as_str().unwrap().into()\n\n}\n\n\n\n/// Answers \"Is it possible to skip serializing this value, because it's the\n\n/// default?\"\n\npub(crate) fn skip_serializing_if_default<E: Default + PartialEq>(e: &E) -> bool {\n\n e == &E::default()\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\n#[serde(untagged)]\n\npub enum FieldsOrValue<V> {\n\n Fields(Fields<V>),\n\n Value(V),\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub struct Fields<V>(IndexMap<String, FieldsOrValue<V>>);\n", "file_path": "src/serde.rs", "rank": 13, "score": 426721.82367613795 }, { "content": "fn decode_map(fields: BTreeMap<String, proto::Value>) -> Option<Value> {\n\n let mut accum: BTreeMap<Atom, Value> = BTreeMap::new();\n\n for (key, value) in fields {\n\n match decode_value(value) {\n\n Some(value) => {\n\n accum.insert(Atom::from(key), value);\n\n }\n\n None => return None,\n\n }\n\n }\n\n Some(Value::Map(accum))\n\n}\n\n\n", "file_path": "src/event/mod.rs", "rank": 14, "score": 419444.6549935818 }, { "content": "pub fn transform(suffix: &str, increase: f64) -> MockTransformConfig {\n\n MockTransformConfig::new(suffix.to_owned(), increase)\n\n}\n\n\n\n#[derive(Debug, Deserialize, Serialize)]\n\npub struct MockSourceConfig {\n\n #[serde(skip)]\n\n receiver: Arc<Mutex<Option<Receiver<Event>>>>,\n\n #[serde(skip)]\n\n event_counter: Option<Arc<AtomicUsize>>,\n\n #[serde(skip)]\n\n data_type: Option<DataType>,\n\n}\n\n\n\nimpl MockSourceConfig {\n\n pub fn new(receiver: Receiver<Event>) -> Self {\n\n Self {\n\n receiver: Arc::new(Mutex::new(Some(receiver))),\n\n event_counter: None,\n\n data_type: Some(DataType::Any),\n", "file_path": "tests/support/mod.rs", "rank": 15, "score": 410713.44801889185 }, { "content": "pub fn source() -> (Sender<Event>, MockSourceConfig) {\n\n let (tx, rx) = futures01::sync::mpsc::channel(0);\n\n let source = MockSourceConfig::new(rx);\n\n (tx, source)\n\n}\n\n\n", "file_path": "tests/support/mod.rs", "rank": 16, "score": 407606.30500265735 }, { "content": "fn json_value_to_type_string(value: &JsonValue) -> &'static str {\n\n match value {\n\n JsonValue::Object(_) => \"Object\",\n\n JsonValue::Array(_) => \"Array\",\n\n JsonValue::String(_) => \"String\",\n\n JsonValue::Number(_) => \"Number\",\n\n JsonValue::Bool(_) => \"Bool\",\n\n JsonValue::Null => \"Null\",\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::{Encoding, SimpleHttpConfig};\n\n use warp::http::HeaderMap;\n\n\n\n use crate::shutdown::ShutdownSignal;\n\n use crate::{\n\n event::{self, Event},\n\n runtime::Runtime,\n", "file_path": "src/sources/http.rs", "rank": 17, "score": 405937.05963908695 }, { "content": "/// Merges all fields specified at `merge_fields` from `incoming` to `current`.\n\npub fn merge_log_event(current: &mut LogEvent, mut incoming: LogEvent, merge_fields: &[Atom]) {\n\n for merge_field in merge_fields {\n\n let incoming_val = match incoming.remove(merge_field) {\n\n None => continue,\n\n Some(val) => val,\n\n };\n\n match current.get_mut(merge_field) {\n\n None => {\n\n current.insert(merge_field, incoming_val);\n\n }\n\n Some(current_val) => merge_value(current_val, incoming_val),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/event/merge.rs", "rank": 18, "score": 405675.4551152024 }, { "content": "pub fn table_to_map<'a, K, V>(t: LuaTable<'a>) -> LuaResult<BTreeMap<K, V>>\n\nwhere\n\n K: From<String> + Ord,\n\n V: FromLua<'a>,\n\n{\n\n let mut map = BTreeMap::new();\n\n for pair in t.pairs() {\n\n let (k, v): (String, V) = pair?;\n\n map.insert(k.into(), v);\n\n }\n\n Ok(map)\n\n}\n\n\n", "file_path": "src/transforms/lua/v2/interop/util.rs", "rank": 19, "score": 403939.7347867954 }, { "content": "/// As defined by CRI\n\nfn transform_cri_message() -> crate::Result<Box<dyn Transform>> {\n\n let mut rp_config = RegexParserConfig::default();\n\n // message field\n\n rp_config.regex =\n\n r\"^(?P<timestamp>.*) (?P<stream>(stdout|stderr)) (?P<multiline_tag>(P|F)) (?P<message>.*)$\"\n\n .to_owned();\n\n // drop field\n\n rp_config.types.insert(\n\n event::log_schema().timestamp_key().clone(),\n\n \"timestamp|%+\".to_owned(),\n\n );\n\n // stream is a string\n\n // message is a string\n\n RegexParser::build(&rp_config).map_err(|e| {\n\n format!(\n\n \"Failed in creating message regex transform with error: {:?}\",\n\n e\n\n )\n\n .into()\n\n })\n", "file_path": "src/sources/kubernetes/message_parser.rs", "rank": 20, "score": 401726.0723497294 }, { "content": "fn into_message(event: Event) -> String {\n\n event\n\n .as_log()\n\n .get(&event::log_schema().message_key())\n\n .unwrap()\n\n .to_string_lossy()\n\n}\n\n\n", "file_path": "tests/topology.rs", "rank": 21, "score": 393170.18681132846 }, { "content": "fn parse_tags(input: &str) -> Result<BTreeMap<String, String>, ParserError> {\n\n let input = input.trim();\n\n let mut result = BTreeMap::new();\n\n\n\n if input.is_empty() {\n\n return Ok(result);\n\n }\n\n\n\n let pairs = input.split(',').collect::<Vec<_>>();\n\n for pair in pairs {\n\n if pair.is_empty() {\n\n continue;\n\n }\n\n let pair = pair.trim();\n\n let parts = pair.split('=').collect::<Vec<_>>();\n\n if parts.len() != 2 {\n\n return Err(ParserError::Malformed {\n\n s: \"expected 2 values separated by '='\",\n\n });\n\n }\n", "file_path": "src/sources/prometheus/parser.rs", "rank": 22, "score": 391433.45142924576 }, { "content": "fn maybe_set_id(key: Option<impl AsRef<str>>, doc: &mut serde_json::Value, event: &Event) {\n\n if let Some(val) = key.and_then(|k| event.as_log().get(&k.as_ref().into())) {\n\n let val = val.to_string_lossy();\n\n\n\n doc.as_object_mut()\n\n .unwrap()\n\n .insert(\"_id\".into(), json!(val));\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::sinks::util::retries::RetryAction;\n\n use crate::Event;\n\n use http::{Response, StatusCode};\n\n use serde_json::json;\n\n\n\n #[test]\n\n fn sets_id_from_custom_field() {\n", "file_path": "src/sinks/elasticsearch.rs", "rank": 23, "score": 389966.59555449465 }, { "content": "pub fn random_map(max_size: usize, field_len: usize) -> HashMap<String, String> {\n\n let size = thread_rng().gen_range(0, max_size);\n\n\n\n (0..size)\n\n .map(move |_| (random_string(field_len), random_string(field_len)))\n\n .collect()\n\n}\n\n\n", "file_path": "src/test_util.rs", "rank": 24, "score": 388469.9055450766 }, { "content": "pub fn user_namespace<S: AsRef<str>>(namespace: S) -> String {\n\n \"user-\".to_owned() + namespace.as_ref()\n\n}\n\n\n", "file_path": "src/sources/kubernetes/test.rs", "rank": 25, "score": 386617.77478779136 }, { "content": "/// If F returns None, retries it after some time, for some count.\n\n/// Panics if all trys fail.\n\nfn retry<F: FnMut() -> Result<R, E>, R, E: std::fmt::Debug>(mut f: F) -> R {\n\n let mut last_error = None;\n\n let started = std::time::Instant::now();\n\n while started.elapsed() < std::time::Duration::from_secs(WAIT_LIMIT as u64) {\n\n match f() {\n\n Ok(data) => return data,\n\n Err(error) => {\n\n error!(?error);\n\n last_error = Some(error);\n\n }\n\n }\n\n std::thread::sleep(std::time::Duration::from_secs(1));\n\n debug!(\"Retrying\");\n\n }\n\n panic!(\"Timed out while waiting. Last error: {:?}\", last_error);\n\n}\n\n\n", "file_path": "src/sources/kubernetes/test.rs", "rank": 26, "score": 386296.1494853658 }, { "content": "fn hash_map<H: Hasher>(hasher: &mut H, map: &BTreeMap<Atom, Value>) {\n\n for (key, val) in map.iter() {\n\n hasher.write(key.as_bytes());\n\n hash_value(hasher, val);\n\n }\n\n}\n\n\n", "file_path": "src/event/discriminant.rs", "rank": 27, "score": 385785.2828934755 }, { "content": "#[must_use]\n\npub fn echo(kube: &Kube, name: &str, message: &str) -> KubePod {\n\n // Start echo\n\n let echo = echo_create(ECHO_YAML, kube, name, message);\n\n\n\n // Wait for success state\n\n kube.wait_for_success(echo.clone());\n\n\n\n echo\n\n}\n\n\n", "file_path": "src/sources/kubernetes/test.rs", "rank": 28, "score": 381909.0632327346 }, { "content": "pub fn wait_for(mut f: impl FnMut() -> bool) {\n\n let wait = std::time::Duration::from_millis(5);\n\n let limit = std::time::Duration::from_secs(5);\n\n let mut attempts = 0;\n\n while !f() {\n\n std::thread::sleep(wait);\n\n attempts += 1;\n\n if attempts * wait > limit {\n\n panic!(\"timed out while waiting\");\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/test_util.rs", "rank": 29, "score": 380041.8494896939 }, { "content": "fn events_to_string(name: &str, events: &Vec<Event>) -> String {\n\n if events.len() > 1 {\n\n format!(\n\n \" {}s:\\n {}\",\n\n name,\n\n events\n\n .iter()\n\n .map(|e| event_to_string(e))\n\n .collect::<Vec<_>>()\n\n .join(\"\\n \")\n\n )\n\n } else {\n\n events\n\n .first()\n\n .map(|e| format!(\" {}: {}\", name, event_to_string(e)))\n\n .unwrap_or(format!(\" no {}\", name))\n\n }\n\n}\n\n\n", "file_path": "src/topology/unit_test.rs", "rank": 30, "score": 378315.0036839692 }, { "content": "fn parse_tags(input: &str) -> Result<BTreeMap<String, String>, ParseError> {\n\n if !input.starts_with('#') || input.len() < 2 {\n\n return Err(ParseError::Malformed(\n\n \"expected non empty '#'-prefixed tags component\",\n\n ));\n\n }\n\n\n\n let mut result = BTreeMap::new();\n\n\n\n let chunks = input[1..].split(',').collect::<Vec<_>>();\n\n for chunk in chunks {\n\n let pair: Vec<_> = chunk.split(':').collect();\n\n let key = &pair[0];\n\n // same as in telegraf plugin:\n\n // if tag value is not provided, use \"true\"\n\n // https://github.com/influxdata/telegraf/blob/master/plugins/inputs/statsd/datadog.go#L152\n\n let value = pair.get(1).unwrap_or(&\"true\");\n\n result.insert((*key).to_owned(), (*value).to_owned());\n\n }\n\n\n\n Ok(result)\n\n}\n\n\n", "file_path": "src/sources/statsd/parser.rs", "rank": 31, "score": 378044.7182607356 }, { "content": "fn encode_fields(fields: HashMap<String, Field>, output: &mut String) {\n\n for (key, value) in fields.into_iter() {\n\n encode_string(key.to_string(), output);\n\n output.push('=');\n\n match value {\n\n Field::String(s) => {\n\n output.push('\"');\n\n for c in s.chars() {\n\n if \"\\\\\\\"\".contains(c) {\n\n output.push('\\\\');\n\n }\n\n output.push(c);\n\n }\n\n output.push('\"');\n\n }\n\n Field::Float(f) => output.push_str(&f.to_string()),\n\n Field::UnsignedInt(i) => {\n\n output.push_str(&i.to_string());\n\n output.push('u');\n\n }\n\n };\n\n output.push(',');\n\n }\n\n\n\n // remove last ','\n\n output.pop();\n\n}\n\n\n", "file_path": "src/sinks/influxdb_metrics.rs", "rank": 32, "score": 377382.7121010275 }, { "content": "fn remove_ending_newline(mut event: Event) -> Event {\n\n if let Some(Value::Bytes(msg)) = event\n\n .as_mut_log()\n\n .get_mut(&event::log_schema().message_key())\n\n {\n\n if msg.ends_with(&['\\n' as u8]) {\n\n msg.truncate(msg.len() - 1);\n\n }\n\n }\n\n event\n\n}\n\n\n", "file_path": "src/sources/kubernetes/mod.rs", "rank": 33, "score": 375541.03385174886 }, { "content": "fn render_template(s: &str, event: &Event) -> Result<String, TransformError> {\n\n let template = Template::from(s);\n\n let name = template.render(&event).map_err(|e| {\n\n TransformError::RenderError(format!(\n\n \"Keys ({:?}) do not exist on the event. Dropping event.\",\n\n e\n\n ))\n\n })?;\n\n Ok(String::from_utf8_lossy(&name.to_vec()).to_string())\n\n}\n\n\n", "file_path": "src/transforms/log_to_metric.rs", "rank": 34, "score": 373776.05818650895 }, { "content": "/// Used to map `timestamp` to `@timestamp`.\n\nfn map_timestamp(mut event: Event) -> impl Future<Item = Event, Error = ()> {\n\n let log = event.as_mut_log();\n\n\n\n if let Some(ts) = log.remove(&crate::event::log_schema().timestamp_key()) {\n\n log.insert(\"@timestamp\", ts);\n\n }\n\n\n\n if let Some(host) = log.remove(&crate::event::log_schema().host_key()) {\n\n log.insert(\"os.host\", host);\n\n }\n\n\n\n futures01::future::ok(event)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::event::Event;\n\n use crate::sinks::util::test::{build_test_server, load_sink};\n\n use crate::test_util;\n", "file_path": "src/sinks/sematext_logs.rs", "rank": 35, "score": 365945.12533231097 }, { "content": "pub fn interpolate(input: &str, vars: &HashMap<String, String>) -> String {\n\n let re = Regex::new(r\"\\$\\$|\\$(\\w+)|\\$\\{(\\w+)(?::-([^}]+)?)?\\}\").unwrap();\n\n re.replace_all(input, |caps: &Captures<'_>| {\n\n caps.get(1)\n\n .or_else(|| caps.get(2))\n\n .map(|m| m.as_str())\n\n .map(|name| {\n\n vars.get(name).map(|val| val.as_str()).unwrap_or_else(|| {\n\n caps.get(3).map(|m| m.as_str()).unwrap_or_else(|| {\n\n warn!(\"unknown env var in config: {:?}\", name);\n\n \"\"\n\n })\n\n })\n\n })\n\n .unwrap_or(\"$\")\n\n .to_string()\n\n })\n\n .into_owned()\n\n}\n\n\n", "file_path": "src/topology/config/vars.rs", "rank": 36, "score": 365562.44312088937 }, { "content": "fn group_metrics(packet: &str) -> Result<IndexMap<ParserHeader, Vec<String>>, ParserError> {\n\n // This will organise text into groups of lines, wrt to the format spec:\n\n // https://prometheus.io/docs/instrumenting/exposition_formats/#text-format-details\n\n //\n\n // All lines for a given metric must be provided as one single group,\n\n // with the optional HELP and TYPE lines first (in no particular order).\n\n // Beyond that, reproducible sorting in repeated expositions is preferred\n\n // but not required, i.e. do not sort if the computational cost is prohibitive.\n\n //\n\n // Each line must have a unique combination of a metric name and labels.\n\n // Otherwise, the ingestion behavior is undefined.\n\n let mut result = IndexMap::new();\n\n\n\n let mut current_header = ParserHeader {\n\n name: \"\".into(),\n\n kind: ParserType::Untyped,\n\n };\n\n\n\n for line in packet.lines() {\n\n let line = line.trim();\n", "file_path": "src/sources/prometheus/parser.rs", "rank": 37, "score": 365204.57196132734 }, { "content": "// Splits the given input by a separator.\n\n// If the separator is `None`, then it will split on whitespace.\n\npub fn split(input: &str, separator: Option<String>) -> Vec<&str> {\n\n match separator {\n\n Some(separator) => input.split(&separator).collect(),\n\n None => input.split_whitespace().collect(),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::split;\n\n use super::SplitConfig;\n\n use crate::event::{LogEvent, Value};\n\n use crate::{\n\n topology::config::{TransformConfig, TransformContext},\n\n Event,\n\n };\n\n use string_cache::DefaultAtom as Atom;\n\n\n\n #[test]\n\n fn split_whitespace() {\n", "file_path": "src/transforms/split.rs", "rank": 38, "score": 359087.92439638136 }, { "content": "pub fn log_schema() -> &'static LogSchema {\n\n LOG_SCHEMA.get().unwrap_or(&LOG_SCHEMA_DEFAULT)\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Getters, Setters)]\n\n#[serde(default)]\n\npub struct LogSchema {\n\n #[serde(default = \"LogSchema::default_message_key\")]\n\n #[getset(get = \"pub\", set = \"pub(crate)\")]\n\n message_key: Atom,\n\n #[serde(default = \"LogSchema::default_timestamp_key\")]\n\n #[getset(get = \"pub\", set = \"pub(crate)\")]\n\n timestamp_key: Atom,\n\n #[serde(default = \"LogSchema::default_host_key\")]\n\n #[getset(get = \"pub\", set = \"pub(crate)\")]\n\n host_key: Atom,\n\n #[getset(get = \"pub\", set = \"pub(crate)\")]\n\n kubernetes_key: Atom,\n\n}\n\n\n", "file_path": "src/event/mod.rs", "rank": 39, "score": 355679.47680838883 }, { "content": "fn sanitize_key(key: &str) -> String {\n\n let s = key.replace(\"/\", \"-\");\n\n let s = WHITESPACE.replace_all(&s, \"_\");\n\n let s = NONALPHANUM.replace_all(&s, \"\");\n\n s.into()\n\n}\n\n\n", "file_path": "src/sources/statsd/parser.rs", "rank": 40, "score": 354357.52855882386 }, { "content": "/// Merges `incoming` value into `current` value.\n\n///\n\n/// Will concatenate `Bytes` and overwrite the rest value kinds.\n\npub fn merge_value(current: &mut Value, incoming: Value) {\n\n match (current, incoming) {\n\n (Value::Bytes(current), Value::Bytes(ref incoming)) => current.extend_from_slice(incoming),\n\n (current, incoming) => *current = incoming,\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n fn assert_merge_value(\n\n current: impl Into<Value>,\n\n incoming: impl Into<Value>,\n\n expected: impl Into<Value>,\n\n ) {\n\n let mut merged = current.into();\n\n merge_value(&mut merged, incoming.into());\n\n assert_eq!(merged, expected.into());\n\n }\n", "file_path": "src/event/merge.rs", "rank": 41, "score": 347314.2216529832 }, { "content": "fn to_fields(value: f64) -> HashMap<String, Field> {\n\n let fields: HashMap<String, Field> = vec![(\"value\".to_owned(), Field::Float(value))]\n\n .into_iter()\n\n .collect();\n\n fields\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::event::metric::{Metric, MetricKind, MetricValue};\n\n use chrono::offset::TimeZone;\n\n use pretty_assertions::assert_eq;\n\n\n\n fn ts() -> DateTime<Utc> {\n\n Utc.ymd(2018, 11, 14).and_hms_nano(8, 9, 10, 11)\n\n }\n\n\n\n fn tags() -> BTreeMap<String, String> {\n\n vec![\n", "file_path": "src/sinks/influxdb_metrics.rs", "rank": 42, "score": 347254.8094429576 }, { "content": "pub fn encode_event(mut event: Event, encoding: &EncodingConfig<Encoding>) -> Option<Bytes> {\n\n encoding.apply_rules(&mut event);\n\n let log = event.into_log();\n\n\n\n let b = match encoding.codec {\n\n Encoding::Json => serde_json::to_vec(&log),\n\n Encoding::Text => {\n\n let bytes = log\n\n .get(&event::log_schema().message_key())\n\n .map(|v| v.as_bytes().to_vec())\n\n .unwrap_or_default();\n\n Ok(bytes)\n\n }\n\n };\n\n\n\n b.map(|mut b| {\n\n b.push(b'\\n');\n\n Bytes::from(b)\n\n })\n\n .map_err(|error| error!(message = \"Unable to encode.\", %error))\n\n .ok()\n\n}\n", "file_path": "src/sinks/util/mod.rs", "rank": 43, "score": 346330.8667348791 }, { "content": "fn transform_file() -> crate::Result<Box<dyn Transform>> {\n\n let mut config = RegexParserConfig::default();\n\n\n\n config.field = Some(\"file\".into());\n\n\n\n config.regex = r\"^\".to_owned()\n\n + LOG_DIRECTORY\n\n + r\"(?P<pod_uid>[^/]*)/(?P<container_name>[^/]*)/[0-9]*[.]log$\";\n\n\n\n // this field is implementation depended so remove it\n\n config.drop_field = true;\n\n\n\n // pod_uid is a string\n\n // container_name is a string\n\n RegexParser::build(&config).map_err(|e| {\n\n format!(\n\n \"Failed in creating file regex transform with error: {:?}\",\n\n e\n\n )\n\n .into()\n\n })\n\n}\n\n\n", "file_path": "src/sources/kubernetes/mod.rs", "rank": 44, "score": 344418.5362920646 }, { "content": "fn event_to_string(event: &Event) -> String {\n\n match event {\n\n Event::Log(log) => serde_json::to_string(&log).unwrap_or_else(|_| \"{}\".into()),\n\n Event::Metric(metric) => serde_json::to_string(&metric).unwrap_or_else(|_| \"{}\".into()),\n\n }\n\n}\n\n\n", "file_path": "src/topology/unit_test.rs", "rank": 45, "score": 343124.83888526034 }, { "content": "pub fn encode_event(encoding: &EncodingConfigWithDefault<Encoding>, mut event: Event) -> Vec<u8> {\n\n encoding.apply_rules(&mut event);\n\n let log = event.into_log();\n\n match encoding.codec {\n\n Encoding::Ndjson => serde_json::to_vec(&log).expect(\"Unable to encode event as JSON.\"),\n\n Encoding::Text => log\n\n .get(&event::log_schema().message_key())\n\n .map(|v| v.to_string_lossy().into_bytes())\n\n .unwrap_or_default(),\n\n }\n\n}\n\n\n\nasync fn write_event_to_file(\n\n file: &mut File,\n\n event: Event,\n\n encoding: &EncodingConfigWithDefault<Encoding>,\n\n) -> Result<(), std::io::Error> {\n\n let mut buf = encode_event(encoding, event);\n\n buf.push(b'\\n');\n\n file.write_all(&buf[..]).await\n", "file_path": "src/sinks/file/mod.rs", "rank": 46, "score": 343114.91632594226 }, { "content": "pub fn random_lines(len: usize) -> impl Iterator<Item = String> {\n\n std::iter::repeat(()).map(move |_| random_string(len))\n\n}\n\n\n", "file_path": "src/test_util.rs", "rank": 47, "score": 341046.04292255023 }, { "content": "fn create_event(line: Bytes, host_key: &str, hostname: &Option<String>) -> Event {\n\n let mut event = Event::from(line);\n\n\n\n if let Some(hostname) = &hostname {\n\n event.as_mut_log().insert(host_key, hostname.clone());\n\n }\n\n\n\n event\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::event;\n\n use futures01::sync::mpsc;\n\n use futures01::Async::*;\n\n use std::io::Cursor;\n\n use tokio01::runtime::current_thread::Runtime;\n\n\n\n #[test]\n", "file_path": "src/sources/stdin.rs", "rank": 48, "score": 339745.4029007512 }, { "content": "fn map_eq(this: &BTreeMap<Atom, Value>, other: &BTreeMap<Atom, Value>) -> bool {\n\n if this.len() != other.len() {\n\n return false;\n\n }\n\n\n\n this.iter()\n\n .zip(other.iter())\n\n .all(|((key1, value1), (key2, value2))| key1 == key2 && value_eq(value1, value2))\n\n}\n\n\n\nimpl Hash for Discriminant {\n\n fn hash<H: Hasher>(&self, state: &mut H) {\n\n for value in &self.values {\n\n match value {\n\n Some(value) => {\n\n state.write_u8(1);\n\n hash_value(state, value);\n\n }\n\n None => state.write_u8(0),\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/event/discriminant.rs", "rank": 49, "score": 337931.4819852599 }, { "content": "/// Takes in an Event and returns a CacheEntry to place into the LRU cache containing\n\n/// all relevant information for the fields that need matching against according to the\n\n/// specified FieldMatchConfig.\n\nfn build_cache_entry(event: &Event, fields: &FieldMatchConfig) -> CacheEntry {\n\n match &fields {\n\n FieldMatchConfig::MatchFields(fields) => {\n\n let mut entry = Vec::new();\n\n for field_name in fields.iter() {\n\n if let Some(value) = event.as_log().get(&field_name) {\n\n entry.push(Some((type_id_for_value(&value), value.as_bytes())));\n\n } else {\n\n entry.push(None);\n\n }\n\n }\n\n CacheEntry::Match(entry)\n\n }\n\n FieldMatchConfig::IgnoreFields(fields) => {\n\n let mut entry = Vec::new();\n\n\n\n for (field_name, value) in event.as_log().all_fields() {\n\n if !fields.contains(&field_name) {\n\n entry.push((\n\n field_name.clone(),\n", "file_path": "src/transforms/dedupe.rs", "rank": 50, "score": 336351.55487084185 }, { "content": "fn render_timestamp(src: &str, event: &Event) -> String {\n\n let timestamp = match event {\n\n Event::Log(log) => log\n\n .get(&event::log_schema().timestamp_key())\n\n .and_then(Value::as_timestamp),\n\n _ => None,\n\n };\n\n if let Some(ts) = timestamp {\n\n ts.format(src).to_string()\n\n } else {\n\n Utc::now().format(src).to_string()\n\n }\n\n}\n\n\n\nimpl<'de> Deserialize<'de> for Template {\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n\n D: Deserializer<'de>,\n\n {\n\n deserializer.deserialize_str(TemplateVisitor)\n\n }\n\n}\n\n\n", "file_path": "src/template.rs", "rank": 51, "score": 336046.00520724023 }, { "content": "pub fn sink_dead() -> MockSinkConfig<DeadSink<Event>> {\n\n MockSinkConfig::new(DeadSink::new(), false)\n\n}\n\n\n", "file_path": "tests/support/mod.rs", "rank": 52, "score": 335542.9674040848 }, { "content": "fn truncate_string_at(s: &str, maxlen: usize) -> Cow<str> {\n\n if s.len() >= maxlen {\n\n let mut len = maxlen - ELLIPSIS.len();\n\n while !s.is_char_boundary(len) {\n\n len -= 1;\n\n }\n\n format!(\"{}{}\", &s[..len], ELLIPSIS).into()\n\n } else {\n\n s.into()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::RegexParserConfig;\n\n use crate::event::{LogEvent, Value};\n\n use crate::{\n\n topology::config::{TransformConfig, TransformContext},\n\n Event,\n\n };\n", "file_path": "src/transforms/regex_parser.rs", "rank": 53, "score": 330674.67524209706 }, { "content": "/// Contains several regexes that can parse common forms of pod_uid.\n\n/// On the first message, regexes are tried out one after the other until\n\n/// first succesfull one has been found. After that that regex will be\n\n/// always used.\n\n///\n\n/// If nothing succeeds the message is still passed.\n\nfn transform_pod_uid() -> crate::Result<ApplicableTransform> {\n\n let mut regexes = Vec::new();\n\n\n\n let namespace_regex = r\"(?P<pod_namespace>[0-9a-z.\\-]*)\";\n\n let name_regex = r\"(?P<pod_name>[0-9a-z.\\-]*)\";\n\n // TODO: rename to pod_uid?\n\n let uid_regex = r\"(?P<object_uid>([0-9A-Fa-f]{8}[-][0-9A-Fa-f]{4}[-][0-9A-Fa-f]{4}[-][0-9A-Fa-f]{4}[-][0-9A-Fa-f]{12}|[0-9A-Fa-f]{32}))\";\n\n\n\n // Definition of pod_uid has been well defined since Kubernetes 1.14 with https://github.com/kubernetes/kubernetes/pull/74441\n\n\n\n // Minikube 1.15, MicroK8s 1.15,1.14,1.16 , DigitalOcean 1.16 , Google Kubernetes Engine 1.13, 1.14, EKS 1.14\n\n // format: namespace_name_UID\n\n regexes.push(format!(\n\n \"^{}_{}_{}$\",\n\n namespace_regex, name_regex, uid_regex\n\n ));\n\n\n\n // EKS 1.13 , AKS 1.13.12, MicroK8s 1.13\n\n // If everything else fails, try to at least parse out uid from somewhere.\n\n // This is somewhat robust as UUID format is hard to create by accident\n", "file_path": "src/sources/kubernetes/mod.rs", "rank": 54, "score": 329295.7572373885 }, { "content": "pub fn sink(channel_size: usize) -> (Receiver<Event>, MockSinkConfig<Sender<Event>>) {\n\n let (tx, rx) = futures01::sync::mpsc::channel(channel_size);\n\n let sink = MockSinkConfig::new(tx, true);\n\n (rx, sink)\n\n}\n\n\n", "file_path": "tests/support/mod.rs", "rank": 55, "score": 328830.14695988805 }, { "content": "pub fn parse(packet: &str) -> Result<Vec<Metric>, ParserError> {\n\n let mut result = Vec::new();\n\n\n\n for (header, group) in group_metrics(packet)? {\n\n // just a header without measurements\n\n if group.is_empty() {\n\n continue;\n\n }\n\n\n\n match header.kind {\n\n ParserType::Counter => {\n\n for line in group {\n\n let metric = parse_metric(&line)?;\n\n let tags = if !metric.tags.is_empty() {\n\n Some(metric.tags)\n\n } else {\n\n None\n\n };\n\n\n\n let counter = Metric {\n", "file_path": "src/sources/prometheus/parser.rs", "rank": 56, "score": 327865.04613177606 }, { "content": "#[derive(PartialEq, Debug, Clone)]\n\nstruct FieldValue(Value);\n\n\n\n// Since we aren't using Eq feature in the evmap, we can impl Eq.\n\nimpl Eq for FieldValue {}\n\n\n\npub struct KubernetesPodMetadata {\n\n metadata: ReadHandle<Bytes, Box<(Atom, FieldValue)>>,\n\n pod_uid: Atom,\n\n}\n\n\n\nimpl Transform for KubernetesPodMetadata {\n\n fn transform(&mut self, mut event: Event) -> Option<Event> {\n\n let log = event.as_mut_log();\n\n\n\n if let Some(Value::Bytes(pod_uid)) = log.get(&self.pod_uid) {\n\n let pod_uid = pod_uid.clone();\n\n\n\n let found = self.metadata.get_and(&pod_uid, |fields| {\n\n for pair in fields {\n\n log.insert(pair.0.clone(), (pair.1).0.clone());\n", "file_path": "src/transforms/kubernetes/mod.rs", "rank": 57, "score": 327560.9858435172 }, { "content": "#[derive(Debug)]\n\nstruct DockerMessageTransformer {\n\n json_parser: JsonParser,\n\n atom_time: Atom,\n\n atom_log: Atom,\n\n}\n\n\n\nimpl DockerMessageTransformer {\n\n fn new() -> Self {\n\n let mut config = JsonParserConfig::default();\n\n\n\n // Drop so that it's possible to detect if message is in json format\n\n config.drop_invalid = true;\n\n\n\n config.drop_field = true;\n\n\n\n DockerMessageTransformer {\n\n json_parser: config.into(),\n\n atom_time: Atom::from(\"time\"),\n\n atom_log: Atom::from(\"log\"),\n\n }\n", "file_path": "src/sources/kubernetes/message_parser.rs", "rank": 58, "score": 326570.7191035197 }, { "content": "fn event_to_json(event: LogEvent, indexed_fields: &[Atom], timestamp: i64) -> JsonValue {\n\n let fields = indexed_fields\n\n .iter()\n\n .filter_map(|field| event.get(field).map(|value| (field, value.clone())))\n\n .collect::<LogEvent>();\n\n\n\n json!({\n\n \"fields\": fields,\n\n \"event\": event,\n\n \"time\": timestamp\n\n })\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::event::{self, Event};\n\n use crate::sinks::util::http::HttpSink;\n\n use serde::Deserialize;\n\n use std::collections::BTreeMap;\n", "file_path": "src/sinks/splunk_hec.rs", "rank": 59, "score": 325605.6032714801 }, { "content": "pub fn build_unit_tests(config: &mut super::Config) -> Result<Vec<UnitTest>, Vec<String>> {\n\n let mut tests = vec![];\n\n let mut errors = vec![];\n\n\n\n let expansions = config.expand_macros()?;\n\n config\n\n .tests\n\n .iter()\n\n .for_each(|test| match build_unit_test(test, &expansions, config) {\n\n Ok(t) => tests.push(t),\n\n Err(errs) => {\n\n let mut test_err = errs.join(\"\\n\");\n\n // Indent all line breaks\n\n test_err = test_err.replace(\"\\n\", \"\\n \");\n\n test_err.insert_str(0, &format!(\"Failed to build test '{}':\\n \", test.name));\n\n errors.push(test_err);\n\n }\n\n });\n\n\n\n if errors.is_empty() {\n", "file_path": "src/topology/unit_test.rs", "rank": 60, "score": 325359.896483325 }, { "content": "pub fn parse(packet: &str) -> Result<Metric, ParseError> {\n\n // https://docs.datadoghq.com/developers/dogstatsd/datagram_shell/#datagram-format\n\n let key_and_body = packet.splitn(2, ':').collect::<Vec<_>>();\n\n if key_and_body.len() != 2 {\n\n return Err(ParseError::Malformed(\n\n \"should be key and body with ':' separator\",\n\n ));\n\n }\n\n let (key, body) = (key_and_body[0], key_and_body[1]);\n\n\n\n let parts = body.split('|').collect::<Vec<_>>();\n\n if parts.len() < 2 {\n\n return Err(ParseError::Malformed(\n\n \"body should have at least two pipe separated components\",\n\n ));\n\n }\n\n\n\n let name = sanitize_key(key);\n\n let metric_type = parts[1];\n\n\n", "file_path": "src/sources/statsd/parser.rs", "rank": 61, "score": 323260.52368077403 }, { "content": "fn parse_value(input: &str) -> Result<f64, ParserError> {\n\n let input = input.trim();\n\n let value = match input {\n\n \"Nan\" => std::f64::NAN,\n\n \"+Inf\" => std::f64::INFINITY,\n\n \"-Inf\" => std::f64::NEG_INFINITY,\n\n s => s.parse().with_context(|| InvalidFloat { s: input })?,\n\n };\n\n\n\n Ok(value)\n\n}\n\n\n", "file_path": "src/sources/prometheus/parser.rs", "rank": 62, "score": 317644.8408316618 }, { "content": "fn get_header<'a>(header_map: &'a HeaderMap, name: &str) -> Result<&'a str, ErrorMessage> {\n\n if let Some(header_value) = header_map.get(name) {\n\n header_value\n\n .to_str()\n\n .map_err(|e| header_error_message(name, &e.to_string()))\n\n } else {\n\n Err(header_error_message(name, \"Header does not exist\"))\n\n }\n\n}\n\n\n", "file_path": "src/sources/logplex.rs", "rank": 63, "score": 317520.74557602894 }, { "content": "pub fn load_sink<T>(config: &str) -> crate::Result<(T, SinkContext, Runtime)>\n\nwhere\n\n for<'a> T: Deserialize<'a> + SinkConfig,\n\n{\n\n let sink_config: T = toml::from_str(config)?;\n\n let rt = crate::test_util::runtime();\n\n let cx = SinkContext::new_test(rt.executor());\n\n\n\n Ok((sink_config, cx, rt))\n\n}\n\n\n", "file_path": "src/sinks/util/test.rs", "rank": 64, "score": 316399.1392182619 }, { "content": "fn echo_create(template: &str, kube: &Kube, name: &str, message: &str) -> KubePod {\n\n kube.create(\n\n Api::v1Pod,\n\n template\n\n .replace(ECHO_NAME, name)\n\n .replace(ARGS_MARKER, format!(\"{}\", message).as_str())\n\n .as_str(),\n\n )\n\n}\n\n\n", "file_path": "src/sources/kubernetes/test.rs", "rank": 65, "score": 316164.9788892189 }, { "content": "fn line_to_event(line: String) -> Event {\n\n let parts = line.splitn(8, ' ').collect::<Vec<&str>>();\n\n\n\n if parts.len() == 8 {\n\n let timestamp = parts[2];\n\n let hostname = parts[3];\n\n let app_name = parts[4];\n\n let proc_id = parts[5];\n\n let message = parts[7];\n\n\n\n let mut event = Event::from(message);\n\n let log = event.as_mut_log();\n\n\n\n if let Ok(ts) = timestamp.parse::<DateTime<Utc>>() {\n\n log.insert(event::log_schema().timestamp_key().clone(), ts);\n\n }\n\n\n\n log.insert(event::log_schema().host_key().clone(), hostname);\n\n\n\n log.insert(\"app_name\", app_name);\n", "file_path": "src/sources/logplex.rs", "rank": 66, "score": 315141.0745068494 }, { "content": "fn json_parse_object(value: JsonValue) -> Result<Event, ErrorMessage> {\n\n let mut event = Event::new_empty_log();\n\n let log = event.as_mut_log();\n\n log.insert(event::log_schema().timestamp_key().clone(), Utc::now()); // Add timestamp\n\n match value {\n\n JsonValue::Object(map) => {\n\n for (k, v) in map {\n\n log.insert(k, v);\n\n }\n\n Ok(event)\n\n }\n\n _ => Err(json_error(format!(\n\n \"Expected Object, got {}\",\n\n json_value_to_type_string(&value)\n\n ))),\n\n }\n\n}\n\n\n", "file_path": "src/sources/http.rs", "rank": 67, "score": 314031.4190849372 }, { "content": "pub fn parse(input: &str) -> Vec<&str> {\n\n let simple = is_not::<_, _, (&str, nom::error::ErrorKind)>(\" \\t[\\\"\");\n\n let string = delimited(\n\n tag(\"\\\"\"),\n\n map(opt(escaped(is_not(\"\\\"\\\\\"), '\\\\', one_of(\"\\\"\\\\\"))), |o| {\n\n o.unwrap_or(\"\")\n\n }),\n\n tag(\"\\\"\"),\n\n );\n\n let bracket = delimited(\n\n tag(\"[\"),\n\n map(opt(escaped(is_not(\"]\\\\\"), '\\\\', one_of(\"]\\\\\"))), |o| {\n\n o.unwrap_or(\"\")\n\n }),\n\n tag(\"]\"),\n\n );\n\n\n\n // fall back to returning the rest of the input, if any\n\n let remainder = verify(rest, |s: &str| !s.is_empty());\n\n let field = alt((bracket, string, simple, remainder));\n", "file_path": "src/transforms/tokenizer.rs", "rank": 68, "score": 311426.39180520084 }, { "content": "fn body_to_lines(mut body: BytesMut) -> impl Iterator<Item = Result<Bytes, ErrorMessage>> {\n\n let mut decoder = BytesDelimitedCodec::new(b'\\n');\n\n std::iter::from_fn(move || {\n\n match decoder.decode_eof(&mut body) {\n\n Err(e) => Some(Err(ErrorMessage::new(\n\n StatusCode::BAD_REQUEST,\n\n format!(\"Bad request: {}\", e),\n\n ))),\n\n Ok(Some(b)) => Some(Ok(b)),\n\n Ok(None) => None, //actually done\n\n }\n\n })\n\n .filter(|s| match s {\n\n //filter empty lines\n\n Ok(b) => !b.is_empty(),\n\n _ => true,\n\n })\n\n}\n\n\n", "file_path": "src/sources/http.rs", "rank": 69, "score": 310660.9200651413 }, { "content": "fn prometheus(urls: Vec<String>, interval: u64, out: mpsc::Sender<Event>) -> super::Source {\n\n let out = out.sink_map_err(|e| error!(\"error sending metric: {:?}\", e));\n\n\n\n let task = Interval::new(Instant::now(), Duration::from_secs(interval))\n\n .map_err(|e| error!(\"timer error: {:?}\", e))\n\n .map(move |_| futures01::stream::iter_ok(urls.clone()))\n\n .flatten()\n\n .map(move |url| {\n\n let https = HttpsConnector::new(4).expect(\"TLS initialization failed\");\n\n let client = hyper::Client::builder().build(https);\n\n\n\n let request = hyper::Request::get(&url)\n\n .body(hyper::Body::empty())\n\n .expect(\"error creating request\");\n\n\n\n client\n\n .request(request)\n\n .and_then(|response| response.into_body().concat2())\n\n .map(|body| {\n\n let packet = String::from_utf8_lossy(&body);\n", "file_path": "src/sources/prometheus/mod.rs", "rank": 70, "score": 306953.2297364265 }, { "content": "pub fn random_string(len: usize) -> String {\n\n thread_rng()\n\n .sample_iter(&Alphanumeric)\n\n .take(len)\n\n .collect::<String>()\n\n}\n\n\n", "file_path": "src/test_util.rs", "rank": 71, "score": 306019.73516952846 }, { "content": "fn encode_distribution(values: &[f64], counts: &[u32]) -> Option<HashMap<String, Field>> {\n\n if values.len() != counts.len() {\n\n return None;\n\n }\n\n\n\n let mut samples = Vec::new();\n\n for (v, c) in values.iter().zip(counts.iter()) {\n\n for _ in 0..*c {\n\n samples.push(*v);\n\n }\n\n }\n\n\n\n if samples.is_empty() {\n\n return None;\n\n }\n\n\n\n if samples.len() == 1 {\n\n let val = samples[0];\n\n return Some(\n\n vec![\n", "file_path": "src/sinks/influxdb_metrics.rs", "rank": 72, "score": 305689.13132016617 }, { "content": "pub fn validate_host(host: &str) -> crate::Result<()> {\n\n let uri = Uri::try_from(host).context(super::UriParseError)?;\n\n\n\n match uri.scheme_part() {\n\n Some(_) => Ok(()),\n\n None => Err(Box::new(BuildError::UriMissingScheme)),\n\n }\n\n}\n\n\n", "file_path": "src/sinks/splunk_hec.rs", "rank": 73, "score": 305446.42388588033 }, { "content": "fn decode_message(body: FullBody, header_map: HeaderMap) -> Result<Vec<Event>, ErrorMessage> {\n\n // Deal with headers\n\n let msg_count = match usize::from_str(get_header(&header_map, \"Logplex-Msg-Count\")?) {\n\n Ok(v) => v,\n\n Err(e) => return Err(header_error_message(\"Logplex-Msg-Count\", &e.to_string())),\n\n };\n\n let frame_id = get_header(&header_map, \"Logplex-Frame-Id\")?;\n\n let drain_token = get_header(&header_map, \"Logplex-Drain-Token\")?;\n\n info!(message = \"Handling logplex request\", %msg_count, %frame_id, %drain_token);\n\n\n\n // Deal with body\n\n let events = body_to_events(body);\n\n\n\n if events.len() != msg_count {\n\n let error_msg = format!(\n\n \"Parsed event count does not match message count header: {} vs {}\",\n\n events.len(),\n\n msg_count\n\n );\n\n\n", "file_path": "src/sources/logplex.rs", "rank": 74, "score": 304824.5425832731 }, { "content": "fn json_parse_array_of_object(value: JsonValue) -> Result<Vec<Event>, ErrorMessage> {\n\n match value {\n\n JsonValue::Array(v) => v\n\n .into_iter()\n\n .map(json_parse_object)\n\n .collect::<Result<_, _>>(),\n\n JsonValue::Object(map) => {\n\n //treat like an array of one object\n\n Ok(vec![json_parse_object(JsonValue::Object(map))?])\n\n }\n\n _ => Err(json_error(format!(\n\n \"Expected Array or Object, got {}.\",\n\n json_value_to_type_string(&value)\n\n ))),\n\n }\n\n}\n\n\n", "file_path": "src/sources/http.rs", "rank": 75, "score": 303507.2758929992 }, { "content": "pub fn file_source(\n\n config: &FileConfig,\n\n data_dir: PathBuf,\n\n out: mpsc::Sender<Event>,\n\n) -> super::Source {\n\n let (shutdown_tx, shutdown_rx) = std::sync::mpsc::channel();\n\n\n\n let ignore_before = config\n\n .ignore_older\n\n .map(|secs| SystemTime::now() - Duration::from_secs(secs));\n\n let glob_minimum_cooldown = Duration::from_millis(config.glob_minimum_cooldown);\n\n\n\n let file_server = FileServer {\n\n include: config.include.clone(),\n\n exclude: config.exclude.clone(),\n\n max_read_bytes: config.max_read_bytes,\n\n start_at_beginning: config.start_at_beginning,\n\n ignore_before,\n\n max_line_bytes: config.max_line_bytes,\n\n data_dir,\n", "file_path": "src/sources/file/mod.rs", "rank": 76, "score": 302082.62696270854 }, { "content": "fn load(config: &str) -> Result<Vec<String>, Vec<String>> {\n\n let rt = vector::runtime::Runtime::single_threaded().unwrap();\n\n Config::load(config.as_bytes())\n\n .and_then(|c| topology::builder::build_pieces(&c, rt.executor()))\n\n .map(|(_topology, warnings)| warnings)\n\n}\n\n\n\n#[cfg(all(\n\n feature = \"sources-socket\",\n\n feature = \"transforms-sampler\",\n\n feature = \"sinks-socket\"\n\n))]\n", "file_path": "tests/config.rs", "rank": 77, "score": 298575.5217384559 }, { "content": "fn tcp_json_sink(address: String) -> SocketSinkConfig {\n\n SocketSinkConfig::make_tcp_config(address, EncodingConfig::from(Encoding::Json), None)\n\n}\n", "file_path": "tests/syslog.rs", "rank": 78, "score": 298253.98954182654 }, { "content": "fn encode_tags(tags: BTreeMap<String, String>, output: &mut String) {\n\n let sorted = tags\n\n // sort by key\n\n .iter()\n\n .collect::<BTreeMap<_, _>>();\n\n\n\n for (key, value) in sorted {\n\n if key.is_empty() || value.is_empty() {\n\n continue;\n\n }\n\n encode_string(key.to_string(), output);\n\n output.push('=');\n\n encode_string(value.to_string(), output);\n\n output.push(',');\n\n }\n\n\n\n // remove last ','\n\n output.pop();\n\n}\n\n\n", "file_path": "src/sinks/influxdb_metrics.rs", "rank": 79, "score": 297494.8797964389 }, { "content": "/// Iterates over all paths in form \"a.b[0].c[1]\" in alphabetical order\n\n/// and their corresponding values.\n\npub fn all_fields<'a>(\n\n fields: &'a BTreeMap<Atom, Value>,\n\n) -> impl Iterator<Item = (Atom, &'a Value)> + Serialize {\n\n FieldsIter::new(fields)\n\n}\n\n\n", "file_path": "src/event/util/log/all_fields.rs", "rank": 80, "score": 297453.54188280465 }, { "content": "pub fn random_maps(\n\n max_size: usize,\n\n field_len: usize,\n\n) -> impl Iterator<Item = HashMap<String, String>> {\n\n iter::repeat(()).map(move |_| random_map(max_size, field_len))\n\n}\n\n\n", "file_path": "src/test_util.rs", "rank": 81, "score": 296638.03193418076 }, { "content": "pub fn logs(kube: &Kube, vector: &KubeDaemon) -> Vec<Value> {\n\n let mut logs = Vec::new();\n\n for daemon_instance in kube.list(&vector) {\n\n debug!(message=\"daemon_instance\",name=%daemon_instance.metadata.name);\n\n logs.extend(\n\n kube.logs(daemon_instance.metadata.name.as_str())\n\n .into_iter()\n\n .filter_map(|s| serde_json::from_slice::<Value>(s.as_ref()).ok()),\n\n );\n\n }\n\n logs\n\n}\n\n\n", "file_path": "src/sources/kubernetes/test.rs", "rank": 82, "score": 294181.6919468183 }, { "content": "pub fn random_events_with_stream(\n\n len: usize,\n\n count: usize,\n\n) -> (Vec<Event>, impl Stream<Item = Event, Error = ()>) {\n\n random_events_with_stream_generic(count, move || Event::from(random_string(len)))\n\n}\n\n\n", "file_path": "src/test_util.rs", "rank": 83, "score": 292653.8644449544 }, { "content": "pub fn sink_failing_healthcheck(\n\n channel_size: usize,\n\n) -> (Receiver<Event>, MockSinkConfig<Sender<Event>>) {\n\n let (tx, rx) = futures01::sync::mpsc::channel(channel_size);\n\n let sink = MockSinkConfig::new(tx, false);\n\n (rx, sink)\n\n}\n\n\n", "file_path": "tests/support/mod.rs", "rank": 84, "score": 292337.8850893845 }, { "content": "#[typetag::serde(tag = \"type\")]\n\npub trait TransformConfig: core::fmt::Debug {\n\n fn build(&self, cx: TransformContext) -> crate::Result<Box<dyn transforms::Transform>>;\n\n\n\n fn input_type(&self) -> DataType;\n\n\n\n fn output_type(&self) -> DataType;\n\n\n\n fn transform_type(&self) -> &'static str;\n\n\n\n /// Allows a transform configuration to expand itself into multiple \"child\"\n\n /// transformations to replace it. This allows a transform to act as a macro\n\n /// for various patterns.\n\n fn expand(&mut self) -> crate::Result<Option<IndexMap<String, Box<dyn TransformConfig>>>> {\n\n Ok(None)\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct TransformContext {\n\n pub(super) exec: TaskExecutor,\n", "file_path": "src/topology/config/mod.rs", "rank": 85, "score": 291921.4225706794 }, { "content": "// TODO: many more cases to handle:\n\n// octet framing (i.e. num bytes as ascii string prefix) with and without delimiters\n\n// null byte delimiter in place of newline\n\nfn event_from_str(host_key: &str, default_host: Option<Bytes>, line: &str) -> Option<Event> {\n\n trace!(\n\n message = \"Received line.\",\n\n bytes = &field::display(line.len())\n\n );\n\n\n\n let line = line.trim();\n\n let parsed = syslog_loose::parse_message_with_year(line, resolve_year);\n\n let mut event = Event::from(&parsed.msg[..]);\n\n\n\n if let Some(host) = &parsed.hostname {\n\n event.as_mut_log().insert(host_key, host.clone());\n\n } else if let Some(default_host) = default_host {\n\n event.as_mut_log().insert(host_key, default_host);\n\n }\n\n\n\n let timestamp = parsed\n\n .timestamp\n\n .map(|ts| ts.into())\n\n .unwrap_or_else(Utc::now);\n", "file_path": "src/sources/syslog.rs", "rank": 86, "score": 291468.12359335675 }, { "content": "pub fn collect_n<T>(mut rx: mpsc::Receiver<T>, n: usize) -> impl Future<Item = Vec<T>, Error = ()> {\n\n let mut events = Vec::new();\n\n\n\n future::poll_fn(move || {\n\n while events.len() < n {\n\n let e = try_ready!(rx.poll()).unwrap();\n\n events.push(e);\n\n }\n\n Ok(Async::Ready(mem::replace(&mut events, Vec::new())))\n\n })\n\n}\n\n\n", "file_path": "src/test_util.rs", "rank": 87, "score": 290608.1978028577 }, { "content": "fn header_error_message(name: &str, msg: &str) -> ErrorMessage {\n\n ErrorMessage::new(\n\n StatusCode::BAD_REQUEST,\n\n format!(\"Invalid request header {:?}: {:?}\", name, msg),\n\n )\n\n}\n\n\n", "file_path": "src/sources/logplex.rs", "rank": 88, "score": 290299.58560840087 }, { "content": "fn validate_headers(headers: &Option<IndexMap<String, String>>) -> crate::Result<()> {\n\n if let Some(map) = headers {\n\n for (name, value) in map {\n\n HeaderName::from_bytes(name.as_bytes()).with_context(|| InvalidHeaderName { name })?;\n\n HeaderValue::from_bytes(value.as_bytes())\n\n .with_context(|| InvalidHeaderValue { value })?;\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/sinks/http.rs", "rank": 89, "score": 289999.9709587281 }, { "content": "pub fn random_nested_events_with_stream(\n\n len: usize,\n\n breadth: usize,\n\n depth: usize,\n\n count: usize,\n\n) -> (Vec<Event>, impl Stream<Item = Event, Error = ()>) {\n\n random_events_with_stream_generic(count, move || {\n\n let mut log = LogEvent::new();\n\n\n\n let tree = random_pseudonested_map(len, breadth, depth);\n\n for (k, v) in tree.into_iter() {\n\n log.insert(k, v);\n\n }\n\n\n\n Event::Log(log)\n\n })\n\n}\n\n\n", "file_path": "src/test_util.rs", "rank": 90, "score": 289025.02394129575 }, { "content": "fn get_mut_value<'a, I>(mut value: &'a mut Value, mut path_iter: I) -> Option<&'a mut Value>\n\nwhere\n\n I: Iterator<Item = PathComponent>,\n\n{\n\n loop {\n\n match (path_iter.next(), value) {\n\n (None, value) => return Some(value),\n\n (Some(PathComponent::Key(ref key)), Value::Map(map)) => match map.get_mut(key) {\n\n None => return None,\n\n Some(nested_value) => {\n\n value = nested_value;\n\n }\n\n },\n\n (Some(PathComponent::Index(index)), Value::Array(array)) => {\n\n match array.get_mut(index) {\n\n None => return None,\n\n Some(nested_value) => {\n\n value = nested_value;\n\n }\n\n }\n", "file_path": "src/event/util/log/get_mut.rs", "rank": 91, "score": 288847.8904676618 }, { "content": "// Make a header pair from a key-value string pair\n\nfn make_header((name, value): (&String, &String)) -> crate::Result<(HeaderName, HeaderValue)> {\n\n Ok((\n\n HeaderName::from_bytes(name.as_bytes())?,\n\n HeaderValue::from_str(&value)?,\n\n ))\n\n}\n\n\n", "file_path": "src/sinks/gcp/cloud_storage.rs", "rank": 92, "score": 288356.82426179055 }, { "content": "pub fn receive_events<S>(stream: S) -> Receiver<Event>\n\nwhere\n\n S: Stream<Item = Event> + Send + 'static,\n\n <S as Stream>::Error: std::fmt::Debug,\n\n{\n\n let runtime = runtime();\n\n\n\n let count = Arc::new(AtomicUsize::new(0));\n\n let count_clone = Arc::clone(&count);\n\n\n\n let (trigger, tripwire) = Tripwire::new();\n\n\n\n let events = stream\n\n .take_until(tripwire)\n\n .inspect(move |_| {\n\n count_clone.fetch_add(1, Ordering::Relaxed);\n\n })\n\n .map_err(|e| panic!(\"{:?}\", e))\n\n .collect();\n\n\n", "file_path": "src/test_util.rs", "rank": 93, "score": 288332.1558032078 }, { "content": "fn encode_events(events: Vec<Metric>, namespace: &str) -> String {\n\n let mut output = String::new();\n\n for event in events.into_iter() {\n\n let fullname = encode_namespace(namespace, &event.name);\n\n let ts = encode_timestamp(event.timestamp);\n\n let tags = event.tags.clone();\n\n match event.value {\n\n MetricValue::Counter { value } => {\n\n let fields = to_fields(value);\n\n\n\n influx_line_protocol(fullname, \"counter\", tags, Some(fields), ts, &mut output)\n\n }\n\n MetricValue::Gauge { value } => {\n\n let fields = to_fields(value);\n\n\n\n influx_line_protocol(fullname, \"gauge\", tags, Some(fields), ts, &mut output);\n\n }\n\n MetricValue::Set { values } => {\n\n let fields = to_fields(values.len() as f64);\n\n\n", "file_path": "src/sinks/influxdb_metrics.rs", "rank": 94, "score": 285753.4290458023 }, { "content": "fn default_test_input_type() -> String {\n\n \"raw\".to_string()\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug)]\n\n#[serde(deny_unknown_fields)]\n\npub struct TestOutput {\n\n pub extract_from: String,\n\n pub conditions: Option<Vec<TestCondition>>,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug)]\n\n#[serde(untagged)]\n\npub enum TestCondition {\n\n Embedded(Box<dyn conditions::ConditionConfig>),\n\n NoTypeEmbedded(conditions::CheckFieldsConfig),\n\n String(String),\n\n}\n\n\n\n// Helper methods for programming construction during tests\n", "file_path": "src/topology/config/mod.rs", "rank": 95, "score": 285471.2075688987 }, { "content": "fn build_event(host_key: &str, received_from: Option<Bytes>, line: &str) -> Option<Event> {\n\n let mut event = Event::from(line);\n\n if let Some(host) = received_from {\n\n event.as_mut_log().insert(host_key, host);\n\n }\n\n trace!(message = \"Received one event.\", ?event);\n\n Some(event)\n\n}\n\n\n", "file_path": "src/sources/socket/unix.rs", "rank": 96, "score": 285196.6783977791 }, { "content": "fn pathbuf_to_string(path: &PathBuf) -> crate::Result<&str> {\n\n path.to_str()\n\n .ok_or_else(|| KafkaError::InvalidPath { path: path.into() }.into())\n\n}\n", "file_path": "src/kafka.rs", "rank": 97, "score": 285137.99530303717 }, { "content": "pub fn receive(addr: &SocketAddr) -> Receiver<String> {\n\n let runtime = runtime();\n\n\n\n let listener = TcpListener::bind(addr).unwrap();\n\n\n\n let count = Arc::new(AtomicUsize::new(0));\n\n let count_clone = Arc::clone(&count);\n\n\n\n let (trigger, tripwire) = Tripwire::new();\n\n\n\n let lines = listener\n\n .incoming()\n\n .take_until(tripwire)\n\n .map(|socket| FramedRead::new(socket, LinesCodec::new()))\n\n .flatten()\n\n .inspect(move |_| {\n\n count_clone.fetch_add(1, Ordering::Relaxed);\n\n })\n\n .map_err(|e| panic!(\"{:?}\", e))\n\n .collect();\n\n\n\n let handle = futures01::sync::oneshot::spawn(lines, &runtime.executor());\n\n Receiver {\n\n handle,\n\n count,\n\n trigger,\n\n _runtime: runtime,\n\n }\n\n}\n\n\n", "file_path": "src/test_util.rs", "rank": 98, "score": 284315.1635941627 }, { "content": "pub fn start_vector<'a>(\n\n kube: &Kube,\n\n user_namespace: &str,\n\n container_name: impl Into<Option<&'a str>>,\n\n config: &str,\n\n) -> KubeDaemon {\n\n let vector = create_vector(kube, user_namespace, container_name, None, config);\n\n\n\n // Wait for running state\n\n kube.wait_for_running(vector.clone());\n\n\n\n vector\n\n}\n\n\n", "file_path": "src/sources/kubernetes/test.rs", "rank": 99, "score": 283764.38665868284 } ]
Rust
server/prisma-rs/libs/database-inspector/tests/tests.rs
otrebu/prisma
298be5c919119847bb8d102d6b16672edd06b2c5
#![allow(non_snake_case)] #![allow(unused)] use barrel::{backend::Sqlite as Squirrel, types, Migration}; use database_inspector::*; use rusqlite::{Connection, Result, NO_PARAMS}; use std::{thread, time}; const SCHEMA: &str = "database_inspector_test"; #[test] fn all_columns_types_must_work() { let inspector = setup(|mut migration| { migration.create_table("User", |t| { t.add_column("int", types::integer()); t.add_column("float", types::float()); t.add_column("boolean", types::boolean()); t.add_column("string1", types::text()); t.add_column("string2", types::varchar(1)); t.add_column("date_time", types::date()); }); }); let result = inspector.introspect(&SCHEMA.to_string()); let table = result.table("User").unwrap(); let expected_columns = vec![ Column { name: "int".to_string(), tpe: ColumnType::Int, is_required: true, foreign_key: None, sequence: None, }, Column { name: "float".to_string(), tpe: ColumnType::Float, is_required: true, foreign_key: None, sequence: None, }, Column { name: "boolean".to_string(), tpe: ColumnType::Boolean, is_required: true, foreign_key: None, sequence: None, }, Column { name: "string1".to_string(), tpe: ColumnType::String, is_required: true, foreign_key: None, sequence: None, }, Column { name: "string2".to_string(), tpe: ColumnType::String, is_required: true, foreign_key: None, sequence: None, }, Column { name: "date_time".to_string(), tpe: ColumnType::DateTime, is_required: true, foreign_key: None, sequence: None, }, ]; assert_eq!(table.columns, expected_columns); } #[test] fn is_required_must_work() { let inspector = setup(|mut migration| { migration.create_table("User", |t| { t.add_column("column1", types::integer().nullable(false)); t.add_column("column2", types::integer().nullable(true)); }); }); let result = inspector.introspect(&SCHEMA.to_string()); let user_table = result.table("User").unwrap(); let expected_columns = vec![ Column { name: "column1".to_string(), tpe: ColumnType::Int, is_required: true, foreign_key: None, sequence: None, }, Column { name: "column2".to_string(), tpe: ColumnType::Int, is_required: false, foreign_key: None, sequence: None, }, ]; assert_eq!(user_table.columns, expected_columns); } #[test] fn foreign_keys_must_work() { let inspector = setup(|mut migration| { migration.create_table("City", |t| { t.add_column("id", types::primary()); }); migration.create_table("User", |t| { t.add_column("city", types::foreign("City(id)")); }); }); let result = inspector.introspect(&SCHEMA.to_string()); let user_table = result.table("User").unwrap(); let expected_columns = vec![Column { name: "city".to_string(), tpe: ColumnType::Int, is_required: true, foreign_key: Some(ForeignKey { table: "City".to_string(), column: "id".to_string(), }), sequence: None, }]; assert_eq!(user_table.columns, expected_columns); } fn setup<F>(mut migrationFn: F) -> Box<DatabaseInspector> where F: FnMut(&mut Migration) -> (), { let connection = Connection::open_in_memory() .and_then(|c| { let server_root = std::env::var("SERVER_ROOT").expect("Env var SERVER_ROOT required but not found."); let path = format!("{}/db", server_root); let database_file_path = dbg!(format!("{}/{}.db", path, SCHEMA)); std::fs::remove_file(database_file_path.clone()); thread::sleep(time::Duration::from_millis(100)); c.execute("ATTACH DATABASE ? AS ?", &[database_file_path.as_ref(), SCHEMA]) .map(|_| c) }) .and_then(|c| { let mut migration = Migration::new().schema(SCHEMA); migrationFn(&mut migration); let full_sql = migration.make::<Squirrel>(); for sql in full_sql.split(";") { dbg!(sql); if (sql != "") { c.execute(&sql, NO_PARAMS).unwrap(); } } Ok(c) }) .unwrap(); Box::new(DatabaseInspectorImpl::new(connection)) }
#![allow(non_snake_case)] #![allow(unused)] use barrel::{backend::Sqlite as Squirrel, types, Migration}; use database_inspector::*; use rusqlite::{Connection, Result, NO_PARAMS}; use std::{thread, time}; const SCHEMA: &str = "database_inspector_test"; #[test] fn all_columns_types_must_work() { let inspector = setup(|mut migration| { migration.create_table("User", |t| { t.add_column("int", types::integer()); t.add_column("float", types::float()); t.add_column("boolean", types::boolean()); t.add_column("string1", types::tex
at, is_required: true, foreign_key: None, sequence: None, }, Column { name: "boolean".to_string(), tpe: ColumnType::Boolean, is_required: true, foreign_key: None, sequence: None, }, Column { name: "string1".to_string(), tpe: ColumnType::String, is_required: true, foreign_key: None, sequence: None, }, Column { name: "string2".to_string(), tpe: ColumnType::String, is_required: true, foreign_key: None, sequence: None, }, Column { name: "date_time".to_string(), tpe: ColumnType::DateTime, is_required: true, foreign_key: None, sequence: None, }, ]; assert_eq!(table.columns, expected_columns); } #[test] fn is_required_must_work() { let inspector = setup(|mut migration| { migration.create_table("User", |t| { t.add_column("column1", types::integer().nullable(false)); t.add_column("column2", types::integer().nullable(true)); }); }); let result = inspector.introspect(&SCHEMA.to_string()); let user_table = result.table("User").unwrap(); let expected_columns = vec![ Column { name: "column1".to_string(), tpe: ColumnType::Int, is_required: true, foreign_key: None, sequence: None, }, Column { name: "column2".to_string(), tpe: ColumnType::Int, is_required: false, foreign_key: None, sequence: None, }, ]; assert_eq!(user_table.columns, expected_columns); } #[test] fn foreign_keys_must_work() { let inspector = setup(|mut migration| { migration.create_table("City", |t| { t.add_column("id", types::primary()); }); migration.create_table("User", |t| { t.add_column("city", types::foreign("City(id)")); }); }); let result = inspector.introspect(&SCHEMA.to_string()); let user_table = result.table("User").unwrap(); let expected_columns = vec![Column { name: "city".to_string(), tpe: ColumnType::Int, is_required: true, foreign_key: Some(ForeignKey { table: "City".to_string(), column: "id".to_string(), }), sequence: None, }]; assert_eq!(user_table.columns, expected_columns); } fn setup<F>(mut migrationFn: F) -> Box<DatabaseInspector> where F: FnMut(&mut Migration) -> (), { let connection = Connection::open_in_memory() .and_then(|c| { let server_root = std::env::var("SERVER_ROOT").expect("Env var SERVER_ROOT required but not found."); let path = format!("{}/db", server_root); let database_file_path = dbg!(format!("{}/{}.db", path, SCHEMA)); std::fs::remove_file(database_file_path.clone()); thread::sleep(time::Duration::from_millis(100)); c.execute("ATTACH DATABASE ? AS ?", &[database_file_path.as_ref(), SCHEMA]) .map(|_| c) }) .and_then(|c| { let mut migration = Migration::new().schema(SCHEMA); migrationFn(&mut migration); let full_sql = migration.make::<Squirrel>(); for sql in full_sql.split(";") { dbg!(sql); if (sql != "") { c.execute(&sql, NO_PARAMS).unwrap(); } } Ok(c) }) .unwrap(); Box::new(DatabaseInspectorImpl::new(connection)) }
t()); t.add_column("string2", types::varchar(1)); t.add_column("date_time", types::date()); }); }); let result = inspector.introspect(&SCHEMA.to_string()); let table = result.table("User").unwrap(); let expected_columns = vec![ Column { name: "int".to_string(), tpe: ColumnType::Int, is_required: true, foreign_key: None, sequence: None, }, Column { name: "float".to_string(), tpe: ColumnType::Flo
function_block-random_span
[]
Rust
askama_escape/src/lib.rs
tizgafa/askama
d2c38b22ac54cc145bb2ede2925f7f149c8fd57e
#[macro_use] extern crate cfg_if; use std::fmt::{self, Display, Formatter}; use std::str; #[derive(Debug, PartialEq)] pub enum MarkupDisplay<T> where T: Display, { Safe(T), Unsafe(T), } impl<T> MarkupDisplay<T> where T: Display, { pub fn mark_safe(self) -> MarkupDisplay<T> { match self { MarkupDisplay::Unsafe(t) => MarkupDisplay::Safe(t), _ => self, } } } impl<T> From<T> for MarkupDisplay<T> where T: Display, { fn from(t: T) -> MarkupDisplay<T> { MarkupDisplay::Unsafe(t) } } impl<T> Display for MarkupDisplay<T> where T: Display, { fn fmt(&self, f: &mut Formatter) -> fmt::Result { match *self { MarkupDisplay::Unsafe(ref t) => escape(&t.to_string()).fmt(f), MarkupDisplay::Safe(ref t) => t.fmt(f), } } } pub fn escape(s: &str) -> Escaped { Escaped { bytes: s.as_bytes(), } } pub struct Escaped<'a> { bytes: &'a [u8], } impl<'a> Display for Escaped<'a> { fn fmt(&self, fmt: &mut Formatter) -> fmt::Result { _imp(self.bytes, fmt) } } cfg_if! { if #[cfg(all(target_arch = "x86_64", askama_runtime_simd))] { use std::arch::x86_64::*; use std::mem::{self, size_of}; use std::sync::atomic::{AtomicUsize, Ordering}; #[inline(always)] fn _imp(bytes: &[u8], fmt: &mut Formatter) -> fmt::Result { static mut FN: fn(bytes: &[u8], fmt: &mut Formatter) -> fmt::Result = detect; fn detect(bytes: &[u8], fmt: &mut Formatter) -> fmt::Result { let fun = if cfg!(askama_runtime_avx) && is_x86_feature_detected!("avx2") { _avx_escape as usize } else if cfg!(askama_runtime_sse) { _sse_escape as usize } else { _escape as usize }; let slot = unsafe { &*(&FN as *const _ as *const AtomicUsize) }; slot.store(fun as usize, Ordering::Relaxed); unsafe { mem::transmute::<usize, fn(bytes: &[u8], fmt: &mut Formatter) -> fmt::Result>(fun)(bytes, fmt) } } unsafe { let slot = &*(&FN as *const _ as * const AtomicUsize); let fun = slot.load(Ordering::Relaxed); mem::transmute::<usize, fn(bytes: &[u8], fmt: &mut Formatter) -> fmt::Result>(fun)(bytes, fmt) } } #[inline(always)] fn sub(a: *const u8, b: *const u8) -> usize { debug_assert!(b <= a); (a as usize) - (b as usize) } } else { #[inline(always)] fn _imp(bytes: &[u8], fmt: &mut Formatter) -> fmt::Result { _escape(bytes, fmt) } } } macro_rules! escaping_body { ($i:expr, $start:ident, $fmt:ident, $bytes:ident, $quote:expr) => {{ if $start < $i { #[allow(unused_unsafe)] $fmt.write_str(unsafe { str::from_utf8_unchecked(&$bytes[$start..$i]) })?; } $fmt.write_str($quote)?; $start = $i + 1; }}; } macro_rules! bodies { ($i:expr, $b: ident, $start:ident, $fmt:ident, $bytes:ident, $callback:ident) => { match $b { b'<' => $callback!($i, $start, $fmt, $bytes, "&lt;"), b'>' => $callback!($i, $start, $fmt, $bytes, "&gt;"), b'&' => $callback!($i, $start, $fmt, $bytes, "&amp;"), b'"' => $callback!($i, $start, $fmt, $bytes, "&quot;"), b'\'' => $callback!($i, $start, $fmt, $bytes, "&#x27;"), b'/' => $callback!($i, $start, $fmt, $bytes, "&#x2f;"), _ => (), } }; } macro_rules! write_char { ($i:ident, $ptr: ident, $start: ident, $fmt: ident, $bytes:ident) => {{ let b = *$ptr; if b.wrapping_sub(FLAG_BELOW) <= LEN { bodies!($i, b, $start, $fmt, $bytes, escaping_body); } }}; } #[allow(unused_macros)] macro_rules! mask_body { ($i:expr, $start:ident, $fmt:ident, $bytes:ident, $quote:expr) => {{ let i = $i; escaping_body!(i, $start, $fmt, $bytes, $quote); }}; } #[allow(unused_macros)] macro_rules! mask_bodies { ($mask: ident, $at:ident, $cur: ident, $ptr: ident, $start: ident, $fmt: ident, $bytes:ident) => { let b = *$ptr.add($cur); bodies!($at + $cur, b, $start, $fmt, $bytes, mask_body); $mask ^= 1 << $cur; if $mask == 0 { break; } $cur = $mask.trailing_zeros() as usize; }; } #[allow(unused_macros)] macro_rules! write_mask { ($mask: ident, $ptr: ident, $start_ptr: ident, $start: ident, $fmt: ident, $bytes:ident) => {{ let at = sub($ptr, $start_ptr); let mut cur = $mask.trailing_zeros() as usize; loop { mask_bodies!($mask, at, cur, $ptr, $start, $fmt, $bytes); } debug_assert_eq!(at, sub($ptr, $start_ptr)) }}; } #[allow(unused_macros)] macro_rules! write_forward { ($mask: ident, $align: ident, $ptr: ident, $start_ptr: ident, $start: ident, $fmt: ident, $bytes:ident) => {{ if $mask != 0 { let at = sub($ptr, $start_ptr); let mut cur = $mask.trailing_zeros() as usize; while cur < $align { mask_bodies!($mask, at, cur, $ptr, $start, $fmt, $bytes); } debug_assert_eq!(at, sub($ptr, $start_ptr)) } }}; } fn _escape(bytes: &[u8], fmt: &mut Formatter) -> fmt::Result { let mut start = 0; for (i, b) in bytes.iter().enumerate() { write_char!(i, b, start, fmt, bytes); } fmt.write_str(unsafe { str::from_utf8_unchecked(&bytes[start..]) })?; Ok(()) } #[cfg(all(target_arch = "x86_64", askama_runtime_avx))] unsafe fn _avx_escape(bytes: &[u8], fmt: &mut Formatter) -> fmt::Result { const VECTOR_SIZE: usize = size_of::<__m256i>(); const VECTOR_ALIGN: usize = VECTOR_SIZE - 1; const LOOP_SIZE: usize = 4 * VECTOR_SIZE; let len = bytes.len(); let mut start = 0; if len < VECTOR_SIZE { for (i, b) in bytes.iter().enumerate() { write_char!(i, b, start, fmt, bytes); } if start < len { fmt.write_str(str::from_utf8_unchecked(&bytes[start..len]))?; } return Ok(()); } let v_flag = _mm256_set1_epi8((LEN + 1) as i8); let v_flag_below = _mm256_set1_epi8(FLAG_BELOW as i8); let start_ptr = bytes.as_ptr(); let end_ptr = bytes[len..].as_ptr(); let mut ptr = start_ptr; debug_assert!(start_ptr <= ptr && start_ptr <= end_ptr.sub(VECTOR_SIZE)); if LOOP_SIZE <= len { { let align = start_ptr as usize & VECTOR_ALIGN; if 0 < align { let a = _mm256_loadu_si256(ptr as *const __m256i); let cmp = _mm256_cmpgt_epi8(v_flag, _mm256_sub_epi8(a, v_flag_below)); let mut mask = _mm256_movemask_epi8(cmp); write_forward!(mask, align, ptr, start_ptr, start, fmt, bytes); ptr = ptr.add(align); debug_assert!(start <= sub(ptr, start_ptr)); } } while ptr <= end_ptr.sub(LOOP_SIZE) { debug_assert_eq!(0, (ptr as usize) % VECTOR_SIZE); let a = _mm256_load_si256(ptr as *const __m256i); let b = _mm256_load_si256(ptr.add(VECTOR_SIZE) as *const __m256i); let c = _mm256_load_si256(ptr.add(VECTOR_SIZE * 2) as *const __m256i); let d = _mm256_load_si256(ptr.add(VECTOR_SIZE * 3) as *const __m256i); let cmp_a = _mm256_cmpgt_epi8(v_flag, _mm256_sub_epi8(a, v_flag_below)); let cmp_b = _mm256_cmpgt_epi8(v_flag, _mm256_sub_epi8(b, v_flag_below)); let cmp_c = _mm256_cmpgt_epi8(v_flag, _mm256_sub_epi8(c, v_flag_below)); let cmp_d = _mm256_cmpgt_epi8(v_flag, _mm256_sub_epi8(d, v_flag_below)); let or1 = _mm256_or_si256(cmp_a, cmp_b); let or2 = _mm256_or_si256(cmp_c, cmp_d); if _mm256_movemask_epi8(_mm256_or_si256(or1, or2)) != 0 { let mut mask = _mm256_movemask_epi8(cmp_a) as i128 | (_mm256_movemask_epi8(cmp_b) as i128) << VECTOR_SIZE | (_mm256_movemask_epi8(cmp_c) as i128) << VECTOR_SIZE * 2 | (_mm256_movemask_epi8(cmp_d) as i128) << VECTOR_SIZE * 3; write_mask!(mask, ptr, start_ptr, start, fmt, bytes); } ptr = ptr.add(LOOP_SIZE); debug_assert!(start <= sub(ptr, start_ptr)); } } while ptr <= end_ptr.sub(VECTOR_SIZE) { let a = _mm256_loadu_si256(ptr as *const __m256i); let cmp = _mm256_cmpgt_epi8(v_flag, _mm256_sub_epi8(a, v_flag_below)); let mut mask = _mm256_movemask_epi8(cmp); if mask != 0 { write_mask!(mask, ptr, start_ptr, start, fmt, bytes); } ptr = ptr.add(VECTOR_SIZE); debug_assert!(start <= sub(ptr, start_ptr)); } debug_assert!(end_ptr.sub(VECTOR_SIZE) < ptr); if ptr < end_ptr { let a = _mm256_loadu_si256(ptr as *const __m256i); let cmp = _mm256_cmpgt_epi8(v_flag, _mm256_sub_epi8(a, v_flag_below)); let end = sub(end_ptr, ptr); let mut mask = _mm256_movemask_epi8(cmp); write_forward!(mask, end, ptr, start_ptr, start, fmt, bytes); } debug_assert!(start <= len); if start < len { fmt.write_str(str::from_utf8_unchecked(&bytes[start..len]))?; } Ok(()) } #[cfg(all(target_arch = "x86_64", askama_runtime_sse))] unsafe fn _sse_escape(bytes: &[u8], fmt: &mut Formatter) -> fmt::Result { const VECTOR_SIZE: usize = size_of::<__m128i>(); let len = bytes.len(); let mut start = 0; if len < VECTOR_SIZE { for (i, b) in bytes.iter().enumerate() { write_char!(i, b, start, fmt, bytes); } if start < len { fmt.write_str(str::from_utf8_unchecked(&bytes[start..len]))?; } return Ok(()); } const NEEDLE_LEN: i32 = 6; let needle = _mm_setr_epi8( b'<' as i8, b'>' as i8, b'&' as i8, b'"' as i8, b'\'' as i8, b'/' as i8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ); let start_ptr = bytes.as_ptr(); let end_ptr = bytes[len..].as_ptr(); let mut ptr = start_ptr; while ptr <= end_ptr.sub(VECTOR_SIZE) { let a = _mm_loadu_si128(ptr as *const __m128i); let cmp = _mm_cmpestrm(needle, NEEDLE_LEN, a, VECTOR_SIZE as i32, 0); let mut mask = _mm_extract_epi16(cmp, 0) as i16; if mask != 0 { write_mask!(mask, ptr, start_ptr, start, fmt, bytes); } ptr = ptr.add(VECTOR_SIZE); debug_assert!(start <= sub(ptr, start_ptr)); } debug_assert!(end_ptr.sub(VECTOR_SIZE) < ptr); if ptr < end_ptr { let end = sub(end_ptr, ptr); let a = _mm_loadu_si128(ptr as *const __m128i); let cmp = _mm_cmpestrm(needle, NEEDLE_LEN, a, VECTOR_SIZE as i32, 0); let mut mask = _mm_extract_epi16(cmp, 0) as i16; write_forward!(mask, end, ptr, start_ptr, start, fmt, bytes); } debug_assert!(start <= len); if start < len { fmt.write_str(str::from_utf8_unchecked(&bytes[start..len]))?; } Ok(()) } const LEN: u8 = b'>' - b'"'; const FLAG_BELOW: u8 = b'"'; #[cfg(test)] mod tests { use super::*; #[test] fn test_escape() { let escapes = "<>&\"'/"; let escaped = "&lt;&gt;&amp;&quot;&#x27;&#x2f;"; let string_long: &str = &"foobar".repeat(1024); assert_eq!(escape("").to_string(), ""); assert_eq!(escape("<&>").to_string(), "&lt;&amp;&gt;"); assert_eq!(escape("bar&").to_string(), "bar&amp;"); assert_eq!(escape("<foo").to_string(), "&lt;foo"); assert_eq!(escape("bar&h").to_string(), "bar&amp;h"); assert_eq!( escape("// my <html> is \"unsafe\" & should be 'escaped'").to_string(), "&#x2f;&#x2f; my &lt;html&gt; is &quot;unsafe&quot; &amp; \ should be &#x27;escaped&#x27;" ); assert_eq!(escape(&"<".repeat(16)).to_string(), "&lt;".repeat(16)); assert_eq!(escape(&"<".repeat(32)).to_string(), "&lt;".repeat(32)); assert_eq!(escape(&"<".repeat(64)).to_string(), "&lt;".repeat(64)); assert_eq!(escape(&"<".repeat(128)).to_string(), "&lt;".repeat(128)); assert_eq!(escape(&"<".repeat(1024)).to_string(), "&lt;".repeat(1024)); assert_eq!(escape(&"<".repeat(129)).to_string(), "&lt;".repeat(129)); assert_eq!( escape(&"<".repeat(128 * 2 - 1)).to_string(), "&lt;".repeat(128 * 2 - 1) ); assert_eq!( escape(&"<".repeat(128 * 8 - 1)).to_string(), "&lt;".repeat(128 * 8 - 1) ); assert_eq!(escape(string_long).to_string(), string_long); assert_eq!( escape(&[string_long, "<"].join("")).to_string(), [string_long, "&lt;"].join("") ); assert_eq!( escape(&["<", string_long].join("")).to_string(), ["&lt;", string_long].join("") ); assert_eq!( escape(&escapes.repeat(1024)).to_string(), escaped.repeat(1024) ); assert_eq!( escape(&[string_long, "<", string_long].join("")).to_string(), [string_long, "&lt;", string_long].join("") ); assert_eq!( escape(&[string_long, "<", string_long, escapes, string_long,].join("")).to_string(), [string_long, "&lt;", string_long, escaped, string_long,].join("") ); } }
#[macro_use] extern crate cfg_if; use std::fmt::{self, Display, Formatter}; use std::str; #[derive(Debug, PartialEq)] pub enum MarkupDisplay<T> where T: Display, { Safe(T), Unsafe(T), } impl<T> MarkupDisplay<T> where T: Display, { pub fn mark_safe(self) -> MarkupDisplay<T> { match self { MarkupDisplay::Unsafe(t) => MarkupDisplay::Safe(t), _ => self, } } } impl<T> From<T> for MarkupDisplay<T> where T: Display, { fn from(t: T) -> MarkupDisplay<T> { MarkupDisplay::Unsafe(t) } } impl<T> Display for MarkupDisplay<T> where T: Display, { fn fmt(&self, f: &mut Formatter) -> fmt::Result { match *self { MarkupDisplay::Unsafe(ref t) => escape(&t.to_string()).fmt(f), MarkupDisplay::Safe(ref t) => t.fmt(f), } } } pub fn escape(s: &str) -> Escaped { Escaped { bytes: s.as_bytes(), } } pub struct Escaped<'a> { bytes: &'a [u8], } impl<'a> Display for Escaped<'a> { fn fmt(&self, fmt: &mut Formatter) -> fmt::Result { _imp(self.bytes, fmt) } } cfg_if! { if #[cfg(all(target_arch = "x86_64", askama_runtime_simd))] { use std::arch::x86_64::*; use std::mem::{self, size_of}; use std::sync::atomic::{AtomicUsize, Ordering}; #[inline(always)] fn _imp(bytes: &[u8], fmt: &mut Formatter) -> fmt::Result { static mut FN: fn(bytes: &[u8], fmt: &mut Formatter) -> fmt::Result = detect; fn detect(bytes: &[u8], fmt: &mut Formatter) -> fmt::Result { let fun = if cfg!(askama_runtime_avx) && is_x86_feature_detected!("avx2") { _avx_escape as usize } else if cfg!(askama_runtime_sse) { _sse_escape as usize } else { _escape as usize }; let slot = unsafe { &*(&FN as *const _ as *const AtomicUsize) }; slot.store(fun as usize, Ordering::Relaxed); unsafe { mem::transmute::<usize, fn(bytes: &[u8], fmt: &mut Formatter) -> fmt::Result>(fun)(bytes, fmt) } } unsafe { let slot = &*(&FN as *const _ as * const AtomicUsize); let fun = slot.load(Ordering::Relaxed); mem::transmute::<usize, fn(bytes: &[u8], fmt: &mut Formatter) -> fmt::Result>(fun)(bytes, fmt) } } #[inline(always)] fn sub(a: *const u8, b: *const u8) -> usize { debug_assert!(b <= a); (a as usize) - (b as usize) } } else { #[inline(always)] fn _imp(bytes: &[u8], fmt: &mut Formatter) -> fmt::Result { _escape(bytes, fmt) } } } macro_rules! escaping_body { ($i:expr, $start:ident, $fmt:ident, $bytes:ident, $quote:expr) => {{ if $start < $i { #[allow(unused_unsafe)] $fmt.write_str(unsafe { str::from_utf8_unchecked(&$bytes[$start..$i]) })?; } $fmt.write_str($quote)?; $start = $i + 1; }}; } macro_rules! bodies { ($i:expr, $b: ident, $start:ident, $fmt:ident, $bytes:ident, $callback:ident) => { match $b { b'<' => $callback!($i, $start, $fmt, $bytes, "&lt;"), b'>' => $callback!($i, $start, $fmt, $bytes, "&gt;"), b'&' => $callback!($i, $start, $fmt, $bytes, "&amp;"), b'"' => $callback!($i, $start, $fmt, $bytes, "&quot;"), b'\'' => $callback!($i, $start, $fmt, $bytes, "&#x27;"), b'/' => $callback!($i, $start, $fmt, $bytes, "&#x2f;"), _ => (), } }; } macro_rules! write_char { ($i:ident, $ptr: ident, $start: ident, $fmt: ident, $bytes:ident) => {{ let b = *$ptr; if b.wrapping_sub(FLAG_BELOW) <= LEN { bodies!($i, b, $start, $fmt, $bytes, escaping_body); } }}; } #[allow(unused_macros)] macro_rules! mask_body { ($i:expr, $start:ident, $fmt:ident, $bytes:ident, $quote:expr) => {{ let i = $i; escaping_body!(i, $start, $fmt, $bytes, $quote); }}; } #[allow(unused_macros)] macro_rules! mask_bodies { ($mask: ident, $at:ident, $cur: ident, $ptr: ident, $start: ident, $fmt: ident, $bytes:ident) => { let b = *$ptr.add($cur); bodies!($at + $cur, b, $start, $fmt, $bytes, mask_body); $mask ^= 1 << $cur; if $mask == 0 { break; } $cur = $mask.trailing_zeros() as usize; }; } #[allow(unused_macros)] macro_rules! write_mask { ($mask: ident, $ptr: ident, $start_ptr: ident, $start: ident, $fmt: ident, $bytes:ident) => {{ let at = sub($ptr, $start_ptr); let mut cur = $mask.trailing_zeros() as usize; loop { mask_bodies!($mask, at, cur, $ptr, $start, $fmt, $bytes); } debug_assert_eq!(at, sub($ptr, $start_ptr)) }}; } #[allow(unused_macros)] macro_rules! write_forward { ($mask: ident, $align: ident, $ptr: ident, $start_ptr: ident, $start: ident, $fmt: ident, $bytes:ident) => {{ if $mask != 0 { let at = sub($ptr, $start_ptr); let mut cur = $mask.trailing_zeros() as usize; while cur < $align { mask_bodies!($mask, at, cur, $ptr, $start, $fmt, $bytes); } debug_assert_eq!(at, sub($ptr, $start_ptr)) } }}; } fn _escape(bytes: &[u8], fmt: &mut Formatter) -> fmt::Result { let mut start = 0; for (i, b) in bytes.iter().enumerate() { write_char!(i, b, start, fmt, bytes); } fmt.write_str(unsafe { str::from_utf8_unchecked(&bytes[start..]) })?; Ok(()) } #[cfg(all(target_arch = "x86_64", askama_runtime_avx))] unsafe fn _avx_escape(bytes: &[u8], fmt: &mut Formatter) -> fmt::Result { const VECTOR_SIZE: usize = size_of::<__m256i>(); const VECTOR_ALIGN: usize = VECTOR_SIZE - 1; const LOOP_SIZE: usize = 4 * VECTOR_SIZE; let len = bytes.len(); let mut start = 0; if len < VECTOR_SIZE { for (i, b) in bytes.iter().enumerate() { write_char!(i, b, start, fmt, bytes); } if start < len { fmt.write_str(str::from_utf8_unchecked(&bytes[start..len]))?; } return Ok(()); } let v_flag = _mm25
#[cfg(all(target_arch = "x86_64", askama_runtime_sse))] unsafe fn _sse_escape(bytes: &[u8], fmt: &mut Formatter) -> fmt::Result { const VECTOR_SIZE: usize = size_of::<__m128i>(); let len = bytes.len(); let mut start = 0; if len < VECTOR_SIZE { for (i, b) in bytes.iter().enumerate() { write_char!(i, b, start, fmt, bytes); } if start < len { fmt.write_str(str::from_utf8_unchecked(&bytes[start..len]))?; } return Ok(()); } const NEEDLE_LEN: i32 = 6; let needle = _mm_setr_epi8( b'<' as i8, b'>' as i8, b'&' as i8, b'"' as i8, b'\'' as i8, b'/' as i8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ); let start_ptr = bytes.as_ptr(); let end_ptr = bytes[len..].as_ptr(); let mut ptr = start_ptr; while ptr <= end_ptr.sub(VECTOR_SIZE) { let a = _mm_loadu_si128(ptr as *const __m128i); let cmp = _mm_cmpestrm(needle, NEEDLE_LEN, a, VECTOR_SIZE as i32, 0); let mut mask = _mm_extract_epi16(cmp, 0) as i16; if mask != 0 { write_mask!(mask, ptr, start_ptr, start, fmt, bytes); } ptr = ptr.add(VECTOR_SIZE); debug_assert!(start <= sub(ptr, start_ptr)); } debug_assert!(end_ptr.sub(VECTOR_SIZE) < ptr); if ptr < end_ptr { let end = sub(end_ptr, ptr); let a = _mm_loadu_si128(ptr as *const __m128i); let cmp = _mm_cmpestrm(needle, NEEDLE_LEN, a, VECTOR_SIZE as i32, 0); let mut mask = _mm_extract_epi16(cmp, 0) as i16; write_forward!(mask, end, ptr, start_ptr, start, fmt, bytes); } debug_assert!(start <= len); if start < len { fmt.write_str(str::from_utf8_unchecked(&bytes[start..len]))?; } Ok(()) } const LEN: u8 = b'>' - b'"'; const FLAG_BELOW: u8 = b'"'; #[cfg(test)] mod tests { use super::*; #[test] fn test_escape() { let escapes = "<>&\"'/"; let escaped = "&lt;&gt;&amp;&quot;&#x27;&#x2f;"; let string_long: &str = &"foobar".repeat(1024); assert_eq!(escape("").to_string(), ""); assert_eq!(escape("<&>").to_string(), "&lt;&amp;&gt;"); assert_eq!(escape("bar&").to_string(), "bar&amp;"); assert_eq!(escape("<foo").to_string(), "&lt;foo"); assert_eq!(escape("bar&h").to_string(), "bar&amp;h"); assert_eq!( escape("// my <html> is \"unsafe\" & should be 'escaped'").to_string(), "&#x2f;&#x2f; my &lt;html&gt; is &quot;unsafe&quot; &amp; \ should be &#x27;escaped&#x27;" ); assert_eq!(escape(&"<".repeat(16)).to_string(), "&lt;".repeat(16)); assert_eq!(escape(&"<".repeat(32)).to_string(), "&lt;".repeat(32)); assert_eq!(escape(&"<".repeat(64)).to_string(), "&lt;".repeat(64)); assert_eq!(escape(&"<".repeat(128)).to_string(), "&lt;".repeat(128)); assert_eq!(escape(&"<".repeat(1024)).to_string(), "&lt;".repeat(1024)); assert_eq!(escape(&"<".repeat(129)).to_string(), "&lt;".repeat(129)); assert_eq!( escape(&"<".repeat(128 * 2 - 1)).to_string(), "&lt;".repeat(128 * 2 - 1) ); assert_eq!( escape(&"<".repeat(128 * 8 - 1)).to_string(), "&lt;".repeat(128 * 8 - 1) ); assert_eq!(escape(string_long).to_string(), string_long); assert_eq!( escape(&[string_long, "<"].join("")).to_string(), [string_long, "&lt;"].join("") ); assert_eq!( escape(&["<", string_long].join("")).to_string(), ["&lt;", string_long].join("") ); assert_eq!( escape(&escapes.repeat(1024)).to_string(), escaped.repeat(1024) ); assert_eq!( escape(&[string_long, "<", string_long].join("")).to_string(), [string_long, "&lt;", string_long].join("") ); assert_eq!( escape(&[string_long, "<", string_long, escapes, string_long,].join("")).to_string(), [string_long, "&lt;", string_long, escaped, string_long,].join("") ); } }
6_set1_epi8((LEN + 1) as i8); let v_flag_below = _mm256_set1_epi8(FLAG_BELOW as i8); let start_ptr = bytes.as_ptr(); let end_ptr = bytes[len..].as_ptr(); let mut ptr = start_ptr; debug_assert!(start_ptr <= ptr && start_ptr <= end_ptr.sub(VECTOR_SIZE)); if LOOP_SIZE <= len { { let align = start_ptr as usize & VECTOR_ALIGN; if 0 < align { let a = _mm256_loadu_si256(ptr as *const __m256i); let cmp = _mm256_cmpgt_epi8(v_flag, _mm256_sub_epi8(a, v_flag_below)); let mut mask = _mm256_movemask_epi8(cmp); write_forward!(mask, align, ptr, start_ptr, start, fmt, bytes); ptr = ptr.add(align); debug_assert!(start <= sub(ptr, start_ptr)); } } while ptr <= end_ptr.sub(LOOP_SIZE) { debug_assert_eq!(0, (ptr as usize) % VECTOR_SIZE); let a = _mm256_load_si256(ptr as *const __m256i); let b = _mm256_load_si256(ptr.add(VECTOR_SIZE) as *const __m256i); let c = _mm256_load_si256(ptr.add(VECTOR_SIZE * 2) as *const __m256i); let d = _mm256_load_si256(ptr.add(VECTOR_SIZE * 3) as *const __m256i); let cmp_a = _mm256_cmpgt_epi8(v_flag, _mm256_sub_epi8(a, v_flag_below)); let cmp_b = _mm256_cmpgt_epi8(v_flag, _mm256_sub_epi8(b, v_flag_below)); let cmp_c = _mm256_cmpgt_epi8(v_flag, _mm256_sub_epi8(c, v_flag_below)); let cmp_d = _mm256_cmpgt_epi8(v_flag, _mm256_sub_epi8(d, v_flag_below)); let or1 = _mm256_or_si256(cmp_a, cmp_b); let or2 = _mm256_or_si256(cmp_c, cmp_d); if _mm256_movemask_epi8(_mm256_or_si256(or1, or2)) != 0 { let mut mask = _mm256_movemask_epi8(cmp_a) as i128 | (_mm256_movemask_epi8(cmp_b) as i128) << VECTOR_SIZE | (_mm256_movemask_epi8(cmp_c) as i128) << VECTOR_SIZE * 2 | (_mm256_movemask_epi8(cmp_d) as i128) << VECTOR_SIZE * 3; write_mask!(mask, ptr, start_ptr, start, fmt, bytes); } ptr = ptr.add(LOOP_SIZE); debug_assert!(start <= sub(ptr, start_ptr)); } } while ptr <= end_ptr.sub(VECTOR_SIZE) { let a = _mm256_loadu_si256(ptr as *const __m256i); let cmp = _mm256_cmpgt_epi8(v_flag, _mm256_sub_epi8(a, v_flag_below)); let mut mask = _mm256_movemask_epi8(cmp); if mask != 0 { write_mask!(mask, ptr, start_ptr, start, fmt, bytes); } ptr = ptr.add(VECTOR_SIZE); debug_assert!(start <= sub(ptr, start_ptr)); } debug_assert!(end_ptr.sub(VECTOR_SIZE) < ptr); if ptr < end_ptr { let a = _mm256_loadu_si256(ptr as *const __m256i); let cmp = _mm256_cmpgt_epi8(v_flag, _mm256_sub_epi8(a, v_flag_below)); let end = sub(end_ptr, ptr); let mut mask = _mm256_movemask_epi8(cmp); write_forward!(mask, end, ptr, start_ptr, start, fmt, bytes); } debug_assert!(start <= len); if start < len { fmt.write_str(str::from_utf8_unchecked(&bytes[start..len]))?; } Ok(()) }
function_block-function_prefixed
[ { "content": "/// Limit string length, appends '...' if truncated\n\npub fn truncate(s: &fmt::Display, len: &usize) -> Result<String> {\n\n let mut s = s.to_string();\n\n if s.len() < *len {\n\n Ok(s)\n\n } else {\n\n s.truncate(*len);\n\n s.push_str(\"...\");\n\n Ok(s)\n\n }\n\n}\n\n\n", "file_path": "askama_shared/src/filters/mod.rs", "rank": 1, "score": 242439.16716245635 }, { "content": "/// Count the words in that string\n\npub fn wordcount(s: &fmt::Display) -> Result<usize> {\n\n let s = s.to_string();\n\n\n\n Ok(s.split_whitespace().count())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::f64::INFINITY;\n\n\n\n #[test]\n\n fn test_linebreaks() {\n\n assert_eq!(\n\n linebreaks(&\"Foo\\nBar Baz\").unwrap(),\n\n \"<p>Foo<br/>Bar Baz</p>\"\n\n );\n\n assert_eq!(\n\n linebreaks(&\"Foo\\nBar\\n\\nBaz\").unwrap(),\n\n \"<p>Foo<br/>Bar</p><p>Baz</p>\"\n", "file_path": "askama_shared/src/filters/mod.rs", "rank": 2, "score": 214575.3489054205 }, { "content": "/// Centers the value in a field of a given width\n\npub fn center(s: &fmt::Display, l: usize) -> Result<String> {\n\n let s = s.to_string();\n\n let len = s.len();\n\n\n\n if l <= len {\n\n Ok(s)\n\n } else {\n\n let p = l - len;\n\n let q = p / 2;\n\n let r = p % 2;\n\n let mut buf = String::with_capacity(l);\n\n\n\n for _ in 0..q {\n\n buf.push(' ');\n\n }\n\n\n\n buf.push_str(&s);\n\n\n\n for _ in 0..q + r {\n\n buf.push(' ');\n\n }\n\n\n\n Ok(buf)\n\n }\n\n}\n\n\n", "file_path": "askama_shared/src/filters/mod.rs", "rank": 3, "score": 201778.13019587848 }, { "content": "/// Indent lines with `width` spaces\n\npub fn indent(s: &fmt::Display, width: &usize) -> Result<String> {\n\n let s = s.to_string();\n\n\n\n let mut indented = String::new();\n\n\n\n for (i, c) in s.char_indices() {\n\n indented.push(c);\n\n\n\n if c == '\\n' && i < s.len() - 1 {\n\n for _ in 0..*width {\n\n indented.push(' ');\n\n }\n\n }\n\n }\n\n\n\n Ok(indented)\n\n}\n\n\n", "file_path": "askama_shared/src/filters/mod.rs", "rank": 5, "score": 197897.2754685143 }, { "content": "#[derive(Template)]\n\n#[template(path = \"generics.html\")]\n\nstruct GenericsTemplate<T: std::fmt::Display, U = u8>\n\nwhere\n\n U: std::fmt::Display,\n\n{\n\n t: T,\n\n u: U,\n\n}\n\n\n", "file_path": "testing/tests/simple.rs", "rank": 6, "score": 189908.12372599993 }, { "content": "/// Replaces line breaks in plain text with appropriate HTML\n\n///\n\n/// A single newline becomes an HTML line break `<br>` and a new line\n\n/// followed by a blank line becomes a paragraph break `<p>`.\n\npub fn linebreaks(s: &fmt::Display) -> Result<String> {\n\n let s = s.to_string();\n\n let linebroken = s.replace(\"\\n\\n\", \"</p><p>\").replace(\"\\n\", \"<br/>\");\n\n\n\n Ok(format!(\"<p>{}</p>\", linebroken))\n\n}\n\n\n", "file_path": "askama_shared/src/filters/mod.rs", "rank": 7, "score": 187522.55234885105 }, { "content": "/// Converts all newlines in a piece of plain text to HTML line breaks\n\npub fn linebreaksbr(s: &fmt::Display) -> Result<String> {\n\n let s = s.to_string();\n\n Ok(s.replace(\"\\n\", \"<br/>\"))\n\n}\n\n\n", "file_path": "askama_shared/src/filters/mod.rs", "rank": 8, "score": 187521.949920056 }, { "content": "/// Alias for the `lower()` filter\n\npub fn lowercase(s: &fmt::Display) -> Result<String> {\n\n lower(s)\n\n}\n\n\n", "file_path": "askama_shared/src/filters/mod.rs", "rank": 9, "score": 187518.26210338812 }, { "content": "/// Converts to lowercase\n\npub fn lower(s: &fmt::Display) -> Result<String> {\n\n let s = s.to_string();\n\n Ok(s.to_lowercase())\n\n}\n\n\n", "file_path": "askama_shared/src/filters/mod.rs", "rank": 10, "score": 187518.26210338812 }, { "content": "/// Alias for the `upper()` filter\n\npub fn uppercase(s: &fmt::Display) -> Result<String> {\n\n upper(s)\n\n}\n\n\n", "file_path": "askama_shared/src/filters/mod.rs", "rank": 11, "score": 187518.26210338812 }, { "content": "/// Strip leading and trailing whitespace\n\npub fn trim(s: &fmt::Display) -> Result<String> {\n\n let s = s.to_string();\n\n Ok(s.trim().to_owned())\n\n}\n\n\n", "file_path": "askama_shared/src/filters/mod.rs", "rank": 12, "score": 187518.26210338812 }, { "content": "/// Converts to uppercase\n\npub fn upper(s: &fmt::Display) -> Result<String> {\n\n let s = s.to_string();\n\n Ok(s.to_uppercase())\n\n}\n\n\n", "file_path": "askama_shared/src/filters/mod.rs", "rank": 13, "score": 187518.26210338812 }, { "content": "/// Capitalize a value. The first character will be uppercase, all others lowercase.\n\npub fn capitalize(s: &fmt::Display) -> Result<String> {\n\n let mut s = s.to_string();\n\n\n\n match s.get_mut(0..1).map(|s| {\n\n s.make_ascii_uppercase();\n\n &*s\n\n }) {\n\n None => Ok(s),\n\n _ => {\n\n let l = s.len();\n\n match s.get_mut(1..l).map(|s| {\n\n s.make_ascii_lowercase();\n\n &*s\n\n }) {\n\n _ => Ok(s),\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "askama_shared/src/filters/mod.rs", "rank": 14, "score": 187518.26210338812 }, { "content": "fn escaping_long(b: &mut criterion::Bencher) {\n\n // 5 MB bytes at 20% escape\n\n let string: &str = &[\"a>foo\"; 1024 * 1024].join(\"\");\n\n let no_escape: &str = &\"a\".repeat(5 * 1024 * 1024);\n\n\n\n b.iter(|| {\n\n escape(string).to_string();\n\n escape(no_escape).to_string();\n\n });\n\n}\n\n\n", "file_path": "askama_escape/benches/all.rs", "rank": 15, "score": 166495.50169423982 }, { "content": "fn escaping_short(b: &mut criterion::Bencher) {\n\n // 30 bytes at 20% escape\n\n let string: &str = &[FOO_BAR, FOO_BAR, ESCAPES, FOO_BAR, FOO_BAR].join(\"\");\n\n let no_escape: &str = &[FOO_BAR; 5].join(\"\");\n\n\n\n b.iter(|| {\n\n escape(EMPTY).to_string();\n\n escape(string).to_string();\n\n escape(no_escape).to_string();\n\n });\n\n}\n\n\n", "file_path": "askama_escape/benches/all.rs", "rank": 16, "score": 166495.50169423982 }, { "content": "fn format_long(b: &mut criterion::Bencher) {\n\n // 5 MB bytes at 20% escape\n\n let string: &str = &[\"a>foo\"; 1024 * 1024].join(\"\");\n\n let no_escape: &str = &\"a\".repeat(5 * 1024 * 1024);\n\n\n\n b.iter(|| {\n\n string.to_string();\n\n no_escape.to_string();\n\n });\n\n}\n", "file_path": "askama_escape/benches/all.rs", "rank": 17, "score": 156597.00924696153 }, { "content": "fn format_short(b: &mut criterion::Bencher) {\n\n // 30 bytes at 20% escape\n\n let string: &str = &[FOO_BAR, FOO_BAR, ESCAPES, FOO_BAR, FOO_BAR].join(\"\");\n\n let no_escape: &str = &[FOO_BAR; 5].join(\"\");\n\n\n\n b.iter(|| {\n\n EMPTY.to_string();\n\n string.to_string();\n\n no_escape.to_string();\n\n });\n\n}\n\n\n", "file_path": "askama_escape/benches/all.rs", "rank": 18, "score": 156597.00924696153 }, { "content": "fn big_table(b: &mut criterion::Bencher, size: &usize) {\n\n let mut table = Vec::with_capacity(*size);\n\n for _ in 0..*size {\n\n let mut inner = Vec::with_capacity(*size);\n\n for i in 0..*size {\n\n inner.push(i);\n\n }\n\n table.push(inner);\n\n }\n\n let ctx = BigTable { table };\n\n b.iter(|| ctx.render().unwrap());\n\n}\n\n\n", "file_path": "testing/benches/all.rs", "rank": 19, "score": 145777.87101496194 }, { "content": "fn functions(c: &mut Criterion) {\n\n c.bench_function(\"Format 60 bytes\", format_short);\n\n c.bench_function(\"Escaping 60 bytes\", escaping_short);\n\n c.bench_function(\"Format 10 MB\", format_long);\n\n c.bench_function(\"Escaping 10 MB\", escaping_long);\n\n}\n\n\n\nstatic FOO_BAR: &str = \"foobar\";\n\nstatic ESCAPES: &str = \"<>&\\\"'/\";\n\nstatic EMPTY: &str = \"\";\n\n\n", "file_path": "askama_escape/benches/all.rs", "rank": 20, "score": 142398.4353024392 }, { "content": "/// Escapes `&`, `<` and `>` in strings\n\npub fn escape<D, I>(i: I) -> Result<MarkupDisplay<D>>\n\nwhere\n\n D: fmt::Display,\n\n MarkupDisplay<D>: From<I>,\n\n{\n\n Ok(i.into())\n\n}\n\n\n", "file_path": "askama_shared/src/filters/mod.rs", "rank": 21, "score": 137625.29924662554 }, { "content": "fn is_env_set(name: &str) -> bool {\n\n env::var(name).is_ok()\n\n}\n", "file_path": "askama_escape/build.rs", "rank": 22, "score": 135913.803901075 }, { "content": "#[derive(Template)]\n\n#[template(path = \"match-custom-enum.html\")]\n\nstruct MatchCustomEnumTemplate {\n\n color: Color,\n\n}\n\n\n", "file_path": "testing/tests/matches.rs", "rank": 23, "score": 131951.10461526096 }, { "content": "fn teams(b: &mut criterion::Bencher) {\n\n let teams = Teams {\n\n year: 2015,\n\n teams: vec![\n\n Team {\n\n name: \"Jiangsu\".into(),\n\n score: 43,\n\n },\n\n Team {\n\n name: \"Beijing\".into(),\n\n score: 27,\n\n },\n\n Team {\n\n name: \"Guangzhou\".into(),\n\n score: 22,\n\n },\n\n Team {\n\n name: \"Shandong\".into(),\n\n score: 12,\n\n },\n\n ],\n\n };\n\n b.iter(|| teams.render().unwrap());\n\n}\n\n\n", "file_path": "testing/benches/all.rs", "rank": 24, "score": 131521.72204760613 }, { "content": "#[test]\n\nfn test_match_custom_enum() {\n\n let s = MatchCustomEnumTemplate {\n\n color: Color::Rgb(160, 0, 255),\n\n };\n\n assert_eq!(s.render().unwrap(), \"\\n\\nColorful: #A000FF\\n\");\n\n}\n\n\n", "file_path": "testing/tests/matches.rs", "rank": 25, "score": 129573.87168016167 }, { "content": "pub fn parse<'a>(src: &'a str, syntax: &'a Syntax<'a>) -> Vec<Node<'a>> {\n\n match parse_template(Input(src.as_bytes()), syntax) {\n\n Ok((left, res)) => {\n\n if !left.is_empty() {\n\n let s = str::from_utf8(left.0).unwrap();\n\n panic!(\"unable to parse template:\\n\\n{:?}\", s);\n\n } else {\n\n res\n\n }\n\n }\n\n Err(nom::Err::Error(err)) => panic!(\"problems parsing template source: {:?}\", err),\n\n Err(nom::Err::Failure(err)) => panic!(\"problems parsing template source: {:?}\", err),\n\n Err(nom::Err::Incomplete(_)) => panic!(\"parsing incomplete\"),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use shared::Syntax;\n\n\n", "file_path": "askama_derive/src/parser.rs", "rank": 26, "score": 121429.0473740453 }, { "content": "/// Build script helper to rebuild crates if contained templates have changed\n\n///\n\n/// Iterates over all files in the template directories and writes a\n\n/// `cargo:rerun-if-changed=` line for each of them to stdout.\n\n///\n\n/// This helper method can be used in build scripts (`build.rs`) in crates\n\n/// that have templates, to make sure the crate gets rebuilt when template\n\n/// source code changes.\n\npub fn rerun_if_templates_changed() {\n\n let file = read_config_file();\n\n for template_dir in &shared::Config::new(&file).dirs {\n\n visit_dirs(template_dir, &|e: &DirEntry| {\n\n println!(\"cargo:rerun-if-changed={}\", e.path().to_str().unwrap());\n\n }).unwrap();\n\n }\n\n}\n", "file_path": "askama/src/lib.rs", "rank": 27, "score": 115797.12639001518 }, { "content": "/// Formats arguments according to the specified format\n\n///\n\n/// The first argument to this filter must be a string literal (as in normal\n\n/// Rust). All arguments are passed through to the `format!()`\n\n/// [macro](https://doc.rust-lang.org/stable/std/macro.format.html) by\n\n/// the Askama code generator.\n\npub fn format() {}\n\n\n", "file_path": "askama_shared/src/filters/mod.rs", "rank": 28, "score": 115791.29651272029 }, { "content": "/// Alias for the `escape()` filter\n\npub fn e<D, I>(i: I) -> Result<MarkupDisplay<D>>\n\nwhere\n\n D: fmt::Display,\n\n MarkupDisplay<D>: From<I>,\n\n{\n\n escape(i)\n\n}\n\n\n", "file_path": "askama_shared/src/filters/mod.rs", "rank": 29, "score": 114230.25125999538 }, { "content": "fn functions(c: &mut Criterion) {\n\n c.bench_function(\"Big table\", |b| big_table(b, &100));\n\n c.bench_function(\"Teams\", teams);\n\n}\n\n\n", "file_path": "testing/benches/all.rs", "rank": 30, "score": 113128.82978588963 }, { "content": "#[get(\"/\")]\n\nfn hello() -> HelloTemplate<'static> {\n\n HelloTemplate { name: \"world\" }\n\n}\n\n\n", "file_path": "testing/tests/rocket.rs", "rank": 31, "score": 112443.92445019222 }, { "content": "/// Marks a string (or other `Display` type) as safe\n\n///\n\n/// Use this is you want to allow markup in an expression, or if you know\n\n/// that the expression's contents don't need to be escaped.\n\npub fn safe<D, I>(v: I) -> Result<MarkupDisplay<D>>\n\nwhere\n\n D: fmt::Display,\n\n MarkupDisplay<D>: From<I>,\n\n{\n\n let res: MarkupDisplay<D> = v.into();\n\n Ok(res.mark_safe())\n\n}\n\n\n", "file_path": "askama_shared/src/filters/mod.rs", "rank": 32, "score": 111777.95875620571 }, { "content": "pub fn read_config_file() -> String {\n\n let root = PathBuf::from(env::var(\"CARGO_MANIFEST_DIR\").unwrap());\n\n let filename = root.join(CONFIG_FILE_NAME);\n\n if filename.exists() {\n\n fs::read_to_string(&filename)\n\n .expect(&format!(\"unable to read {}\", filename.to_str().unwrap()))\n\n } else {\n\n \"\".to_string()\n\n }\n\n}\n\n\n\nstatic CONFIG_FILE_NAME: &str = \"askama.toml\";\n\nstatic DEFAULT_SYNTAX_NAME: &str = \"default\";\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::env;\n\n use std::path::{Path, PathBuf};\n\n\n", "file_path": "askama_shared/src/lib.rs", "rank": 33, "score": 107314.95630383742 }, { "content": "/// Serialize to JSON (requires `serde-json` feature)\n\n///\n\n/// ## Errors\n\n///\n\n/// This will panic if `S`'s implementation of `Serialize` decides to fail,\n\n/// or if `T` contains a map with non-string keys.\n\npub fn json<S: Serialize>(s: &S) -> Result<MarkupDisplay<String>> {\n\n match serde_json::to_string_pretty(s) {\n\n Ok(s) => Ok(MarkupDisplay::Safe(s)),\n\n Err(e) => Err(Error::from(e)),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_json() {\n\n assert_eq!(json(&true).unwrap().to_string(), \"true\");\n\n assert_eq!(json(&\"foo\").unwrap().to_string(), r#\"\"foo\"\"#);\n\n assert_eq!(\n\n json(&vec![\"foo\", \"bar\"]).unwrap().to_string(),\n\n r#\"[\n\n \"foo\",\n\n \"bar\"\n\n]\"#\n\n );\n\n }\n\n}\n", "file_path": "askama_shared/src/filters/json.rs", "rank": 34, "score": 107283.88519334371 }, { "content": "#[allow(non_snake_case)]\n\nfn Input(input: &[u8]) -> Input {\n\n nom::types::CompleteByteSlice(input)\n\n}\n\n\n", "file_path": "askama_derive/src/parser.rs", "rank": 35, "score": 106446.88961057551 }, { "content": "#[derive(Template)]\n\n#[template(path = \"match-no-ws.html\")]\n\nstruct MatchNoWhitespace {\n\n foo: Option<usize>,\n\n}\n\n\n", "file_path": "testing/tests/matches.rs", "rank": 36, "score": 105404.18170033005 }, { "content": "fn split_ws_parts(s: &[u8]) -> Node {\n\n if s.is_empty() {\n\n let rs = str::from_utf8(&s).unwrap();\n\n return Node::Lit(rs, rs, rs);\n\n }\n\n let is_ws = |c: &u8| *c != b' ' && *c != b'\\t' && *c != b'\\r' && *c != b'\\n';\n\n let start = s.iter().position(&is_ws);\n\n let res = if start.is_none() {\n\n (s, &s[0..0], &s[0..0])\n\n } else {\n\n let start = start.unwrap();\n\n let end = s.iter().rposition(&is_ws);\n\n if end.is_none() {\n\n (&s[..start], &s[start..], &s[0..0])\n\n } else {\n\n let end = end.unwrap();\n\n (&s[..start], &s[start..end + 1], &s[end + 1..])\n\n }\n\n };\n\n Node::Lit(\n\n str::from_utf8(res.0).unwrap(),\n\n str::from_utf8(res.1).unwrap(),\n\n str::from_utf8(res.2).unwrap(),\n\n )\n\n}\n\n\n", "file_path": "askama_derive/src/parser.rs", "rank": 37, "score": 104456.52780178718 }, { "content": "#[allow(dead_code)]\n\nenum Color {\n\n Rgb(u32, u32, u32),\n\n GrayScale(u32),\n\n Cmyk(u32, u32, u32, u32),\n\n}\n\n\n", "file_path": "testing/tests/matches.rs", "rank": 38, "score": 102645.37127661056 }, { "content": "#[derive(Template)]\n\n#[template(path = \"match-literal-num.html\")]\n\nstruct MatchLitNumTemplate {\n\n item: u32,\n\n}\n\n\n", "file_path": "testing/tests/matches.rs", "rank": 39, "score": 100966.62647753429 }, { "content": "#[test]\n\nfn test_match_no_whitespace() {\n\n let s = MatchNoWhitespace { foo: Some(1) };\n\n assert_eq!(s.render().unwrap(), \"1\");\n\n}\n", "file_path": "testing/tests/matches.rs", "rank": 40, "score": 100721.48071185862 }, { "content": "#[test]\n\nfn test_match_literal() {\n\n let s = MatchLitTemplate { item: \"bar\" };\n\n assert_eq!(s.render().unwrap(), \"\\n\\nFound literal bar\\n\");\n\n\n\n let s = MatchLitTemplate { item: \"qux\" };\n\n assert_eq!(s.render().unwrap(), \"\\n\\nElse found qux\\n\");\n\n}\n\n\n", "file_path": "testing/tests/matches.rs", "rank": 41, "score": 100721.48071185862 }, { "content": "#[test]\n\nfn test_match_option() {\n\n let s = MatchOptTemplate { item: Some(\"foo\") };\n\n assert_eq!(s.render().unwrap(), \"\\n\\nFound literal foo\\n\");\n\n\n\n let s = MatchOptTemplate { item: Some(\"bar\") };\n\n assert_eq!(s.render().unwrap(), \"\\n\\nFound bar\\n\");\n\n\n\n let s = MatchOptTemplate { item: None };\n\n assert_eq!(s.render().unwrap(), \"\\n\\nNot Found\\n\");\n\n}\n\n\n", "file_path": "testing/tests/matches.rs", "rank": 42, "score": 100721.48071185862 }, { "content": "#[derive(Template)]\n\n#[template(path = \"match-literal.html\")]\n\nstruct MatchLitTemplate<'a> {\n\n item: &'a str,\n\n}\n\n\n", "file_path": "testing/tests/matches.rs", "rank": 43, "score": 100537.58485151132 }, { "content": "#[derive(Template)]\n\n#[template(path = \"match-opt.html\")]\n\nstruct MatchOptTemplate<'a> {\n\n item: Option<&'a str>,\n\n}\n\n\n", "file_path": "testing/tests/matches.rs", "rank": 44, "score": 100537.58485151132 }, { "content": "#[test]\n\nfn test_match_ref_deref() {\n\n let s = MatchOptRefTemplate { item: &Some(\"foo\") };\n\n assert_eq!(s.render().unwrap(), \"\\n\\nFound literal foo\\n\");\n\n}\n\n\n", "file_path": "testing/tests/matches.rs", "rank": 45, "score": 98593.08671409346 }, { "content": "#[test]\n\nfn test_match_literal_num() {\n\n let s = MatchLitNumTemplate { item: 42 };\n\n assert_eq!(s.render().unwrap(), \"\\n\\nFound answer to everything\\n\");\n\n\n\n let s = MatchLitNumTemplate { item: 23 };\n\n assert_eq!(s.render().unwrap(), \"\\n\\nElse found 23\\n\");\n\n}\n\n\n", "file_path": "testing/tests/matches.rs", "rank": 46, "score": 98593.08671409346 }, { "content": "#[derive(Template)]\n\n#[template(path = \"match-opt.html\")]\n\nstruct MatchOptRefTemplate<'a> {\n\n item: &'a Option<&'a str>,\n\n}\n\n\n", "file_path": "testing/tests/matches.rs", "rank": 47, "score": 98389.45046191478 }, { "content": "#[derive(Clone)]\n\nenum DisplayWrap {\n\n Wrapped,\n\n Unwrapped,\n\n}\n\n\n\nimpl Copy for DisplayWrap {}\n\n\n", "file_path": "askama_derive/src/generator.rs", "rank": 48, "score": 97308.05265898388 }, { "content": "fn find_used_templates(input: &TemplateInput, map: &mut HashMap<PathBuf, String>, source: String) {\n\n let mut check = vec![(input.path.clone(), source)];\n\n while let Some((path, source)) = check.pop() {\n\n for n in parse(&source, input.syntax) {\n\n match n {\n\n Node::Extends(Expr::StrLit(extends)) => {\n\n let extends = input.config.find_template(extends, Some(&path));\n\n let source = get_template_source(&extends);\n\n check.push((extends, source));\n\n }\n\n Node::Import(_, import, _) => {\n\n let import = input.config.find_template(import, Some(&path));\n\n let source = get_template_source(&import);\n\n check.push((import, source));\n\n }\n\n _ => {}\n\n }\n\n }\n\n map.insert(path, source);\n\n }\n", "file_path": "askama_derive/src/lib.rs", "rank": 49, "score": 96911.16968835916 }, { "content": "#[test]\n\nfn test_for() {\n\n let s = ForTemplate {\n\n strings: vec![\"A\", \"alfa\", \"1\"],\n\n };\n\n assert_eq!(s.render().unwrap(), \"0. A (first)\\n1. alfa\\n2. 1\\n\");\n\n}\n\n\n", "file_path": "testing/tests/loops.rs", "rank": 50, "score": 96239.8616411197 }, { "content": "#[derive(Template)]\n\n#[template(path = \"for.html\")]\n\nstruct ForTemplate<'a> {\n\n strings: Vec<&'a str>,\n\n}\n\n\n", "file_path": "testing/tests/loops.rs", "rank": 51, "score": 96096.6600199826 }, { "content": "#[derive(Template)]\n\n#[template(path = \"for-range.html\")]\n\nstruct ForRangeTemplate {\n\n init: i32,\n\n end: i32,\n\n}\n\n\n", "file_path": "testing/tests/loops.rs", "rank": 52, "score": 95969.7220510923 }, { "content": "fn main() {\n\n enable_simd_optimizations();\n\n}\n\n\n", "file_path": "askama_escape/build.rs", "rank": 53, "score": 95808.80948351692 }, { "content": "#[proc_macro_derive(Template, attributes(template))]\n\npub fn derive_template(input: TokenStream) -> TokenStream {\n\n let ast: syn::DeriveInput = syn::parse(input).unwrap();\n\n build_template(&ast).parse().unwrap()\n\n}\n\n\n", "file_path": "askama_derive/src/lib.rs", "rank": 54, "score": 95149.21282763619 }, { "content": "#[test]\n\nfn test_let() {\n\n let t = LetTemplate { s: \"foo\" };\n\n assert_eq!(t.render().unwrap(), \"foo\");\n\n}\n\n\n", "file_path": "testing/tests/vars.rs", "rank": 55, "score": 93600.11131895424 }, { "content": "#[test]\n\nfn test_nested_for() {\n\n let alpha = vec![\"a\", \"b\", \"c\"];\n\n let numbers = vec![\"one\", \"two\"];\n\n let s = NestedForTemplate {\n\n seqs: vec![&alpha, &numbers],\n\n };\n\n assert_eq!(s.render().unwrap(), \"1\\n 0a1b2c2\\n 0one1two\");\n\n}\n\n\n", "file_path": "testing/tests/loops.rs", "rank": 56, "score": 93558.71241428495 }, { "content": "#[test]\n\nfn test_for_range() {\n\n let s = ForRangeTemplate { init: -1, end: 1 };\n\n assert_eq!(s.render().unwrap(), \"foo\\nfoo\\nbar\\nbar\\nfoo\\nbar\\nbar\\n\");\n\n}\n", "file_path": "testing/tests/loops.rs", "rank": 57, "score": 93558.71241428495 }, { "content": "#[test]\n\nfn test_precedence_for() {\n\n let s = PrecedenceTemplate {\n\n strings: vec![\"A\", \"alfa\", \"1\"],\n\n };\n\n assert_eq!(s.render().unwrap(), \"0. A2 (first)\\n1. alfa4\\n2. 16\\n\");\n\n}\n\n\n", "file_path": "testing/tests/loops.rs", "rank": 58, "score": 93558.71241428495 }, { "content": "#[derive(Template)]\n\n#[template(source = \"{% let v = s %}{{ v }}\", ext = \"txt\")]\n\nstruct LetTemplate<'a> {\n\n s: &'a str,\n\n}\n\n\n", "file_path": "testing/tests/vars.rs", "rank": 59, "score": 93437.62516713793 }, { "content": "#[derive(Template)]\n\n#[template(path = \"nested-for.html\")]\n\nstruct NestedForTemplate<'a> {\n\n seqs: Vec<&'a [&'a str]>,\n\n}\n\n\n", "file_path": "testing/tests/loops.rs", "rank": 60, "score": 93392.45897271152 }, { "content": "#[derive(Template)]\n\n#[template(path = \"precedence-for.html\")]\n\nstruct PrecedenceTemplate<'a> {\n\n strings: Vec<&'a str>,\n\n}\n\n\n", "file_path": "testing/tests/loops.rs", "rank": 61, "score": 93392.45897271152 }, { "content": "#[test]\n\nfn filter_escape() {\n\n let s = TestTemplate {\n\n strvar: \"// my <html> is \\\"unsafe\\\" & should be 'escaped'\".to_string(),\n\n };\n\n assert_eq!(\n\n s.render().unwrap(),\n\n \"&#x2f;&#x2f; my &lt;html&gt; is &quot;unsafe&quot; &amp; \\\n\n should be &#x27;escaped&#x27;\"\n\n );\n\n}\n\n\n", "file_path": "testing/tests/filters.rs", "rank": 62, "score": 93144.68704594688 }, { "content": "#[test]\n\nfn test_escape() {\n\n let s = EscapeTemplate { name: \"<>&\\\"'/\" };\n\n\n\n assert_eq!(\n\n s.render().unwrap(),\n\n \"Hello, &lt;&gt;&amp;&quot;&#x27;&#x2f;!\"\n\n );\n\n}\n\n\n", "file_path": "testing/tests/simple.rs", "rank": 63, "score": 93144.68704594688 }, { "content": "#[derive(Template)]\n\n#[template(path = \"hello.html\")]\n\nstruct EscapeTemplate<'a> {\n\n name: &'a str,\n\n}\n\n\n", "file_path": "testing/tests/simple.rs", "rank": 64, "score": 92978.43360437345 }, { "content": "struct FilterLetFilterTemplate {\n\n foo: String,\n\n baz: Baz,\n\n}\n\n\n", "file_path": "testing/tests/filters.rs", "rank": 65, "score": 91193.6670391591 }, { "content": "#[test]\n\nfn test_self_method() {\n\n let t = SelfMethodTemplate { s: \"foo\" };\n\n assert_eq!(t.render().unwrap(), \"foo\");\n\n}\n\n\n", "file_path": "testing/tests/methods.rs", "rank": 66, "score": 91161.2277676999 }, { "content": "#[test]\n\nfn test_let_decl() {\n\n let t = LetDeclTemplate {\n\n cond: false,\n\n s: \"bar\",\n\n };\n\n assert_eq!(t.render().unwrap(), \"bar\");\n\n}\n", "file_path": "testing/tests/vars.rs", "rank": 67, "score": 91121.15377274383 }, { "content": "#[derive(Template)]\n\n#[template(source = \"{{ self.get_s() }}\", ext = \"txt\")]\n\nstruct SelfMethodTemplate<'a> {\n\n s: &'a str,\n\n}\n\n\n\nimpl<'a> SelfMethodTemplate<'a> {\n\n fn get_s(&self) -> &str {\n\n self.s\n\n }\n\n}\n\n\n", "file_path": "testing/tests/methods.rs", "rank": 68, "score": 90977.35203556581 }, { "content": "#[derive(Template)]\n\n#[template(path = \"let-decl.html\")]\n\nstruct LetDeclTemplate<'a> {\n\n cond: bool,\n\n s: &'a str,\n\n}\n\n\n", "file_path": "testing/tests/vars.rs", "rank": 69, "score": 90937.36771716339 }, { "content": "fn enable_simd_optimizations() {\n\n if is_env_set(\"CARGO_CFG_ASKAMA_DISABLE_AUTO_SIMD\") {\n\n return;\n\n }\n\n if !is_min_version(\"1.27.0\")\n\n .map(|(yes, _)| yes)\n\n .unwrap_or(false)\n\n {\n\n return;\n\n }\n\n\n\n println!(\"cargo:rustc-cfg=askama_runtime_simd\");\n\n println!(\"cargo:rustc-cfg=askama_runtime_avx\");\n\n println!(\"cargo:rustc-cfg=askama_runtime_sse\");\n\n}\n\n\n", "file_path": "askama_escape/build.rs", "rank": 70, "score": 90683.03540794025 }, { "content": "#[test]\n\nfn test_variables_no_escape() {\n\n let s = VariablesTemplateNoEscape {\n\n strvar: \"foo\",\n\n num: 42,\n\n i18n: \"Iñtërnâtiônàlizætiøn\".to_string(),\n\n };\n\n assert_eq!(\n\n s.render().unwrap(),\n\n \"\\nhello world, foo\\n\\\n\n with number: 42\\n\\\n\n Iñtërnâtiônàlizætiøn is important\\n\\\n\n in vars too: Iñtërnâtiônàlizætiøn\"\n\n );\n\n}\n\n\n", "file_path": "testing/tests/simple.rs", "rank": 71, "score": 90683.03540794025 }, { "content": "#[derive(Template)]\n\n#[template(path = \"simple-no-escape.txt\")]\n\nstruct VariablesTemplateNoEscape<'a> {\n\n strvar: &'a str,\n\n num: i64,\n\n i18n: String,\n\n}\n\n\n", "file_path": "testing/tests/simple.rs", "rank": 72, "score": 90499.20428919923 }, { "content": "/// Absolute value\n\npub fn abs<T>(number: T) -> Result<T>\n\nwhere\n\n T: Signed,\n\n{\n\n Ok(number.abs())\n\n}\n\n\n", "file_path": "askama_shared/src/filters/mod.rs", "rank": 73, "score": 89854.2590797398 }, { "content": "/// Casts number to f64\n\npub fn into_f64<T>(number: T) -> Result<f64>\n\nwhere\n\n T: NumCast,\n\n{\n\n number.to_f64().ok_or(Fmt(fmt::Error))\n\n}\n\n\n", "file_path": "askama_shared/src/filters/mod.rs", "rank": 74, "score": 89854.2590797398 }, { "content": "/// Casts number to isize\n\npub fn into_isize<T>(number: T) -> Result<isize>\n\nwhere\n\n T: NumCast,\n\n{\n\n number.to_isize().ok_or(Fmt(fmt::Error))\n\n}\n\n\n", "file_path": "askama_shared/src/filters/mod.rs", "rank": 75, "score": 89854.2590797398 }, { "content": "#[test]\n\nfn test_use_base_directly() {\n\n let t = BaseTemplate { title: \"Foo\" };\n\n assert_eq!(t.render().unwrap(), \"Foo\\n\\nFoo\\nCopyright 2017\");\n\n}\n\n\n", "file_path": "testing/tests/inheritance.rs", "rank": 76, "score": 88875.54971736262 }, { "content": "#[test]\n\nfn test_filter_let_filter() {\n\n let t = FilterLetFilterTemplate {\n\n foo: \" bar \".to_owned(),\n\n baz: Baz {},\n\n };\n\n assert_eq!(t.render().unwrap(), \"BAR\");\n\n}\n\n\n\n#[derive(Template)]\n\n#[template(\n\n source = \"{{ foo|truncate(10) }}{{ foo|truncate(5) }}\",\n\n ext = \"txt\"\n\n)]\n", "file_path": "testing/tests/filters.rs", "rank": 77, "score": 88823.69769794725 }, { "content": "#[derive(Template)]\n\n#[template(source = \"{{ self.get_s() }} {{ t.get_s() }}\", ext = \"txt\")]\n\nstruct NestedSelfMethodTemplate<'a> {\n\n t: SelfMethodTemplate<'a>,\n\n}\n\n\n\nimpl<'a> NestedSelfMethodTemplate<'a> {\n\n fn get_s(&self) -> &str {\n\n \"bar\"\n\n }\n\n}\n\n\n", "file_path": "testing/tests/methods.rs", "rank": 78, "score": 88658.60301653275 }, { "content": "fn identifier(input: Input) -> Result<(Input, &str), nom::Err<Input>> {\n\n if !nom::is_alphabetic(input[0]) && input[0] != b'_' {\n\n return Err(nom::Err::Error(error_position!(\n\n input,\n\n nom::ErrorKind::Custom(0)\n\n )));\n\n }\n\n for (i, ch) in input.iter().enumerate() {\n\n if i == 0 || nom::is_alphanumeric(*ch) || *ch == b'_' {\n\n continue;\n\n }\n\n return Ok((Input(&input[i..]), str::from_utf8(&input[..i]).unwrap()));\n\n }\n\n Ok((Input(&input[1..]), str::from_utf8(&input[..1]).unwrap()))\n\n}\n\n\n\nnamed!(num_lit<Input, &str>, map!(nom::digit,\n\n |s| str::from_utf8(s.0).unwrap()\n\n));\n\n\n", "file_path": "askama_derive/src/parser.rs", "rank": 79, "score": 84816.94898872916 }, { "content": "/// Joins iterable into a string separated by provided argument\n\npub fn join<T, I, S>(input: I, separator: S) -> Result<String>\n\nwhere\n\n T: fmt::Display,\n\n I: Iterator<Item = T>,\n\n S: AsRef<str>,\n\n{\n\n let separator: &str = separator.as_ref();\n\n\n\n let mut rv = String::new();\n\n\n\n for (num, item) in input.enumerate() {\n\n if num > 0 {\n\n rv.push_str(separator);\n\n }\n\n\n\n rv.push_str(&format!(\"{}\", item));\n\n }\n\n\n\n Ok(rv)\n\n}\n\n\n", "file_path": "askama_shared/src/filters/mod.rs", "rank": 80, "score": 79365.3792915395 }, { "content": "#[derive(PartialEq, Eq)]\n\nenum Alphabet {\n\n Alpha,\n\n}\n\n\n\n#[derive(Template)]\n\n#[template(\n\n source = \"{% if x == Alphabet::Alpha %}true{% endif %}\",\n\n ext = \"txt\"\n\n)]\n", "file_path": "testing/tests/simple.rs", "rank": 81, "score": 67928.94088675501 }, { "content": "#[derive(Template)]\n\n#[template(path = \"teams.html\")]\n\nstruct Teams {\n\n year: u16,\n\n teams: Vec<Team>,\n\n}\n\n\n", "file_path": "testing/benches/all.rs", "rank": 82, "score": 65329.37747200836 }, { "content": "struct Team {\n\n name: String,\n\n score: u8,\n\n}\n", "file_path": "testing/benches/all.rs", "rank": 83, "score": 65329.37747200836 }, { "content": "enum ContentState {\n\n Any,\n\n Brace(usize),\n\n End(usize),\n\n}\n\n\n", "file_path": "askama_derive/src/parser.rs", "rank": 84, "score": 65283.942700847234 }, { "content": "#[derive(Clone, PartialEq)]\n\nenum AstLevel {\n\n Top,\n\n Block,\n\n Nested,\n\n}\n\n\n\nimpl Copy for AstLevel {}\n\n\n", "file_path": "askama_derive/src/generator.rs", "rank": 85, "score": 65283.942700847234 }, { "content": "struct Holder {\n\n a: usize,\n\n}\n\n\n", "file_path": "testing/tests/simple.rs", "rank": 86, "score": 63885.26029892218 }, { "content": "#[derive(Template)]\n\n#[template(path = \"if.html\")]\n\nstruct IfTemplate {\n\n cond: bool,\n\n}\n\n\n", "file_path": "testing/tests/simple.rs", "rank": 87, "score": 63885.26029892218 }, { "content": "#[derive(Template)]\n\n#[template(path = \"big-table.html\")]\n\nstruct BigTable {\n\n table: Vec<Vec<usize>>,\n\n}\n\n\n", "file_path": "testing/benches/all.rs", "rank": 88, "score": 63885.26029892218 }, { "content": "#[derive(Template)]\n\n#[template(source = \"foo\", ext = \"txt\")]\n\nstruct Empty;\n\n\n", "file_path": "testing/tests/simple.rs", "rank": 89, "score": 63885.26029892218 }, { "content": "struct Baz {}\n\n\n\nimpl Baz {\n\n fn print(&self, s: &str) -> String {\n\n s.trim().to_owned()\n\n }\n\n}\n\n\n", "file_path": "testing/tests/filters.rs", "rank": 90, "score": 63885.26029892218 }, { "content": "#[derive(Debug)]\n\nenum Writable<'a> {\n\n Lit(&'a str),\n\n Expr(&'a Expr<'a>),\n\n}\n", "file_path": "askama_derive/src/generator.rs", "rank": 91, "score": 63728.55858651838 }, { "content": " trait AssertSendSyncStatic: Send + Sync + 'static {}\n\n impl AssertSendSyncStatic for Error {}\n\n}\n", "file_path": "askama_shared/src/error.rs", "rank": 92, "score": 63354.12981820315 }, { "content": "fn main() {\n\n askama::rerun_if_templates_changed();\n\n}\n", "file_path": "testing/build.rs", "rank": 93, "score": 62870.28686488975 }, { "content": "/// Main `Template` trait; implementations are generally derived\n\npub trait Template {\n\n /// Helper method which allocates a new `String` and renders into it\n\n fn render(&self) -> Result<String> {\n\n let mut buf = String::new();\n\n self.render_into(&mut buf)?;\n\n Ok(buf)\n\n }\n\n /// Renders the template to the given `writer` buffer\n\n fn render_into(&self, writer: &mut std::fmt::Write) -> Result<()>;\n\n /// Helper function to inspect the template's extension\n\n fn extension() -> Option<&'static str>\n\n where\n\n Self: Sized;\n\n}\n\n\n\npub use askama_derive::*;\n\npub use shared::filters;\n\npub use shared::{read_config_file, Error, MarkupDisplay, Result};\n\n\n\n#[cfg(feature = \"with-iron\")]\n", "file_path": "askama/src/lib.rs", "rank": 94, "score": 62821.31702451084 }, { "content": "#[derive(Template)]\n\n#[template(path = \"filters.html\")]\n\nstruct TestTemplate {\n\n strvar: String,\n\n}\n\n\n", "file_path": "testing/tests/filters.rs", "rank": 95, "score": 62555.229917172066 }, { "content": "struct TruncateFilter {\n\n foo: String,\n\n}\n\n\n", "file_path": "testing/tests/filters.rs", "rank": 96, "score": 62555.229917172066 }, { "content": "#[derive(Template)]\n\n#[template(path = \"compare.html\")]\n\nstruct CompareTemplate {\n\n a: usize,\n\n b: usize,\n\n c: usize,\n\n}\n\n\n", "file_path": "testing/tests/operators.rs", "rank": 97, "score": 62555.229917172066 }, { "content": "#[derive(Template)]\n\n#[template(path = \"deep-nested-macro.html\")]\n\nstruct NestedTemplate;\n\n\n", "file_path": "testing/tests/macro.rs", "rank": 98, "score": 62555.229917172066 }, { "content": "struct Buffer {\n\n // The buffer to generate the code into\n\n buf: String,\n\n // The current level of indentation (in spaces)\n\n indent: u8,\n\n // Whether the output buffer is currently at the start of a line\n\n start: bool,\n\n}\n\n\n\nimpl Buffer {\n\n fn new(indent: u8) -> Self {\n\n Self {\n\n buf: String::new(),\n\n indent,\n\n start: true,\n\n }\n\n }\n\n\n\n fn writeln(&mut self, s: &str) {\n\n if s == \"}\" {\n", "file_path": "askama_derive/src/generator.rs", "rank": 99, "score": 62555.229917172066 } ]
Rust
bee-bundle/src/constants.rs
zesterer/bee-p
375357bdfe8f670e4d26b62a7683d97f339f056f
use common::constants::*; pub struct Offset { pub start: usize, pub length: usize, } pub struct Field { pub trit_offset: Offset, pub tryte_offset: Offset, } impl Field { pub fn byte_start(&self) -> usize { self.trit_offset.start / 5 } pub fn byte_length(&self) -> usize { if self.trit_offset.length % 5 == 0 { self.trit_offset.length / 5 } else { self.trit_offset.length / 5 + 1 } } } macro_rules! offsets_from_trits { ($start:expr, $length:expr) => { Field { trit_offset: Offset { start: $start, length: $length, }, tryte_offset: Offset { start: $start / 3, length: $length / 3, }, } }; } macro_rules! offsets_from_previous_field { ($prev:expr, $length:expr) => { Field { trit_offset: Offset { start: ($prev).trit_offset.start + ($prev).trit_offset.length, length: $length, }, tryte_offset: Offset { start: (($prev).trit_offset.start + ($prev).trit_offset.length) / 3, length: $length / 3, }, } }; } pub const PAYLOAD: Field = offsets_from_trits!(0, PAYLOAD_TRIT_LEN); pub const ADDRESS: Field = offsets_from_previous_field!(PAYLOAD, ADDRESS_TRIT_LEN); pub const VALUE: Field = offsets_from_previous_field!(ADDRESS, VALUE_TRIT_LEN); pub const OBSOLETE_TAG: Field = offsets_from_previous_field!(VALUE, TAG_TRIT_LEN); pub const TIMESTAMP: Field = offsets_from_previous_field!(OBSOLETE_TAG, TIMESTAMP_TRIT_LEN); pub const INDEX: Field = offsets_from_previous_field!(TIMESTAMP, INDEX_TRIT_LEN); pub const LAST_INDEX: Field = offsets_from_previous_field!(INDEX, INDEX_TRIT_LEN); pub const BUNDLE_HASH: Field = offsets_from_previous_field!(LAST_INDEX, HASH_TRIT_LEN); pub const TRUNK_HASH: Field = offsets_from_previous_field!(BUNDLE_HASH, HASH_TRIT_LEN); pub const BRANCH_HASH: Field = offsets_from_previous_field!(TRUNK_HASH, HASH_TRIT_LEN); pub const TAG: Field = offsets_from_previous_field!(BRANCH_HASH, TAG_TRIT_LEN); pub const ATTACHMENT_TS: Field = offsets_from_previous_field!(TAG, TIMESTAMP_TRIT_LEN); pub const ATTACHMENT_LBTS: Field = offsets_from_previous_field!(ATTACHMENT_TS, TIMESTAMP_TRIT_LEN); pub const ATTACHMENT_UBTS: Field = offsets_from_previous_field!(ATTACHMENT_LBTS, TIMESTAMP_TRIT_LEN); pub const NONCE: Field = offsets_from_previous_field!(ATTACHMENT_UBTS, NONCE_TRIT_LEN); #[cfg(test)] mod should { use super::*; use common::constants::*; #[test] fn add_up_to_transaction_trit_length() { let total_trit_length = PAYLOAD.trit_offset.length + ADDRESS.trit_offset.length + VALUE.trit_offset.length + OBSOLETE_TAG.trit_offset.length + TIMESTAMP.trit_offset.length + INDEX.trit_offset.length + LAST_INDEX.trit_offset.length + BUNDLE_HASH.trit_offset.length + TRUNK_HASH.trit_offset.length + BRANCH_HASH.trit_offset.length + TAG.trit_offset.length + ATTACHMENT_TS.trit_offset.length + ATTACHMENT_LBTS.trit_offset.length + ATTACHMENT_UBTS.trit_offset.length + NONCE.trit_offset.length; assert_eq!(total_trit_length, TRANSACTION_TRIT_LEN); } #[test] fn add_up_to_transaction_tryte_length() { let total_tryte_length = PAYLOAD.tryte_offset.length + ADDRESS.tryte_offset.length + VALUE.tryte_offset.length + OBSOLETE_TAG.tryte_offset.length + TIMESTAMP.tryte_offset.length + INDEX.tryte_offset.length + LAST_INDEX.tryte_offset.length + BUNDLE_HASH.tryte_offset.length + TRUNK_HASH.tryte_offset.length + BRANCH_HASH.tryte_offset.length + TAG.tryte_offset.length + ATTACHMENT_TS.tryte_offset.length + ATTACHMENT_LBTS.tryte_offset.length + ATTACHMENT_UBTS.tryte_offset.length + NONCE.tryte_offset.length; assert_eq!(total_tryte_length, TRANSACTION_TRYT_LEN); } }
use common::constants::*; pub struct Offset { pub start: usize, pub length: usize, } pub struct Field { pub trit_offset: Offset, pub tryte_offset: Offset, } impl Field { pub fn byte_start(&self) -> usize { self.trit_offset.start / 5 } pub fn byte_length(&self) -> usize { if self.trit_offset.length % 5 == 0 { self.trit_offset.length / 5 } else { self.trit_offset.length / 5 + 1 } } } macro_rules! offsets_from_trits { ($start:expr, $length:expr) => { Field { trit_offset: Offset { start: $start, length: $length, }, tryte_offset: Offset { start: $start / 3, length: $length / 3, }, } }; } macro_rules! offsets_from_previous_field { ($prev:expr, $length:exp
h + LAST_INDEX.trit_offset.length + BUNDLE_HASH.trit_offset.length + TRUNK_HASH.trit_offset.length + BRANCH_HASH.trit_offset.length + TAG.trit_offset.length + ATTACHMENT_TS.trit_offset.length + ATTACHMENT_LBTS.trit_offset.length + ATTACHMENT_UBTS.trit_offset.length + NONCE.trit_offset.length; assert_eq!(total_trit_length, TRANSACTION_TRIT_LEN); } #[test] fn add_up_to_transaction_tryte_length() { let total_tryte_length = PAYLOAD.tryte_offset.length + ADDRESS.tryte_offset.length + VALUE.tryte_offset.length + OBSOLETE_TAG.tryte_offset.length + TIMESTAMP.tryte_offset.length + INDEX.tryte_offset.length + LAST_INDEX.tryte_offset.length + BUNDLE_HASH.tryte_offset.length + TRUNK_HASH.tryte_offset.length + BRANCH_HASH.tryte_offset.length + TAG.tryte_offset.length + ATTACHMENT_TS.tryte_offset.length + ATTACHMENT_LBTS.tryte_offset.length + ATTACHMENT_UBTS.tryte_offset.length + NONCE.tryte_offset.length; assert_eq!(total_tryte_length, TRANSACTION_TRYT_LEN); } }
r) => { Field { trit_offset: Offset { start: ($prev).trit_offset.start + ($prev).trit_offset.length, length: $length, }, tryte_offset: Offset { start: (($prev).trit_offset.start + ($prev).trit_offset.length) / 3, length: $length / 3, }, } }; } pub const PAYLOAD: Field = offsets_from_trits!(0, PAYLOAD_TRIT_LEN); pub const ADDRESS: Field = offsets_from_previous_field!(PAYLOAD, ADDRESS_TRIT_LEN); pub const VALUE: Field = offsets_from_previous_field!(ADDRESS, VALUE_TRIT_LEN); pub const OBSOLETE_TAG: Field = offsets_from_previous_field!(VALUE, TAG_TRIT_LEN); pub const TIMESTAMP: Field = offsets_from_previous_field!(OBSOLETE_TAG, TIMESTAMP_TRIT_LEN); pub const INDEX: Field = offsets_from_previous_field!(TIMESTAMP, INDEX_TRIT_LEN); pub const LAST_INDEX: Field = offsets_from_previous_field!(INDEX, INDEX_TRIT_LEN); pub const BUNDLE_HASH: Field = offsets_from_previous_field!(LAST_INDEX, HASH_TRIT_LEN); pub const TRUNK_HASH: Field = offsets_from_previous_field!(BUNDLE_HASH, HASH_TRIT_LEN); pub const BRANCH_HASH: Field = offsets_from_previous_field!(TRUNK_HASH, HASH_TRIT_LEN); pub const TAG: Field = offsets_from_previous_field!(BRANCH_HASH, TAG_TRIT_LEN); pub const ATTACHMENT_TS: Field = offsets_from_previous_field!(TAG, TIMESTAMP_TRIT_LEN); pub const ATTACHMENT_LBTS: Field = offsets_from_previous_field!(ATTACHMENT_TS, TIMESTAMP_TRIT_LEN); pub const ATTACHMENT_UBTS: Field = offsets_from_previous_field!(ATTACHMENT_LBTS, TIMESTAMP_TRIT_LEN); pub const NONCE: Field = offsets_from_previous_field!(ATTACHMENT_UBTS, NONCE_TRIT_LEN); #[cfg(test)] mod should { use super::*; use common::constants::*; #[test] fn add_up_to_transaction_trit_length() { let total_trit_length = PAYLOAD.trit_offset.length + ADDRESS.trit_offset.length + VALUE.trit_offset.length + OBSOLETE_TAG.trit_offset.length + TIMESTAMP.trit_offset.length + INDEX.trit_offset.lengt
random
[ { "content": "fn trits_with_length(trits: &[Trit], length: usize) -> Vec<Trit> {\n\n if trits.len() < length {\n\n let mut result = vec![0; length];\n\n result[..trits.len()].copy_from_slice(&trits);\n\n result\n\n } else {\n\n trits[..length].to_vec()\n\n }\n\n}\n", "file_path": "iota-conversion/trinary.rs", "rank": 0, "score": 125516.62830532865 }, { "content": "/// Create a subseed\n\n///\n\n/// * `mode` - The hashing mode to use\n\n/// * `seed` - The generation seed\n\n/// * `index` - How many address permutations to iterate through\n\npub fn subseed(mode: HashMode, seed: &[i8], index: usize) -> Result<[i8; HASH_LENGTH]> {\n\n let mut subseed_preimage = seed.to_vec();\n\n for _ in 0..index {\n\n for trit in &mut subseed_preimage {\n\n *trit += 1;\n\n if *trit > iota_constants::MAX_TRIT_VALUE {\n\n *trit = iota_constants::MIN_TRIT_VALUE;\n\n } else {\n\n break;\n\n }\n\n }\n\n }\n\n let mut subseed = [0; HASH_LENGTH];\n\n hash_with_mode(mode, &subseed_preimage, &mut subseed)?;\n\n Ok(subseed)\n\n}\n\n\n", "file_path": "iota-crypto/iss.rs", "rank": 1, "score": 122604.13185828346 }, { "content": "/// Generate a digest\n\n///\n\n/// * `mode` - The hashing mode to use\n\n/// * `normalized_bundle_fragment` - Normalized bundle fragment to digest\n\n/// * `signature_fragment` - Signature fragment to use\n\npub fn digest(\n\n mode: HashMode,\n\n normalized_bundle_fragment: &[i8],\n\n signature_fragment: &[i8],\n\n) -> Result<[i8; HASH_LENGTH]> {\n\n ensure!(\n\n normalized_bundle_fragment.len() == HASH_LENGTH / TRYTE_WIDTH / NUMBER_OF_SECURITY_LEVELS,\n\n \"Invalid normalized bundle fragment length: {}\",\n\n normalized_bundle_fragment.len()\n\n );\n\n ensure!(\n\n signature_fragment.len() == FRAGMENT_LENGTH,\n\n \"Invalid signature fragment length: {}\",\n\n signature_fragment.len()\n\n );\n\n let mut digest = [0; HASH_LENGTH];\n\n match mode {\n\n HashMode::CURLP27 | HashMode::CURLP81 => {\n\n let mut curl = Curl::new(mode).unwrap();\n\n digest_in_place(\n", "file_path": "iota-crypto/iss.rs", "rank": 2, "score": 107569.2938494212 }, { "content": "/// Generate a signature fragment\n\n///\n\n/// * `mode` - The hashing mode to use\n\n/// * `normalized_bundle_fragment` - Normalized bundle fragment to sign\n\n/// * `key_fragment` - Key fragment to use\n\npub fn signature_fragment(\n\n mode: HashMode,\n\n normalized_bundle_fragment: &[i8],\n\n key_fragment: &[i8],\n\n) -> Result<Vec<i8>> {\n\n ensure!(\n\n normalized_bundle_fragment.len() == NORMALIZED_FRAGMENT_LENGTH,\n\n \"Invalid normalized bundle fragment length: {}\",\n\n normalized_bundle_fragment.len()\n\n );\n\n ensure!(\n\n key_fragment.len() == FRAGMENT_LENGTH,\n\n \"Invalid key fragment length: {}\",\n\n key_fragment.len()\n\n );\n\n let mut signature_fragment = key_fragment.to_vec();\n\n match mode {\n\n HashMode::CURLP27 | HashMode::CURLP81 => {\n\n let mut curl = Curl::new(mode).unwrap();\n\n signature_fragment_helper(\n", "file_path": "iota-crypto/iss.rs", "rank": 3, "score": 105402.4156861218 }, { "content": "/// Hash digest in place\n\n///\n\n/// * `mode` - The hashing mode to use\n\n/// * `normalized_bundle_fragment` - Normalized bundle fragment to digest\n\n/// * `signature_fragment` - Signature fragment to use\n\n/// * `digest` - Destination slice to modify in place\n\npub fn digest_in_place(\n\n hash: &mut impl Sponge,\n\n normalized_bundle_fragment: &[i8],\n\n signature_fragment: &[i8],\n\n digest: &mut [i8],\n\n) -> Result<()> {\n\n let mut buffer = signature_fragment[0..FRAGMENT_LENGTH].to_vec();\n\n for (j, trit) in normalized_bundle_fragment\n\n .iter()\n\n .enumerate()\n\n .take(NUMBER_OF_FRAGMENT_CHUNKS)\n\n {\n\n for _ in 0..*trit - iota_constants::MIN_TRYTE_VALUE {\n\n hash.reset();\n\n let offset = j * HASH_LENGTH;\n\n hash.absorb(&buffer[offset..offset + HASH_LENGTH])?;\n\n hash.squeeze(&mut buffer[offset..offset + HASH_LENGTH])?;\n\n }\n\n }\n\n hash.reset();\n\n hash.absorb(&buffer)?;\n\n hash.squeeze(digest)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "iota-crypto/iss.rs", "rank": 4, "score": 105402.2539735102 }, { "content": "/// Generate normalize bundle\n\n///\n\n/// * `bundle` - Bundle to normalize\n\npub fn normalized_bundle(bundle: &[i8]) -> Result<[i8; HASH_LENGTH / TRYTE_WIDTH]> {\n\n ensure!(\n\n bundle.len() == HASH_LENGTH,\n\n \"Invalid bundle length: {}\",\n\n bundle.len()\n\n );\n\n let mut normalized_bundle = [0; HASH_LENGTH / TRYTE_WIDTH];\n\n normalized_bundle_in_place(bundle, &mut normalized_bundle);\n\n Ok(normalized_bundle)\n\n}\n\n\n", "file_path": "iota-crypto/iss.rs", "rank": 5, "score": 105190.5328489554 }, { "content": "/// Retrieve the merkle root\n\n///\n\n/// * `mode` - The hashing mode to use\n\n/// * `hash` - Hash to absorb\n\n/// * `trits` - Trits to absorb\n\n/// * `offset` - Trit offset to start at\n\n/// * `index` - Used to alternate the order trits and hash are absorbed\n\n/// * `size` - Number of hash iterations\n\npub fn get_merkle_root(\n\n mode: HashMode,\n\n hash: &[i8],\n\n trits: &mut [i8],\n\n offset: usize,\n\n index: usize,\n\n size: usize,\n\n) -> Result<[i8; HASH_LENGTH]> {\n\n match mode {\n\n HashMode::CURLP27 | HashMode::CURLP81 => {\n\n let mut curl = Curl::new(mode).unwrap();\n\n get_merkle_root_helper(&mut curl, hash, trits, offset, index, size)\n\n }\n\n HashMode::Kerl => {\n\n let mut kerl = Kerl::default();\n\n get_merkle_root_helper(&mut kerl, hash, trits, offset, index, size)\n\n }\n\n }\n\n}\n\n\n", "file_path": "iota-crypto/iss.rs", "rank": 6, "score": 103377.36984331078 }, { "content": "/// Generate address\n\n///\n\n/// * `mode` - The hashing mode to use\n\n/// * `digests` - Digests used to generate address\n\npub fn address(mode: HashMode, digests: &mut [i8]) -> Result<[i8; HASH_LENGTH]> {\n\n ensure!(\n\n !digests.is_empty() && digests.len() % HASH_LENGTH == 0,\n\n \"Invalid key length: {}\",\n\n digests.len()\n\n );\n\n let mut address = [0; HASH_LENGTH];\n\n hash_with_mode(mode, digests, &mut address)?;\n\n Ok(address)\n\n}\n\n\n", "file_path": "iota-crypto/iss.rs", "rank": 7, "score": 100571.0272613243 }, { "content": "/// Key a subseed\n\n///\n\n/// * `mode` - The hashing mode to use\n\n/// * `subseed` - Subseed used for key generation\n\n/// * `number_of_fragments` - Number of fragments to generate\n\npub fn key(mode: HashMode, subseed: &mut [i8], number_of_fragments: usize) -> Result<Vec<i8>> {\n\n ensure!(\n\n subseed.len() == HASH_LENGTH,\n\n \"Invalid subseed length: {}\",\n\n subseed.len()\n\n );\n\n\n\n let mut key = vec![0; FRAGMENT_LENGTH * number_of_fragments];\n\n hash_with_mode(mode, subseed, &mut key)?;\n\n\n\n Ok(key)\n\n}\n\n\n", "file_path": "iota-crypto/iss.rs", "rank": 8, "score": 94728.34946307002 }, { "content": "/// Converts a slice of trits into a numeric value\n\npub fn value(trits: &[i8]) -> i8 {\n\n trits.iter().rev().fold(0, |acc, trit| acc * 3 + *trit)\n\n}\n\n\n", "file_path": "iota-conversion/lib.rs", "rank": 9, "score": 93553.1179333853 }, { "content": "/// Converts a slice of trits into a numeric value in i64\n\npub fn long_value(trits: &[i8]) -> i64 {\n\n trits\n\n .iter()\n\n .rev()\n\n .fold(0, |acc, trit| acc * 3 + i64::from(*trit))\n\n}\n", "file_path": "iota-conversion/lib.rs", "rank": 10, "score": 91757.07764011352 }, { "content": "/// Converts a UTF-8 string containing ascii into a tryte-encoded string\n\npub fn to_trytes(input: &str) -> Result<String> {\n\n let mut trytes = String::new();\n\n let mut tmp_ascii = Vec::new();\n\n for c in input.chars() {\n\n if let Some(ascii) = CHAR_TO_ASCII_MAP.get(&c) {\n\n tmp_ascii.push(ascii);\n\n }\n\n }\n\n for byte in tmp_ascii {\n\n let mut ascii = *byte;\n\n if ascii > 255 {\n\n ascii = 32;\n\n }\n\n let first = ascii % 27;\n\n let second = (ascii - first) / 27;\n\n trytes.push(iota_constants::TRYTE_ALPHABET[first]);\n\n trytes.push(iota_constants::TRYTE_ALPHABET[second]);\n\n }\n\n Ok(trytes)\n\n}\n\n\n", "file_path": "iota-conversion/trytes_converter.rs", "rank": 11, "score": 88073.98767161914 }, { "content": "/// Converts a tryte-encoded string into a UTF-8 string containing ascii characters\n\npub fn to_string(input_trytes: &str) -> Result<String> {\n\n ensure!(\n\n input_trytes.len() % 2 == 0,\n\n iota_constants::INVALID_TRYTES_INPUT_ERROR\n\n );\n\n let mut tmp = String::new();\n\n let chars: Vec<char> = input_trytes.chars().collect();\n\n for letters in chars.chunks(2) {\n\n let first = match iota_constants::TRYTE_ALPHABET\n\n .iter()\n\n .position(|&x| x == letters[0])\n\n {\n\n Some(x) => x,\n\n None => {\n\n return Err(Error::from(TryteConverterError::StringNotAscii {\n\n string: input_trytes.to_string(),\n\n }))\n\n }\n\n };\n\n let second = match iota_constants::TRYTE_ALPHABET\n", "file_path": "iota-conversion/trytes_converter.rs", "rank": 12, "score": 86474.66011336973 }, { "content": "/// Finds the optimal unit for displaying an iota amount\n\n///\n\n/// * `amount` - Amount in base Iota unit\n\n///```\n\n/// extern crate iota_conversion;\n\n/// use iota_conversion::{iota_units::IotaUnits, unit_converter};\n\n///\n\n/// let unit = unit_converter::find_optimal_iota_unit_to_display(1000000);\n\n/// assert_eq!(unit, IotaUnits::MegaIota);\n\n///```\n\npub fn find_optimal_iota_unit_to_display(amount: u64) -> IotaUnits {\n\n let length = amount.to_string().len();\n\n\n\n if length >= 1 && length <= 3 {\n\n IotaUnits::Iota\n\n } else if length > 3 && length <= 6 {\n\n IotaUnits::KiloIota\n\n } else if length > 6 && length <= 9 {\n\n IotaUnits::MegaIota\n\n } else if length > 9 && length <= 12 {\n\n IotaUnits::GigaIota\n\n } else if length > 12 && length <= 15 {\n\n IotaUnits::TeraIota\n\n } else if length > 15 && length <= 18 {\n\n IotaUnits::PetaIota\n\n } else {\n\n panic!(\"Invalid number\")\n\n }\n\n}\n\n\n", "file_path": "iota-conversion/unit_converter.rs", "rank": 13, "score": 84160.85776015381 }, { "content": "// TODO: remove\n\n// TODO: documentation\n\npub fn slice_eq(xs: &[i8], ys: &[i8]) -> bool {\n\n for (x, y) in xs.iter().zip(ys.iter()) {\n\n if x != y {\n\n return false;\n\n }\n\n }\n\n\n\n true\n\n}\n", "file_path": "bee-signing/src/lib.rs", "rank": 14, "score": 83201.74183437793 }, { "content": "/// Converts an amount of iota to a unit\n\n///\n\n/// * `amount` - Amount in base Iota unit\n\n/// * `target` - Target IotaUnit\n\n///```\n\n/// extern crate iota_conversion;\n\n/// use iota_conversion::{iota_units::IotaUnits, unit_converter};\n\n///\n\n/// let unit = unit_converter::convert_amount_to(1000000, IotaUnits::GigaIota);\n\n/// assert_eq!(unit, 0.001);\n\n///```\n\npub fn convert_amount_to(amount: u64, target: IotaUnits) -> f64 {\n\n amount as f64 / 10_u64.pow(u32::from(target.value())) as f64\n\n}\n\n\n", "file_path": "iota-conversion/unit_converter.rs", "rank": 15, "score": 81770.90657997449 }, { "content": "/// Normalize a bundle in place\n\n///\n\n/// * `bundle` - Bundle to normalize\n\n/// * `normalized_bundle` - Destination slice to modify in place\n\npub fn normalized_bundle_in_place(bundle: &[i8], normalized_bundle: &mut [i8]) {\n\n for i in 0..NUMBER_OF_SECURITY_LEVELS {\n\n let mut sum = 0;\n\n let offset = HASH_LENGTH / TRYTE_WIDTH / NUMBER_OF_SECURITY_LEVELS;\n\n for j in i * offset..(i + 1) * offset {\n\n normalized_bundle[j] = bundle[j * TRYTE_WIDTH]\n\n + bundle[j * TRYTE_WIDTH + 1] * 3\n\n + bundle[j * TRYTE_WIDTH + 2] * 9;\n\n sum += normalized_bundle[j];\n\n }\n\n if sum > 0 {\n\n while sum > 0 {\n\n for trit in normalized_bundle\n\n .iter_mut()\n\n .skip(i * offset)\n\n .take((i + 1) * offset)\n\n {\n\n if *trit > iota_constants::MIN_TRYTE_VALUE {\n\n *trit -= 1;\n\n break;\n", "file_path": "iota-crypto/iss.rs", "rank": 16, "score": 81768.49239163936 }, { "content": "/// Generate digests\n\n///\n\n/// * `mode` - The hashing mode to use\n\n/// * `key` - kKey slice used to generate digests\n\npub fn digests(mode: HashMode, key: &[i8]) -> Result<Vec<i8>> {\n\n ensure!(\n\n !key.is_empty() && key.len() % FRAGMENT_LENGTH == 0,\n\n \"Invalid key length: {}\",\n\n key.len()\n\n );\n\n match mode {\n\n HashMode::CURLP27 | HashMode::CURLP81 => {\n\n let mut curl = Curl::new(mode)?;\n\n Ok(digests_helper(&mut curl, key)?)\n\n }\n\n HashMode::Kerl => {\n\n let mut kerl = Kerl::default();\n\n Ok(digests_helper(&mut kerl, key)?)\n\n }\n\n }\n\n}\n\n\n", "file_path": "iota-crypto/iss.rs", "rank": 17, "score": 78798.91716731488 }, { "content": "fn digests_helper(hash: &mut impl Sponge, key: &[i8]) -> Result<Vec<i8>> {\n\n let mut digests = vec![0; key.len() / FRAGMENT_LENGTH * HASH_LENGTH];\n\n for i in 0..key.len() / FRAGMENT_LENGTH {\n\n let mut buffer = key[i * FRAGMENT_LENGTH..(i + 1) * FRAGMENT_LENGTH].to_vec();\n\n for j in 0..NUMBER_OF_FRAGMENT_CHUNKS {\n\n for _ in 0..iota_constants::MAX_TRYTE_VALUE - iota_constants::MIN_TRYTE_VALUE {\n\n hash.reset();\n\n let offset = j * HASH_LENGTH;\n\n hash.absorb(&buffer[offset..offset + HASH_LENGTH])?;\n\n hash.squeeze(&mut buffer[offset..offset + HASH_LENGTH])?;\n\n }\n\n }\n\n hash.reset();\n\n hash.absorb(&buffer)?;\n\n let offset = i * HASH_LENGTH;\n\n hash.squeeze(&mut digests[offset..offset + HASH_LENGTH])?;\n\n }\n\n Ok(digests)\n\n}\n\n\n", "file_path": "iota-crypto/iss.rs", "rank": 18, "score": 78415.82430443392 }, { "content": "/// Converts an iota amount into the optimal unit for display\n\n///\n\n/// * `amount` - amount in base Iota unit\n\n/// * `extended` - Whether to use two significant digests, or 15\n\n///```\n\n/// extern crate iota_conversion;\n\n/// use iota_conversion::{iota_units::IotaUnits, unit_converter};\n\n///\n\n/// let s = unit_converter::convert_raw_iota_amount_to_display_text(1000000, false);\n\n/// assert_eq!(s, \"1.00 Mi\");\n\n///\n\n/// let extended_s = unit_converter::convert_raw_iota_amount_to_display_text(1900000000000002, true);\n\n/// assert_eq!(extended_s, \"1.900000000000002 Pi\");\n\n///```\n\npub fn convert_raw_iota_amount_to_display_text(amount: u64, extended: bool) -> String {\n\n let unit = find_optimal_iota_unit_to_display(amount);\n\n let amount_in_display_unit = convert_amount_to(amount, unit);\n\n create_amount_with_unit_display_text(amount_in_display_unit, unit, extended)\n\n}\n\n\n", "file_path": "iota-conversion/unit_converter.rs", "rank": 19, "score": 77891.16146789545 }, { "content": "/// Converts an amount of iotas to a new unit\n\n///\n\n/// * `amount` - Amount to convert\n\n/// * `from` - IotaUnit that `amount` is in\n\n/// * `to` - Target IotaUnit\n\n///```\n\n/// extern crate iota_conversion;\n\n/// use iota_conversion::{iota_units::IotaUnits, unit_converter};\n\n///\n\n/// let amount_in_new_unit = unit_converter::convert_units(1000, IotaUnits::TeraIota, IotaUnits::PetaIota);\n\n/// assert_eq!(amount_in_new_unit, 1);\n\n///```\n\npub fn convert_units(amount: u64, from: IotaUnits, to: IotaUnits) -> u64 {\n\n let amount_in_source = amount * 10_u64.pow(u32::from(from.value()));\n\n convert_units_helper(amount_in_source, to)\n\n}\n\n\n", "file_path": "iota-conversion/unit_converter.rs", "rank": 20, "score": 77437.21290901191 }, { "content": "/// Allows you to hash `trits` into `out` using the `mode` of your choosing\n\n///```rust\n\n/// use iota_crypto::{self, HashMode};\n\n///\n\n/// let input = [0; 243];\n\n/// let mut out = [0; 243];\n\n/// iota_crypto::hash_with_mode(HashMode::Kerl, &input, &mut out);\n\n///```\n\npub fn hash_with_mode(mode: HashMode, trits: &[i8], out: &mut [i8]) -> Result<()> {\n\n ensure!(\n\n out.len() % 243 == 0,\n\n \"Output slice length isn't a multiple of 243: {}\",\n\n out.len()\n\n );\n\n match mode {\n\n HashMode::CURLP27 | HashMode::CURLP81 => {\n\n let mut curl = Curl::new(mode).unwrap();\n\n curl.absorb(trits)?;\n\n curl.squeeze(out)?;\n\n }\n\n HashMode::Kerl => {\n\n let mut kerl = Kerl::default();\n\n kerl.absorb(trits)?;\n\n kerl.squeeze(out)?;\n\n }\n\n }\n\n Ok(())\n\n}\n", "file_path": "iota-crypto/lib.rs", "rank": 21, "score": 76171.25167946826 }, { "content": "/// Increments a trit slice in place, only considering trits until index `size`\n\nfn increment(trit_array: &mut [Trit], size: usize) {\n\n for trit in trit_array.iter_mut().take(size) {\n\n *trit += 1;\n\n if *trit > iota_constants::MAX_TRIT_VALUE {\n\n *trit = iota_constants::MIN_TRIT_VALUE;\n\n } else {\n\n break;\n\n }\n\n }\n\n}\n\n\n", "file_path": "iota-conversion/trinary.rs", "rank": 22, "score": 71924.47022351048 }, { "content": "fn setout(src: &[u8], dst: &mut [u8], len: usize) {\n\n dst[..len].copy_from_slice(&src[..len]);\n\n}\n\n\n", "file_path": "iota-crypto/keccak.rs", "rank": 23, "score": 68790.34383827135 }, { "content": "/// Extracts the nonce from the final Curl state and the given slot index.\n\nfn extract_nonce(state: &Curl64State, slot: usize) -> NonceTrits {\n\n let mut nonce = [0; NONCE_LEN];\n\n let mut offset = 0;\n\n let slotmask = 1 << slot;\n\n\n\n for i in CHUNK_NONCE_START..HASH_LEN {\n\n let (hi, lo) = state.get(i);\n\n\n\n match (hi & slotmask, lo & slotmask) {\n\n (1, 0) => nonce[offset] = 1,\n\n (0, 1) => nonce[offset] = -1,\n\n (_, _) => (),\n\n }\n\n offset += 1;\n\n }\n\n\n\n NonceTrits(nonce)\n\n}\n\n\n\nimpl Default for PearlDiver {\n\n fn default() -> Self {\n\n Self {\n\n cores: Cores::default(),\n\n difficulty: Difficulty::default(),\n\n state: Arc::new(RwLock::new(PearlDiverState::Created)),\n\n }\n\n }\n\n}\n", "file_path": "bee-pow/src/pearldiver.rs", "rank": 24, "score": 67348.90710126732 }, { "content": "/// Responsible for constructing a `Bundle` from scratch to be sent to the IOTA network. This includes siging its transactions, calculating the bundle hash, and setting other releveant fields depending on context.\n\nstruct OutgoingBundleBuilder;\n", "file_path": "bee-bundle/src/bundle_builder.rs", "rank": 25, "score": 57752.190616855805 }, { "content": "struct SignedBundleBuilder;\n\n\n", "file_path": "bee-bundle/src/bundle_builder.rs", "rank": 26, "score": 57748.90063915062 }, { "content": "struct AttachedBundleBuilder;\n", "file_path": "bee-bundle/src/bundle_builder.rs", "rank": 27, "score": 57748.90063915062 }, { "content": "struct ValidatedBundleBuilder;\n\n\n\nimpl TransactionBuilders {\n\n pub fn push(&mut self, transaction_builder: TransactionBuilder) {\n\n self.0.push(transaction_builder);\n\n }\n\n}\n", "file_path": "bee-bundle/src/bundle_builder.rs", "rank": 28, "score": 57748.90063915062 }, { "content": "/// Concerned with constructing and verifying complete messages coming in externally.\n\nstruct IncomingBundleBuilder {\n\n transaction_builders: TransactionBuilders,\n\n}\n\n\n\nimpl IncomingBundleBuilder {\n\n /// Pushes a new transaction coming over the wire into the bundle builder.\n\n pub fn push(&mut self, transaction_builder: TransactionBuilder) -> &mut Self {\n\n self.transaction_builders.push(transaction_builder);\n\n self\n\n }\n\n}\n\n\n", "file_path": "bee-bundle/src/bundle_builder.rs", "rank": 29, "score": 57748.90063915062 }, { "content": "struct SealedBundleBuilder;\n", "file_path": "bee-bundle/src/bundle_builder.rs", "rank": 30, "score": 57748.90063915062 }, { "content": "/// Trait used to enable conversion to trinary types\n\npub trait Trinary {\n\n /// Provides the trit vector representation of the value\n\n fn trits(&self) -> Vec<Trit>;\n\n /// Provides the trit vector representation of the value with given length, padding with `0` if required\n\n fn trits_with_length(&self, length: usize) -> Vec<Trit>;\n\n /// Provides the tryte string representation of the value\n\n fn trytes(&self) -> Result<Trytes>;\n\n}\n\n\n\n/// Type alias for `i8`\n\npub type Trit = i8;\n\n/// Type alias for `String`\n\npub type Trytes = String;\n\n\n\nimpl Trinary for i64 {\n\n fn trits(&self) -> Vec<Trit> {\n\n let mut trits = Vec::new();\n\n let mut abs = self.abs();\n\n while abs > 0 {\n\n let mut remainder = (abs % i64::from(TRINARY_RADIX as i8)) as i8;\n", "file_path": "iota-conversion/trinary.rs", "rank": 31, "score": 55671.82792278037 }, { "content": "/// The sponge trait specifys the main functionality of all\n\n/// sponges used throughout IOTA\n\npub trait Sponge\n\nwhere\n\n Self: Default + Clone + Send + 'static,\n\n{\n\n /// Absorb trits into the sponge\n\n ///\n\n /// * `trits` - A slice of trits whose length is a multiple of 243\n\n fn absorb(&mut self, trits: &[i8]) -> Result<()>;\n\n /// Squeeze trits out of the sponge and copy them into `out`\n\n ///\n\n /// * `out` - A slice of trits whose length is a multiple of 243\n\n fn squeeze(&mut self, out: &mut [i8]) -> Result<()>;\n\n /// Reset the sponge to initial state\n\n fn reset(&mut self);\n\n}\n\n\n", "file_path": "iota-crypto/lib.rs", "rank": 32, "score": 55671.652484544495 }, { "content": "// TODO: documentation\n\npub trait Signature {\n\n // TODO: documentation\n\n fn size(&self) -> usize;\n\n\n\n // TODO: documentation\n\n fn from_bytes(bytes: &[i8]) -> Self;\n\n\n\n // TODO: documentation\n\n fn to_bytes(&self) -> &[i8];\n\n}\n\n\n", "file_path": "bee-signing/src/lib.rs", "rank": 33, "score": 54578.600809215226 }, { "content": "pub trait IsTryte {\n\n fn is_tryte(&self) -> bool;\n\n}\n\n\n\nimpl IsTryte for char {\n\n fn is_tryte(&self) -> bool {\n\n *self == '9' || (*self >= 'A' && *self <= 'Z')\n\n }\n\n}\n", "file_path": "bee-ternary/src/tryte.rs", "rank": 34, "score": 54578.600809215226 }, { "content": "// TODO: documentation\n\npub trait RecoverableSignature {\n\n // TODO: documentation\n\n type PublicKey;\n\n\n\n // TODO: documentation\n\n fn recover_public_key(&self, message: &[i8]) -> Self::PublicKey;\n\n}\n\n\n", "file_path": "bee-signing/src/lib.rs", "rank": 35, "score": 53557.19516980993 }, { "content": "// TODO: documentation\n\npub trait PrivateKey {\n\n /// The type of the matching public key\n\n type PublicKey;\n\n /// The type of the generated signatures\n\n type Signature;\n\n\n\n /// Returns the public counterpart of a private key\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// # use iota_crypto::Kerl;\n\n /// # use signing::PrivateKeyGenerator;\n\n /// # use signing::seed::Seed;\n\n /// # use signing::wots::WotsPrivateKeyGeneratorBuilder;\n\n /// use signing::PrivateKey;\n\n ///\n\n /// # let seed = Seed::new();\n\n /// # let private_key_generator = WotsPrivateKeyGeneratorBuilder::<Kerl>::default().security_level(2).build().unwrap();\n\n /// # let private_key = private_key_generator.generate(&seed, 0);\n", "file_path": "bee-signing/src/lib.rs", "rank": 36, "score": 53557.19516980993 }, { "content": "pub trait RawEncoding {\n\n /// Get the number of trits in this buffer\n\n fn len(&self) -> usize;\n\n\n\n /// Get the trit at the given index\n\n unsafe fn get_unchecked(&self, index: usize) -> Trit;\n\n\n\n /// Set the trit at the given index\n\n unsafe fn set_unchecked(&mut self, index: usize, trit: Trit);\n\n\n\n /// Get a slice of this slice\n\n unsafe fn slice_unchecked(&self, range: Range<usize>) -> &Self;\n\n\n\n /// Get a mutable slice of this slice\n\n unsafe fn slice_unchecked_mut(&mut self, range: Range<usize>) -> &mut Self;\n\n}\n\n\n", "file_path": "bee-ternary/src/raw.rs", "rank": 37, "score": 53557.19516980993 }, { "content": "// TODO: documentation\n\npub trait PublicKey {\n\n // TODO: documentation\n\n type Signature;\n\n\n\n // TODO: documentation\n\n fn verify(&self, message: &[i8], signature: &Self::Signature) -> bool;\n\n\n\n // TODO: documentation\n\n fn from_bytes(bytes: &[i8]) -> Self;\n\n\n\n // TODO: documentation\n\n fn to_bytes(&self) -> &[i8];\n\n}\n\n\n", "file_path": "bee-signing/src/lib.rs", "rank": 38, "score": 53557.19516980993 }, { "content": "fn main() {\n\n env::set_var(ENV_VAR, DEBUG);\n\n\n\n let mut prototype = Prototype::from_config(CONFIG);\n\n\n\n assert!(prototype.run().is_ok());\n\n\n\n env::remove_var(ENV_VAR);\n\n}\n", "file_path": "bee-main/src/main.rs", "rank": 39, "score": 53530.299243752015 }, { "content": "// TODO: documentation\n\npub trait PrivateKeyGenerator {\n\n /// The type of the generated private keys\n\n type PrivateKey;\n\n\n\n /// Deterministically generates and returns a private key\n\n ///\n\n /// # Parameters\n\n ///\n\n /// * `seed` A seed to deterministically derive a private key from\n\n /// * `index` An index to deterministically derive a private key from\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// use iota_crypto::Kerl;\n\n /// use signing::PrivateKeyGenerator;\n\n /// use signing::seed::Seed;\n\n /// use signing::wots::WotsPrivateKeyGeneratorBuilder;\n\n ///\n\n /// let seed = Seed::new();\n\n /// let private_key_generator = WotsPrivateKeyGeneratorBuilder::<Kerl>::default().security_level(2).build().unwrap();\n\n /// let private_key = private_key_generator.generate(&seed, 0);\n\n /// ```\n\n fn generate(&self, seed: &Seed, index: u64) -> Self::PrivateKey;\n\n}\n\n\n", "file_path": "bee-signing/src/lib.rs", "rank": 40, "score": 52597.69414316265 }, { "content": "pub trait RawEncodingBuf {\n\n type Slice: RawEncoding + ?Sized;\n\n\n\n /// Create a new empty buffer\n\n fn new() -> Self where Self: Sized;\n\n\n\n /// Create a new buffer containing the given trits\n\n fn from_trits<T: Into<Trit> + Clone>(trits: &[T]) -> Self where Self: Sized {\n\n let mut this = Self::new();\n\n for trit in trits {\n\n this.push(trit.clone().into());\n\n }\n\n this\n\n }\n\n\n\n /// Push a trit to the back of this buffer\n\n fn push(&mut self, trit: Trit);\n\n\n\n /// View the trits in this buffer as a slice\n\n fn as_slice(&self) -> &Self::Slice;\n", "file_path": "bee-ternary/src/raw.rs", "rank": 41, "score": 52597.69414316265 }, { "content": "fn signature_fragment_helper(\n\n hash: &mut impl Sponge,\n\n normalized_bundle_fragment: &[i8],\n\n out: &mut [i8],\n\n) -> Result<()> {\n\n for (j, trit) in normalized_bundle_fragment\n\n .iter()\n\n .enumerate()\n\n .take(NUMBER_OF_FRAGMENT_CHUNKS)\n\n {\n\n for _ in 0..iota_constants::MAX_TRYTE_VALUE - *trit {\n\n hash.reset();\n\n let offset = j * HASH_LENGTH;\n\n hash.absorb(&out[offset..offset + HASH_LENGTH])?;\n\n hash.squeeze(&mut out[offset..offset + HASH_LENGTH])?;\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "iota-crypto/iss.rs", "rank": 42, "score": 52452.955963105414 }, { "content": "fn get_merkle_root_helper(\n\n curl: &mut impl Sponge,\n\n hash: &[i8],\n\n trits: &[i8],\n\n offset: usize,\n\n index: usize,\n\n size: usize,\n\n) -> Result<[i8; HASH_LENGTH]> {\n\n let empty = [0; HASH_LENGTH];\n\n let mut index = index;\n\n let mut tmp = [0; HASH_LENGTH];\n\n for i in 0..size {\n\n curl.reset();\n\n if (index & 1) == 0 {\n\n curl.absorb(hash)?;\n\n let offset = offset + i * HASH_LENGTH;\n\n curl.absorb(&trits[offset..offset + HASH_LENGTH])?;\n\n } else {\n\n let offset = offset + i * HASH_LENGTH;\n\n curl.absorb(&trits[offset..offset + HASH_LENGTH])?;\n", "file_path": "iota-crypto/iss.rs", "rank": 43, "score": 51442.948029185456 }, { "content": "fn bigint_not(base: &mut [u32]) {\n\n for i in base.iter_mut() {\n\n *i = !*i;\n\n }\n\n}\n\n\n", "file_path": "iota-crypto/kerl.rs", "rank": 44, "score": 47227.625312910415 }, { "content": "fn is_null(base: &[u32]) -> bool {\n\n for b in base.iter() {\n\n if *b != 0 {\n\n return false;\n\n }\n\n }\n\n true\n\n}\n\n\n", "file_path": "iota-crypto/kerl.rs", "rank": 45, "score": 47227.625312910415 }, { "content": "fn criterion_benchmark(c: &mut Criterion) {\n\n let mut rng = thread_rng();\n\n let mut trits = [0; HASH_TRINARY_SIZE];\n\n for trit in trits.iter_mut() {\n\n *trit = rng.gen_range(-1, 2);\n\n }\n\n c.bench_function(\"Kerl on 243 trits\", move |b| b.iter(|| basic_kerl(trits)));\n\n}\n\n\n\ncriterion_group!(benches, criterion_benchmark);\n\ncriterion_main!(benches);\n", "file_path": "iota-crypto/benches/kerl_benchmark.rs", "rank": 46, "score": 45385.849606459466 }, { "content": "fn trytes(trits: &[Trit]) -> Result<Trytes> {\n\n ensure!(trits.len() % 3 == 0, \"Invalid trit length.\");\n\n\n\n trits\n\n .chunks(iota_constants::TRITS_PER_TRYTE)\n\n .map(trits_to_char)\n\n .collect()\n\n}\n\n\n", "file_path": "iota-conversion/trinary.rs", "rank": 47, "score": 45240.38414387329 }, { "content": "fn outer_increment(prestate: &mut Curl64State) {\n\n for i in OUTER_INCR_START..INNER_INCR_START {\n\n let with_carry = prestate.bit_add(i);\n\n if !with_carry {\n\n break;\n\n }\n\n }\n\n}\n\n\n", "file_path": "bee-pow/src/pearldiver.rs", "rank": 48, "score": 44543.89648938227 }, { "content": "fn char_to_trits(tryte: char) -> &'static [Trit] {\n\n match iota_constants::TRYTE_ALPHABET\n\n .iter()\n\n .position(|&x| x == tryte)\n\n {\n\n Some(p) => &TRYTE_TO_TRITS_MAPPINGS[p],\n\n None => &TRYTE_TO_TRITS_MAPPINGS[0],\n\n }\n\n}\n\n\n", "file_path": "iota-conversion/trinary.rs", "rank": 49, "score": 44347.402542458156 }, { "content": "fn trits_to_char(trits: &[Trit]) -> Result<char> {\n\n ensure!(\n\n trits.len() <= iota_constants::TRITS_PER_TRYTE,\n\n \"Provided trit slice is too long: {:?}\",\n\n trits\n\n );\n\n Ok(\n\n match TRYTE_TO_TRITS_MAPPINGS.iter().position(|&x| x == trits) {\n\n Some(p) => iota_constants::TRYTE_ALPHABET[p],\n\n None => '-',\n\n },\n\n )\n\n}\n\n\n", "file_path": "iota-conversion/trinary.rs", "rank": 50, "score": 44347.402542458156 }, { "content": "fn make_prestate(input: &InputTrits) -> Curl64State {\n\n let mut prestate = Curl64State::new(BITS_1);\n\n let mut tmpstate = Curl64State::new(BITS_1);\n\n\n\n let mut offset = 0;\n\n\n\n for _ in 0..NUM_FULL_CHUNKS_FOR_PRESTATE {\n\n for i in 0..HASH_LEN {\n\n match (*input)[offset] {\n\n 1 => prestate.set(i, BITS_1, BITS_0),\n\n -1 => prestate.set(i, BITS_0, BITS_1),\n\n _ => (),\n\n }\n\n offset += 1;\n\n }\n\n\n\n unsafe {\n\n transform(&mut prestate, &mut tmpstate);\n\n }\n\n }\n", "file_path": "bee-pow/src/pearldiver.rs", "rank": 51, "score": 43748.71940482406 }, { "content": "fn xorin(dst: &mut [u8], src: &[u8]) {\n\n assert!(dst.len() <= src.len());\n\n let len = dst.len();\n\n let mut dst_ptr = dst.as_mut_ptr();\n\n let mut src_ptr = src.as_ptr();\n\n for _ in 0..len {\n\n unsafe {\n\n *dst_ptr ^= *src_ptr;\n\n src_ptr = src_ptr.offset(1);\n\n dst_ptr = dst_ptr.offset(1);\n\n }\n\n }\n\n}\n\n\n\n/// Total number of lanes.\n\nconst PLEN: usize = 25;\n\n\n\n/// This structure should be used to create keccak/sha3 hash.\n\n#[derive(Clone, Copy)]\n\npub(crate) struct Keccak {\n", "file_path": "iota-crypto/keccak.rs", "rank": 52, "score": 43436.14831419503 }, { "content": "fn basic_kerl(trits: [i8; HASH_TRINARY_SIZE]) {\n\n let mut kerl = Kerl::default();\n\n kerl.absorb(&trits).unwrap();\n\n let mut bytes = vec![0; HASH_TRINARY_SIZE];\n\n kerl.squeeze(&mut bytes).unwrap();\n\n}\n\n\n", "file_path": "iota-crypto/benches/kerl_benchmark.rs", "rank": 53, "score": 42996.525633704616 }, { "content": "fn inner_increment(prestate: &mut Curl64State) -> Exhausted {\n\n // we have not exhausted the search space until each add\n\n // operation produces a carry\n\n for i in INNER_INCR_START..HASH_LEN {\n\n if {\n\n let with_carry = prestate.bit_add(i);\n\n !with_carry\n\n } {\n\n return false;\n\n }\n\n }\n\n true\n\n}\n\n\n", "file_path": "bee-pow/src/pearldiver.rs", "rank": 54, "score": 42710.27234082275 }, { "content": "fn bigint_sub(base: &mut [u32], rh: &[u32]) {\n\n let mut noborrow = true;\n\n for (a, b) in base.iter_mut().zip(rh) {\n\n let (v, c) = full_add(*a, !*b, noborrow);\n\n *a = v;\n\n noborrow = c;\n\n }\n\n assert!(noborrow);\n\n}\n\n\n", "file_path": "iota-crypto/kerl.rs", "rank": 55, "score": 42594.19519711783 }, { "content": "fn bigint_add(base: &mut [u32], rh: &[u32]) {\n\n let mut carry = false;\n\n\n\n for (a, b) in base.iter_mut().zip(rh.iter()) {\n\n let (v, c) = full_add(*a, *b, carry);\n\n *a = v;\n\n carry = c;\n\n }\n\n}\n\n\n", "file_path": "iota-crypto/kerl.rs", "rank": 56, "score": 42594.19519711783 }, { "content": "fn bigint_cmp(lh: &[u32], rh: &[u32]) -> i8 {\n\n for (a, b) in lh.iter().rev().zip(rh.iter().rev()) {\n\n if a < b {\n\n return -1;\n\n } else if a > b {\n\n return 1;\n\n }\n\n }\n\n 0\n\n}\n\n\n", "file_path": "iota-crypto/kerl.rs", "rank": 57, "score": 42594.19519711783 }, { "content": "fn convert_units_helper(amount: u64, to: IotaUnits) -> u64 {\n\n amount / 10_u64.pow(u32::from(to.value()))\n\n}\n\n\n", "file_path": "iota-conversion/unit_converter.rs", "rank": 58, "score": 41046.824341440166 }, { "content": "/// Converts trits to bytes\n\nfn trits_to_bytes(trits: &[i8], bytes: &mut [u8]) -> Result<()> {\n\n ensure!(\n\n trits.len() == HASH_LENGTH,\n\n \"Trit slice should have length {}, but had length: {}\",\n\n HASH_LENGTH,\n\n trits.len()\n\n );\n\n ensure!(\n\n bytes.len() == BYTE_LENGTH,\n\n \"Byte slice should have length {}, but had length: {}\",\n\n BYTE_LENGTH,\n\n bytes.len()\n\n );\n\n\n\n let mut base = [0; INT_LENGTH];\n\n\n\n let mut size = 1;\n\n let mut all_minus_1 = true;\n\n\n\n for t in trits[0..HASH_LENGTH - 1].iter() {\n", "file_path": "iota-crypto/kerl.rs", "rank": 59, "score": 40992.93669397046 }, { "content": "fn bigint_add_base(base: &mut [u32], rh: u32) -> u32 {\n\n let mut res = full_add(base[0], rh, false);\n\n base[0] = res.0;\n\n let mut j = 0;\n\n while res.1 {\n\n res = full_add(base[j], 0, true);\n\n base[j] = res.0;\n\n j += 1;\n\n }\n\n j as u32\n\n}\n\n\n", "file_path": "iota-crypto/kerl.rs", "rank": 60, "score": 40240.742922851015 }, { "content": "/// Converts bytes to trits\n\nfn bytes_to_trits(bytes: &mut [u8], trits: &mut [i8]) -> Result<()> {\n\n ensure!(\n\n trits.len() == HASH_LENGTH,\n\n \"Trit slice should have length {}, but had length: {}\",\n\n HASH_LENGTH,\n\n trits.len()\n\n );\n\n ensure!(\n\n bytes.len() == BYTE_LENGTH,\n\n \"Byte slice should have length {}, but had length: {}\",\n\n BYTE_LENGTH,\n\n bytes.len()\n\n );\n\n\n\n let mut base = vec![0; INT_LENGTH];\n\n trits[HASH_LENGTH - 1] = 0;\n\n\n\n for i in 0..INT_LENGTH {\n\n base[INT_LENGTH - 1 - i] = u32::from(bytes[i * 4]) << 24;\n\n base[INT_LENGTH - 1 - i] |= u32::from(bytes[i * 4 + 1]) << 16;\n", "file_path": "iota-crypto/kerl.rs", "rank": 61, "score": 39522.619870725735 }, { "content": "fn find_nonce(state: &Curl64State, difficulty: &Difficulty) -> Option<NonceTrits> {\n\n let mut nonce_test = BITS_1;\n\n\n\n for i in (HASH_LEN - difficulty.0)..HASH_LEN {\n\n nonce_test &= state.bit_equal(i);\n\n\n\n // If 'nonce_test' ever becomes 0, then this means that none of the current nonce candidates satisfied\n\n // the difficulty setting\n\n if nonce_test == 0 {\n\n return None;\n\n }\n\n }\n\n\n\n for slot in 0..BATCH_SIZE {\n\n if (nonce_test >> slot) & 1 != 0 {\n\n return Some(extract_nonce(&state, slot));\n\n }\n\n }\n\n\n\n unreachable!()\n\n}\n\n\n", "file_path": "bee-pow/src/pearldiver.rs", "rank": 62, "score": 38852.07946173537 }, { "content": "fn full_add(ia: u32, ib: u32, carry: bool) -> (u32, bool) {\n\n let a = u64::from(ia);\n\n let b = u64::from(ib);\n\n\n\n let mut v = a + b;\n\n let mut l = v >> 32;\n\n let mut r = v & 0xFFFF_FFFF;\n\n\n\n let carry1 = l != 0;\n\n\n\n if carry {\n\n v = r + 1;\n\n }\n\n l = (v >> 32) & 0xFFFF_FFFF;\n\n r = v & 0xFFFF_FFFF;\n\n let carry2 = l != 0;\n\n (r as u32, carry1 || carry2)\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "iota-crypto/kerl.rs", "rank": 63, "score": 38166.19926764934 }, { "content": "fn create_amount_with_unit_display_text(amount: f64, unit: IotaUnits, extended: bool) -> String {\n\n if unit == IotaUnits::Iota {\n\n format!(\"{} {}\", amount, unit.unit())\n\n } else if extended {\n\n format!(\"{:.15} {}\", amount, unit.unit())\n\n } else {\n\n format!(\"{:.2} {}\", amount, unit.unit())\n\n }\n\n}\n\n\n", "file_path": "iota-conversion/unit_converter.rs", "rank": 64, "score": 36298.94074093635 }, { "content": "use common::Tryte;\n\nuse common::constants::*;\n\nuse common::Result;\n\nuse common::Error;\n\n\n\nuse ternary::IsTryte;\n\n\n\nuse crate::constants::*;\n\n\n\npub struct Payload(pub [Tryte; PAYLOAD.tryte_offset.length]);\n\npub struct Address(pub [Tryte; ADDRESS.tryte_offset.length]);\n\n#[derive(Default, Debug)]\n\npub struct Value(pub i64);\n\npub struct Tag(pub [Tryte; TAG.tryte_offset.length]);\n\n#[derive(Default, Debug)]\n\npub struct Timestamp(pub u64);\n\n#[derive(Default, Debug)]\n\npub struct Index(pub usize);\n\npub struct Hash(pub [Tryte; BUNDLE_HASH.tryte_offset.length]);\n\npub struct Nonce(pub [Tryte; NONCE.tryte_offset.length]);\n", "file_path": "bee-bundle/src/transaction.rs", "rank": 67, "score": 17.868056445539292 }, { "content": "use std::ops::Range;\n\nuse crate::{\n\n Trit,\n\n RawEncoding,\n\n RawEncodingBuf,\n\n};\n\n\n\n#[repr(transparent)]\n\npub struct T1B1([()]);\n\n\n\nimpl T1B1 {\n\n unsafe fn make(ptr: *const u8, offset: usize, len: usize) -> *const Self {\n\n std::mem::transmute((ptr.offset(offset as isize), len))\n\n }\n\n\n\n unsafe fn ptr(&self, index: usize) -> *const u8 {\n\n (self.0.as_ptr() as *const u8).offset(index as isize)\n\n }\n\n}\n\n\n", "file_path": "bee-ternary/src/t1b1.rs", "rank": 70, "score": 15.40654273503145 }, { "content": "use std::ops::Range;\n\nuse crate::{Trit, RawEncoding, RawEncodingBuf};\n\n\n\n#[repr(transparent)]\n\npub struct T4B1([()]);\n\n\n\nimpl T4B1 {\n\n unsafe fn make(ptr: *const u8, offset: usize, len: usize) -> *const Self {\n\n let len = (len << 2) | (offset % 4);\n\n std::mem::transmute((ptr.offset((offset / 4) as isize), len))\n\n }\n\n\n\n unsafe fn ptr(&self, index: usize) -> *const u8 {\n\n let byte_offset = index / 4;\n\n (self.0.as_ptr() as *const u8).offset(byte_offset as isize)\n\n }\n\n\n\n fn len_offset(&self) -> (usize, usize) {\n\n (self.0.len() >> 2, self.0.len() & 0b11)\n\n }\n", "file_path": "bee-ternary/src/t4b1.rs", "rank": 71, "score": 15.372482050656341 }, { "content": "use iota_constants;\n\n\n\nuse crate::Result;\n\n\n\nuse super::curl::Curl;\n\nuse super::kerl::Kerl;\n\nuse super::{hash_with_mode, HashMode, Sponge};\n\nuse iota_constants::HASH_TRINARY_SIZE as HASH_LENGTH;\n\n\n\n/// Number of fragment chunks\n\npub const NUMBER_OF_FRAGMENT_CHUNKS: usize = 27;\n\n/// Length of a fragment\n\npub const FRAGMENT_LENGTH: usize = HASH_LENGTH * NUMBER_OF_FRAGMENT_CHUNKS;\n\n/// The amount of valid security levels\n\npub const NUMBER_OF_SECURITY_LEVELS: usize = 3;\n\n/// The width of tryte\n\npub const TRYTE_WIDTH: usize = 3;\n\n/// Normalized fragment length\n\npub const NORMALIZED_FRAGMENT_LENGTH: usize = HASH_LENGTH / TRYTE_WIDTH / NUMBER_OF_SECURITY_LEVELS;\n\n\n\n/// Create a subseed\n\n///\n\n/// * `mode` - The hashing mode to use\n\n/// * `seed` - The generation seed\n\n/// * `index` - How many address permutations to iterate through\n", "file_path": "iota-crypto/iss.rs", "rank": 72, "score": 15.125157151436925 }, { "content": " }\n\n }\n\n\n\n pub fn absorb(&mut self, trits: &[i8], mut offset: usize, mut length: usize) {\n\n loop {\n\n let chunk_length = {\n\n if length < HASH_LEN {\n\n length\n\n } else {\n\n HASH_LEN\n\n }\n\n };\n\n\n\n self.state[0..chunk_length].copy_from_slice(&trits[offset..offset + chunk_length]);\n\n\n\n self.transform();\n\n\n\n offset += chunk_length;\n\n\n\n if length > chunk_length {\n", "file_path": "bee-crypto/src/curl/mod.rs", "rank": 73, "score": 14.998152401661104 }, { "content": " length -= chunk_length;\n\n } else {\n\n break;\n\n }\n\n }\n\n }\n\n\n\n pub fn squeeze(&mut self, trits: &mut [i8], mut offset: usize, mut length: usize) {\n\n loop {\n\n let chunk_length = {\n\n if length < HASH_LEN {\n\n length\n\n } else {\n\n HASH_LEN\n\n }\n\n };\n\n\n\n trits[offset..offset + chunk_length].copy_from_slice(&self.state[0..chunk_length]);\n\n\n\n self.transform();\n", "file_path": "bee-crypto/src/curl/mod.rs", "rank": 74, "score": 14.711292343413366 }, { "content": "/// The number of trits in a tryte\n\npub const TRITS_PER_TRYTE: usize = 3;\n\n/// The maximum allowed seed length\n\npub const SEED_LENGTH_MAX: usize = 81;\n\n/// The length of an address without a checksum\n\npub const ADDRESS_LENGTH_WITHOUT_CHECKSUM: usize = 81;\n\n/// The length of an address with a checksum\n\npub const ADDRESS_LENGTH_WITH_CHECKSUM: usize = 90;\n\n/// The mandatory length of a message segment\n\npub const MESSAGE_LENGTH: usize = 2187;\n\n/// The mandatory length of a tag segment\n\npub const TAG_LENGTH: usize = 27;\n\n\n\n/// Size of signature message fragment in trtis\n\npub const SIGNATURE_MESSAGE_FRAGMENT_TRINARY_SIZE: usize = 6561;\n\n/// Size of address in trtis\n\npub const ADDRESS_TRINARY_SIZE: usize = 243;\n\n/// Size of value in trtis\n\npub const VALUE_SIZE_TRINARY: usize = 81;\n\n/// Size of obselte tag in trtis\n", "file_path": "iota-constants/lib.rs", "rank": 75, "score": 12.965895549045506 }, { "content": "}\n\n\n\nimpl Tag {\n\n pub fn from_str(tag: &str) -> Self {\n\n assert!(tag.len() <= TAG.tryte_offset.length);\n\n assert!(tag.chars().all(|c| c.is_tryte()));\n\n\n\n let mut trytes = [TRYTE_ZERO; TAG.tryte_offset.length];\n\n\n\n for (i, c) in tag.chars().enumerate() {\n\n trytes[i] = c;\n\n }\n\n\n\n Self(trytes)\n\n }\n\n}\n\n\n\nimpl Default for Hash {\n\n fn default() -> Self {\n\n Self([TRYTE_ZERO; BUNDLE_HASH.tryte_offset.length])\n", "file_path": "bee-bundle/src/transaction.rs", "rank": 76, "score": 12.96444455771817 }, { "content": " }\n\n}\n\n\n\nmacro_rules! forward_sponge_impl {\n\n ($($t:ty),+) => {\n\n\n\n $(\n\n impl $t {\n\n /// Return the number of rounds used in this `CurlP` instacnce.\n\n pub fn rounds(&self) -> usize {\n\n self.0.rounds\n\n }\n\n }\n\n\n\n impl Sponge for $t {\n\n const HASH_LEN: usize = 243;\n\n\n\n fn absorb(&mut self, input: &Trits) {\n\n self.0.absorb(input)\n\n }\n", "file_path": "bee-crypto/src/hashes_preview/mod.rs", "rank": 77, "score": 12.938904098938277 }, { "content": "//! This module currently uses a custom implementation of bigint\n\n//! because num-bigint is significantly slower, the plan is to\n\n//! use this until ApInt or ramp are good enough, then use those\n\n//! instead.\n\n\n\nuse std::fmt;\n\n\n\nuse crate::keccak::Keccak;\n\nuse crate::Result;\n\n\n\nuse super::Sponge;\n\nuse iota_constants::HASH_TRINARY_SIZE as HASH_LENGTH;\n\n\n\nconst BIT_HASH_LENGTH: usize = 384;\n\nconst BYTE_HASH_LENGTH: usize = BIT_HASH_LENGTH / 8;\n\n\n\nconst RADIX: i32 = 3;\n\n\n\nconst BYTE_LENGTH: usize = 48;\n\nconst INT_LENGTH: usize = BYTE_LENGTH / 4;\n", "file_path": "iota-crypto/kerl.rs", "rank": 78, "score": 12.681427087218536 }, { "content": "#[derive(Clone)]\n\npub struct Difficulty(pub usize);\n\n\n\nuse common::constants::{HASH_TRIT_LEN, NETWORK_DIFFICULTY};\n\n\n\nimpl Default for Difficulty {\n\n fn default() -> Self {\n\n Self(NETWORK_DIFFICULTY)\n\n }\n\n}\n\n\n\nimpl From<usize> for Difficulty {\n\n fn from(difficulty: usize) -> Self {\n\n let max_difficulty = HASH_TRIT_LEN;\n\n if difficulty > max_difficulty {\n\n Self(max_difficulty)\n\n } else {\n\n Self(difficulty)\n\n }\n\n }\n", "file_path": "bee-pow/src/difficulty.rs", "rank": 79, "score": 12.580555192296632 }, { "content": " pub(crate) fn xof(mut self) -> XofReader {\n\n self.pad();\n\n\n\n keccakf(&mut self.a);\n\n\n\n XofReader {\n\n keccak: self,\n\n offset: 0,\n\n }\n\n }\n\n}\n\n\n\npub(crate) struct XofReader {\n\n keccak: Keccak,\n\n offset: usize,\n\n}\n\n\n\nimpl XofReader {\n\n pub(crate) fn squeeze(&mut self, output: &mut [u8]) {\n\n // second foldp\n", "file_path": "iota-crypto/keccak.rs", "rank": 80, "score": 12.561271618306737 }, { "content": "\n\nimpl Nonce {\n\n pub fn from_str(nonce: &str) -> Self {\n\n assert!(nonce.len() <= NONCE.tryte_offset.length);\n\n assert!(nonce.chars().all(|c| c.is_tryte()));\n\n\n\n let mut trytes = [TRYTE_ZERO; NONCE.tryte_offset.length];\n\n\n\n for (i, c) in nonce.chars().enumerate() {\n\n trytes[i] = c;\n\n }\n\n\n\n Self(trytes)\n\n }\n\n}\n\n\n\nimpl Transaction {\n\n pub fn from_tryte_str(tx_trytes: &str) -> Self {\n\n assert_eq!(TRANSACTION_TRYT_LEN, tx_trytes.len());\n\n\n", "file_path": "bee-bundle/src/transaction.rs", "rank": 81, "score": 12.457259882353826 }, { "content": "pub mod constants;\n\n\n\nuse common::Trit;\n\n\n\nuse self::constants::CURL_HASH_TRIT_LEN as HASH_LEN;\n\nuse self::constants::CURL_P_81 as NUM_ROUNDS;\n\nuse self::constants::CURL_STAT_TRIT_LEN as STATE_LEN;\n\nuse self::constants::TRUTH_TABLE;\n\n\n\npub struct Curl {\n\n num_rounds: usize,\n\n state: [Trit; STATE_LEN],\n\n scratchpad: [Trit; STATE_LEN],\n\n}\n\n\n\nimpl Curl {\n\n pub fn new(num_rounds: usize) -> Self {\n\n Self {\n\n num_rounds,\n\n ..Self::default()\n", "file_path": "bee-crypto/src/curl/mod.rs", "rank": 82, "score": 12.269116503669217 }, { "content": " }\n\n}\n\n\n\nimpl Default for Address {\n\n fn default() -> Self {\n\n Self([TRYTE_ZERO; ADDRESS.tryte_offset.length])\n\n }\n\n}\n\n\n\nimpl std::fmt::Debug for Address {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"{}\", self.0.iter().collect::<String>())\n\n }\n\n}\n\n\n\nimpl Address {\n\n pub fn from_str(address: &str) -> Self {\n\n assert!(address.len() <= ADDRESS.tryte_offset.length);\n\n assert!(address.chars().all(|c| c.is_tryte()));\n\n\n", "file_path": "bee-bundle/src/transaction.rs", "rank": 83, "score": 12.251543337590478 }, { "content": "pub const H3: u64 = 0x003FFFFFFFFFFFFF;\n\n\n\npub const OUTER_INCR_START: usize = HASH_TRIT_LEN - NONCE_TRIT_LEN + 4;\n\npub const INNER_INCR_START: usize = OUTER_INCR_START + 27;\n\n\n\npub const INDICES: [isize; 730] = [\n\n 0, 364, 728, 363, 727, 362, 726, 361, 725, 360, 724, 359, 723, 358, 722, 357, 721, 356, 720,\n\n 355, 719, 354, 718, 353, 717, 352, 716, 351, 715, 350, 714, 349, 713, 348, 712, 347, 711, 346,\n\n 710, 345, 709, 344, 708, 343, 707, 342, 706, 341, 705, 340, 704, 339, 703, 338, 702, 337, 701,\n\n 336, 700, 335, 699, 334, 698, 333, 697, 332, 696, 331, 695, 330, 694, 329, 693, 328, 692, 327,\n\n 691, 326, 690, 325, 689, 324, 688, 323, 687, 322, 686, 321, 685, 320, 684, 319, 683, 318, 682,\n\n 317, 681, 316, 680, 315, 679, 314, 678, 313, 677, 312, 676, 311, 675, 310, 674, 309, 673, 308,\n\n 672, 307, 671, 306, 670, 305, 669, 304, 668, 303, 667, 302, 666, 301, 665, 300, 664, 299, 663,\n\n 298, 662, 297, 661, 296, 660, 295, 659, 294, 658, 293, 657, 292, 656, 291, 655, 290, 654, 289,\n\n 653, 288, 652, 287, 651, 286, 650, 285, 649, 284, 648, 283, 647, 282, 646, 281, 645, 280, 644,\n\n 279, 643, 278, 642, 277, 641, 276, 640, 275, 639, 274, 638, 273, 637, 272, 636, 271, 635, 270,\n\n 634, 269, 633, 268, 632, 267, 631, 266, 630, 265, 629, 264, 628, 263, 627, 262, 626, 261, 625,\n\n 260, 624, 259, 623, 258, 622, 257, 621, 256, 620, 255, 619, 254, 618, 253, 617, 252, 616, 251,\n\n 615, 250, 614, 249, 613, 248, 612, 247, 611, 246, 610, 245, 609, 244, 608, 243, 607, 242, 606,\n\n 241, 605, 240, 604, 239, 603, 238, 602, 237, 601, 236, 600, 235, 599, 234, 598, 233, 597, 232,\n", "file_path": "bee-pow/src/constants.rs", "rank": 84, "score": 12.173011053665668 }, { "content": "use common::constants::*;\n\n\n\npub const NUM_FULL_CHUNKS_FOR_PRESTATE: usize =\n\n (TRANSACTION_TRIT_LEN - HASH_TRIT_LEN) / HASH_TRIT_LEN; // 32\n\n\n\npub const TRANS_NONCE_START: usize = TRANSACTION_TRIT_LEN - NONCE_TRIT_LEN; // 7938\n\npub const CHUNK_NONCE_START: usize = HASH_TRIT_LEN - NONCE_TRIT_LEN; // 162\n\n\n\npub const BATCH_SIZE: usize = 64;\n\n\n\npub const BITS_1: u64 = 0xFFFFFFFFFFFFFFFF;\n\npub const BITS_0: u64 = 0x0000000000000000;\n\n\n\npub const L0: u64 = 0xDB6DB6DB6DB6DB6D;\n\npub const H0: u64 = 0xB6DB6DB6DB6DB6DB;\n\npub const L1: u64 = 0xF1F8FC7E3F1F8FC7;\n\npub const H1: u64 = 0x8FC7E3F1F8FC7E3F;\n\npub const L2: u64 = 0x7FFFE00FFFFC01FF;\n\npub const H2: u64 = 0xFFC01FFFF803FFFF;\n\npub const L3: u64 = 0xFFC0000007FFFFFF;\n", "file_path": "bee-pow/src/constants.rs", "rank": 85, "score": 12.047131811171074 }, { "content": "/// curl.absorb(&input);\n\n/// curl.squeeze(&mut out);\n\n///```\n\n#[derive(Clone, Copy)]\n\npub struct Curl {\n\n number_of_rounds: usize,\n\n scratchpad: [i8; STATE_LENGTH],\n\n state: [i8; STATE_LENGTH],\n\n}\n\n\n\nimpl Default for Curl {\n\n fn default() -> Curl {\n\n Curl {\n\n number_of_rounds: 81,\n\n scratchpad: [0; STATE_LENGTH],\n\n state: [0; STATE_LENGTH],\n\n }\n\n }\n\n}\n\n\n", "file_path": "iota-crypto/curl.rs", "rank": 86, "score": 11.682112106598884 }, { "content": "};\n\n\n\n// ONLY TEMPORARY\n\n// re-export iota-conversion\n\npub use iota_conversion;\n\n\n\n#[repr(transparent)]\n\npub struct TritSlice<T: RawEncoding + ?Sized = T1B1>(T);\n\n\n\nimpl<T: RawEncoding + ?Sized> TritSlice<T> {\n\n pub fn len(&self) -> usize {\n\n self.0.len()\n\n }\n\n\n\n pub fn get(&self, index: usize) -> Option<Trit> {\n\n if index < self.0.len() {\n\n unsafe { Some(self.0.get_unchecked(index).into()) }\n\n } else {\n\n None\n\n }\n", "file_path": "bee-ternary/src/lib.rs", "rank": 87, "score": 11.576244240102318 }, { "content": "}\n\n\n\nimpl Trinary for Vec<Trit> {\n\n fn trits(&self) -> Vec<Trit> {\n\n self.to_vec()\n\n }\n\n fn trits_with_length(&self, length: usize) -> Vec<Trit> {\n\n trits_with_length(&self.trits(), length)\n\n }\n\n fn trytes(&self) -> Result<Trytes> {\n\n trytes(self)\n\n }\n\n}\n\n\n\nimpl Trinary for &[Trit] {\n\n fn trits(&self) -> Vec<Trit> {\n\n self.to_vec()\n\n }\n\n fn trits_with_length(&self, length: usize) -> Vec<Trit> {\n\n trits_with_length(&self.trits(), length)\n", "file_path": "iota-conversion/trinary.rs", "rank": 88, "score": 11.491389495714913 }, { "content": "\n\npub struct FromU8Error;\n\npub struct FromI8Error;\n\n\n\n/// Similar impls for `TritsMut` and `TritsBuf`\n\nimpl<'a> Trits<'a> {\n\n pub fn len(&self) -> usize {\n\n self.0.len()\n\n }\n\n\n\n /// Create a `Trits` from a `&[i8]` slice without verifying that its bytes are\n\n /// correctly binary-coded balanced trits (-1, 0, and +1).\n\n ///\n\n /// This function is intended to be used in hot loops and relies on the user making sure that\n\n /// the bytes are set correctly.\n\n ///\n\n /// **NOTE:** Use the `TryFrom` trait if you want to check that the slice encodes trits\n\n /// correctly before creating `Trits`.\n\n ///\n\n /// **WARNING:** If used incorrectly (that is, if the bytes are not correctly encoding trits), the\n", "file_path": "bee-crypto/src/hashes_preview/mod.rs", "rank": 89, "score": 11.460250751301054 }, { "content": " }\n\n\n\n pub fn set(&mut self, index: usize, trit: Trit) {\n\n if index < self.0.len() {\n\n unsafe { self.0.set_unchecked(index, trit.into()) };\n\n }\n\n }\n\n\n\n pub fn iter(&self) -> impl Iterator<Item=Trit> + '_ {\n\n (0..self.0.len()).map(move |idx| unsafe { self.0.get_unchecked(idx).into() })\n\n }\n\n\n\n pub fn slice(&self, range: Range<usize>) -> &Self {\n\n assert!(range.end >= range.start && range.end <= self.len());\n\n unsafe { &*(self.0.slice_unchecked(range) as *const _ as *const Self) }\n\n }\n\n\n\n pub fn slice_mut(&mut self, range: Range<usize>) -> &mut Self {\n\n assert!(range.end >= range.start && range.end <= self.len());\n\n unsafe { &mut *(self.0.slice_unchecked_mut(range) as *mut _ as *mut Self) }\n", "file_path": "bee-ternary/src/lib.rs", "rank": 90, "score": 11.456586403554692 }, { "content": " /// most secure\n\n High = 3,\n\n}\n\n\n\n/// Number of Iota Signature Scheme fragments\n\npub const ISS_FRAGMENTS: usize = 27;\n\n/// Length of Iota Signature Scheme key\n\npub const ISS_KEY_LENGTH: usize = HASH_TRINARY_SIZE * ISS_FRAGMENTS;\n\n/// Lenght of Iota Signature Scheme chunk\n\npub const ISS_CHUNK_LENGTH: usize = HASH_TRINARY_SIZE / TRINARY_RADIX;\n", "file_path": "iota-constants/lib.rs", "rank": 91, "score": 11.318208308365367 }, { "content": " unsafe fn slice_unchecked(&self, range: Range<usize>) -> &Self {\n\n &*Self::make(self.ptr(0), range.start, range.end - range.start)\n\n }\n\n\n\n unsafe fn slice_unchecked_mut(&mut self, range: Range<usize>) -> &mut Self {\n\n &mut *(Self::make(self.ptr(0), range.start, range.end - range.start) as *mut Self)\n\n }\n\n}\n\n\n\npub struct T4B1Buf(Vec<u8>, usize);\n\n\n\nimpl RawEncodingBuf for T4B1Buf {\n\n type Slice = T4B1;\n\n\n\n fn new() -> Self {\n\n Self(Vec::new(), 0)\n\n }\n\n\n\n fn push(&mut self, trit: Trit) {\n\n let b = trit.into_u8();\n", "file_path": "bee-ternary/src/t4b1.rs", "rank": 92, "score": 11.127071157456509 }, { "content": " a: [u64; PLEN],\n\n offset: usize,\n\n rate: usize,\n\n delim: u8,\n\n}\n\n\n\nmacro_rules! impl_constructor {\n\n ($name:ident, $alias:ident, $bits:expr, $delim:expr) => {\n\n pub(crate) fn $name() -> Keccak {\n\n Keccak::new(200 - $bits / 4, $delim)\n\n }\n\n\n\n pub(crate) fn $alias(data: &[u8], result: &mut [u8]) {\n\n let mut keccak = Keccak::$name();\n\n keccak.update(data);\n\n keccak.finalize(result);\n\n }\n\n };\n\n}\n\n\n", "file_path": "iota-crypto/keccak.rs", "rank": 93, "score": 10.798788875627436 }, { "content": "#[derive(Clone)]\n\npub struct Cores(pub(self) usize);\n\n\n\nimpl Default for Cores {\n\n fn default() -> Self {\n\n Self(num_cpus::get())\n\n }\n\n}\n\n\n\nimpl From<usize> for Cores {\n\n fn from(num_cores: usize) -> Self {\n\n let max_cores = num_cpus::get();\n\n if num_cores > max_cores {\n\n Self(max_cores)\n\n } else {\n\n Self(num_cores)\n\n }\n\n }\n\n}\n\n\n\nimpl std::ops::Deref for Cores {\n\n type Target = usize;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n &self.0\n\n }\n\n}\n", "file_path": "bee-pow/src/cores.rs", "rank": 94, "score": 10.65021804658335 }, { "content": "}\n\n\n\nimpl Trinary for Trytes {\n\n fn trits(&self) -> Vec<Trit> {\n\n self.chars().flat_map(char_to_trits).cloned().collect()\n\n }\n\n fn trits_with_length(&self, length: usize) -> Vec<Trit> {\n\n trits_with_length(&self.trits(), length)\n\n }\n\n fn trytes(&self) -> Result<Trytes> {\n\n Ok(self.clone())\n\n }\n\n}\n\n\n\nimpl Trinary for &str {\n\n fn trits(&self) -> Vec<Trit> {\n\n self.chars().flat_map(char_to_trits).cloned().collect()\n\n }\n\n fn trits_with_length(&self, length: usize) -> Vec<Trit> {\n\n trits_with_length(&self.trits(), length)\n\n }\n\n fn trytes(&self) -> Result<Trytes> {\n\n Ok(self.to_string())\n\n }\n\n}\n\n\n", "file_path": "iota-conversion/trinary.rs", "rank": 95, "score": 10.546003375108437 }, { "content": "use std::fmt;\n\n\n\nuse super::Result;\n\nuse super::{HashMode, Sponge};\n\nuse iota_constants::HASH_TRINARY_SIZE as HASH_LENGTH;\n\n\n\n/// The length of the internal state\n\nconst STATE_LENGTH: usize = 3 * HASH_LENGTH;\n\n\n\nconst TRUTH_TABLE: [i8; 11] = [1, 0, -1, 2, 1, -1, 0, 2, -1, 1, 0];\n\n\n\n/// The Curl struct is a Sponge that uses the Curl\n\n/// hashing algorithm.\n\n///```\n\n/// use iota_crypto::{Sponge, Curl};\n\n/// // Create an array of 243 1s\n\n/// let input = [1; 243];\n\n/// // Create an array of 243 0s\n\n/// let mut out = [0; 243];\n\n/// let mut curl = Curl::default();\n", "file_path": "iota-crypto/curl.rs", "rank": 96, "score": 10.488839429249172 }, { "content": "use common::constants::NONCE_TRIT_LEN as NONCE_LEN;\n\nuse common::Trit;\n\n\n\n#[derive(Copy)]\n\npub struct NonceTrits(pub(crate) [Trit; NONCE_LEN]);\n\n\n\nimpl NonceTrits {\n\n pub fn to_vec(&self) -> Vec<i8> {\n\n self.0.to_vec()\n\n }\n\n\n\n pub fn as_slice(&self) -> &[i8] {\n\n &self.0[..]\n\n }\n\n}\n\n\n\nimpl Default for NonceTrits {\n\n fn default() -> Self {\n\n Self([0i8; NONCE_LEN])\n\n }\n", "file_path": "bee-pow/src/nonce.rs", "rank": 97, "score": 10.359829539981144 }, { "content": "\n\n offset += chunk_length;\n\n\n\n if length > chunk_length {\n\n length -= chunk_length;\n\n } else {\n\n break;\n\n }\n\n }\n\n }\n\n\n\n pub fn reset(&mut self) {\n\n self.state.iter_mut().for_each(|t| *t = 0);\n\n }\n\n\n\n fn transform(&mut self) {\n\n let mut scratchpad_index = 0;\n\n\n\n for _ in 0..self.num_rounds {\n\n self.scratchpad.copy_from_slice(&self.state);\n", "file_path": "bee-crypto/src/curl/mod.rs", "rank": 98, "score": 10.178467026135978 }, { "content": "\n\nimpl Keccak {\n\n pub(crate) fn new(rate: usize, delim: u8) -> Keccak {\n\n Keccak {\n\n a: [0; PLEN],\n\n offset: 0,\n\n rate,\n\n delim,\n\n }\n\n }\n\n\n\n impl_constructor!(new_shake128, shake128, 128, 0x1f);\n\n impl_constructor!(new_shake256, shake256, 256, 0x1f);\n\n impl_constructor!(new_keccak224, keccak224, 224, 0x01);\n\n impl_constructor!(new_keccak256, keccak256, 256, 0x01);\n\n impl_constructor!(new_keccak384, keccak384, 384, 0x01);\n\n impl_constructor!(new_keccak512, keccak512, 512, 0x01);\n\n impl_constructor!(new_sha3_224, sha3_224, 224, 0x06);\n\n impl_constructor!(new_sha3_256, sha3_256, 256, 0x06);\n\n impl_constructor!(new_sha3_384, sha3_384, 384, 0x06);\n", "file_path": "iota-crypto/keccak.rs", "rank": 99, "score": 9.96288523649043 } ]
Rust
src/client/market.rs
zeta1999/huobi_future_async
e5202c50b15cd1fd22ccb696534bb4c513af1b49
use super::HuobiFuture; use crate::{ models::*, }; use failure::Fallible; use futures::prelude::*; use std::{collections::BTreeMap}; impl HuobiFuture { pub fn get_contract_info<S1, S2, S3>( &self, symbol: S1, contract_type: S2, contract_code: S3 ) -> Fallible<impl Future<Output = Fallible<APIResponse<Vec<Symbol>>>>> where S1: Into<Option<String>>, S2: Into<Option<String>>, S3: Into<Option<String>>, { let mut parameters: BTreeMap<String, String> = BTreeMap::new(); if let Some(sy) = symbol.into() { parameters.insert("symbol".into(), format!{"{}", sy});} if let Some(cc) = contract_code.into() { parameters.insert("contract_code".into(), format!("{}", cc));} if let Some(ct) = contract_type.into() { parameters.insert("contract_type".into(), format!("{}", ct));} Ok(self .transport .get("/api/v1/contract_contract_info", Some(parameters))?) } pub fn get_all_book_tickers<S1, S2>( &self, contract_code: S1, orderbook_type: S2, ) -> Fallible<impl Future<Output = Fallible<APIResponse<OrderBook>>>> where S1: Into<String>, S2: Into<String>, { let mut parameters: BTreeMap<String, String> = BTreeMap::new(); parameters.insert("symbol".into(), contract_code.into()); parameters.insert("type".into(), orderbook_type.into()); Ok(self .transport .get("/market/depth", Some(parameters))?) } pub fn get_klines<S1, S2, S3, S4, S5>( &self, contract_code: S1, period: S2, size: S3, from: S4, to: S5, ) -> Fallible<impl Future<Output = Fallible<APIResponse<Vec<Kline>>>>> where S1: Into<String>, S2: Into<String>, S3: Into<Option<u32>>, S4: Into<Option<u32>>, S5: Into<Option<u32>>, { let mut parameters: BTreeMap<String, String> = BTreeMap::new(); parameters.insert("symbol".into(), contract_code.into()); parameters.insert("period".into(), period.into()); if let Some(lt) = size.into() { parameters.insert("size".into(), format!{"{}", lt});} if let Some(st) = from.into() { parameters.insert("from".into(), format!("{}", st));} if let Some(et) = to.into() { parameters.insert("to".into(), format!("{}", et));} Ok(self .transport .get("/market/history/kline", Some(parameters))?) } pub fn get_index_klines<S1, S2>( &self, contract_code: S1, period: S2, size: u32, ) -> Fallible<impl Future<Output = Fallible<APIResponse<Vec<Kline>>>>> where S1: Into<String>, S2: Into<String>, { let mut parameters: BTreeMap<String, String> = BTreeMap::new(); parameters.insert("symbol".into(), contract_code.into()); parameters.insert("period".into(), period.into()); parameters.insert("size".into(), format!{"{}", size}); Ok(self .transport .get("/index/market/history/index", Some(parameters))?) } pub fn get_basis<S1, S2, S3>( &self, contract_code: S1, period: S2, basis_price_type: S3, size: u32, ) -> Fallible<impl Future<Output = Fallible<APIResponse<Vec<Basis>>>>> where S1: Into<String>, S2: Into<String>, S3: Into<Option<String>>, { let mut parameters: BTreeMap<String, String> = BTreeMap::new(); parameters.insert("symbol".into(), contract_code.into()); parameters.insert("period".into(), period.into()); parameters.insert("size".into(), format!{"{}", size}); if let Some(bs) = basis_price_type.into() { parameters.insert("basis_price_type".into(), bs);} Ok(self .transport .get("/index/market/history/basis", Some(parameters))?) } pub fn get_merged_data<S1>( &self, symbol: S1, ) -> Fallible<impl Future<Output = Fallible<APIResponse<Merged>>>> where S1: Into<String> { let mut parameters: BTreeMap<String, String> = BTreeMap::new(); parameters.insert("symbol".into(), symbol.into()); Ok(self .transport .get("/market/detail/merged", Some(parameters))? ) } pub fn get_price_limit<S1, S2, S3>( &self, symbol: S1, contract_type: S2, contract_code: S3 ) -> Fallible<impl Future<Output = Fallible<APIResponse<Vec<PriceLimit>>>>> where S1: Into<Option<String>>, S2: Into<Option<String>>, S3: Into<Option<String>> { let mut parameters: BTreeMap<String, String> = BTreeMap::new(); if let Some(sym) = symbol.into() { parameters.insert("symbol".into(), sym); } if let Some(ctype) = contract_type.into() { parameters.insert("contract_type".into(), ctype); } if let Some(code) = contract_code.into() { parameters.insert("contract_code".into(), code); } Ok(self .transport .get("/api/v1/contract_price_limit", Some(parameters))? ) } }
use super::HuobiFuture; use crate::{ models::*, }; use failure::Fallible; use futures::prelude::*; use std::{collections::BTreeMap}; impl HuobiFuture { pub fn get_contract_info<S1, S2, S3>( &self, symbol: S1, contract_type: S2, contract_code: S3 ) -> Fallible<impl Future<Output = Fallible<APIResponse<Vec<Symbol>>>>> where S1: Into<Option<String>>, S2: Into<Option<String>>, S3: Into<Option<String>>, { let mut parameters: BTreeMap<String, String> = BTreeMap::new(); if let Some(sy) = symbol.into() { parameters.insert("symbol".into(), format!{"{}", sy});} if let Some(cc) = contract_code.into() { parameters.insert("contract_code".into(), format!("{}", cc));} if let Some(ct) = contract_type.into() { parameters.insert("contract_type".into(), format!("{}", ct));} Ok(self .transport .get("/api/v1/contract_contract_info", Some(parameters))?) } pub fn get_all_book_tickers<S1, S2>( &self, contract_code: S1, orderbook_type: S2, ) -> Fallible<impl Future<Output = Fallible<APIResponse<OrderBook>>>> where S1: Into<String>, S2: Into<String>, { let mut parameters: BTreeMap<String, String> = BTreeMap::new(); parameters.insert("symbol".into(), contract_code.into()); parameters.insert("type".into(), orderbook_type.into()); Ok(self .transport .get("/market/depth", Some(parameters))?) } pub fn get_klines<S1, S2, S3, S4, S5>( &self, contract_code: S1, period: S2, size: S3, from: S4, to: S5, ) -> Fallible<impl Future<Output = Fallible<APIResponse<Vec<Kline>>>>> where S1: Into<String>, S2: Into<String>, S3: Into<Option<u32>>, S4: Into<Option<u32>>, S5: Into<Option<u32>>, { let mut parameters: BTreeMap<String, String> = BTreeMap::new(); parameters.insert("symbol".into(), contract_code.into()); parameters.insert("period".into(), period.into()); if let Some(lt) = size.into() { parameters.insert("size".into(), format!{"{}", lt});} if let Some(st) = from.into() { parameters.insert("from".into(), format!("{}", st));} if let Some(et) = to.into() { parameters.insert("to".into(), format!("{}", et));} Ok(self .transport .get("/market/history/kline", Some(parameters))?) } pub fn get_index_klines<S1, S2>( &self, contract_code: S1, period: S2, size: u32, ) -> Fallible<impl Future<Output = Fallible<APIResponse<Vec<Kline>>>>> where S1: Into<String>, S2: Into<String>, { let mut parameters: BTreeMap<String, String> = BTreeMap::new(); parameters.insert("symbol".into(), contract_code.into()); parameters.insert("period".into(), period.into()); parameters.insert("size".into(), format!{"{}", size}); Ok(self .transport .get("/index/market/history/index", Some(parameters))?) } pub fn get_basis<S1, S2, S3>( &self, contract_code: S1, period: S2, basis_price_type: S3, size: u32, ) -> Fallible<impl Future<Output = Fallible<APIResponse<Vec<Basis>>>>> where S1: Into<String>, S2: Into<String>, S3: Into<Option<String>>, { let mut parameters: BTreeMap<String, String> = BTreeMap::new(); parameters.insert("symbol".into(), contract_code.into()); parameters.insert("period".into(), period.into()); parameters.insert("size".into(), format!{"{}", size}); if let Some(bs) = basis_price_type.into() { parameters.insert("basis_price_type".into(), bs);} Ok(self .transport .get("/index/market/history/basis", Some(parameters))?) } pub fn get_merged_data<S1>( &self, symbol: S1, ) -> Fallible<impl Future<Output = Fallible<APIResponse<Merged>>>> where S1: Into<String> { let mut parameters: BTreeMap<String, String> = BTreeMap::new(); parameters.insert("symbol".into(), symbol.into()); Ok(self .transport .get("/market/detail/merged", Some(parameters))? ) }
}
pub fn get_price_limit<S1, S2, S3>( &self, symbol: S1, contract_type: S2, contract_code: S3 ) -> Fallible<impl Future<Output = Fallible<APIResponse<Vec<PriceLimit>>>>> where S1: Into<Option<String>>, S2: Into<Option<String>>, S3: Into<Option<String>> { let mut parameters: BTreeMap<String, String> = BTreeMap::new(); if let Some(sym) = symbol.into() { parameters.insert("symbol".into(), sym); } if let Some(ctype) = contract_type.into() { parameters.insert("contract_type".into(), ctype); } if let Some(code) = contract_code.into() { parameters.insert("contract_code".into(), code); } Ok(self .transport .get("/api/v1/contract_price_limit", Some(parameters))? ) }
function_block-full_function
[ { "content": "pub fn build_query_string(parameters: &[(String,String)]) -> String \n\n{\n\n parameters\n\n .iter()\n\n .map(|(key, value)| format!(\"{}={}\", key, percent_encode(&value.clone())))\n\n .collect::<Vec<String>>()\n\n .join(\"&\")\n\n}\n\n\n", "file_path": "src/transport.rs", "rank": 0, "score": 128638.69199921926 }, { "content": "pub fn get_timestamp() -> String {\n\n let utc_time = chrono::Utc::now();\n\n utc_time.format(\"%Y-%m-%dT%H:%M:%S\").to_string()\n\n}\n\n\n", "file_path": "src/transport.rs", "rank": 1, "score": 112561.276350218 }, { "content": "pub fn build_query_string(parameters: BTreeMap<String, String>) -> String {\n\n parameters\n\n .into_iter()\n\n .map(|(key, value)| format!(\"{}={}\", key, percent_encode(&value)))\n\n .collect::<Vec<String>>()\n\n .join(\"&\")\n\n}\n\n\n", "file_path": "src/client/subscription.rs", "rank": 2, "score": 99391.8248324211 }, { "content": "pub fn percent_encode(source: &str) -> String {\n\n use percent_encoding::{define_encode_set, utf8_percent_encode, USERINFO_ENCODE_SET};\n\n define_encode_set! {\n\n pub CUSTOM_ENCODE_SET = [USERINFO_ENCODE_SET] | { '+', ',' }\n\n }\n\n utf8_percent_encode(source, CUSTOM_ENCODE_SET).to_string()\n\n}\n\n\n", "file_path": "src/transport.rs", "rank": 3, "score": 96723.5125357779 }, { "content": "pub fn percent_encode(source: &str) -> String {\n\n use percent_encoding::{define_encode_set, utf8_percent_encode, USERINFO_ENCODE_SET};\n\n define_encode_set! {\n\n pub CUSTOM_ENCODE_SET = [USERINFO_ENCODE_SET] | { '+', ',' }\n\n }\n\n utf8_percent_encode(source, CUSTOM_ENCODE_SET).to_string()\n\n}", "file_path": "src/client/subscription.rs", "rank": 4, "score": 76152.64187456068 }, { "content": "pub fn sign_hmac_sha256_base64(secret: &str, digest: &str) -> Fallible<String> {\n\n\n\n let signed_key = hmac::SigningKey::new(&digest::SHA256, secret.as_bytes());\n\n let signature = hmac::sign(&signed_key, digest.as_bytes());\n\n let b64_encoded_sig = BASE64.encode(signature.as_ref());\n\n\n\n Ok(b64_encoded_sig)\n\n}\n\n\n", "file_path": "src/transport.rs", "rank": 5, "score": 75645.91105814284 }, { "content": "pub fn sign_hmac_sha256_base64(secret: &str, digest: &str) -> String {\n\n use data_encoding::BASE64;\n\n\n\n let signed_key = hmac::SigningKey::new(&digest::SHA256, secret.as_bytes());\n\n let signature = hmac::sign(&signed_key, digest.as_bytes());\n\n BASE64.encode(signature.as_ref())\n\n}\n\n\n", "file_path": "src/client/subscription.rs", "rank": 6, "score": 63057.52261489919 }, { "content": "fn string_as_f64<'de, D>(deserializer: D) -> Result<f64, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n deserializer.deserialize_any(F64Visitor)\n\n}\n\n\n", "file_path": "src/models.rs", "rank": 7, "score": 49848.45009634436 }, { "content": "fn from_str<'de, T, D>(deserializer: D) -> Result<T, D::Error>\n\nwhere\n\n T: FromStr,\n\n T::Err: Display,\n\n D: Deserializer<'de>,\n\n{\n\n let s = String::deserialize(deserializer)?;\n\n T::from_str(&s).map_err(de::Error::custom)\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub struct APIResponse<R> {\n\n pub status: Option<String>,\n\n pub ts: Option<u64>,\n\n pub data: Option<R>,\n\n pub tick: Option<R>,\n\n pub ch: Option<String>,\n\n pub err_code: Option<ErrCodeEnum>,\n\n pub err_msg: Option<String>,\n\n #[serde(rename = \"err-code\")]\n", "file_path": "src/models.rs", "rank": 8, "score": 34291.33506721583 }, { "content": " client: reqwest::Client,\n\n}\n\n\n\nimpl Default for Transport {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n\n\n\nimpl Transport {\n\n pub fn new() -> Self {\n\n Self {\n\n credential: None,\n\n client: reqwest::Client::builder().build().unwrap()\n\n }\n\n }\n\n\n\n pub fn with_credential(api_key: &str, api_secret: &str) -> Self {\n\n Self {\n\n client: reqwest::Client::builder().build().unwrap(),\n", "file_path": "src/transport.rs", "rank": 9, "score": 25784.24534636573 }, { "content": "use crate::error::{HuobiResponse, Error};\n\nuse failure::Fallible;\n\nuse futures::prelude::*;\n\nuse ring::{digest, hmac};\n\nuse http::Method;\n\nuse reqwest_ext::*;\n\nuse serde::{de::DeserializeOwned, Serialize};\n\nuse serde_json::{to_string, to_value, Value};\n\nuse data_encoding::BASE64;\n\nuse tracing::*;\n\nuse url::Url;\n\n\n\nconst BASE: &str = \"https://api.hbdm.vn\";\n\nconst SPOT_BASE: &str = \"https://api.huobi.pro\";\n\nstatic API_HOST: &str = \"api.hbdm.vn\";\n\nstatic SPOT_API_HOST: &str = \"api.huobi.pro\";\n\n\n\n#[derive(Clone)]\n\npub struct Transport {\n\n credential: Option<(String, String)>,\n", "file_path": "src/transport.rs", "rank": 10, "score": 25782.742961158456 }, { "content": "\n\n pub fn signed_request<O, Q, D>(\n\n &self,\n\n method: Method,\n\n endpoint: &str,\n\n params: Option<Q>,\n\n data: Option<D>,\n\n ) -> Fallible<impl Future<Output = Fallible<O>>>\n\n where\n\n O: DeserializeOwned,\n\n Q: Serialize,\n\n D: Serialize,\n\n {\n\n let (key, secret) = self.check_key()?;\n\n let mut query = params.map_or_else(Vec::new, |q| q.to_url_query());\n\n query.push((\"AccessKeyId\".to_string(), key.to_string()));\n\n query.push((\"SignatureMethod\".to_string(), \"HmacSHA256\".to_string()));\n\n query.push((\"SignatureVersion\".to_string(), \"2\".to_string()));\n\n let utctime = get_timestamp();\n\n query.push((\"Timestamp\".to_string(), utctime));\n", "file_path": "src/transport.rs", "rank": 11, "score": 25779.501895096484 }, { "content": " pub fn delete<O, Q>(\n\n &self,\n\n endpoint: &str,\n\n params: Option<Q>,\n\n ) -> Fallible<impl Future<Output = Fallible<O>>>\n\n where\n\n O: DeserializeOwned,\n\n Q: Serialize,\n\n {\n\n self.request::<_, _, ()>(Method::DELETE, endpoint, params, None)\n\n }\n\n\n\n pub fn signed_get<O, Q>(\n\n &self,\n\n endpoint: &str,\n\n params: Option<Q>,\n\n ) -> Fallible<impl Future<Output = Fallible<O>>>\n\n where\n\n O: DeserializeOwned,\n\n Q: Serialize,\n", "file_path": "src/transport.rs", "rank": 12, "score": 25777.091016590806 }, { "content": " pub fn request<O, Q, D>(\n\n &self,\n\n method: Method,\n\n endpoint: &str,\n\n params: Option<Q>,\n\n data: Option<D>,\n\n ) -> Fallible<impl Future<Output = Fallible<O>>>\n\n where\n\n O: DeserializeOwned,\n\n Q: Serialize,\n\n D: Serialize,\n\n {\n\n let url = format!(\"{}{}\", BASE, endpoint);\n\n let url = match params {\n\n Some(p) => Url::parse_with_params(&url, p.to_url_query())?,\n\n None => Url::parse(&url)?,\n\n };\n\n\n\n let body = match data {\n\n Some(data) => data.to_url_query_string(),\n", "file_path": "src/transport.rs", "rank": 13, "score": 25777.02430760059 }, { "content": " ) -> Fallible<impl Future<Output = Fallible<O>>>\n\n where\n\n O: DeserializeOwned,\n\n D: Serialize,\n\n {\n\n self.request::<_, (), _>(Method::POST, endpoint, None, data)\n\n }\n\n\n\n pub fn put<O, D>(\n\n &self,\n\n endpoint: &str,\n\n data: Option<D>,\n\n ) -> Fallible<impl Future<Output = Fallible<O>>>\n\n where\n\n O: DeserializeOwned,\n\n D: Serialize,\n\n {\n\n self.request::<_, (), _>(Method::PUT, endpoint, None, data)\n\n }\n\n\n", "file_path": "src/transport.rs", "rank": 14, "score": 25776.57635386787 }, { "content": " credential: Some((api_key.into(), api_secret.into())),\n\n }\n\n }\n\n\n\n pub fn get<O, Q>(\n\n &self,\n\n endpoint: &str,\n\n params: Option<Q>,\n\n ) -> Fallible<impl Future<Output = Fallible<O>>>\n\n where\n\n O: DeserializeOwned,\n\n Q: Serialize,\n\n {\n\n self.request::<_, _, ()>(Method::GET, endpoint, params, None)\n\n }\n\n\n\n pub fn post<O, D>(\n\n &self,\n\n endpoint: &str,\n\n data: Option<D>,\n", "file_path": "src/transport.rs", "rank": 15, "score": 25776.571658680994 }, { "content": " ) -> Fallible<impl Future<Output = Fallible<O>>>\n\n where\n\n O: DeserializeOwned,\n\n Q: Serialize,\n\n {\n\n self.signed_request::<_, _, ()>(Method::PUT, endpoint, params, None)\n\n }\n\n\n\n pub fn signed_delete<O, Q>(\n\n &self,\n\n endpoint: &str,\n\n params: Option<Q>,\n\n ) -> Fallible<impl Future<Output = Fallible<O>>>\n\n where\n\n O: DeserializeOwned,\n\n Q: Serialize,\n\n {\n\n self.signed_request::<_, _, ()>(Method::DELETE, endpoint, params, None)\n\n }\n\n\n", "file_path": "src/transport.rs", "rank": 16, "score": 25776.476471359532 }, { "content": " {\n\n self.signed_request::<_, _, ()>(Method::GET, endpoint, params, None)\n\n }\n\n\n\n pub fn signed_post<O, D>(\n\n &self,\n\n endpoint: &str,\n\n data: Option<D>,\n\n ) -> Fallible<impl Future<Output = Fallible<O>>>\n\n where\n\n O: DeserializeOwned,\n\n D: Serialize,\n\n {\n\n self.signed_request::<_, (), _>(Method::POST, endpoint, None, data)\n\n }\n\n\n\n pub fn signed_put<O, Q>(\n\n &self,\n\n endpoint: &str,\n\n params: Option<Q>,\n", "file_path": "src/transport.rs", "rank": 17, "score": 25776.398977995836 }, { "content": "\n\n let paramss = build_query_string(&query);\n\n\n\n let api_host = if endpoint == \"/v1/futures/transfer\" { SPOT_API_HOST } else { API_HOST };\n\n let api_url = if endpoint == \"/v1/futures/transfer\" { SPOT_BASE } else { BASE };\n\n\n\n let signature = sign_hmac_sha256_base64(\n\n secret,\n\n &format!(\"{}\\n{}\\n{}\\n{}\", \"POST\", api_host, endpoint, paramss,),\n\n )?;\n\n\n\n trace!(\"Sign message: {}\", signature);\n\n\n\n let url = format!(\"{}{}\", api_url, endpoint);\n\n let mut url = Url::parse_with_params(&url, &query)?;\n\n url.query_pairs_mut()\n\n .append_pair(\"Signature\", &signature);\n\n\n\n let req = self\n\n .client\n", "file_path": "src/transport.rs", "rank": 18, "score": 25775.89846794985 }, { "content": " None => \"\".to_string(),\n\n };\n\n\n\n let req = self\n\n .client\n\n .request(method, url.as_str())\n\n .typed_header(headers::UserAgent::from_static(\"alphaquant\"))\n\n .typed_header(headers::ContentType::form_url_encoded());\n\n\n\n let req = req.body(body);\n\n\n\n Ok(async move {\n\n Ok(req\n\n .send()\n\n .await?\n\n .json::<HuobiResponse<_>>()\n\n .await?\n\n .into_result()?)\n\n })\n\n }\n", "file_path": "src/transport.rs", "rank": 19, "score": 25773.814707825033 }, { "content": " .request(method, url.as_str())\n\n .typed_header(headers::UserAgent::from_static(\"alphaquant\"))\n\n .typed_header(headers::ContentType::json())\n\n .json(&data);\n\n\n\n Ok(async move {\n\n Ok(req\n\n .send()\n\n .await?\n\n .json::<HuobiResponse<_>>()\n\n .await?\n\n .into_result()?)\n\n })\n\n }\n\n\n\n fn check_key(&self) -> Fallible<(&str, &str)> {\n\n match self.credential.as_ref() {\n\n None => Err(Error::NoApiKeySet.into()),\n\n Some((k, s)) => Ok((k, s)),\n\n }\n\n }\n\n\n\n\n\n\n\n}\n\n\n\n\n", "file_path": "src/transport.rs", "rank": 20, "score": 25772.03254851654 }, { "content": "#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct MatchResults {\n\n pub trades: Vec<MatchTradeItem>,\n\n pub total_page: u32,\n\n pub current_page: u32,\n\n pub total_size: u32,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct MatchTradeItem {\n\n pub match_id: u64,\n\n pub id: String,\n\n pub order_id: u64,\n\n pub order_id_str: String,\n\n pub symbol: String,\n\n pub order_source: String,\n\n pub contract_code: String,\n\n pub direction: String,\n\n pub offset: String,\n\n pub trade_volume: u32,\n", "file_path": "src/models.rs", "rank": 21, "score": 24603.671483721537 }, { "content": "#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct Symbol {\n\n pub symbol: String,\n\n pub contract_code: String,\n\n pub contract_type: String,\n\n pub contract_size: f64,\n\n pub price_tick: f64,\n\n pub delivery_date: String,\n\n pub create_date: String,\n\n pub contract_status: u32,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct AccountPosition {\n\n pub symbol: String,\n\n pub margin_balance: f64,\n\n pub margin_static: f64,\n\n pub margin_position: f64,\n\n pub margin_frozen: f64,\n\n pub margin_available: f64,\n", "file_path": "src/models.rs", "rank": 22, "score": 24603.395375291442 }, { "content": "pub struct OrderWSResponse {\n\n pub op: String,\n\n pub topic: String,\n\n pub uid: String,\n\n pub ts: u64,\n\n pub symbol: String,\n\n pub contract_code: String,\n\n pub contract_type: String,\n\n pub volume: u32,\n\n pub price: f64,\n\n pub order_price_type: String,\n\n pub direction: String,\n\n pub offset: String,\n\n pub status: u32,\n\n pub lever_rate: u32,\n\n pub order_id: u64,\n\n pub order_id_str: String,\n\n pub client_order_id: Option<u64>,\n\n pub order_source: String,\n\n pub order_type: u32,\n", "file_path": "src/models.rs", "rank": 23, "score": 24602.222728223387 }, { "content": " pub volume: f64,\n\n pub price: f64,\n\n pub created_at: u64,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct ContractInfo {\n\n pub symbol: String,\n\n pub contract_code: String,\n\n pub contract_type: String,\n\n pub contract_size: f64,\n\n pub price_tick: f64,\n\n pub delivery_date: String,\n\n pub create_date: String,\n\n pub contract_status: u32,\n\n}\n\n\n\n\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct BBO {\n\n pub bid: (f64, f64),\n\n pub ask: (f64, f64),\n\n pub id: u64,\n\n pub ts: u64,\n\n pub version: u64,\n\n pub ch: String,\n\n pub mrid: u64,\n\n}", "file_path": "src/models.rs", "rank": 24, "score": 24601.904905007756 }, { "content": " pub event: String,\n\n pub data: T,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\n#[serde(untagged)]\n\npub enum ErrCodeEnum {\n\n S(String),\n\n U(u32),\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct Page<T> {\n\n pub orders: Option<Vec<T>>,\n\n pub trades: Option<Vec<T>>,\n\n pub total_page: u32,\n\n pub current_page: u32,\n\n pub total_size: u32\n\n}\n\n\n", "file_path": "src/models.rs", "rank": 25, "score": 24601.832833392196 }, { "content": " pub created_at: u64,\n\n pub canceled_at: u64,\n\n pub trade_volume: u32,\n\n pub trade_turnover: f64,\n\n pub fee: f64,\n\n pub fee_asset: String,\n\n pub trade_avg_price: Option<f64>,\n\n pub margin_frozen: f64,\n\n pub profit: f64,\n\n pub status: u32,\n\n pub order_type: u32,\n\n pub order_source: String,\n\n pub liquidation_type: String,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct TriggerOpenOrder {\n\n symbol: String,\n\n contract_code: String,\n\n contract_type: String,\n", "file_path": "src/models.rs", "rank": 26, "score": 24601.777417125737 }, { "content": "pub struct Cancel {\n\n pub errors: Vec<CancelError>,\n\n pub successes: String,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct CancelError {\n\n pub order_id: String,\n\n pub err_code: u32,\n\n pub err_msg: String,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct HisOrder {\n\n pub order_id: u64,\n\n pub order_id_str: String,\n\n pub symbol: String,\n\n pub contract_code: String,\n\n pub lever_rate: u32,\n\n pub direction: String,\n", "file_path": "src/models.rs", "rank": 27, "score": 24601.666982860774 }, { "content": " fail_code: Option<u32>,\n\n fail_reason: Option<String>,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct OrderDetail {\n\n pub symbol: String,\n\n pub contract_type: String,\n\n pub contract_code: String,\n\n pub lever_rate: u32,\n\n pub direction: String,\n\n pub offset: String,\n\n pub volume: f64,\n\n pub price: f64,\n\n pub created_at: u64,\n\n pub canceled_at: u64,\n\n pub order_source: String,\n\n pub order_price_type: String,\n\n pub margin_frozen: f64,\n\n pub profit: f64,\n", "file_path": "src/models.rs", "rank": 28, "score": 24601.567216034677 }, { "content": " pub total_page: u32,\n\n pub current_page: u32,\n\n pub total_size: u32,\n\n pub instrument_price: f64,\n\n pub final_interest: f64,\n\n pub adjust_value: f64,\n\n pub fee: f64,\n\n pub fee_asset: String,\n\n pub liquidation_type: String,\n\n pub trades: Vec<TradeItem>,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct TradeItem {\n\n pub trade_id: u64,\n\n pub id: String,\n\n pub trade_price: f64,\n\n pub trade_volume: f64,\n\n pub trade_fee: f64,\n\n pub fee_asset: String,\n", "file_path": "src/models.rs", "rank": 29, "score": 24601.50763978911 }, { "content": " pub role: String,\n\n pub created_at: u64,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct OpenOrder {\n\n pub symbol: String,\n\n pub contract_code: String,\n\n pub contract_type: String,\n\n pub volume: f64,\n\n pub price: f64,\n\n pub order_price_type: String,\n\n pub order_type: u32,\n\n pub direction: String,\n\n pub offset: String,\n\n pub lever_rate: u32,\n\n pub order_id: u64,\n\n pub order_id_str: String,\n\n pub client_order_id: Option<u64>,\n\n pub created_at: u64,\n", "file_path": "src/models.rs", "rank": 30, "score": 24601.498876145073 }, { "content": "#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct BatchOrderRequest {\n\n pub orders_data: Vec<OrderRequest>,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct OrderRequest {\n\n pub contract_code: Option<String>,\n\n pub symbol: Option<String>,\n\n pub contract_type: Option<String>,\n\n pub client_order_id: Option<u64>,\n\n pub price: Option<f64>,\n\n pub volume: u32, \n\n pub direction: String,\n\n pub offset: String,\n\n pub lever_rate: u32,\n\n pub order_price_type: String,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n", "file_path": "src/models.rs", "rank": 31, "score": 24601.16961571313 }, { "content": " trigger_type: String,\n\n volume: f64,\n\n order_type: u32,\n\n direction: String,\n\n offset: String,\n\n lever_rate: u32,\n\n order_id: u32,\n\n order_id_str: String,\n\n order_source: String,\n\n trigger_price: f64,\n\n order_price: f64,\n\n created_at: u64,\n\n order_price_type: String,\n\n status: u32,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct TriggerHisOrder {\n\n symbol: String,\n\n contract_type: String,\n", "file_path": "src/models.rs", "rank": 32, "score": 24600.890558032374 }, { "content": " pub created_at: u64,\n\n pub trade_volume: u32,\n\n pub trade_turnover: f64,\n\n pub fee: f64,\n\n pub trade_avg_price: f64,\n\n pub margin_frozen: f64,\n\n pub profit: f64,\n\n pub liquidation_type: String,\n\n pub trade: Vec<TradeSubItem>,\n\n} \n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct MatchOrderWSResponse {\n\n pub op: String,\n\n pub topic: String,\n\n pub uid: String,\n\n pub ts: u64,\n\n pub symbol: String,\n\n pub contract_code: String,\n\n pub contract_type: String,\n", "file_path": "src/models.rs", "rank": 33, "score": 24600.634806121332 }, { "content": " pub liquidation_price: Option<f64>,\n\n pub withdraw_available: f64,\n\n pub lever_rate: f64,\n\n pub adjust_factor: f64,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct Order {\n\n pub symbol: String,\n\n pub contract_type: String,\n\n pub contract_code: String,\n\n pub volume: f64,\n\n pub price: f64,\n\n pub order_price_type: String,\n\n pub direction: String,\n\n pub offset: String,\n\n pub lever_rate: u32,\n\n pub order_id: u64,\n\n pub order_id_str: String,\n\n pub client_order_id: u64,\n", "file_path": "src/models.rs", "rank": 34, "score": 24600.600230231248 }, { "content": " pub offset: String,\n\n pub volume: u32,\n\n pub price: f64,\n\n pub create_date: u64,\n\n pub order_source: String,\n\n pub order_price_type: u32,\n\n pub margin_frozen: f64,\n\n pub profit: f64,\n\n pub trade_volume: u32,\n\n pub trade_turnover: f64,\n\n pub fee: f64,\n\n pub fee_asset: String,\n\n pub trade_avg_price: Option<f64>,\n\n pub status: u32,\n\n pub order_type: u32,\n\n pub liquidation_type: String\n\n}\n\n\n\n\n\n\n", "file_path": "src/models.rs", "rank": 35, "score": 24600.507807974736 }, { "content": " pub status: u32,\n\n pub order_id: u64,\n\n pub order_id_str: String,\n\n pub client_order_id: Option<u64>,\n\n pub order_type: u32,\n\n pub volume: u32,\n\n pub trade_volume: u32,\n\n pub trade: Vec<TradeSubItem>,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct TradeSubItem {\n\n pub trade_id: u64,\n\n pub id: String,\n\n pub trade_volume: u32,\n\n pub trade_price: f64,\n\n pub trade_fee: Option<f64>,\n\n pub fee_asset: Option<String>,\n\n pub trade_turnover: f64,\n\n pub created_at: u64,\n", "file_path": "src/models.rs", "rank": 36, "score": 24599.948934688317 }, { "content": " pub trade_price: f64,\n\n pub trade_turnover: u32,\n\n pub create_date: u64,\n\n pub offset_profitloss: f64,\n\n pub trade_fee: f64,\n\n pub fee_asset: String,\n\n pub role: String,\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct TransferLimit {\n\n pub symbol: String,\n\n pub transfer_in_max_each: f64,\n\n pub transfer_in_min_each: f64,\n\n pub transfer_out_max_each: f64,\n\n pub transfer_out_min_each: f64,\n\n pub transfer_in_max_daily: f64,\n\n pub transfer_out_max_daily: f64,\n\n pub net_transfer_in_max_daily: f64,\n\n pub net_transfer_out_max_daily: f64,\n", "file_path": "src/models.rs", "rank": 37, "score": 24599.780576694797 }, { "content": " pub profit_unreal: f64,\n\n pub profit_rate: f64,\n\n pub profit: f64,\n\n pub position_margin: f64,\n\n pub lever_rate: u32,\n\n pub direction: String,\n\n pub last_price: f64,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct Account {\n\n pub symbol: String,\n\n pub margin_balance: f64,\n\n pub margin_static: f64,\n\n pub margin_position: f64,\n\n pub margin_frozen: f64,\n\n pub margin_available: f64,\n\n pub profit_real: f64,\n\n pub profit_unreal: f64,\n\n pub risk_rate: Option<f64>,\n", "file_path": "src/models.rs", "rank": 38, "score": 24599.682838437544 }, { "content": "}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct Index {\n\n pub id: u64,\n\n pub vol: String,\n\n pub count: f64,\n\n pub open: String,\n\n pub close: String,\n\n pub low: String,\n\n pub high: String,\n\n pub amount: String,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone)]\n\npub struct Liquidation {\n\n pub symbol: String,\n\n pub contract_code: String,\n\n pub direction: String,\n\n pub offset: String,\n", "file_path": "src/models.rs", "rank": 39, "score": 24599.5267684938 }, { "content": " pub errors: Vec<BatchOrderErrors>,\n\n pub success: Vec<BatchOrderSuccess>,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct BatchOrderErrors {\n\n pub index: u32,\n\n pub err_code: u32,\n\n pub err_msg: String, \n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct BatchOrderSuccess {\n\n pub index: u32,\n\n pub order_id: u64,\n\n pub order_id_str: String,\n\n pub client_order_id: Option<u64>,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n", "file_path": "src/models.rs", "rank": 40, "score": 24599.26096873818 }, { "content": "pub struct OpStatus {\n\n pub op: String,\n\n #[serde(rename = \"type\")]\n\n pub otype: Option<String>,\n\n pub ts: Ts,\n\n #[serde(rename = \"err-code\")]\n\n pub err_code: Option<u32>,\n\n #[serde(rename = \"err-msg\")]\n\n pub err_msg: Option<String>,\n\n pub cid: Option<String>,\n\n pub topic: Option<String>,\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct Basis {\n\n pub id: u64,\n\n pub contract_price: String,\n\n pub index_price: String,\n\n pub basis: String,\n\n pub basis_rate: String,\n", "file_path": "src/models.rs", "rank": 41, "score": 24599.17527587814 }, { "content": " pub ask: (f64, f64),\n\n pub bid: (f64, f64),\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct PriceLimit {\n\n pub symbol: String,\n\n pub high_limit: f64,\n\n pub low_limit: f64,\n\n pub contract_code: String,\n\n pub contract_type: String,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct Trade {\n\n pub ch: String,\n\n pub ts: u64,\n\n pub tick: TradeDetail,\n\n pub status: Option<String>,\n\n}\n", "file_path": "src/models.rs", "rank": 42, "score": 24599.024000612648 }, { "content": " #[serde(rename = \"err-code\")]\n\n pub err_code: Option<String>,\n\n #[serde(rename = \"err-msg\")]\n\n pub err_msg: Option<String>,\n\n}\n\n\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct MarketPing {\n\n pub ping: u64,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\n#[serde(untagged)]\n\npub enum Ts {\n\n St(String),\n\n It(u64),\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n", "file_path": "src/models.rs", "rank": 43, "score": 24598.78308453631 }, { "content": " pub trade_volume: f64,\n\n pub trade_turnover: f64,\n\n pub fee: f64,\n\n pub fee_asset: String,\n\n pub trade_avg_price: Option<f64>,\n\n pub margin_frozen: f64,\n\n pub profit: f64,\n\n pub status: u32,\n\n pub order_source: String,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct OrderId {\n\n pub order_id: u64,\n\n pub order_id_str: String,\n\n pub client_order_id: Option<u64>,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct BatchOrder {\n", "file_path": "src/models.rs", "rank": 44, "score": 24598.457397406484 }, { "content": " pub profit_real: f64,\n\n pub profit_unreal: f64,\n\n pub risk_rate: Option<f64>,\n\n pub liquidation_price: Option<f64>,\n\n pub withdraw_available: f64,\n\n pub lever_rate: f64,\n\n pub adjust_factor: f64,\n\n pub positions: Option<Vec<Position>>,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct Position {\n\n pub symbol: String,\n\n pub contract_code: String,\n\n pub contract_type: String,\n\n pub volume: f64,\n\n pub available: f64,\n\n pub frozen: f64,\n\n pub cost_open: f64,\n\n pub cost_hold: f64,\n", "file_path": "src/models.rs", "rank": 45, "score": 24598.35969203983 }, { "content": "\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct TradeDetail {\n\n pub id: u64,\n\n pub ts: u64,\n\n pub data: Vec<TradeDetailItem>,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct TradeDetailItem {\n\n pub amount: u32,\n\n pub ts: u64,\n\n pub id: u64,\n\n pub price: f64,\n\n pub direction: String,\n\n}\n\n\n", "file_path": "src/models.rs", "rank": 46, "score": 24598.104781174967 }, { "content": " pub volume: f64,\n\n pub count: f64,\n\n pub open: f64,\n\n pub close: f64,\n\n pub low: f64,\n\n pub high: f64,\n\n pub amount: f64,\n\n pub mrid: Option<u64>,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct Merged {\n\n pub id: u64,\n\n pub vol: String,\n\n pub count: f64,\n\n pub open: String,\n\n pub close: String,\n\n pub low: String,\n\n pub high: String,\n\n pub amount: String,\n", "file_path": "src/models.rs", "rank": 47, "score": 24597.447559588156 }, { "content": " pub role: String,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct PositionSubs\n\n{\n\n pub op: String,\n\n pub topic: String,\n\n pub ts: u64,\n\n pub event: String,\n\n pub data: Vec<Position>,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct HistoryTrade {\n\n pub ch: String,\n\n pub status: String,\n\n pub ts: u64,\n\n pub data: Vec<HistoryTradeItem>,\n\n}\n", "file_path": "src/models.rs", "rank": 48, "score": 24597.417308564694 }, { "content": " contract_code: String,\n\n trigger_type: String,\n\n volume: f64,\n\n order_type: u32,\n\n direction: String,\n\n offset: String,\n\n lever_rate: u32,\n\n order_id: u32,\n\n order_id_str: String,\n\n relation_order_id: String,\n\n order_price_type: String,\n\n status: u32,\n\n order_source: String,\n\n trigger_price: f64,\n\n triggered_price: Option<f64>,\n\n order_price: f64,\n\n created_at: u64,\n\n triggered_at: Option<u64>,\n\n order_insert_at: u64,\n\n canceled_at: u64,\n", "file_path": "src/models.rs", "rank": 49, "score": 24597.339955553507 }, { "content": " pub err_code1: Option<ErrCodeEnum>,\n\n #[serde(rename = \"err-msg\")]\n\n pub err_msg1: Option<String>,\n\n \n\n}\n\n\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct WSMarketResponse<T> {\n\n pub ch: String,\n\n pub ts: u64,\n\n pub tick: T,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct WSAccountResponse<T> {\n\n pub op: String,\n\n pub topic: String,\n\n pub ts: u64,\n\n pub uid: Option<String>,\n", "file_path": "src/models.rs", "rank": 50, "score": 24597.169426222623 }, { "content": " pub ch: String,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct IncrementalOrderBook {\n\n pub bids: Vec<(f64, f64)>,\n\n pub asks: Vec<(f64, f64)>,\n\n pub mrid: u64,\n\n pub id: u64,\n\n pub ts: u64,\n\n pub version: u64,\n\n pub ch: String,\n\n pub event: String,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct Kline {\n\n #[serde(rename = \"id\")]\n\n pub timestamp: u64,\n\n #[serde(rename = \"vol\")]\n", "file_path": "src/models.rs", "rank": 51, "score": 24596.867652394623 }, { "content": "\n\n //Index\n\n Basis(WSMarketResponse<Basis>),\n\n Index(WSMarketResponse<Index>),\n\n\n\n OpStatus(OpStatus),\n\n\n\n //Other\n\n Ping,\n\n Pong,\n\n Binary(Vec<u8>), // Unexpected, unparsed\n\n Text(String),\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct SubStatus {\n\n pub id: String,\n\n pub subbed: Option<String>,\n\n pub ts: u64,\n\n pub status: String,\n", "file_path": "src/models.rs", "rank": 52, "score": 24596.86224513868 }, { "content": "}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct TransferResponse {\n\n pub status: String,\n\n pub data: Option<u64>,\n\n #[serde(rename = \"err-code\")]\n\n pub err_code: Option<String>,\n\n #[serde(rename = \"err-msg\")]\n\n pub err_msg: Option<String>,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct OrderBook {\n\n pub bids: Vec<(f64, f64)>,\n\n pub asks: Vec<(f64, f64)>,\n\n pub mrid: u64,\n\n pub id: u64,\n\n pub ts: u64,\n\n pub version: u64,\n", "file_path": "src/models.rs", "rank": 53, "score": 24596.756912785644 }, { "content": "#![allow(dead_code)]\n\n#![allow(unused_variables)]\n\n\n\nuse serde::de::{self, Unexpected, Visitor};\n\nuse serde::{Deserialize, Deserializer, Serialize};\n\nuse std::fmt::{self, Display};\n\nuse std::str::FromStr;\n\n\n", "file_path": "src/models.rs", "rank": 54, "score": 24594.955603902992 }, { "content": "\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct HistoryTradeItem {\n\n pub data: Vec<TradeDetailItem>,\n\n pub id: u64,\n\n pub ts: u64,\n\n}\n\n\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub enum Subscription {\n\n Market, // market\n\n Account, // private account\n\n Index, // index\n\n}\n\n\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\n#[serde(untagged)]\n\n#[allow(clippy::large_enum_variant)]\n", "file_path": "src/models.rs", "rank": 55, "score": 24594.059222278236 }, { "content": "pub enum WebsocketEvent {\n\n //Ping,Sub,Op\n\n MarketPing(MarketPing),\n\n SubStatus(SubStatus),\n\n\n\n //Market\n\n IncrementalOrderBook(WSMarketResponse<IncrementalOrderBook>),\n\n OrderBook(WSMarketResponse<OrderBook>),\n\n BBO(WSMarketResponse<BBO>),\n\n Kline(WSMarketResponse<Kline>),\n\n TradeDetail(WSMarketResponse<TradeDetail>),\n\n\n\n //Account\n\n Account(WSAccountResponse<Vec<Account>>),\n\n Order(OrderWSResponse),\n\n MatchOrder(MatchOrderWSResponse),\n\n Position(WSAccountResponse<Vec<Position>>),\n\n Liquidation(WSAccountResponse<Vec<Liquidation>>),\n\n ContractInfo(WSAccountResponse<Vec<ContractInfo>>),\n\n TriggerOrder(WSAccountResponse<Vec<TriggerHisOrder>>),\n", "file_path": "src/models.rs", "rank": 56, "score": 24592.232097658263 }, { "content": "fn parse_message(msg: Message) -> Fallible<WebsocketEvent> {\n\n let bin = match msg {\n\n Message::Text(msg) => return Ok(WebsocketEvent::Text(msg)),\n\n Message::Binary(b) => b,\n\n Message::Pong(b) => b,\n\n Message::Ping(b) => b,\n\n Message::Close(..) => return Err(failure::format_err!(\"Socket closed\")),\n\n };\n\n\n\n let mut d = GzDecoder::new(&*bin);\n\n let mut s = String::new();\n\n d.read_to_string(&mut s).unwrap();\n\n\n\n trace!(\"Incoming websocket message {:?}\", s);\n\n \n\n let message: WebsocketEvent = from_str(&s)?;\n\n\n\n Ok(message)\n\n}\n\n\n", "file_path": "src/client/websocket.rs", "rank": 57, "score": 24437.589578338353 }, { "content": "struct F64Visitor;\n\nimpl<'de> Visitor<'de> for F64Visitor {\n\n type Value = f64;\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"a string representation of a f64\")\n\n }\n\n fn visit_str<E>(self, value: &str) -> Result<f64, E>\n\n where\n\n E: de::Error,\n\n {\n\n if let Ok(integer) = value.parse::<i32>() {\n\n Ok(integer as f64)\n\n } else {\n\n value.parse::<f64>().map_err(|err| {\n\n E::invalid_value(Unexpected::Str(value), &\"a string representation of a f64\")\n\n })\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/models.rs", "rank": 58, "score": 21464.478902029095 }, { "content": "trait ToUrlQuery: Serialize {\n\n fn to_url_query_string(&self) -> String {\n\n let vec = self.to_url_query();\n\n\n\n vec.into_iter()\n\n .map(|(k, v)| format!(\"{}={}\", k, v))\n\n .collect::<Vec<_>>()\n\n .join(\"&\")\n\n }\n\n\n\n fn to_url_query(&self) -> Vec<(String, String)> {\n\n let v = to_value(self).unwrap();\n\n let v = v.as_object().unwrap();\n\n let mut vec = vec![];\n\n\n\n for (key, value) in v {\n\n match value {\n\n Value::Null => continue,\n\n Value::String(s) => vec.push((key.clone(), s.clone())),\n\n other => vec.push((key.clone(), to_string(other).unwrap())),\n\n }\n\n }\n\n\n\n vec\n\n }\n\n}\n\n\n\nimpl<S: Serialize> ToUrlQuery for S {}", "file_path": "src/transport.rs", "rank": 59, "score": 21150.739850489197 }, { "content": " )\n\n\n\n }\n\n\n\n // query contract trigger open orders\n\n pub fn get_trigger_open_orders<S1, S2, S3, S4>(\n\n &self,\n\n symbol: S1,\n\n contract_type: S2,\n\n page_index: S3,\n\n page_size: S4,\n\n )-> Fallible<impl Future<Output = Fallible<APIResponse<Page<TriggerOpenOrder>>>>>\n\n where\n\n S1: Into<String>,\n\n S2: Into<Option<String>>,\n\n S3: Into<Option<u32>>,\n\n S4: Into<Option<u32>>\n\n {\n\n let mut params: BTreeMap<String, String> = BTreeMap::new();\n\n\n", "file_path": "src/client/account.rs", "rank": 65, "score": 26.22944223301986 }, { "content": " } \n\n\n\n\n\n // lightning close\n\n pub fn lightning_close<S1, S2, S3, S4, S5, S6>(\n\n &self,\n\n symbol: S1,\n\n contract_type: S2,\n\n contract_code: S3,\n\n volume: u32,\n\n direction: S4,\n\n client_order_id: S5,\n\n order_price_type: S6\n\n )-> Fallible<impl Future<Output = Fallible<APIResponse<OrderId>>>>\n\n where\n\n S1: Into<Option<String>>,\n\n S2: Into<Option<String>>,\n\n S3: Into<Option<String>>,\n\n S4: Into<String>,\n\n S5: Into<Option<u64>>,\n", "file_path": "src/client/account.rs", "rank": 67, "score": 25.516054177493277 }, { "content": " Ok(self\n\n .transport\n\n .signed_post(\"/api/v1/contract_order_detail\", Some(params))?\n\n\n\n )\n\n }\n\n\n\n // get open orders\n\n pub fn get_open_orders<S1, S2, S3>(\n\n &self,\n\n symbol: S1,\n\n page_index: S2,\n\n page_size: S3\n\n ) -> Fallible<impl Future<Output = Fallible<APIResponse<Page<OpenOrder>>>>>\n\n where\n\n S1: Into<String>,\n\n S2: Into<Option<u32>>,\n\n S3: Into<Option<u32>>\n\n {\n\n let mut params: BTreeMap<String, String> = BTreeMap::new();\n", "file_path": "src/client/account.rs", "rank": 69, "score": 24.859280688481114 }, { "content": " page_index: S4,\n\n page_size: S5\n\n ) -> Fallible<impl Future<Output = Fallible<APIResponse<OrderDetail>>>>\n\n where\n\n S1: Into<String>,\n\n S2: Into<Option<u64>>,\n\n S3: Into<Option<u32>>,\n\n S4: Into<Option<u32>>,\n\n S5: Into<Option<u32>>\n\n {\n\n let mut params: BTreeMap<String, String> = BTreeMap::new();\n\n\n\n params.insert(\"symbol\".into(), symbol.into());\n\n params.insert(\"order_id\".into(), format!(\"{}\", order_id));\n\n\n\n if let Some(ct) = created_at.into() { params.insert(\"created_at\".into(), format!(\"{}\", ct)); }\n\n if let Some(otype) = order_type.into() { params.insert(\"order_type\".into(), format!(\"{}\", otype)); }\n\n if let Some(offset) = page_index.into() { params.insert(\"page_index\".into(), format!(\"{}\", offset)); }\n\n if let Some(limit) = page_size.into() { params.insert(\"page_size\".into(), format!(\"{}\", limit)); }\n\n\n", "file_path": "src/client/account.rs", "rank": 71, "score": 24.233968522924858 }, { "content": " params.insert(\"symbol\".into(), symbol.into());\n\n if let Some(ctype) = contract_type.into() { params.insert(\"contract_type\".into(), ctype); }\n\n if let Some(index) = page_index.into() { params.insert(\"page_index\".into(), format!(\"{}\",index)); }\n\n if let Some(size) = page_size.into() { params.insert(\"page_size\".into(), format!(\"{}\",size)); }\n\n\n\n Ok(self\n\n .transport\n\n .signed_post(\"/api/v1/contract_trigger_openorders\", Some(params))?\n\n )\n\n\n\n }\n\n\n\n // query history open orders\n\n pub fn get_trigger_his_orders<S1, S2, S3, S4, S5> (\n\n &self,\n\n symbol: S1,\n\n contract_code: S2,\n\n trade_type: u32,\n\n status: S3,\n\n create_date: u32,\n", "file_path": "src/client/account.rs", "rank": 72, "score": 22.763979617437855 }, { "content": "use super::HuobiFuture;\n\nuse crate::{\n\n models::*, \n\n};\n\nuse failure::Fallible;\n\nuse futures::prelude::*;\n\nuse std::{collections::BTreeMap};\n\n\n\nimpl HuobiFuture {\n\n // Account Information\n\n pub fn get_account_info<S1>(\n\n &self,\n\n symbol: S1,\n\n ) -> Fallible<impl Future<Output = Fallible<APIResponse<Vec<AccountPosition>>>>>\n\n where\n\n S1: Into<Option<String>>\n\n {\n\n let mut params: BTreeMap<String, String> = BTreeMap::new();\n\n\n\n // Add three optional parameters\n", "file_path": "src/client/account.rs", "rank": 73, "score": 22.25439390629751 }, { "content": " page_index: S4,\n\n page_size: S5\n\n ) -> Fallible<impl Future<Output = Fallible<APIResponse<Page<TriggerHisOrder>>>>>\n\n where\n\n S1: Into<String>,\n\n S2: Into<Option<String>>,\n\n S3: Into<String>,\n\n S4: Into<Option<u32>>,\n\n S5: Into<Option<u32>>\n\n {\n\n let mut params: BTreeMap<String, String> = BTreeMap::new();\n\n \n\n params.insert(\"symbol\".into(), symbol.into());\n\n params.insert(\"trade_type\".into(), format!(\"{}\", trade_type));\n\n params.insert(\"status\".into(), status.into());\n\n params.insert(\"create_date\".into(), format!(\"{}\", create_date));\n\n \n\n if let Some(code) = contract_code.into() { params.insert(\"contract_code\".into(), code); }\n\n if let Some(index) = page_index.into() { params.insert(\"page_index\".into(), format!(\"{}\", index)); }\n\n if let Some(size) = page_size.into() { params.insert(\"page_size\".into(), format!(\"{}\", size)); }\n", "file_path": "src/client/account.rs", "rank": 74, "score": 22.025494082377858 }, { "content": "\n\n Ok(self\n\n .transport\n\n .signed_post(\"/api/v1/contract_account_position_info\", Some(params))?\n\n )\n\n }\n\n\n\n // place an order\n\n pub fn place_order<S1, S2, S3, S4, S5, S6, S7, S8>(\n\n &self, \n\n symbol: S1, \n\n contract_type: S2, \n\n contract_code: S3, \n\n client_order_id: S4, \n\n price: S5, \n\n volume: u32,\n\n direction: S6, \n\n offset: S7, \n\n lever_rate: u32, \n\n order_price_type: S8\n", "file_path": "src/client/account.rs", "rank": 75, "score": 21.921565544440448 }, { "content": "\n\n params.insert(\"symbol\".into(), symbol.into());\n\n \n\n if let Some(offset) = page_index.into() { params.insert(\"page_index\".into(), format!(\"{}\", offset)); }\n\n if let Some(limit) = page_size.into() { params.insert(\"page_size\".into(), format!(\"{}\", limit)); }\n\n\n\n Ok(self\n\n .transport\n\n .signed_post(\"/api/v1/contract_openorders\", Some(params))?\n\n )\n\n }\n\n\n\n // place trigger order\n\n pub fn place_trigger_order<S1, S2, S3, S4, S5, S6, S7>(\n\n &self,\n\n symbol: S1,\n\n contract_type: S2,\n\n contract_code: S3,\n\n trigger_type: S4,\n\n trigger_price: f64,\n", "file_path": "src/client/account.rs", "rank": 76, "score": 21.256737721477762 }, { "content": "\n\n Ok(self\n\n .transport\n\n .signed_post(\"/api/v1/contract_cancelall\", Some(params))?\n\n )\n\n }\n\n\n\n // get order info\n\n pub fn get_order_info<S1, S2, S3>(\n\n &self,\n\n symbol: S1,\n\n order_id: S2,\n\n client_order_id: S3,\n\n )-> Fallible<impl Future<Output = Fallible<APIResponse<Vec<Order>>>>>\n\n where\n\n S1: Into<String>,\n\n S2: Into<Option<String>>,\n\n S3: Into<Option<String>>\n\n {\n\n let mut params: BTreeMap<String, String> = BTreeMap::new();\n", "file_path": "src/client/account.rs", "rank": 77, "score": 20.879904350816187 }, { "content": " order_price: f64,\n\n order_price_type: S5,\n\n volume: u32,\n\n direction: S6,\n\n offset: S7,\n\n lever_rate: u32\n\n ) -> Fallible<impl Future<Output = Fallible<APIResponse<OrderId>>>>\n\n where\n\n S1: Into<Option<String>>,\n\n S2: Into<Option<String>>,\n\n S3: Into<Option<String>>,\n\n S4: Into<String>,\n\n S5: Into<Option<String>>,\n\n S6: Into<String>,\n\n S7: Into<String>\n\n {\n\n let mut params: BTreeMap<String, String> = BTreeMap::new();\n\n\n\n if let Some(sym) = symbol.into() { params.insert(\"symbol\".into(), sym); }\n\n if let Some(ctype) = contract_type.into() { params.insert(\"contract_type\".into(), ctype); }\n", "file_path": "src/client/account.rs", "rank": 78, "score": 20.52609479778542 }, { "content": " pub fn cancel_all_trigger_orders<S1, S2, S3>(\n\n &self,\n\n symbol: S1,\n\n contract_code: S2,\n\n contract_type: S3,\n\n ) -> Fallible<impl Future<Output = Fallible<APIResponse<Cancel>>>>\n\n where\n\n S1: Into<String>,\n\n S2: Into<Option<String>>,\n\n S3: Into<Option<String>>\n\n {\n\n let mut params: BTreeMap<String, String> = BTreeMap::new();\n\n\n\n params.insert(\"symbol\".into(), symbol.into());\n\n if let Some(code) = contract_code.into() { params.insert(\"contract_code\".into(), code); }\n\n if let Some(ctype) = contract_type.into() { params.insert(\"contract_type\".into(), ctype); }\n\n\n\n Ok(self\n\n .transport\n\n .signed_post(\"/api/v1/contract_trigger_cancelall\", Some(params))?\n", "file_path": "src/client/account.rs", "rank": 79, "score": 20.232594774186 }, { "content": "\n\n params.insert(\"symbol\".into(), symbol.into());\n\n\n\n if let Some(oid) = order_id.into() { params.insert(\"order_id\".into(), oid);}\n\n if let Some(cid) = client_order_id.into() { params.insert(\"client_order_id\".into(), cid);}\n\n\n\n Ok(self\n\n .transport\n\n .signed_post(\"/api/v1/contract_order_info\", Some(params))?\n\n )\n\n\n\n }\n\n\n\n // get order detail\n\n pub fn get_order_detail<S1, S2, S3, S4, S5>(\n\n &self,\n\n symbol: S1,\n\n order_id: u64,\n\n created_at: S2,\n\n order_type: S3,\n", "file_path": "src/client/account.rs", "rank": 81, "score": 18.39659847153366 }, { "content": "mod account;\n\nmod market;\n\nmod subscription;\n\npub mod websocket;\n\n\n\nuse crate::transport::Transport;\n\n\n\n#[derive(Clone, Default)]\n\npub struct HuobiFuture {\n\n pub transport: Transport,\n\n}\n\n\n\nimpl HuobiFuture {\n\n #[must_use]\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n\n\n #[must_use]\n\n pub fn with_credential(api_key: &str, api_secret: &str) -> Self {\n\n Self {\n\n transport: Transport::with_credential(api_key, api_secret),\n\n }\n\n }\n\n}\n", "file_path": "src/client/mod.rs", "rank": 82, "score": 17.43320266630663 }, { "content": " pub fn cancel_orders<S1, S2>(\n\n &self,\n\n symbol: String,\n\n order_id: S1,\n\n client_order_id: S2,\n\n ) -> Fallible<impl Future<Output = Fallible<APIResponse<Cancel>>>>\n\n where\n\n S1: Into<Option<String>>,\n\n S2: Into<Option<String>>\n\n { \n\n let mut params: BTreeMap<String, String> = BTreeMap::new();\n\n \n\n if let Some(oid) = order_id.into() { params.insert(\"order_id\".into(), format!(\"{}\", oid));}\n\n if let Some(cid) = client_order_id.into() { params.insert(\"client_order_id\".into(), format!(\"{}\", cid));}\n\n\n\n params.insert(\"symbol\".into(), symbol);\n\n\n\n Ok(self\n\n .transport\n\n .signed_post(\"/api/v1/contract_cancel\", Some(params))?\n", "file_path": "src/client/account.rs", "rank": 83, "score": 17.31914074081432 }, { "content": " Ok(self\n\n .transport\n\n .signed_post(\"/api/v1/contract_transfer_limit\", Some(params))?\n\n )\n\n }\n\n\n\n\n\n // transfer between spot and future\n\n pub fn transfer<S1, S2> (\n\n &self,\n\n currency: S1,\n\n amount: f64,\n\n ttype: S2\n\n ) -> Fallible<impl Future<Output = Fallible<TransferResponse>>>\n\n where\n\n S1: Into<String>,\n\n S2: Into<String>\n\n {\n\n let mut params: BTreeMap<String, String> = BTreeMap::new();\n\n\n", "file_path": "src/client/account.rs", "rank": 84, "score": 16.985879178267012 }, { "content": " &self,\n\n symbol: S1,\n\n order_id: S2,\n\n ) -> Fallible<impl Future<Output = Fallible<APIResponse<Cancel>>>>\n\n where\n\n S1: Into<String>,\n\n S2: Into<String>\n\n {\n\n let mut params: BTreeMap<String, String> = BTreeMap::new();\n\n\n\n params.insert(\"symbol\".into(), symbol.into());\n\n params.insert(\"order_id\".into(), order_id.into());\n\n\n\n Ok(self\n\n .transport\n\n .signed_post(\"/api/v1/contract_trigger_cancel\", Some(params))?\n\n )\n\n }\n\n\n\n // cancel all trigger orders\n", "file_path": "src/client/account.rs", "rank": 85, "score": 16.85358894027153 }, { "content": " )\n\n }\n\n\n\n // cancel all orders\n\n pub fn cancel_allorders<S1, S2>(\n\n &self,\n\n symbol: String,\n\n contract_code: S1,\n\n contract_type: S2\n\n ) -> Fallible<impl Future<Output = Fallible<APIResponse<Cancel>>>>\n\n where\n\n S1: Into<Option<String>>,\n\n S2: Into<Option<String>>\n\n { \n\n let mut params: BTreeMap<String, String> = BTreeMap::new();\n\n \n\n if let Some(code) = contract_code.into() { params.insert(\"contract_code\".into(), code);}\n\n if let Some(ctype) = contract_type.into() { params.insert(\"contract_type\".into(), ctype);}\n\n\n\n params.insert(\"symbol\".into(), symbol);\n", "file_path": "src/client/account.rs", "rank": 86, "score": 16.66529792350114 }, { "content": " if let Some(sym) = symbol.into() {\n\n params.insert(\"symbol\".into(), sym);\n\n }\n\n\n\n Ok(self\n\n .transport\n\n .signed_post(\"/api/v1/contract_account_info\", Some(params))?)\n\n }\n\n\n\n // Account and Position Information\n\n pub fn get_account_position_info<S1>(\n\n &self,\n\n symbol: S1,\n\n ) -> Fallible<impl Future<Output = Fallible<APIResponse<Vec<AccountPosition>>>>>\n\n where\n\n S1: Into<String>\n\n {\n\n let mut params: BTreeMap<String, String> = BTreeMap::new();\n\n\n\n params.insert(\"symbol\".into(), symbol.into());\n", "file_path": "src/client/account.rs", "rank": 87, "score": 16.075915615046313 }, { "content": "\n\n Ok(self\n\n .transport\n\n .signed_post(\"/api/v1/contract_trigger_hisorders\", Some(params))?\n\n )\n\n\n\n }\n\n\n\n // get transfer limit\n\n pub fn get_transfer_limit<S1> (\n\n &self,\n\n symbol: S1,\n\n ) -> Fallible<impl Future<Output = Fallible<APIResponse<Vec<TransferLimit>>>>>\n\n where\n\n S1: Into<Option<String>>\n\n {\n\n let mut params: BTreeMap<String, String> = BTreeMap::new();\n\n\n\n if let Some(sym) = symbol.into() { params.insert(\"symbol\".into(), sym);}\n\n\n", "file_path": "src/client/account.rs", "rank": 88, "score": 16.060315085424932 }, { "content": "#![warn(clippy::all, clippy::pedantic, clippy::nursery)]\n\n#![allow(clippy::missing_errors_doc)]\n\nmod client;\n\npub mod error;\n\npub mod models;\n\nmod transport;\n\n\n\npub use crate::models::*;\n\npub use crate::error::*;\n\n\n\npub use crate::client::{websocket::HuobiWebsocket, HuobiFuture};\n", "file_path": "src/lib.rs", "rank": 89, "score": 15.324959021514266 }, { "content": " ) -> Fallible<impl Future<Output = Fallible<APIResponse<OrderId>>>>\n\n where \n\n S1: Into<Option<String>>, \n\n S2: Into<Option<String>>, \n\n S3: Into<Option<String>>, \n\n S4: Into<Option<u32>>,\n\n S5: Into<Option<f64>>, \n\n S6: Into<String>, \n\n S7: Into<String>, \n\n S8: Into<String>\n\n {\n\n let mut params: BTreeMap<String, String> = BTreeMap::new();\n\n\n\n params.insert(\"volume\".into(), format!(\"{}\", volume));\n\n params.insert(\"direction\".into(), direction.into());\n\n params.insert(\"offset\".into(), offset.into());\n\n params.insert(\"lever_rate\".into(), lever_rate.to_string());\n\n params.insert(\"order_price_type\".into(), order_price_type.into());\n\n\n\n if let Some(client_id) = client_order_id.into() { params.insert(\"client_order_id\".into(), format!(\"{}\", client_id)); }\n", "file_path": "src/client/account.rs", "rank": 90, "score": 15.254931594860961 }, { "content": "use crate::{\n\n models::*, \n\n client::websocket::HuobiWebsocket,\n\n client::websocket::WS_HOST,\n\n};\n\nuse std::{\n\n collections::HashMap,\n\n};\n\nuse failure::Fallible;\n\nuse futures::prelude::*;\n\nuse serde_json::{json};\n\nuse std::{collections::BTreeMap};\n\nuse ring::{digest, hmac};\n\n\n\nimpl HuobiWebsocket {\n\n\n\n pub async fn connect(\n\n &mut self,\n\n subs: HashMap<Subscription, Vec<&str>>,\n\n ) -> Fallible<()> {\n", "file_path": "src/client/subscription.rs", "rank": 91, "score": 13.495048318484567 }, { "content": "use crate::huobi_future::{models::Subscription, models::WebsocketEvent, HuobiWebsocket};\n\nuse huobi_future_async as huobi_future;\n\nuse failure::Fallible;\n\nuse std::{\n\n collections::HashMap,\n\n};\n\nextern crate simple_logger;\n\n\n\n#[tokio::main]\n\nasync fn main() -> Fallible<()> {\n\n // simple_logger::init().unwrap();\n\n\n\n let access_key = \"\";\n\n let secret_key = \"\";\n\n\n\n let mut ws: HuobiWebsocket = HuobiWebsocket::new(access_key, secret_key, |event: WebsocketEvent| {\n\n match event {\n\n \n\n WebsocketEvent::OrderBook(orderbook) => println!(\"orderbook:{:?}\", orderbook), \n\n WebsocketEvent::Kline(kline) => println!(\"kline:{:?}\", kline),\n", "file_path": "examples/websocket.rs", "rank": 92, "score": 9.832964689788874 }, { "content": " self.subscriptions\n\n .get(subscription)\n\n .and_then(|token| StreamUnordered::take(streams, *token))\n\n }\n\n\n\n\n\n pub fn check_key(&self) -> Fallible<(&str, &str)> {\n\n match self.credential.as_ref() {\n\n None => Err(Error::NoApiKeySet.into()),\n\n Some((k, s)) => Ok((k, s)),\n\n }\n\n }\n\n\n\n}\n\n\n\nimpl Stream for HuobiWebsocket {\n\n type Item = Fallible<WebsocketEvent>;\n\n\n\n fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {\n\n match Pin::new(&mut self.as_mut().get_mut().streams).poll_next(cx) {\n", "file_path": "src/client/websocket.rs", "rank": 93, "score": 9.643937322063604 }, { "content": "\n\nuse huobi_future_async as huobi_future;\n\nuse crate::huobi_future::HuobiFuture;\n\nuse crate::huobi_future::models::*;\n\nuse failure::Fallible;\n\nuse std::env::var;\n\nuse tracing::{info, Level};\n\nextern crate simple_logger;\n\n\n\n#[tokio::main]\n\nasync fn main() -> Fallible<()> {\n\n tracing::subscriber::set_global_default(tracing_subscriber::FmtSubscriber::new()).unwrap();\n\n // simple_logger::init().unwrap();\n\n let access_key = \"\";\n\n let secret_key = \"\";\n\n\n\n let hb = HuobiFuture::with_credential(&access_key, &secret_key);\n\n\n\n // get contract info\n\n match hb.get_contract_info(\"BTC\".to_string(), None, None)?.await {\n", "file_path": "examples/endpoints.rs", "rank": 94, "score": 9.453587612910493 }, { "content": "use crate::{\n\n error::Error,\n\n models::*,\n\n};\n\nuse failure::Fallible;\n\nuse futures::{prelude::*, stream::SplitStream, stream::SplitSink};\n\nuse serde_json::from_str;\n\nuse std::{\n\n collections::HashMap,\n\n pin::Pin,\n\n task::{Context, Poll},\n\n};\n\nuse streamunordered::{StreamUnordered, StreamYield};\n\nuse tokio::net::TcpStream;\n\nuse tokio_tungstenite::{connect_async, MaybeTlsStream, WebSocketStream};\n\nuse tracing::*;\n\nuse tungstenite::Message;\n\nuse url::Url;\n\nuse flate2::read::GzDecoder;\n\nuse std::io::Read;\n\n\n\n\n\npub const WS_URL: &str = \"wss://api.hbdm.vn\";\n\npub const WS_HOST: &str = \"api.hbdm.vn\";\n\n\n", "file_path": "src/client/websocket.rs", "rank": 95, "score": 9.070036639476573 }, { "content": " pub code: i64,\n\n pub msg: String,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone)]\n\n#[serde(untagged)]\n\npub enum HuobiResponse<T> {\n\n Success(T),\n\n Error(HuobiErrorData),\n\n}\n\n\n\nimpl<T: for<'a> Deserialize<'a>> HuobiResponse<T> {\n\n pub fn into_result(self) -> Result<T, Error> {\n\n match self {\n\n Self::Success(t) => Result::Ok(t),\n\n Self::Error(HuobiErrorData { code, msg }) => {\n\n Result::Err(Error::HuobiError { code, msg })\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/error.rs", "rank": 96, "score": 9.030878512982808 }, { "content": "use serde::{Deserialize, Serialize};\n\nuse snafu::*;\n\n\n\n#[allow(clippy::pub_enum_variant_names)]\n\n#[derive(Deserialize, Serialize, Debug, Clone, Snafu)]\n\npub enum Error {\n\n #[snafu(display(\"Huobi Future error: {}: {}\", code, msg))]\n\n HuobiError { code: i64, msg: String },\n\n #[snafu(display(\"Assets not found\"))]\n\n AssetsNotFound,\n\n #[snafu(display(\"Symbol not found\"))]\n\n SymbolNotFound,\n\n #[snafu(display(\"No Api key set for private api\"))]\n\n NoApiKeySet,\n\n #[snafu(display(\"No stream is subscribed\"))]\n\n NoStreamSubscribed,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone)]\n\npub struct HuobiErrorData {\n", "file_path": "src/error.rs", "rank": 97, "score": 8.622002171415396 }, { "content": " if let Some(p) = price.into() { params.insert(\"price\".into(), format!(\"{}\", p)); }\n\n if let Some(code) = contract_code.into() { params.insert(\"contract_code\".into(), code); }\n\n if let Some(ctype) = contract_type.into() { params.insert(\"contract_type\".into(), ctype); }\n\n if let Some(sym) = symbol.into() { params.insert(\"symbol\".into(), sym); }\n\n\n\n Ok(self\n\n .transport\n\n .signed_post(\"/api/v1/contract_order\", Some(params))?) \n\n\n\n }\n\n\n\n // place batch order\n\n pub fn place_orders(\n\n &self, \n\n orders_data: BatchOrderRequest\n\n ) -> Fallible<impl Future<Output = Fallible<APIResponse<BatchOrder>>>>\n\n {\n\n Ok(self\n\n .transport\n\n .signed_post(\"/api/v1/contract_batchorder\", Some(orders_data))?) \n", "file_path": "src/client/account.rs", "rank": 98, "score": 7.981441732993247 }, { "content": " S6: Into<Option<String>>\n\n {\n\n let mut params: BTreeMap<String, String> = BTreeMap::new();\n\n\n\n params.insert(\"volume\".into(), format!(\"{}\", volume));\n\n params.insert(\"direction\".into(), format!(\"{}\", direction.into()));\n\n if let Some(code) = contract_code.into() { params.insert(\"contract_code\".into(), code); }\n\n if let Some(ctype) = contract_type.into() { params.insert(\"contract_type\".into(), ctype); }\n\n if let Some(sym) = symbol.into() { params.insert(\"symbol\".into(), sym); }\n\n if let Some(otype) = order_price_type.into() { params.insert(\"order_price_type\".into(), otype); }\n\n if let Some(client_id) = client_order_id.into() { params.insert(\"client_order_id\".into(), format!(\"{}\", client_id)); }\n\n\n\n Ok(self\n\n .transport\n\n .signed_post(\"/api/v1/lightning_close_position\", Some(params))?\n\n )\n\n\n\n }\n\n\n\n // cancel orders\n", "file_path": "src/client/account.rs", "rank": 99, "score": 7.419589314373431 } ]
Rust
src/cigar.rs
Daniel-Liu-c0deb0t/block-aligner
b54c09e0210605bd56c85e950aa9cd1cbf1c1f31
use std::fmt; #[derive(Debug, PartialEq, Copy, Clone)] #[repr(u8)] pub enum Operation { Sentinel = 0u8, M = 1u8, I = 2u8, D = 3u8 } #[derive(Debug, Copy, Clone)] #[repr(C)] pub struct OpLen { pub op: Operation, pub len: usize } pub struct Cigar { s: Vec<OpLen>, idx: usize } impl Cigar { pub fn new(query_len: usize, reference_len: usize) -> Self { let s = vec![OpLen { op: Operation::Sentinel, len: 0 }; query_len + reference_len + 5]; let idx = 1; Cigar { s, idx } } #[allow(dead_code)] pub(crate) fn clear(&mut self, query_len: usize, reference_len: usize) { self.s[..query_len + reference_len + 5].fill(OpLen { op: Operation::Sentinel, len: 0 }); self.idx = 1; } #[allow(dead_code)] pub(crate) unsafe fn add(&mut self, op: Operation) { debug_assert!(self.idx < self.s.len()); let add = (op != (*self.s.as_ptr().add(self.idx - 1)).op) as usize; self.idx += add; (*self.s.as_mut_ptr().add(self.idx - 1)).op = op; (*self.s.as_mut_ptr().add(self.idx - 1)).len += 1; } pub fn len(&self) -> usize { self.idx - 1 } pub fn get(&self, i: usize) -> OpLen { self.s[self.idx - 1 - i] } pub fn format(&self, q: &[u8], r: &[u8]) -> (String, String) { let mut a = String::with_capacity(self.idx); let mut b = String::with_capacity(self.idx); let mut i = 0; let mut j = 0; for &op_len in self.s[1..self.idx].iter().rev() { match op_len.op { Operation::M => { for _k in 0..op_len.len { a.push(q[i] as char); b.push(r[j] as char); i += 1; j += 1; } }, Operation::I => { for _k in 0..op_len.len { a.push(q[i] as char); b.push('-'); i += 1; } }, Operation::D => { for _k in 0..op_len.len { a.push('-'); b.push(r[j] as char); j += 1; } }, _ => continue } } (a, b) } pub fn to_vec(&self) -> Vec<OpLen> { self.s[1..self.idx] .iter() .rev() .map(|&op_len| op_len) .collect::<Vec<OpLen>>() } } impl fmt::Display for Cigar { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { for &op_len in self.s[1..self.idx].iter().rev() { let c = match op_len.op { Operation::M => 'M', Operation::I => 'I', Operation::D => 'D', _ => continue }; write!(f, "{}{}", op_len.len, c)?; } Ok(()) } }
use std::fmt; #[derive(Debug, PartialEq, Copy, Clone)] #[repr(u8)] pub enum Operation { Sentinel = 0u8, M = 1u8, I = 2u8, D = 3u8 } #[derive(Debug, Copy, Clone)] #[repr(C)] pub struct OpLen { pub op: Operation, pub len: usize } pub struct Cigar { s: Vec<OpLen>, idx: usize } impl Cigar { pub fn new(query_len: usize, reference_len: usize) -> Self { let s = vec![OpLen { op: Operation::Sentinel, len: 0 }; query_len + reference_len + 5]; let idx = 1; Cigar { s, idx } } #[allow(dead_code)] pub(crate) fn clear(&mut self, query_len: usize, reference_len: usize) { self.s[..query_len + reference_len + 5].fill(OpLen { op: Operation::Sentinel, len: 0 }); self.idx = 1; } #[allow(dead_code)] pub(crate) unsafe fn add(&mut self, op: Operation) { debug_assert!(self.idx < self.s.len()); let add = (op != (*self.s.as_ptr().add(self.idx - 1)).op) as usize; self.idx += add; (*self.s.as_mut_ptr().add(self.idx - 1)).op = op; (*self.s.as_mut_ptr().add(self.idx - 1)).len += 1; } pub fn len(&self) -> usize { self.idx - 1 } pub fn get(&self, i: usize) -> OpLen { self.s[self.idx - 1 - i] } pub fn format(&self, q: &[u8], r: &[u8]) -> (String, String) { let mut a = String::with_capacity(self.idx); let mut b = String::with_capacity(self.idx); let mut i = 0; let mut j = 0; for &op_len in self.s[1..self.idx].iter().rev() { match op_len.op { Operation::M => { for _k in 0..op_len.len { a.push(q[i] as char); b.push(r[j] as char); i += 1; j += 1; } }, Operation::I => { for _k in 0..op_len.len { a.push(q[i] as char); b.push('-'); i += 1; } }, Operation::D => { for _k in 0..op_len.len { a.push('-'); b.push(r[j] as char); j += 1; } }, _ => continue } } (a, b) } pub fn to_vec(&self) -> Vec<OpLen> { self.s[1..self.idx] .iter() .rev() .map(|&op_len| op_len) .collect::<Vec<OpLen>>() } } impl fmt::Display for Cigar { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { for &op_len in self.s[1..self.idx].iter().rev() { let c =
; write!(f, "{}{}", op_len.len, c)?; } Ok(()) } }
match op_len.op { Operation::M => 'M', Operation::I => 'I', Operation::D => 'D', _ => continue }
if_condition
[ { "content": "/// Given an input byte string, create a randomly mutated copy and\n\n/// add random suffixes to both strings.\n\npub fn rand_mutate_suffix<R: Rng>(a: &mut Vec<u8>, k: usize, alpha: &[u8], suffix_len: usize, rng: &mut R) -> Vec<u8> {\n\n let mut b = rand_mutate(a, k, alpha, rng);\n\n let a_suffix = rand_str(suffix_len, alpha, rng);\n\n let b_suffix = rand_str(suffix_len, alpha, rng);\n\n a.extend_from_slice(&a_suffix);\n\n b.extend_from_slice(&b_suffix);\n\n b\n\n}\n\n\n", "file_path": "src/simulate.rs", "rank": 0, "score": 236036.40189064917 }, { "content": "/// Given an input byte string, create a randomly mutated copy with\n\n/// a single long random insert.\n\npub fn rand_mutate_insert<R: Rng>(a: &[u8], k: usize, alpha: &[u8], insert_len: usize, rng: &mut R) -> Vec<u8> {\n\n let b = rand_mutate(a, k, alpha, rng);\n\n let insert = rand_str(insert_len, alpha, rng);\n\n let idx = rng.gen_range(1..b.len());\n\n let mut res = Vec::with_capacity(b.len() + insert_len);\n\n // insert the long insert string\n\n res.extend_from_slice(&b[..idx]);\n\n res.extend_from_slice(&insert);\n\n res.extend_from_slice(&b[idx..]);\n\n res\n\n}\n\n\n", "file_path": "src/simulate.rs", "rank": 1, "score": 231796.769566838 }, { "content": "/// Given an input byte string, craete a randomly mutated copy.\n\npub fn rand_mutate<R: Rng>(a: &[u8], k: usize, alpha: &[u8], rng: &mut R) -> Vec<u8> {\n\n let mut edits = vec![0u8; a.len()];\n\n let curr_k: usize = rng.gen_range(k * 3 / 4..k + 1);\n\n let mut idx: Vec<usize> = (0usize..a.len()).collect();\n\n idx.shuffle(rng);\n\n\n\n // generate edit types\n\n for i in 0..curr_k {\n\n edits[idx[i]] = rng.gen_range(1u8..4u8);\n\n }\n\n\n\n let mut b = vec![];\n\n\n\n for i in 0..a.len() {\n\n match edits[i] {\n\n 0u8 => { // same\n\n b.push(a[i]);\n\n },\n\n 1u8 => { // diff\n\n let mut iter = alpha.choose_multiple(rng, 2);\n", "file_path": "src/simulate.rs", "rank": 2, "score": 224553.529037324 }, { "content": "/// Generate a random string of a certain length, with a certain\n\n/// alphabet.\n\npub fn rand_str<R: Rng>(length: usize, alpha: &[u8], rng: &mut R) -> Vec<u8> {\n\n let mut res = vec![0u8; length];\n\n\n\n for i in 0..length {\n\n res[i] = *alpha.choose(rng).unwrap();\n\n }\n\n\n\n res\n\n}\n", "file_path": "src/simulate.rs", "rank": 3, "score": 219671.5246606614 }, { "content": "#[allow(dead_code)]\n\n#[allow(non_snake_case)]\n\nfn calc_diag(q: &[u8], r: &[u8], D: &mut [i32], R: &mut [i32], C: &mut [i32], start_i: usize, start_j: usize, block_size: usize, gap_open: i32, gap_extend: i32) -> i32 {\n\n let idx = |i: usize, j: usize| { i + j * (q.len() + 1 + block_size) };\n\n let mut max = i32::MIN;\n\n\n\n for off in 0..block_size {\n\n let i = start_i + block_size - 1 - off;\n\n let j = start_j + off;\n\n\n\n if D[idx(i, j)] != i32::MIN {\n\n max = cmp::max(max, D[idx(i, j)]);\n\n continue;\n\n }\n\n\n\n R[idx(i, j)] = if i == 0 { i32::MIN } else { cmp::max(\n\n R[idx(i - 1, j)].saturating_add(gap_extend),\n\n D[idx(i - 1, j)].saturating_add(gap_open)\n\n ) };\n\n C[idx(i, j)] = if j == 0 { i32::MIN } else { cmp::max(\n\n C[idx(i, j - 1)].saturating_add(gap_extend),\n\n D[idx(i, j - 1)].saturating_add(gap_open)\n", "file_path": "examples/compare.rs", "rank": 4, "score": 195946.31038319052 }, { "content": "#[allow(non_snake_case)]\n\nfn calc_block(q: &[u8], r: &[u8], D: &mut [i32], R: &mut [i32], C: &mut [i32], start_i: usize, start_j: usize, block_width: usize, block_height: usize, max_size: usize, gap_open: i32, gap_extend: i32) -> i32 {\n\n let idx = |i: usize, j: usize| { i + j * (q.len() + 1 + max_size) };\n\n let mut max = i32::MIN;\n\n\n\n for i in start_i..start_i + block_height {\n\n for j in start_j..start_j + block_width {\n\n if D[idx(i, j)] != i32::MIN {\n\n continue;\n\n }\n\n\n\n R[idx(i, j)] = if i == 0 { i32::MIN } else { cmp::max(\n\n R[idx(i - 1, j)].saturating_add(gap_extend),\n\n D[idx(i - 1, j)].saturating_add(gap_open)\n\n ) };\n\n C[idx(i, j)] = if j == 0 { i32::MIN } else { cmp::max(\n\n C[idx(i, j - 1)].saturating_add(gap_extend),\n\n D[idx(i, j - 1)].saturating_add(gap_open)\n\n ) };\n\n D[idx(i, j)] = cmp::max(\n\n if i == 0 || j == 0 || i > q.len() || j > r.len() { i32::MIN } else {\n", "file_path": "examples/accuracy.rs", "rank": 5, "score": 184574.42303070205 }, { "content": "#[allow(dead_code)]\n\n#[allow(non_snake_case)]\n\nfn calc_block(q: &[u8], r: &[u8], D: &mut [i32], R: &mut [i32], C: &mut [i32], start_i: usize, start_j: usize, block_width: usize, block_height: usize, block_size: usize, gap_open: i32, gap_extend: i32) -> i32 {\n\n let idx = |i: usize, j: usize| { i + j * (q.len() + 1 + block_size) };\n\n let mut max = i32::MIN;\n\n\n\n for i in start_i..start_i + block_height {\n\n for j in start_j..start_j + block_width {\n\n if D[idx(i, j)] != i32::MIN {\n\n continue;\n\n }\n\n\n\n R[idx(i, j)] = if i == 0 { i32::MIN } else { cmp::max(\n\n R[idx(i - 1, j)].saturating_add(gap_extend),\n\n D[idx(i - 1, j)].saturating_add(gap_open)\n\n ) };\n\n C[idx(i, j)] = if j == 0 { i32::MIN } else { cmp::max(\n\n C[idx(i, j - 1)].saturating_add(gap_extend),\n\n D[idx(i, j - 1)].saturating_add(gap_open)\n\n ) };\n\n D[idx(i, j)] = cmp::max(\n\n if i == 0 || j == 0 || i > q.len() || j > r.len() { i32::MIN } else {\n", "file_path": "examples/compare.rs", "rank": 6, "score": 184574.42303070205 }, { "content": "fn test(iter: usize, len: usize, k: usize, insert_len: Option<usize>) -> usize {\n\n let mut wrong = 0usize;\n\n let mut rng = StdRng::seed_from_u64(1234);\n\n\n\n for _i in 0..iter {\n\n let r = rand_str(len, &AMINO_ACIDS, &mut rng);\n\n let q = match insert_len {\n\n Some(len) => rand_mutate_insert(&r, k, &AMINO_ACIDS, len, &mut rng),\n\n None => rand_mutate(&r, k, &AMINO_ACIDS, &mut rng)\n\n };\n\n\n\n let r_padded = PaddedBytes::from_bytes::<AAMatrix>(&r, 2048);\n\n let q_padded = PaddedBytes::from_bytes::<AAMatrix>(&q, 2048);\n\n let run_gaps = Gaps { open: -11, extend: -1 };\n\n\n\n let mut block_aligner = Block::<true, false>::new(q.len(), r.len(), 2048);\n\n block_aligner.align(&q_padded, &r_padded, &BLOSUM62, run_gaps, 32..=2048, 0);\n\n let scan_score = block_aligner.res().score;\n\n let mut scan_cigar = Cigar::new(q.len(), r.len());\n\n block_aligner.trace().cigar(q.len(), r.len(), &mut scan_cigar);\n", "file_path": "examples/verify_trace.rs", "rank": 7, "score": 159713.7273866739 }, { "content": "#[allow(non_snake_case)]\n\nfn slow_align(q: &[u8], r: &[u8], x_drop: i32) -> (i32, usize, usize) {\n\n let gap_open = -11;\n\n let gap_extend = -1;\n\n let idx = |i: usize, j: usize| { i + j * (q.len() + 1) };\n\n\n\n let mut D = vec![i32::MIN; (q.len() + 1) * (r.len() + 1)];\n\n let mut R = vec![i32::MIN; (q.len() + 1) * (r.len() + 1)];\n\n let mut C = vec![i32::MIN; (q.len() + 1) * (r.len() + 1)];\n\n D[idx(0, 0)] = 0;\n\n\n\n let mut best_max = i32::MIN;\n\n let mut best_i = 0;\n\n let mut best_j = 0;\n\n\n\n for i in 0..=q.len() {\n\n let mut max = i32::MIN;\n\n let mut max_j = 0;\n\n for j in 0..=r.len() {\n\n if D[idx(i, j)] != i32::MIN {\n\n continue;\n", "file_path": "examples/x_drop_accuracy.rs", "rank": 8, "score": 157939.3436009229 }, { "content": "#[cfg(not(feature = \"simd_wasm\"))]\n\nfn bench_parasailors_aa_core<const K: usize>(b: &mut Bencher, len: usize) {\n\n let mut rng = StdRng::seed_from_u64(1234);\n\n let r = black_box(rand_str(len, &AMINO_ACIDS, &mut rng));\n\n let q = black_box(rand_mutate(&r, K, &AMINO_ACIDS, &mut rng));\n\n let matrix = Matrix::new(MatrixType::Blosum62);\n\n let profile = Profile::new(&q, &matrix);\n\n\n\n b.iter(|| {\n\n global_alignment_score(&profile, &r, 11, 1)\n\n });\n\n}\n\n\n", "file_path": "benches/rand_scan.rs", "rank": 9, "score": 157316.01172405324 }, { "content": "fn bench_scan_nuc_core<const K: usize>(b: &mut Bencher, len: usize) {\n\n let mut rng = StdRng::seed_from_u64(1234);\n\n let r = black_box(rand_str(len, &NUC, &mut rng));\n\n let q = black_box(rand_mutate(&r, K, &NUC, &mut rng));\n\n let r = PaddedBytes::from_bytes::<NucMatrix>(&r, 2048);\n\n let q = PaddedBytes::from_bytes::<NucMatrix>(&q, 2048);\n\n let bench_gaps = Gaps { open: -2, extend: -1 };\n\n\n\n b.iter(|| {\n\n let mut a = Block::<false, false>::new(q.len(), r.len(), 2048);\n\n a.align(&q, &r, &NW1, bench_gaps, 32..=2048, 0);\n\n a.res()\n\n });\n\n}\n\n\n", "file_path": "benches/rand_scan.rs", "rank": 10, "score": 157316.01172405324 }, { "content": "#[cfg(not(feature = \"simd_wasm\"))]\n\nfn bench_triple_accel_core<const K: usize>(b: &mut Bencher, len: usize) {\n\n let mut rng = StdRng::seed_from_u64(1234);\n\n let r = black_box(rand_str(len, &NUC, &mut rng));\n\n let q = black_box(rand_mutate(&r, K, &NUC, &mut rng));\n\n\n\n b.iter(|| {\n\n bounded_levenshtein(&q, &r, K as u32)\n\n });\n\n}\n\n\n", "file_path": "benches/rand_scan.rs", "rank": 11, "score": 157316.01172405324 }, { "content": "fn bench_rustbio_aa_core<const K: usize>(b: &mut Bencher, len: usize) {\n\n let mut rng = StdRng::seed_from_u64(1234);\n\n let r = black_box(rand_str(len, &AMINO_ACIDS, &mut rng));\n\n let q = black_box(rand_mutate(&r, K, &AMINO_ACIDS, &mut rng));\n\n\n\n b.iter(|| {\n\n let mut bio_aligner = Aligner::with_capacity(q.len(), r.len(), -10, -1, &blosum62);\n\n bio_aligner.global(&q, &r).score\n\n });\n\n}\n\n\n", "file_path": "benches/rand_scan.rs", "rank": 12, "score": 157316.01172405324 }, { "content": "fn consistent(i: usize, j: usize, cigar: &Cigar) -> bool {\n\n let mut curr_i = 0;\n\n let mut curr_j = 0;\n\n\n\n for i in 0..cigar.len() {\n\n let op_len = cigar.get(i);\n\n match op_len.op {\n\n Operation::M => {\n\n curr_i += op_len.len;\n\n curr_j += op_len.len;\n\n },\n\n Operation::I => {\n\n curr_i += op_len.len;\n\n },\n\n _ => {\n\n curr_j += op_len.len;\n\n }\n\n }\n\n }\n\n\n\n curr_i == i && curr_j == j\n\n}\n\n\n", "file_path": "examples/verify_trace.rs", "rank": 13, "score": 156176.09297293177 }, { "content": "fn bench_scan_aa_core_trace<const K: usize>(b: &mut Bencher, len: usize) {\n\n let mut rng = StdRng::seed_from_u64(1234);\n\n let r = black_box(rand_str(len, &AMINO_ACIDS, &mut rng));\n\n let q = black_box(rand_mutate(&r, K, &AMINO_ACIDS, &mut rng));\n\n let r = PaddedBytes::from_bytes::<AAMatrix>(&r, 2048);\n\n let q = PaddedBytes::from_bytes::<AAMatrix>(&q, 2048);\n\n let bench_gaps = Gaps { open: -11, extend: -1 };\n\n\n\n b.iter(|| {\n\n let mut a = Block::<true, false>::new(q.len(), r.len(), 2048);\n\n a.align(&q, &r, &BLOSUM62, bench_gaps, 32..=2048, 0);\n\n //a.res()\n\n let mut cigar = Cigar::new(q.len(), r.len());\n\n a.trace().cigar(q.len(), r.len(), &mut cigar);\n\n (a.res(), cigar)\n\n });\n\n}\n\n\n", "file_path": "benches/rand_scan.rs", "rank": 14, "score": 154493.28366035302 }, { "content": "fn bench_scan_aa_core_small<const K: usize>(b: &mut Bencher, len: usize) {\n\n let mut rng = StdRng::seed_from_u64(1234);\n\n let r = black_box(rand_str(len, &AMINO_ACIDS, &mut rng));\n\n let q = black_box(rand_mutate(&r, K, &AMINO_ACIDS, &mut rng));\n\n let r = PaddedBytes::from_bytes::<AAMatrix>(&r, 2048);\n\n let q = PaddedBytes::from_bytes::<AAMatrix>(&q, 2048);\n\n let bench_gaps = Gaps { open: -11, extend: -1 };\n\n\n\n b.iter(|| {\n\n let mut a = Block::<false, false>::new(q.len(), r.len(), 32);\n\n a.align(&q, &r, &BLOSUM62, bench_gaps, 32..=32, 0);\n\n a.res()\n\n });\n\n}\n\n\n", "file_path": "benches/rand_scan.rs", "rank": 15, "score": 154493.28366035302 }, { "content": "#[allow(non_snake_case)]\n\nfn slow_align(q: &[u8], r: &[u8]) -> i32 {\n\n let mut block_width = 16usize;\n\n let mut block_height = 16usize;\n\n let block_grow = 16usize;\n\n let max_size = 256usize;\n\n let i_step = 4usize;\n\n let j_step = 4usize;\n\n let mut y_drop = 3i32;\n\n let y_drop_grow = 2i32;\n\n\n\n let mut D = vec![i32::MIN; (q.len() + 1 + max_size) * (r.len() + 1 + max_size)];\n\n let mut R = vec![i32::MIN; (q.len() + 1 + max_size) * (r.len() + 1 + max_size)];\n\n let mut C = vec![i32::MIN; (q.len() + 1 + max_size) * (r.len() + 1 + max_size)];\n\n D[0 + 0 * (q.len() + 1 + max_size)] = 0;\n\n let mut i = 0usize;\n\n let mut j = 0usize;\n\n let mut dir = 0;\n\n let mut best_max = 0;\n\n\n\n //println!(\"start\");\n", "file_path": "examples/accuracy.rs", "rank": 16, "score": 152369.08515422695 }, { "content": "#[inline(always)]\n\nfn convert_char(c: u8, nuc: bool) -> u8 {\n\n debug_assert!(c >= b'A' && c <= NULL);\n\n if nuc { c } else { c - b'A' }\n\n}\n\n\n", "file_path": "src/old/scan_thin.rs", "rank": 17, "score": 149225.69517139293 }, { "content": "#[inline(always)]\n\nfn convert_char(c: u8, nuc: bool) -> u8 {\n\n debug_assert!(c >= b'A' && c <= NULL);\n\n if nuc { c } else { c - b'A' }\n\n}\n\n\n\n#[cfg_attr(any(target_arch = \"x86\", target_arch = \"x86_64\"), target_feature(enable = \"avx2\"))]\n\n#[cfg_attr(target_arch = \"wasm32\", target_feature(enable = \"simd128\"))]\n\n#[inline]\n\nunsafe fn halfsimd_convert_char(v: HalfSimd, nuc: bool) -> HalfSimd {\n\n if nuc { v } else { halfsimd_sub_i8(v, halfsimd_set1_i8(b'A' as i8)) }\n\n}\n\n\n", "file_path": "src/old/scan_minecraft.rs", "rank": 18, "score": 149225.69517139293 }, { "content": "#[inline(always)]\n\nfn convert_char(c: u8, nuc: bool) -> u8 {\n\n let c = c.to_ascii_uppercase();\n\n debug_assert!(c >= b'A' && c <= NULL);\n\n if nuc { c } else { c - b'A' }\n\n}\n\n\n", "file_path": "src/old/scan_block_old.rs", "rank": 19, "score": 146425.80846267677 }, { "content": "fn bench_scan_aa_core<const K: usize>(b: &mut Bencher, len: usize, insert: bool) {\n\n let mut rng = StdRng::seed_from_u64(1234);\n\n let r = black_box(rand_str(len, &AMINO_ACIDS, &mut rng));\n\n let q = if insert {\n\n black_box(rand_mutate_insert(&r, K, &AMINO_ACIDS, len / 10, &mut rng))\n\n } else {\n\n black_box(rand_mutate(&r, K, &AMINO_ACIDS, &mut rng))\n\n };\n\n let r = PaddedBytes::from_bytes::<AAMatrix>(&r, 2048);\n\n let q = PaddedBytes::from_bytes::<AAMatrix>(&q, 2048);\n\n let bench_gaps = Gaps { open: -11, extend: -1 };\n\n\n\n b.iter(|| {\n\n let mut a = Block::<false, false>::new(q.len(), r.len(), 2048);\n\n a.align(&q, &r, &BLOSUM62, bench_gaps, 32..=2048, 0);\n\n a.res()\n\n });\n\n}\n\n\n", "file_path": "benches/rand_scan.rs", "rank": 20, "score": 145455.89863016235 }, { "content": "#[allow(dead_code)]\n\n#[allow(non_snake_case)]\n\nfn slow_align(q: &[u8], r: &[u8], x_drop: i32) -> i32 {\n\n let block_size = 32usize;\n\n let step = 8usize;\n\n //let step = 1usize;\n\n\n\n let mut D = vec![i32::MIN; (q.len() + 1 + block_size) * (r.len() + 1 + block_size)];\n\n let mut R = vec![i32::MIN; (q.len() + 1 + block_size) * (r.len() + 1 + block_size)];\n\n let mut C = vec![i32::MIN; (q.len() + 1 + block_size) * (r.len() + 1 + block_size)];\n\n D[0 + 0 * (q.len() + 1 + block_size)] = 0;\n\n //let max = calc_block(q, r, &mut D, &mut R, &mut C, 0, 0, block_size, block_size, block_size, -2, -1);\n\n let mut i = 0usize;\n\n let mut j = 0usize;\n\n let mut dir = 0;\n\n //let mut best_max = max;\n\n let mut best_max = 0;\n\n\n\n loop {\n\n let max = match dir {\n\n 0 => { // right\n\n calc_block(q, r, &mut D, &mut R, &mut C, i, j, block_size, block_size, block_size, -2, -1)\n", "file_path": "examples/compare.rs", "rank": 21, "score": 138744.71338953136 }, { "content": "fn test(iter: usize, len: usize, k: usize, verbose: bool) -> (usize, f64, i32, i32, usize) {\n\n let mut wrong = 0usize;\n\n let mut wrong_avg = 0f64;\n\n let mut wrong_min = i32::MAX;\n\n let mut wrong_max = i32::MIN;\n\n let mut diff_idx = 0usize;\n\n let mut rng = StdRng::seed_from_u64(1234);\n\n\n\n for _i in 0..iter {\n\n let mut r = rand_str(len, &AMINO_ACIDS, &mut rng);\n\n let q = rand_mutate_suffix(&mut r, k, &AMINO_ACIDS, 500, &mut rng);\n\n\n\n let r_padded = PaddedBytes::from_bytes::<AAMatrix>(&r, 2048);\n\n let q_padded = PaddedBytes::from_bytes::<AAMatrix>(&q, 2048);\n\n let run_gaps = Gaps { open: -11, extend: -1 };\n\n\n\n let slow_res = slow_align(&q, &r, 50);\n\n\n\n let mut block_aligner = Block::<false, true>::new(q.len(), r.len(), 64);\n\n block_aligner.align(&q_padded, &r_padded, &BLOSUM62, run_gaps, 32..=64, 50);\n", "file_path": "examples/x_drop_accuracy.rs", "rank": 22, "score": 138648.62178724245 }, { "content": "fn run(len: usize, k: usize) {\n\n let mut rng = StdRng::seed_from_u64(1234);\n\n let r = rand_str(len, &AMINO_ACIDS, &mut rng);\n\n let q = rand_mutate(&r, k, &AMINO_ACIDS, &mut rng);\n\n let r = PaddedBytes::from_bytes::<AAMatrix>(&r, 2048);\n\n let q = PaddedBytes::from_bytes::<AAMatrix>(&q, 2048);\n\n let run_gaps = Gaps { open: -11, extend: -1 };\n\n let mut a = Block::<true, true>::new(q.len(), r.len(), 32);\n\n\n\n for _i in 0..10000 {\n\n a.align(&q, &r, &BLOSUM62, run_gaps, 32..=32, 1000);\n\n black_box(a.res());\n\n }\n\n}\n\n\n", "file_path": "examples/profile.rs", "rank": 23, "score": 135710.86029435598 }, { "content": "fn test(iter: usize, len: usize, k: usize, slow: bool, insert_len: Option<usize>, nuc: bool, max_size: usize, verbose: bool) -> (usize, f64, i32, i32) {\n\n let mut wrong = 0usize;\n\n let mut wrong_avg = 0f64;\n\n let mut wrong_min = i32::MAX;\n\n let mut wrong_max = i32::MIN;\n\n let mut rng = StdRng::seed_from_u64(1234);\n\n let nw = |a, b| if a == b { 1 } else { -1 };\n\n\n\n for _i in 0..iter {\n\n let r = rand_str(len, if nuc { &NUC } else { &AMINO_ACIDS }, &mut rng);\n\n let q = match insert_len {\n\n Some(len) => rand_mutate_insert(&r, k, if nuc { &NUC } else { &AMINO_ACIDS }, len, &mut rng),\n\n None => rand_mutate(&r, k, if nuc { &NUC } else { &AMINO_ACIDS }, &mut rng)\n\n };\n\n\n\n // rust-bio\n\n let bio_score = if nuc {\n\n let mut bio_aligner = Aligner::with_capacity(q.len(), r.len(), -1, -1, &nw);\n\n bio_aligner.global(&q, &r).score\n\n } else {\n", "file_path": "examples/accuracy.rs", "rank": 24, "score": 122484.10848353419 }, { "content": "fn time(f: fn(usize, bool, usize, usize) -> (i32, Duration), idx: usize, trace: bool, min_size: usize, max_size: usize) -> Duration {\n\n let (temp, duration) = f(idx, trace, min_size, max_size);\n\n black_box(temp);\n\n duration\n\n}\n\n\n", "file_path": "examples/uc_bench.rs", "rank": 25, "score": 121822.43168166919 }, { "content": "fn indels(a: &Alignment, len: usize) -> f64 {\n\n let mut indels = 0;\n\n\n\n for &op in &a.operations {\n\n if op == AlignmentOperation::Ins\n\n || op == AlignmentOperation::Del {\n\n indels += 1;\n\n }\n\n }\n\n (indels as f64) / (len as f64)\n\n}\n\n\n", "file_path": "examples/uc_accuracy.rs", "rank": 26, "score": 115518.4957160718 }, { "content": "#[bench]\n\nfn bench_naive_prefix_scan(b: &mut Bencher) {\n\n #[target_feature(enable = \"avx2\")]\n\n unsafe fn inner(b: &mut Bencher) {\n\n let vec = A([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 15, 12, 13, 14, 11]);\n\n let vec = simd_load(vec.0.as_ptr() as *const Simd);\n\n\n\n b.iter(|| {\n\n let consts = get_prefix_scan_consts(-1);\n\n simd_naive_prefix_scan_i16(black_box(vec), consts)\n\n });\n\n }\n\n unsafe { inner(b); }\n\n}\n", "file_path": "benches/prefix_scan.rs", "rank": 27, "score": 115006.13345188307 }, { "content": "#[bench]\n\nfn bench_opt_prefix_scan(b: &mut Bencher) {\n\n #[target_feature(enable = \"avx2\")]\n\n unsafe fn inner(b: &mut Bencher) {\n\n let vec = A([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 15, 12, 13, 14, 11]);\n\n let vec = simd_load(vec.0.as_ptr() as *const Simd);\n\n\n\n b.iter(|| {\n\n let consts = get_prefix_scan_consts(-1);\n\n simd_prefix_scan_i16(black_box(vec), consts)\n\n });\n\n }\n\n unsafe { inner(b); }\n\n}\n\n\n", "file_path": "benches/prefix_scan.rs", "rank": 28, "score": 115006.13345188307 }, { "content": "fn test(file_name: &str, min_size: usize, max_size: usize, verbose: bool, wrong: &mut [usize], wrong_avg: &mut [f64], count: &mut [usize]) -> (f64, usize, usize, f64) {\n\n let reader = BufReader::new(File::open(file_name).unwrap());\n\n let mut length_sum = 0f64;\n\n let mut length_min = usize::MAX;\n\n let mut length_max = usize::MIN;\n\n let mut dp_fraction = 0f64;\n\n\n\n for line in reader.lines() {\n\n let line = line.unwrap();\n\n let mut last_two = line.split_ascii_whitespace().rev().take(2);\n\n let r = last_two.next().unwrap().to_ascii_uppercase();\n\n let q = last_two.next().unwrap().to_ascii_uppercase();\n\n\n\n // rust-bio\n\n let mut bio_aligner = Aligner::with_capacity(q.len(), r.len(), -10, -1, &blosum62);\n\n let bio_alignment = bio_aligner.global(q.as_bytes(), r.as_bytes());\n\n let bio_score = bio_alignment.score;\n\n let seq_identity = seq_id(&bio_alignment);\n\n let id_idx = cmp::min((seq_identity * 10.0) as usize, 9);\n\n let indels = indels(&bio_alignment, cmp::max(q.len(), r.len()));\n", "file_path": "examples/uc_accuracy.rs", "rank": 29, "score": 108452.72391439648 }, { "content": "#[cfg(not(feature = \"simd_wasm\"))]\n\n#[bench]\n\nfn bench_parasailors_aa_1000_10000(b: &mut Bencher) { bench_parasailors_aa_core::<1000>(b, 10000); }\n", "file_path": "benches/rand_scan.rs", "rank": 30, "score": 102600.41882065887 }, { "content": "#[cfg(not(feature = \"simd_wasm\"))]\n\n#[bench]\n\nfn bench_parasailors_aa_10_100(b: &mut Bencher) { bench_parasailors_aa_core::<10>(b, 100); }\n", "file_path": "benches/rand_scan.rs", "rank": 31, "score": 102600.41882065887 }, { "content": "#[bench]\n\nfn bench_scan_nuc_100_1000(b: &mut Bencher) { bench_scan_nuc_core::<100>(b, 1000); }\n", "file_path": "benches/rand_scan.rs", "rank": 32, "score": 102600.41882065887 }, { "content": "#[bench]\n\nfn bench_scan_nuc_1000_10000(b: &mut Bencher) { bench_scan_nuc_core::<1000>(b, 10000); }\n\n\n", "file_path": "benches/rand_scan.rs", "rank": 33, "score": 102600.41882065887 }, { "content": "#[cfg(not(feature = \"simd_wasm\"))]\n\n#[bench]\n\nfn bench_triple_accel_100_1000(b: &mut Bencher) { bench_triple_accel_core::<100>(b, 1000); }\n", "file_path": "benches/rand_scan.rs", "rank": 34, "score": 102600.41882065887 }, { "content": "#[bench]\n\nfn bench_rustbio_aa_100_1000(b: &mut Bencher) { bench_rustbio_aa_core::<100>(b, 1000); }\n\n\n", "file_path": "benches/rand_scan.rs", "rank": 35, "score": 102600.41882065887 }, { "content": "#[bench]\n\nfn bench_rustbio_aa_10_100(b: &mut Bencher) { bench_rustbio_aa_core::<10>(b, 100); }\n", "file_path": "benches/rand_scan.rs", "rank": 36, "score": 102600.41882065887 }, { "content": "#[cfg(not(feature = \"simd_wasm\"))]\n\n#[bench]\n\nfn bench_parasailors_aa_100_1000(b: &mut Bencher) { bench_parasailors_aa_core::<100>(b, 1000); }\n", "file_path": "benches/rand_scan.rs", "rank": 37, "score": 102600.41882065887 }, { "content": "#[cfg(not(feature = \"simd_wasm\"))]\n\n#[bench]\n\nfn bench_triple_accel_1000_10000(b: &mut Bencher) { bench_triple_accel_core::<1000>(b, 10000); }\n\n\n", "file_path": "benches/rand_scan.rs", "rank": 38, "score": 102600.41882065887 }, { "content": "#[bench]\n\nfn bench_scan_aa_100_1000_small(b: &mut Bencher) { bench_scan_aa_core_small::<100>(b, 1000); }\n", "file_path": "benches/rand_scan.rs", "rank": 39, "score": 99576.73975211573 }, { "content": "#[bench]\n\nfn bench_scan_aa_1000_10000_trace(b: &mut Bencher) { bench_scan_aa_core_trace::<1000>(b, 10000); }\n\n\n", "file_path": "benches/rand_scan.rs", "rank": 40, "score": 99576.73975211573 }, { "content": "#[bench]\n\nfn bench_scan_aa_100_1000_trace(b: &mut Bencher) { bench_scan_aa_core_trace::<100>(b, 1000); }\n", "file_path": "benches/rand_scan.rs", "rank": 41, "score": 99576.73975211573 }, { "content": "#[bench]\n\nfn bench_scan_aa_1000_10000_small(b: &mut Bencher) { bench_scan_aa_core_small::<1000>(b, 10000); }\n\n\n", "file_path": "benches/rand_scan.rs", "rank": 42, "score": 99576.73975211573 }, { "content": "#[bench]\n\nfn bench_scan_aa_10_100_small(b: &mut Bencher) { bench_scan_aa_core_small::<10>(b, 100); }\n", "file_path": "benches/rand_scan.rs", "rank": 43, "score": 99576.73975211573 }, { "content": "#[bench]\n\nfn bench_scan_aa_10_100_trace(b: &mut Bencher) { bench_scan_aa_core_trace::<10>(b, 100); }\n", "file_path": "benches/rand_scan.rs", "rank": 44, "score": 99576.73975211573 }, { "content": "#[bench]\n\nfn bench_scan_aa_100_1000(b: &mut Bencher) { bench_scan_aa_core::<100>(b, 1000, false); }\n", "file_path": "benches/rand_scan.rs", "rank": 45, "score": 98215.30678372597 }, { "content": "#[bench]\n\nfn bench_scan_aa_10_100(b: &mut Bencher) { bench_scan_aa_core::<10>(b, 100, false); }\n", "file_path": "benches/rand_scan.rs", "rank": 46, "score": 98215.30678372597 }, { "content": "#[bench]\n\nfn bench_scan_aa_1000_10000(b: &mut Bencher) { bench_scan_aa_core::<1000>(b, 10000, false); }\n\n\n", "file_path": "benches/rand_scan.rs", "rank": 47, "score": 98215.30678372597 }, { "content": "fn bench_scan_aa_core(idx: usize, trace: bool, min_size: usize, max_size: usize) -> (i32, Duration) {\n\n let file_data = get_data(&FILE_NAMES[idx]);\n\n let data = file_data\n\n .iter()\n\n .map(|(q, r)| (PaddedBytes::from_bytes::<AAMatrix>(q, 2048), PaddedBytes::from_bytes::<AAMatrix>(r, 2048)))\n\n .collect::<Vec<(PaddedBytes, PaddedBytes)>>();\n\n let bench_gaps = Gaps { open: -11, extend: -1 };\n\n\n\n let start = Instant::now();\n\n let mut temp = 0i32;\n\n for (q, r) in &data {\n\n if trace {\n\n let mut a = Block::<true, false>::new(q.len(), r.len(), max_size);\n\n a.align(&q, &r, &BLOSUM62, bench_gaps, min_size..=max_size, 0);\n\n temp = temp.wrapping_add(a.res().score); // prevent optimizations\n\n let mut cigar = Cigar::new(q.len(), r.len());\n\n a.trace().cigar(q.len(), r.len(), &mut cigar);\n\n temp = temp.wrapping_add(cigar.len() as i32);\n\n } else {\n\n let mut a = Block::<false, false>::new(q.len(), r.len(), max_size);\n\n a.align(&q, &r, &BLOSUM62, bench_gaps, min_size..=max_size, 0);\n\n temp = temp.wrapping_add(a.res().score); // prevent optimizations\n\n }\n\n }\n\n (temp, start.elapsed())\n\n}\n\n\n", "file_path": "examples/uc_bench.rs", "rank": 48, "score": 97005.32882153548 }, { "content": "#[cfg(not(feature = \"simd_wasm\"))]\n\nfn bench_parasailors_aa_core(idx: usize, _trace: bool, _min_size: usize, _max_size: usize) -> (i32, Duration) {\n\n let file_data = get_data(&FILE_NAMES[idx]);\n\n let matrix = Matrix::new(MatrixType::Blosum62);\n\n let data = file_data\n\n .iter()\n\n .map(|(q, r)| (Profile::new(q, &matrix), r.to_owned()))\n\n .collect::<Vec<(Profile, Vec<u8>)>>();\n\n\n\n let start = Instant::now();\n\n let mut temp = 0i32;\n\n for (p, r) in &data {\n\n temp = temp.wrapping_add(global_alignment_score(p, r, 11, 1));\n\n }\n\n (temp, start.elapsed())\n\n}\n\n\n", "file_path": "examples/uc_bench.rs", "rank": 49, "score": 97005.32882153548 }, { "content": "#[bench]\n\nfn bench_scan_aa_1000_10000_insert(b: &mut Bencher) { bench_scan_aa_core::<1000>(b, 10000, true); }\n\n\n", "file_path": "benches/rand_scan.rs", "rank": 50, "score": 96757.80871993606 }, { "content": "#[bench]\n\nfn bench_scan_aa_100_1000_insert(b: &mut Bencher) { bench_scan_aa_core::<100>(b, 1000, true); }\n", "file_path": "benches/rand_scan.rs", "rank": 51, "score": 96757.80871993606 }, { "content": "#[bench]\n\nfn bench_scan_aa_10_100_insert(b: &mut Bencher) { bench_scan_aa_core::<10>(b, 100, true); }\n", "file_path": "benches/rand_scan.rs", "rank": 52, "score": 96757.80871993606 }, { "content": "fn get_data(file_names: &[&str]) -> Vec<(Vec<u8>, Vec<u8>)> {\n\n let mut res = vec![];\n\n\n\n for file_name in file_names {\n\n let reader = BufReader::new(File::open(file_name).unwrap());\n\n\n\n for line in reader.lines() {\n\n let line = line.unwrap();\n\n let mut last_two = line.split_ascii_whitespace().rev().take(2);\n\n let r = last_two.next().unwrap().to_ascii_uppercase();\n\n let q = last_two.next().unwrap().to_ascii_uppercase();\n\n\n\n res.push((q.as_bytes().to_owned(), r.as_bytes().to_owned()));\n\n }\n\n }\n\n\n\n res\n\n}\n\n\n", "file_path": "examples/uc_bench.rs", "rank": 53, "score": 94504.43627397288 }, { "content": "fn get_data(file_name: Option<&str>) -> Vec<(Vec<u8>, Vec<u8>)> {\n\n let mut rng = StdRng::seed_from_u64(1234);\n\n\n\n if let Some(file_name) = file_name {\n\n let mut res = vec![];\n\n\n\n let reader = BufReader::new(File::open(file_name).unwrap());\n\n let all_lines = reader.lines().collect::<Vec<_>>();\n\n\n\n for lines in all_lines.chunks(2) {\n\n let r = lines[0].as_ref().unwrap().to_ascii_uppercase();\n\n let q = lines[1].as_ref().unwrap().to_ascii_uppercase();\n\n let mut r = r.as_bytes().to_owned();\n\n let mut q = q.as_bytes().to_owned();\n\n let extend_r = rand_str(100, &NUC, &mut rng);\n\n let extend_q = rand_str(100, &NUC, &mut rng);\n\n r.extend_from_slice(&extend_r);\n\n q.extend_from_slice(&extend_q);\n\n res.push((q, r));\n\n }\n", "file_path": "examples/nanopore_bench.rs", "rank": 54, "score": 91070.17729591104 }, { "content": "fn time(f: fn(bool, bool, usize) -> (i32, Duration), file: bool, trace: bool, max_size: usize) -> Duration {\n\n let (temp, duration) = f(file, trace, max_size);\n\n black_box(temp);\n\n duration\n\n}\n\n\n", "file_path": "examples/nanopore_bench.rs", "rank": 55, "score": 85524.12866631857 }, { "content": "#[allow(dead_code)]\n\n#[allow(non_snake_case)]\n\nfn block_sum(D: &[i32], col_len: usize, start_i: usize, start_j: usize, block_width: usize, block_height: usize) -> i32 {\n\n let mut sum = 0;\n\n for i in start_i..start_i + block_height {\n\n for j in start_j..start_j + block_width {\n\n sum += D[i + j * col_len];\n\n }\n\n }\n\n sum\n\n}\n\n\n", "file_path": "examples/compare.rs", "rank": 56, "score": 84996.41876327716 }, { "content": "#[allow(non_snake_case)]\n\nfn block_max(D: &[i32], col_len: usize, start_i: usize, start_j: usize, block_width: usize, block_height: usize) -> i32 {\n\n let mut max = i32::MIN;\n\n for i in start_i..start_i + block_height {\n\n for j in start_j..start_j + block_width {\n\n max = cmp::max(max, D[i + j * col_len]);\n\n }\n\n }\n\n max\n\n}\n\n\n", "file_path": "examples/accuracy.rs", "rank": 57, "score": 84996.41876327716 }, { "content": "#[allow(dead_code)]\n\n#[allow(non_snake_case)]\n\nfn block_max(D: &[i32], col_len: usize, start_i: usize, start_j: usize, block_width: usize, block_height: usize) -> i32 {\n\n let mut max = i32::MIN;\n\n for i in start_i..start_i + block_height {\n\n for j in start_j..start_j + block_width {\n\n max = cmp::max(max, D[i + j * col_len]);\n\n }\n\n }\n\n max\n\n}\n\n\n", "file_path": "examples/compare.rs", "rank": 58, "score": 84996.41876327716 }, { "content": "#[inline]\n\nfn div_ceil(n: usize, d: usize) -> usize {\n\n (n + d - 1) / d\n\n}\n\n\n", "file_path": "src/scan_block.rs", "rank": 59, "score": 84431.04349572866 }, { "content": "#[inline(always)]\n\nfn div_ceil(n: usize, d: usize) -> usize {\n\n (n + d - 1) / d\n\n}\n\n\n\n#[derive(Clone, PartialEq, Debug)]\n\npub struct PaddedBytes {\n\n s: Vec<u8>\n\n}\n\n\n\nimpl PaddedBytes {\n\n #[inline(always)]\n\n pub fn from_bytes(b: &[u8]) -> Self {\n\n let mut v = b.to_owned();\n\n v.insert(0, NULL);\n\n v.resize(v.len() + L, NULL);\n\n Self { s: v }\n\n }\n\n\n\n #[inline(always)]\n\n pub fn from_str(s: &str) -> Self {\n", "file_path": "src/old/scan_minecraft.rs", "rank": 60, "score": 82410.9518733071 }, { "content": "#[inline(always)]\n\nfn div_ceil(n: usize, d: usize) -> usize {\n\n (n + d - 1) / d\n\n}\n\n\n\npub struct Aligned {\n\n layout: alloc::Layout,\n\n ptr: *const i16\n\n}\n\n\n\nimpl Aligned {\n\n #[cfg_attr(any(target_arch = \"x86\", target_arch = \"x86_64\"), target_feature(enable = \"avx2\"))]\n\n #[cfg_attr(target_arch = \"wasm32\", target_feature(enable = \"simd128\"))]\n\n #[inline]\n\n pub unsafe fn new(blocks: usize) -> Self {\n\n let layout = alloc::Layout::from_size_align_unchecked(blocks * L * 2, L_BYTES);\n\n let ptr = alloc::alloc(layout) as *const i16;\n\n let neg_inf = simd_set1_i16(i16::MIN);\n\n for i in (0..blocks * L).step_by(L) {\n\n simd_store(ptr.add(i) as _, neg_inf);\n\n }\n", "file_path": "src/old/scan_block_old.rs", "rank": 61, "score": 80519.49361724152 }, { "content": "fn test(file_name: &str, max_size: usize, x_drop: i32) -> (usize, usize, f64, usize, f64) {\n\n let reader = BufReader::new(File::open(file_name).unwrap());\n\n let mut count = 0;\n\n let mut other_better = 0;\n\n let mut other_better_avg = 0f64;\n\n let mut us_better = 0;\n\n let mut us_better_avg = 0f64;\n\n //let mut slow_better = 0;\n\n //let mut slow_equal = 0;\n\n\n\n for line in reader.lines() {\n\n let line = line.unwrap();\n\n let mut row = line.split_ascii_whitespace().take(5);\n\n let q = row.next().unwrap().to_ascii_uppercase();\n\n let r = row.next().unwrap().to_ascii_uppercase();\n\n let other_score = row.next().unwrap().parse::<i32>().unwrap();\n\n let _other_i = row.next().unwrap().parse::<usize>().unwrap();\n\n let _other_j = row.next().unwrap().parse::<usize>().unwrap();\n\n\n\n //let x_drop = 100;\n", "file_path": "examples/compare.rs", "rank": 62, "score": 70458.72208311391 }, { "content": "#[cfg(feature = \"simd_avx2\")]\n\nfn test(file_name: &str, min_size: usize, max_size: usize, verbose: bool) -> (usize, f64, usize) {\n\n use parasailors::{Matrix, *};\n\n\n\n use block_aligner::scan_block::*;\n\n use block_aligner::scores::*;\n\n\n\n use std::fs::File;\n\n use std::io::{BufRead, BufReader};\n\n\n\n let mut wrong = 0usize;\n\n let mut wrong_avg = 0f64;\n\n let mut count = 0usize;\n\n let reader = BufReader::new(File::open(file_name).unwrap());\n\n let all_lines = reader.lines().collect::<Vec<_>>();\n\n\n\n for lines in all_lines.chunks(2) {\n\n let r = lines[0].as_ref().unwrap().to_ascii_uppercase();\n\n let q = lines[1].as_ref().unwrap().to_ascii_uppercase();\n\n\n\n // parasail\n", "file_path": "examples/nanopore_accuracy.rs", "rank": 63, "score": 68062.01093420424 }, { "content": "#[derive(Copy, Clone, PartialEq, Debug)]\n\nenum Direction {\n\n Right,\n\n Down,\n\n Grow\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::scores::*;\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn test_no_x_drop() {\n\n let test_gaps = Gaps { open: -11, extend: -1 };\n\n\n\n let mut a = Block::<false, false>::new(100, 100, 16);\n\n\n\n let r = PaddedBytes::from_bytes::<AAMatrix>(b\"AAAA\", 16);\n\n let q = PaddedBytes::from_bytes::<AAMatrix>(b\"AARA\", 16);\n", "file_path": "src/scan_block.rs", "rank": 64, "score": 62793.23243407291 }, { "content": "#[allow(non_snake_case)]\n\nstruct Allocated {\n\n pub trace: Trace,\n\n\n\n // bottom and right borders of the current block\n\n pub D_col: Aligned,\n\n pub C_col: Aligned,\n\n pub D_row: Aligned,\n\n pub R_row: Aligned,\n\n\n\n // the state at the previous checkpoint (where latest best score was encountered)\n\n pub D_col_ckpt: Aligned,\n\n pub C_col_ckpt: Aligned,\n\n pub D_row_ckpt: Aligned,\n\n pub R_row_ckpt: Aligned,\n\n\n\n // reused buffers for storing values that must be shifted\n\n // into the other border when the block moves in one direction\n\n pub temp_buf1: Aligned,\n\n pub temp_buf2: Aligned,\n\n\n", "file_path": "src/scan_block.rs", "rank": 65, "score": 62784.91144592232 }, { "content": "/// Same alignment as SIMD vectors.\n\nstruct Aligned {\n\n layout: alloc::Layout,\n\n ptr: *const i16\n\n}\n\n\n\nimpl Aligned {\n\n pub unsafe fn new(block_size: usize) -> Self {\n\n // custom alignment\n\n let layout = alloc::Layout::from_size_align_unchecked(block_size * 2, L_BYTES);\n\n let ptr = alloc::alloc_zeroed(layout) as *const i16;\n\n Self { layout, ptr }\n\n }\n\n\n\n #[cfg_attr(feature = \"simd_avx2\", target_feature(enable = \"avx2\"))]\n\n #[cfg_attr(feature = \"simd_wasm\", target_feature(enable = \"simd128\"))]\n\n pub unsafe fn clear(&mut self, block_size: usize) {\n\n let mut i = 0;\n\n while i < block_size {\n\n simd_store(self.ptr.add(i) as _, simd_set1_i16(MIN));\n\n i += L;\n", "file_path": "src/scan_block.rs", "rank": 66, "score": 62784.91144592232 }, { "content": "#[derive(Copy, Clone, PartialEq, Debug)]\n\nenum Direction {\n\n Right,\n\n Down,\n\n Diagonal\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct Trace {\n\n trace: Vec<u32>,\n\n shift_dir: Vec<u32>,\n\n idx: usize\n\n}\n\n\n\nimpl Trace {\n\n #[inline(always)]\n\n pub fn new(query_len: usize, reference_len: usize) -> Self {\n\n let len = query_len + reference_len;\n\n Self {\n\n trace: vec![0; div_ceil(len, 16)],\n\n shift_dir: vec![0; div_ceil(div_ceil(len, L), 16)],\n", "file_path": "src/old/scan_minecraft.rs", "rank": 67, "score": 61188.25476001865 }, { "content": "#[derive(Copy, Clone, PartialEq, Debug)]\n\nenum Direction {\n\n Right,\n\n Down(usize)\n\n}\n\n\n\n// Notes:\n\n//\n\n// BLOSUM62 matrix max = 11, min = -4; gap open = -11 (includes extension), gap extend = -1\n\n//\n\n// R[i][j] = max(R[i - 1][j] + gap_extend, D[i - 1][j] + gap_open)\n\n// C[i][j] = max(C[i][j - 1] + gap_extend, D[i][j - 1] + gap_open)\n\n// D[i][j] = max(D[i - 1][j - 1] + matrix[query[i]][reference[j]], R[i][j], C[i][j])\n\n//\n\n// indexing (we want to calculate D11):\n\n// x0 x1\n\n// +--------\n\n// 0x | 00 01\n\n// 1x | 10 11\n\n//\n\n// note that 'x' represents any bit\n", "file_path": "src/old/scan_thin.rs", "rank": 68, "score": 61188.25476001865 }, { "content": "fn main() {\n\n if env::var(\"BLOCK_ALIGNER_C\").is_ok() {\n\n let crate_dir = env::var(\"CARGO_MANIFEST_DIR\").unwrap();\n\n cbindgen::generate(&crate_dir)\n\n .unwrap()\n\n .write_to_file(format!(\"{}/c/block_aligner.h\", crate_dir));\n\n }\n\n}\n", "file_path": "build.rs", "rank": 69, "score": 60079.89010534457 }, { "content": "pub trait Matrix {\n\n /// Byte to use as padding.\n\n const NULL: u8;\n\n /// Create a new matrix with default (usually nonsense) values.\n\n ///\n\n /// Use `new_simple` to create a sensible scoring matrix.\n\n fn new() -> Self;\n\n /// Set the score for a pair of bytes.\n\n fn set(&mut self, a: u8, b: u8, score: i8);\n\n /// Get the score for a pair of bytes.\n\n fn get(&self, a: u8, b: u8) -> i8;\n\n /// Get the pointer for a specific index.\n\n fn as_ptr(&self, i: usize) -> *const i8;\n\n /// Get the scores for a certain byte and a certain SIMD vector of bytes.\n\n unsafe fn get_scores(&self, c: u8, v: HalfSimd, right: bool) -> Simd;\n\n /// Convert a byte to a better storage format that makes retrieving scores\n\n /// easier.\n\n fn convert_char(c: u8) -> u8;\n\n}\n\n\n", "file_path": "src/scores.rs", "rank": 70, "score": 60045.662916332396 }, { "content": "#[derive(Copy, Clone, PartialEq, Debug)]\n\nenum Direction {\n\n Right,\n\n Down\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct Trace {\n\n trace: Vec<u32>,\n\n shift_dir: Vec<u32>,\n\n idx: usize\n\n}\n\n\n\nimpl Trace {\n\n #[inline(always)]\n\n pub fn new(query_len: usize, reference_len: usize) -> Self {\n\n let len = query_len + reference_len;\n\n Self {\n\n trace: vec![0; div_ceil(len, 16)],\n\n shift_dir: vec![0; div_ceil(div_ceil(len, L), 16)],\n\n idx: 0\n", "file_path": "src/old/scan_block_old.rs", "rank": 71, "score": 59737.44546355584 }, { "content": "fn main() {\n\n run(10000, 1000);\n\n}\n", "file_path": "examples/profile.rs", "rank": 72, "score": 58151.698795581746 }, { "content": "fn main() {\n\n let mut args = env::args().skip(1);\n\n let other_file = args.next().expect(\"Pass in the path to a tab-separated file to compare to!\");\n\n let x_drop = args.next().expect(\"Pass in an X-drop threshold!\").parse::<i32>().unwrap();\n\n let max_sizes = [32, 64];\n\n\n\n println!(\"max size, total, other better, other % better, us better, us % better\");\n\n\n\n for &max_size in &max_sizes {\n\n let (count, other_better, other_better_avg, us_better, us_better_avg) = test(&other_file, max_size, x_drop);\n\n\n\n println!(\n\n \"\\n{}, {}, {}, {}, {}, {}\",\n\n max_size,\n\n count,\n\n other_better,\n\n other_better_avg,\n\n us_better,\n\n us_better_avg\n\n );\n\n }\n\n\n\n println!(\"# Done!\");\n\n}\n\n\n\n// Scalar version of the block aligner algorithm for testing\n\n// purposes. May not exactly match the implementation of the\n\n// vectorized version.\n\n//\n\n// Also possible to simulate diagonal adaptive banding methods.\n", "file_path": "examples/compare.rs", "rank": 73, "score": 58151.698795581746 }, { "content": "fn main() {\n\n let mut args = env::args().skip(1);\n\n let mut q = args.next().unwrap();\n\n q.make_ascii_uppercase();\n\n let q = q.as_bytes().to_owned();\n\n let mut r = args.next().unwrap();\n\n r.make_ascii_uppercase();\n\n let r = r.as_bytes().to_owned();\n\n let r_padded = PaddedBytes::from_bytes::<AAMatrix>(&r, 2048);\n\n let q_padded = PaddedBytes::from_bytes::<AAMatrix>(&q, 2048);\n\n let run_gaps = Gaps { open: -11, extend: -1 };\n\n\n\n let mut bio_aligner = Aligner::with_capacity(q.len(), r.len(), -10, -1, &blosum62);\n\n let bio_alignment = bio_aligner.global(&q, &r);\n\n let bio_score = bio_alignment.score;\n\n\n\n let mut block_aligner = Block::<true, false>::new(q.len(), r.len(), 256);\n\n block_aligner.align(&q_padded, &r_padded, &BLOSUM62, run_gaps, 32..=256, 0);\n\n let scan_score = block_aligner.res().score;\n\n let mut scan_cigar = Cigar::new(q.len(), r.len());\n", "file_path": "examples/debug.rs", "rank": 74, "score": 58151.698795581746 }, { "content": "fn main() {\n\n let arg1 = env::args().skip(1).next();\n\n let slow = false;\n\n let nuc = true;\n\n let verbose = arg1.is_some() && arg1.unwrap() == \"-v\";\n\n let iters = [100, 100, 10];\n\n let lens = [100, 1000, 10000];\n\n let rcp_ks = [10.0, 5.0, 2.0];\n\n let inserts = [false, true];\n\n let max_sizes = [32, 2048];\n\n\n\n let mut total_wrong = 0usize;\n\n let mut total = 0usize;\n\n\n\n println!(\"\\nlen, k, insert, iter, max size, wrong, wrong % error, wrong min, wrong max\\n\");\n\n\n\n for (&len, &iter) in lens.iter().zip(&iters) {\n\n for &rcp_k in &rcp_ks {\n\n for &insert in &inserts {\n\n for &max_size in &max_sizes {\n", "file_path": "examples/accuracy.rs", "rank": 75, "score": 58151.698795581746 }, { "content": "fn main() {\n\n let arg1 = env::args().skip(1).next();\n\n let verbose = arg1.is_some() && arg1.unwrap() == \"-v\";\n\n let iters = [100, 100, 100];\n\n let lens = [10, 100, 1000];\n\n let rcp_ks = [10.0, 5.0, 2.0];\n\n\n\n let mut total_wrong = 0usize;\n\n let mut total = 0usize;\n\n let mut total_diff_idx = 0usize;\n\n\n\n for (&len, &iter) in lens.iter().zip(&iters) {\n\n for &rcp_k in &rcp_ks {\n\n let (wrong, wrong_avg, wrong_min, wrong_max, diff_idx) = test(iter, len, ((len as f64) / rcp_k) as usize, verbose);\n\n println!(\n\n \"\\nlen: {}, k: {}, iter: {}, wrong: {}, wrong % error: {}, wrong min: {}, wrong max: {}, diff idx: {}\\n\",\n\n len,\n\n ((len as f64) / rcp_k) as usize,\n\n iter,\n\n wrong,\n", "file_path": "examples/x_drop_accuracy.rs", "rank": 76, "score": 56428.404246983104 }, { "content": "fn main() {\n\n for _i in 0..2 {\n\n let _d = time(bench_scan_aa_core, 1, false, 32, 32);\n\n }\n\n\n\n println!(\"# time (s)\");\n\n println!(\"algorithm, dataset, size, time\");\n\n\n\n let d = time(bench_scan_aa_core, 0, false, 32, 32);\n\n let uc30_time = d.as_secs_f64();\n\n println!(\"ours (no trace), uc30, 32-32, {}\", uc30_time);\n\n let d = time(bench_scan_aa_core, 1, false, 32, 32);\n\n let uc30_95_time = d.as_secs_f64();\n\n println!(\"ours (no trace), uc30 0.95, 32-32, {}\", uc30_95_time);\n\n\n\n let d = time(bench_scan_aa_core, 0, false, 32, 256);\n\n let uc30_time = d.as_secs_f64();\n\n println!(\"ours (no trace), uc30, 32-256, {}\", uc30_time);\n\n /*let d = time(bench_scan_aa_core, 1);\n\n println!(\"scan merged time (s): {}\", d.as_secs_f64());*/\n", "file_path": "examples/uc_bench.rs", "rank": 77, "score": 56428.404246983104 }, { "content": "fn main() {\n\n let arg1 = env::args().skip(1).next();\n\n let verbose = arg1.is_some() && arg1.unwrap() == \"-v\";\n\n let file_names_arr = [\n\n /*[\n\n \"data/merged_clu_aln_30_40.m8\",\n\n \"data/merged_clu_aln_40_50.m8\",\n\n \"data/merged_clu_aln_50_60.m8\",\n\n \"data/merged_clu_aln_60_70.m8\",\n\n \"data/merged_clu_aln_70_80.m8\",\n\n \"data/merged_clu_aln_80_90.m8\",\n\n \"data/merged_clu_aln_90_100.m8\"\n\n ],*/\n\n [\n\n \"data/uc30_0.95_30_40.m8\",\n\n \"data/uc30_0.95_40_50.m8\",\n\n \"data/uc30_0.95_50_60.m8\",\n\n \"data/uc30_0.95_60_70.m8\",\n\n \"data/uc30_0.95_70_80.m8\",\n\n \"data/uc30_0.95_80_90.m8\",\n", "file_path": "examples/uc_accuracy.rs", "rank": 78, "score": 56428.404246983104 }, { "content": "#[cfg(not(feature = \"simd_avx2\"))]\n\nfn main() {}\n\n\n", "file_path": "examples/block_img.rs", "rank": 79, "score": 56428.404246983104 }, { "content": "#[cfg(not(feature = \"simd_avx2\"))]\n\nfn main() {}\n\n\n", "file_path": "examples/nanopore_accuracy.rs", "rank": 80, "score": 56428.404246983104 }, { "content": "fn main() {\n\n let iters = [100, 100, 100];\n\n let lens = [10, 100, 1000];\n\n let rcp_ks = [10.0, 5.0, 2.0];\n\n let inserts = [false, true];\n\n\n\n let mut total_wrong = 0usize;\n\n let mut total = 0usize;\n\n\n\n for (&len, &iter) in lens.iter().zip(&iters) {\n\n for &rcp_k in &rcp_ks {\n\n for &insert in &inserts {\n\n let insert_len = if insert { Some(len / 10) } else { None };\n\n let wrong = test(iter, len, ((len as f64) / rcp_k) as usize, insert_len);\n\n println!(\n\n \"\\nlen: {}, k: {}, insert: {}, iter: {}, wrong: {}\\n\",\n\n len,\n\n ((len as f64) / rcp_k) as usize,\n\n insert,\n\n iter,\n", "file_path": "examples/verify_trace.rs", "rank": 81, "score": 56428.404246983104 }, { "content": "fn main() {\n\n for _i in 0..3 {\n\n let _d = time(bench_scan_nuc_file, true, false, 32);\n\n }\n\n\n\n println!(\"# time (s)\");\n\n println!(\"algorithm, dataset, time\");\n\n\n\n let d = time(bench_scan_nuc_file, true, false, 32);\n\n let nanopore_time = d.as_secs_f64();\n\n println!(\"ours (no trace 32-32), nanopore 25kbp, {}\", nanopore_time);\n\n let d = time(bench_scan_nuc_core, false, false, 32);\n\n let random_time = d.as_secs_f64();\n\n println!(\"ours (no trace 32-32), random, {}\", random_time);\n\n\n\n let d = time(bench_scan_nuc_file, true, true, 32);\n\n let nanopore_time = d.as_secs_f64();\n\n println!(\"ours (trace 32-32), nanopore 25kbp, {}\", nanopore_time);\n\n let d = time(bench_scan_nuc_core, false, true, 32);\n\n let random_time = d.as_secs_f64();\n", "file_path": "examples/nanopore_bench.rs", "rank": 82, "score": 56428.404246983104 }, { "content": "#[cfg(feature = \"simd_avx2\")]\n\nfn main() {\n\n use std::env;\n\n\n\n let arg1 = env::args().skip(1).next();\n\n let verbose = arg1.is_some() && arg1.unwrap() == \"-v\";\n\n let paths = [\"data/real.illumina.b10M.txt\", \"data/real.ont.b10M.txt\", \"data/sequences.txt\"];\n\n let names = [\"illumina\", \"nanopore 1kbp\", \"nanopore 25kbp\"];\n\n let min_size = [32, 32, 32];\n\n let max_size = [32, 128, 256];\n\n\n\n println!(\"\\ndataset, size, total, wrong, wrong % error\");\n\n\n\n for ((path, name), (&min_size, &max_size)) in paths.iter().zip(&names).zip(min_size.iter().zip(&max_size)) {\n\n let (wrong, wrong_avg, count) = test(path, min_size, max_size, verbose);\n\n println!(\"\\n{}, {}-{}, {}, {}, {}\", name, min_size, max_size, count, wrong, wrong_avg);\n\n }\n\n\n\n println!(\"# Done!\");\n\n}\n", "file_path": "examples/nanopore_accuracy.rs", "rank": 83, "score": 56428.404246983104 }, { "content": "#[cfg(feature = \"simd_avx2\")]\n\nfn main() {\n\n use block_aligner::scan_block::*;\n\n use block_aligner::scores::*;\n\n use block_aligner::cigar::*;\n\n\n\n use image::{Rgb, RgbImage, ColorType};\n\n use image::codecs::png::{PngEncoder, CompressionType, FilterType};\n\n use imageproc::drawing::*;\n\n use imageproc::rect::Rect;\n\n\n\n use std::env;\n\n use std::io::BufWriter;\n\n use std::fs::File;\n\n\n\n let args = env::args().skip(1);\n\n\n\n let seqs = [\n\n // uc30_50_60\n\n (b\"MVQATTWKKAIPGLSDEASSSPASELRAPLGGVRAMTMNELTRYSIKEPPSDELGSQLVNLYLQQLHTRYPFLDPAELWRLQKARTPVAHSESGNLSMTQRYGIFKLYMVFAIGATLLQLTNKSAEVSPERFYMTALQHMAAAKVPRTVQNIEAMTLLVVYHLRSASGLGLWYMIGLAMRTCIDLGLHRKNHERGLAPLVIQMHRRLFWTVYSLEIVIAISLGRPLSISERQIDVELPDTISVASVPCPSSPGETPVQPTSSNDNLQLANLLFQLRSIEARIHHSIYRTDKPLSALLPKLDKIYKQLEVWRLASIESLPPDGHVLDYPLLLYHRAVRMLIQPFMTILPVSDPYYVLCLRAAGSVCQMHKRLHQTIGYGHSFIAVQTIFVAGVTLLYGLWTQTHLVWSVTLADDLRACSLVLFVMSERAPWVRKYRDAFEVLVDAAMEKLRSGESSLAEMVAVAQTQAQAQSQSQGPRVGQFASGDETMRGPNPDTGPGSSSYGNGNGEHGGESGDVWRLVTELADWIDQDQETTPKWMPNFEALQSLS\".to_vec(), b\"MTSETQNSVSPPLAMPGAVAVNPRKRGRTAYVADDASSIAYTRALEERVAFLENKLAQVPTPEATTTPRETASNYSVPSGRDKNALSDVVAHVSLGNFEAPAYVGPSSGLSLALNLGEMVQATVWNKMLPDIQDGTTGNQANCINPSPRCITVEDLLAHSVKEPPSDEQGSQMLKAYTSQLHSKYPFLEPEELWKLHSERLTLAAKPTQTLTRIERFGIFKLYLVYAMGATLVQLTQRGPVLSPEALYITALQHISAARESRTVQNIEAMTLLVMFHLRSTSSHGLWYMIGLAMRTSIDLGLHRAAHEQNLDGPIVQRRRRLFWSVYSLERTIAVSLGRPLSIADNQIDVELPNTSINESPSASVIVGNDITLALVLFKLRRIESKIHHSVYRTDKTLDSLRPKLDRLHQQLKIWRNSLTDWIPTGHPDLNYALLLYNRALRLLIQPFLPILPATDPFYGLCMRAAGDICQAHKRLHQTLDYGHSFIAVQTVFVAGVTLVYGLWTQGNALWSVAVSNDIRACSLVLFVMSERAPWVRKYRDAFEVLVNAAMEKLQDSEAGLAEMASAQMRAGKAPGAADSRGVQNPDLSGNETTTRPMDSSSNQFLMSEDGGIALGEFEGAWPMVAELANWIDQDTEGGSPVWMPNFELLQSLSGTWNE\".to_vec()),\n\n\n", "file_path": "examples/block_img.rs", "rank": 84, "score": 56428.404246983104 }, { "content": "#[repr(align(32))]\n\nstruct A([i16; L]);\n\n\n", "file_path": "benches/prefix_scan.rs", "rank": 85, "score": 56235.84589798902 }, { "content": "/// Keeps track of internal state and some parameters for block aligner.\n\n///\n\n/// This does not describe the whole state. The allocated scratch spaces\n\n/// and other local variables are also needed.\n\nstruct State<'a, M: Matrix> {\n\n query: &'a PaddedBytes,\n\n i: usize,\n\n reference: &'a PaddedBytes,\n\n j: usize,\n\n min_size: usize,\n\n max_size: usize,\n\n matrix: &'a M,\n\n gaps: Gaps,\n\n x_drop: i32\n\n}\n\n\n\n/// Data structure storing the settings for block aligner.\n\npub struct Block<const TRACE: bool, const X_DROP: bool> {\n\n res: AlignResult,\n\n allocated: Allocated\n\n}\n\n\n\n// increasing step size gives a bit extra speed but results in lower accuracy\n\n// current settings are fast, at the expense of some accuracy, and step size does not grow\n", "file_path": "src/scan_block.rs", "rank": 86, "score": 52861.15414767027 }, { "content": "fn bench_scan_nuc_file(_file: bool, trace: bool, max_size: usize) -> (i32, Duration) {\n\n let file_data = get_data(Some(&FILE_NAME));\n\n let x_drop = 50;\n\n let data = file_data\n\n .iter()\n\n .map(|(q, r)| (PaddedBytes::from_bytes::<NucMatrix>(q, 2048), PaddedBytes::from_bytes::<NucMatrix>(r, 2048)))\n\n .collect::<Vec<(PaddedBytes, PaddedBytes)>>();\n\n let bench_gaps = Gaps { open: -2, extend: -1 };\n\n\n\n let start = Instant::now();\n\n let mut temp = 0i32;\n\n for (q, r) in &data {\n\n if trace {\n\n let mut a = Block::<true, true>::new(q.len(), r.len(), max_size);\n\n a.align(&q, &r, &NW1, bench_gaps, 32..=max_size, x_drop);\n\n temp = temp.wrapping_add(a.res().score); // prevent optimizations\n\n } else {\n\n let mut a = Block::<false, true>::new(q.len(), r.len(), max_size);\n\n a.align(&q, &r, &NW1, bench_gaps, 32..=max_size, x_drop);\n\n temp = temp.wrapping_add(a.res().score); // prevent optimizations\n\n }\n\n }\n\n (temp, start.elapsed())\n\n}\n\n\n", "file_path": "examples/nanopore_bench.rs", "rank": 87, "score": 52539.777697847174 }, { "content": "fn bench_scan_nuc_core(_file: bool, trace: bool, max_size: usize) -> (i32, Duration) {\n\n let file_data = get_data(None);\n\n let x_drop = 100;\n\n let matrix = NucMatrix::new_simple(2, -3);\n\n let data = file_data\n\n .iter()\n\n .map(|(q, r)| (PaddedBytes::from_bytes::<NucMatrix>(q, 2048), PaddedBytes::from_bytes::<NucMatrix>(r, 2048)))\n\n .collect::<Vec<(PaddedBytes, PaddedBytes)>>();\n\n let bench_gaps = Gaps { open: -5, extend: -1 };\n\n\n\n let start = Instant::now();\n\n let mut temp = 0i32;\n\n for (q, r) in &data {\n\n if trace {\n\n let mut a = Block::<true, true>::new(q.len(), r.len(), max_size);\n\n a.align(&q, &r, &matrix, bench_gaps, 32..=max_size, x_drop);\n\n temp = temp.wrapping_add(a.res().score); // prevent optimizations\n\n } else {\n\n let mut a = Block::<false, true>::new(q.len(), r.len(), max_size);\n\n a.align(&q, &r, &matrix, bench_gaps, 32..=max_size, x_drop);\n\n temp = temp.wrapping_add(a.res().score); // prevent optimizations\n\n }\n\n }\n\n (temp, start.elapsed())\n\n}\n\n\n", "file_path": "examples/nanopore_bench.rs", "rank": 88, "score": 52539.777697847174 }, { "content": "#[cfg(not(feature = \"simd_wasm\"))]\n\n#[allow(dead_code)]\n\nfn bench_parasailors_nuc_core(file: bool, _trace: bool, _max_size: usize) -> (i32, Duration) {\n\n let file_data = get_data(if file { Some(&FILE_NAME) } else { None });\n\n let matrix = Matrix::new(MatrixType::IdentityWithPenalty);\n\n let data = file_data\n\n .iter()\n\n .map(|(q, r)| (Profile::new(q, &matrix), r.to_owned()))\n\n .collect::<Vec<(Profile, Vec<u8>)>>();\n\n\n\n let start = Instant::now();\n\n let mut temp = 0i32;\n\n for (p, r) in &data {\n\n temp = temp.wrapping_add(global_alignment_score(p, r, 2, 1));\n\n }\n\n (temp, start.elapsed())\n\n}\n\n\n", "file_path": "examples/nanopore_bench.rs", "rank": 89, "score": 52539.777697847174 }, { "content": "#[inline]\n\nfn clamp(x: i32) -> i16 {\n\n cmp::min(cmp::max(x, i16::MIN as i32), i16::MAX as i32) as i16\n\n}\n\n\n", "file_path": "src/scan_block.rs", "rank": 90, "score": 47775.73086420486 }, { "content": "#[inline(always)]\n\nfn clamp(x: i32) -> i16 {\n\n cmp::min(cmp::max(x, i16::MIN as i32), i16::MAX as i32) as i16\n\n}\n\n\n", "file_path": "src/old/scan_minecraft.rs", "rank": 91, "score": 46503.52111269998 }, { "content": "// BLAST sequence identity\n\nfn seq_id(a: &Alignment) -> f64 {\n\n let mut matches = 0;\n\n\n\n for &op in &a.operations {\n\n if op == AlignmentOperation::Match {\n\n matches += 1;\n\n }\n\n }\n\n\n\n (matches as f64) / (a.operations.len() as f64)\n\n}\n\n\n", "file_path": "examples/uc_accuracy.rs", "rank": 92, "score": 46503.52111269998 }, { "content": "#[inline(always)]\n\nfn clamp(x: i32) -> i16 {\n\n cmp::min(cmp::max(x, i16::MIN as i32), i16::MAX as i32) as i16\n\n}\n\n\n\n#[derive(Copy, Clone, PartialEq, Debug)]\n\npub struct EndIndex {\n\n pub query_idx: usize,\n\n pub ref_idx: usize\n\n}\n\n\n", "file_path": "src/old/scan_thin.rs", "rank": 93, "score": 46503.52111269998 }, { "content": "#[inline(always)]\n\nfn clamp(x: i32) -> i16 {\n\n cmp::min(cmp::max(x, i16::MIN as i32), i16::MAX as i32) as i16\n\n}\n\n\n", "file_path": "src/old/scan_block_old.rs", "rank": 94, "score": 45342.80552233202 } ]
Rust
pallets/account-linker/src/lib.rs
FueledAmp/litentry-node
bb703fbd06f45824c79c32a3e938799f07e9442d
#![cfg_attr(not(feature = "std"), no_std)] use codec::Encode; use sp_std::prelude::*; use sp_io::crypto::secp256k1_ecdsa_recover_compressed; use frame_support::{decl_module, decl_storage, decl_event, decl_error, dispatch, ensure}; use frame_system::{ensure_signed}; use btc::base58::ToBase58; use btc::witness::WitnessProgram; #[cfg(test)] mod mock; #[cfg(test)] mod tests; mod btc; mod util_eth; mod benchmarking; const EXPIRING_BLOCK_NUMBER_MAX: u32 = 10 * 60 * 24 * 30; pub const MAX_ETH_LINKS: usize = 3; pub const MAX_BTC_LINKS: usize = 3; pub trait Config: frame_system::Config { type Event: From<Event<Self>> + Into<<Self as frame_system::Config>::Event>; } enum BTCAddrType { Legacy, Segwit, } decl_storage! { trait Store for Module<T: Config> as AccountLinkerModule { pub EthereumLink get(fn eth_addresses): map hasher(blake2_128_concat) T::AccountId => Vec<[u8; 20]>; pub BitcoinLink get(fn btc_addresses): map hasher(blake2_128_concat) T::AccountId => Vec<Vec<u8>>; } } decl_event!( pub enum Event<T> where AccountId = <T as frame_system::Config>::AccountId, { EthAddressLinked(AccountId, Vec<u8>), BtcAddressLinked(AccountId, Vec<u8>), } ); decl_error! { pub enum Error for Module<T: Config> { EcdsaRecoverFailure, LinkRequestExpired, UnexpectedAddress, UnexpectedEthMsgLength, InvalidBTCAddress, InvalidBTCAddressLength, InvalidExpiringBlockNumber, } } decl_module! { pub struct Module<T: Config> for enum Call where origin: T::Origin { type Error = Error<T>; fn deposit_event() = default; #[weight = 1] pub fn link_eth( origin, account: T::AccountId, index: u32, addr_expected: [u8; 20], expiring_block_number: T::BlockNumber, r: [u8; 32], s: [u8; 32], v: u8, ) -> dispatch::DispatchResult { let _ = ensure_signed(origin)?; let current_block_number = <frame_system::Module<T>>::block_number(); ensure!(expiring_block_number > current_block_number, Error::<T>::LinkRequestExpired); ensure!((expiring_block_number - current_block_number) < T::BlockNumber::from(EXPIRING_BLOCK_NUMBER_MAX), Error::<T>::InvalidExpiringBlockNumber); let mut bytes = b"Link Litentry: ".encode(); let mut account_vec = account.encode(); let mut expiring_block_number_vec = expiring_block_number.encode(); bytes.append(&mut account_vec); bytes.append(&mut expiring_block_number_vec); let hash = util_eth::eth_data_hash(bytes).map_err(|_| Error::<T>::UnexpectedEthMsgLength)?; let mut msg = [0u8; 32]; let mut sig = [0u8; 65]; msg[..32].copy_from_slice(&hash[..32]); sig[..32].copy_from_slice(&r[..32]); sig[32..64].copy_from_slice(&s[..32]); sig[64] = v; let addr = util_eth::addr_from_sig(msg, sig) .map_err(|_| Error::<T>::EcdsaRecoverFailure)?; ensure!(addr == addr_expected, Error::<T>::UnexpectedAddress); let index = index as usize; let mut addrs = Self::eth_addresses(&account); if (index >= addrs.len()) && (addrs.len() != MAX_ETH_LINKS) { addrs.push(addr.clone()); } else if (index >= addrs.len()) && (addrs.len() == MAX_ETH_LINKS) { addrs[MAX_ETH_LINKS - 1] = addr.clone(); } else { addrs[index] = addr.clone(); } <EthereumLink<T>>::insert(account.clone(), addrs); Self::deposit_event(RawEvent::EthAddressLinked(account, addr.to_vec())); Ok(()) } #[weight = 1] pub fn link_btc( origin, account: T::AccountId, index: u32, addr_expected: Vec<u8>, expiring_block_number: T::BlockNumber, r: [u8; 32], s: [u8; 32], v: u8, ) -> dispatch::DispatchResult { let _ = ensure_signed(origin)?; let current_block_number = <frame_system::Module<T>>::block_number(); ensure!(expiring_block_number > current_block_number, Error::<T>::LinkRequestExpired); ensure!((expiring_block_number - current_block_number) < T::BlockNumber::from(EXPIRING_BLOCK_NUMBER_MAX), Error::<T>::InvalidExpiringBlockNumber); if addr_expected.len() < 2 { Err(Error::<T>::InvalidBTCAddressLength)? } let addr_type = if addr_expected[0] == b'1' { BTCAddrType::Legacy } else if addr_expected[0] == b'b' && addr_expected[1] == b'c' { BTCAddrType::Segwit } else { Err(Error::<T>::InvalidBTCAddress)? }; let mut bytes = b"Link Litentry: ".encode(); let mut account_vec = account.encode(); let mut expiring_block_number_vec = expiring_block_number.encode(); bytes.append(&mut account_vec); bytes.append(&mut expiring_block_number_vec); let hash = sp_io::hashing::keccak_256(&bytes); let mut msg = [0u8; 32]; let mut sig = [0u8; 65]; msg[..32].copy_from_slice(&hash[..32]); sig[..32].copy_from_slice(&r[..32]); sig[32..64].copy_from_slice(&s[..32]); sig[64] = v; let pk = secp256k1_ecdsa_recover_compressed(&sig, &msg) .map_err(|_| Error::<T>::EcdsaRecoverFailure)?; let addr = match addr_type { BTCAddrType::Legacy => { btc::legacy::btc_addr_from_pk(&pk).to_base58() }, BTCAddrType::Segwit => { let pk_hash = btc::legacy::hash160(&pk); let mut pk = [0u8; 22]; pk[0] = 0; pk[1] = 20; pk[2..].copy_from_slice(&pk_hash); let wp = WitnessProgram::from_scriptpubkey(&pk.to_vec()).map_err(|_| Error::<T>::InvalidBTCAddress)?; wp.to_address(b"bc".to_vec()).map_err(|_| Error::<T>::InvalidBTCAddress)? } }; ensure!(addr == addr_expected, Error::<T>::UnexpectedAddress); let index = index as usize; let mut addrs = Self::btc_addresses(&account); if (index >= addrs.len()) && (addrs.len() != MAX_BTC_LINKS) { addrs.push(addr.clone()); } else if (index >= addrs.len()) && (addrs.len() == MAX_BTC_LINKS) { addrs[MAX_BTC_LINKS - 1] = addr.clone(); } else { addrs[index] = addr.clone(); } <BitcoinLink<T>>::insert(account.clone(), addrs); Self::deposit_event(RawEvent::BtcAddressLinked(account, addr)); Ok(()) } } }
#![cfg_attr(not(feature = "std"), no_std)] use codec::Encode; use sp_std::prelude::*; use sp_io::crypto::secp256k1_ecdsa_recover_compressed; use frame_support::{decl_module, decl_storage, decl_event, decl_error, dispatch, ensure}; use frame_system::{ensure_signed}; use btc::base58::ToBase58; use btc::witness::WitnessProgram; #[cfg(test)] mod mock; #[cfg(test)] mod tests; mod btc; mod util_eth; mod benchmarking; const EXPIRING_BLOCK_NUMBER_MAX: u32 = 10 * 60 * 24 * 30; pub const MAX_ETH_LINKS: usize = 3; pub const MAX_BTC_LINKS: usize = 3; pub trait Config: frame_system::Config { type Event: From<Event<Self>> + Into<<Self as frame_system::Config>::Event>; } enum BTCAddrType { Legacy, Segwit, } decl_storage! { trait Store for Module<T: Config> as AccountLinkerModule { pub EthereumLink get(fn eth_addresses): map hasher(blake2_128_concat) T::AccountId => Vec<[u8; 20]>; pub BitcoinLink get(fn btc_addresses): map hasher(blake2_128_concat) T::AccountId => Vec<Vec<u8>>; } } decl_event!( pub enum Event<T> where AccountId = <T as frame_system::Config>::AccountId, { EthAddressLinked(AccountId, Vec<u8>), BtcAddressLinked(AccountId, Vec<u8>), } ); decl_error! { pub enum Error for Module<T: Config> { EcdsaRecoverFailure, LinkRequestExpired, UnexpectedAddress, UnexpectedEthMsgLength, InvalidBTCAddress, InvalidBTCAddressLength, InvalidExpiringBlockNumber, } } decl_module! { pub struct Module<T: Config> for enum Call where origin: T::Origin { type Error = Error<T>; fn deposit_event() = default; #[weight = 1] pub fn link_eth( origin, account: T::AccountId, index: u32, addr_expected: [u8; 20], expiring_block_number: T::BlockNumber, r: [u8; 32], s: [u8; 32], v: u8, ) -> dispatch::DispatchResult { let _ = ensure_signed(origin)?; let current_block_number = <frame_system::Module<T>>::block_number(); ensure!(expiring_block_num
nt); if (index >= addrs.len()) && (addrs.len() != MAX_ETH_LINKS) { addrs.push(addr.clone()); } else if (index >= addrs.len()) && (addrs.len() == MAX_ETH_LINKS) { addrs[MAX_ETH_LINKS - 1] = addr.clone(); } else { addrs[index] = addr.clone(); } <EthereumLink<T>>::insert(account.clone(), addrs); Self::deposit_event(RawEvent::EthAddressLinked(account, addr.to_vec())); Ok(()) } #[weight = 1] pub fn link_btc( origin, account: T::AccountId, index: u32, addr_expected: Vec<u8>, expiring_block_number: T::BlockNumber, r: [u8; 32], s: [u8; 32], v: u8, ) -> dispatch::DispatchResult { let _ = ensure_signed(origin)?; let current_block_number = <frame_system::Module<T>>::block_number(); ensure!(expiring_block_number > current_block_number, Error::<T>::LinkRequestExpired); ensure!((expiring_block_number - current_block_number) < T::BlockNumber::from(EXPIRING_BLOCK_NUMBER_MAX), Error::<T>::InvalidExpiringBlockNumber); if addr_expected.len() < 2 { Err(Error::<T>::InvalidBTCAddressLength)? } let addr_type = if addr_expected[0] == b'1' { BTCAddrType::Legacy } else if addr_expected[0] == b'b' && addr_expected[1] == b'c' { BTCAddrType::Segwit } else { Err(Error::<T>::InvalidBTCAddress)? }; let mut bytes = b"Link Litentry: ".encode(); let mut account_vec = account.encode(); let mut expiring_block_number_vec = expiring_block_number.encode(); bytes.append(&mut account_vec); bytes.append(&mut expiring_block_number_vec); let hash = sp_io::hashing::keccak_256(&bytes); let mut msg = [0u8; 32]; let mut sig = [0u8; 65]; msg[..32].copy_from_slice(&hash[..32]); sig[..32].copy_from_slice(&r[..32]); sig[32..64].copy_from_slice(&s[..32]); sig[64] = v; let pk = secp256k1_ecdsa_recover_compressed(&sig, &msg) .map_err(|_| Error::<T>::EcdsaRecoverFailure)?; let addr = match addr_type { BTCAddrType::Legacy => { btc::legacy::btc_addr_from_pk(&pk).to_base58() }, BTCAddrType::Segwit => { let pk_hash = btc::legacy::hash160(&pk); let mut pk = [0u8; 22]; pk[0] = 0; pk[1] = 20; pk[2..].copy_from_slice(&pk_hash); let wp = WitnessProgram::from_scriptpubkey(&pk.to_vec()).map_err(|_| Error::<T>::InvalidBTCAddress)?; wp.to_address(b"bc".to_vec()).map_err(|_| Error::<T>::InvalidBTCAddress)? } }; ensure!(addr == addr_expected, Error::<T>::UnexpectedAddress); let index = index as usize; let mut addrs = Self::btc_addresses(&account); if (index >= addrs.len()) && (addrs.len() != MAX_BTC_LINKS) { addrs.push(addr.clone()); } else if (index >= addrs.len()) && (addrs.len() == MAX_BTC_LINKS) { addrs[MAX_BTC_LINKS - 1] = addr.clone(); } else { addrs[index] = addr.clone(); } <BitcoinLink<T>>::insert(account.clone(), addrs); Self::deposit_event(RawEvent::BtcAddressLinked(account, addr)); Ok(()) } } }
ber > current_block_number, Error::<T>::LinkRequestExpired); ensure!((expiring_block_number - current_block_number) < T::BlockNumber::from(EXPIRING_BLOCK_NUMBER_MAX), Error::<T>::InvalidExpiringBlockNumber); let mut bytes = b"Link Litentry: ".encode(); let mut account_vec = account.encode(); let mut expiring_block_number_vec = expiring_block_number.encode(); bytes.append(&mut account_vec); bytes.append(&mut expiring_block_number_vec); let hash = util_eth::eth_data_hash(bytes).map_err(|_| Error::<T>::UnexpectedEthMsgLength)?; let mut msg = [0u8; 32]; let mut sig = [0u8; 65]; msg[..32].copy_from_slice(&hash[..32]); sig[..32].copy_from_slice(&r[..32]); sig[32..64].copy_from_slice(&s[..32]); sig[64] = v; let addr = util_eth::addr_from_sig(msg, sig) .map_err(|_| Error::<T>::EcdsaRecoverFailure)?; ensure!(addr == addr_expected, Error::<T>::UnexpectedAddress); let index = index as usize; let mut addrs = Self::eth_addresses(&accou
random
[ { "content": "pub fn hash160(bytes: &[u8]) -> [u8; 20] {\n\n let mut hasher_sha256 = Sha256::new();\n\n hasher_sha256.update(bytes);\n\n let digest = hasher_sha256.finalize();\n\n\n\n let mut hasher_ripemd = Ripemd160::new();\n\n hasher_ripemd.update(digest);\n\n\n\n let mut ret = [0; 20];\n\n ret.copy_from_slice(&hasher_ripemd.finalize()[..]);\n\n ret\n\n}\n\n\n", "file_path": "pallets/account-linker/src/btc/legacy.rs", "rank": 0, "score": 242443.21838127472 }, { "content": "pub fn btc_addr_from_pk(pk: &[u8]) -> [u8; 25] {\n\n let mut result = [0u8; 25];\n\n\n\n // Now only support P2PKH (Mainnet) prefix = 0\n\n result[0] = 0;\n\n result[1..21].copy_from_slice(&hash160(pk));\n\n let cs = checksum(&result[0..21]);\n\n result[21..25].copy_from_slice(&cs);\n\n result\n\n}\n\n\n", "file_path": "pallets/account-linker/src/btc/legacy.rs", "rank": 1, "score": 222904.77892859682 }, { "content": "/// Computes Bitcoin's double SHA256 hash over a LE byte encoded input\n\n///\n\n/// # Arguments\n\n/// * data: LE bytes encoded input\n\n///\n\n/// # Returns\n\n/// * The double SHA256 hash encoded as LE bytes from data\n\nfn dsha256(bytes: &[u8]) -> [u8; 32] {\n\n let mut hasher = Sha256::new();\n\n hasher.update(bytes);\n\n let digest = hasher.finalize();\n\n\n\n let mut second_hasher = Sha256::new();\n\n second_hasher.update(digest);\n\n\n\n let mut ret = [0; 32];\n\n ret.copy_from_slice(&second_hasher.finalize()[..]);\n\n ret\n\n}\n\n\n\n// test data can be obtained from here http://gobittest.appspot.com/Address\n\n#[cfg(test)]\n\nmod tests {\n\n\tuse super::*;\n\n\tuse hex::decode;\n\n\n\n\t#[test]\n", "file_path": "pallets/account-linker/src/btc/legacy.rs", "rank": 2, "score": 206514.25017456367 }, { "content": "pub trait Config: frame_system::Config + account_linker::Config + CreateSignedTransaction<Call<Self>> {\n\n\ttype Balance: Parameter + Member + AtLeast32BitUnsigned + Codec + Default + Copy +\n\n\t\tMaybeSerializeDeserialize + Debug;\n\n\ttype Event: From<Event<Self>> + Into<<Self as frame_system::Config>::Event>;\n\n\ttype Call: From<Call<Self>>;\n\n\ttype AuthorityId: AppCrypto<Self::Public, Self::Signature>;\n\n\ttype QueryTaskRedundancy: Get<u32>;\n\n\ttype QuerySessionLength: Get<u32>;\n\n\t/// Currency type for this pallet.\n\n\ttype Currency: Currency<Self::AccountId>;\n\n\t/// Handler for the unbalanced increment when rewarding (minting rewards)\n\n\ttype Reward: OnUnbalanced<PositiveImbalanceOf<Self>>;\n\n\ttype OcwQueryReward: Get<<<Self as Config>::Currency as Currency<<Self as frame_system::Config>::AccountId>>::Balance>;\n\n}\n\n\n\n\n\ndecl_storage! {\n", "file_path": "pallets/offchain-worker/src/lib.rs", "rank": 3, "score": 197449.19718246098 }, { "content": "pub fn run_to_block(n: u32) {\n\n while System::block_number() < n {\n\n AccountLinker::on_finalize(System::block_number());\n\n System::on_finalize(System::block_number());\n\n System::set_block_number(System::block_number() + 1);\n\n System::on_initialize(System::block_number());\n\n AccountLinker::on_initialize(System::block_number());\n\n }\n\n}\n", "file_path": "pallets/account-linker/src/mock.rs", "rank": 4, "score": 196903.6933296536 }, { "content": "pub fn addr_from_sig(msg: [u8; 32], sig: [u8; 65]) -> Result<[u8; 20], sp_io::EcdsaVerifyError> {\n\n\tlet pubkey = sp_io::crypto::secp256k1_ecdsa_recover(&sig, &msg)?;\n\n\tlet hashed_pk = sp_io::hashing::keccak_256(&pubkey);\n\n\n\n\tlet mut addr = [0u8; 20];\n\n\taddr[..20].copy_from_slice(&hashed_pk[12..32]);\n\n\tOk(addr)\n\n}\n\n\n", "file_path": "pallets/account-linker/src/util_eth.rs", "rank": 5, "score": 191001.7390938318 }, { "content": "fn polymod(values: Vec<u8>) -> u32 {\n\n let mut chk: u32 = 1;\n\n let mut b: u8;\n\n for v in values {\n\n b = (chk >> 25) as u8;\n\n chk = (chk & 0x1ffffff) << 5 ^ (v as u32);\n\n for i in 0..5 {\n\n if (b >> i) & 1 == 1 {\n\n chk ^= GEN[i]\n\n }\n\n }\n\n }\n\n chk\n\n}\n\n\n", "file_path": "pallets/account-linker/src/btc/witness.rs", "rank": 7, "score": 185523.71323244437 }, { "content": "fn checksum(input: &[u8]) -> [u8; 4] {\n\n\tlet mut result = [0u8; 4];\n\n\tresult.copy_from_slice(&dsha256(input)[0..4]);\n\n\tresult\n\n}\n\n\n", "file_path": "pallets/account-linker/src/btc/legacy.rs", "rank": 8, "score": 185048.7971496493 }, { "content": "fn generate_rsv(sig: &[u8; 65]) -> ([u8; 32], [u8; 32], u8) {\n\n\tlet mut r = [0u8; 32];\n\n\tlet mut s = [0u8; 32];\n\n\n\n\tr[..32].copy_from_slice(&sig[..32]);\n\n\ts[..32].copy_from_slice(&sig[32..64]);\n\n\tlet v = sig[64];\n\n\t(r, s, v)\n\n}\n\n\n", "file_path": "pallets/account-linker/src/tests/eth.rs", "rank": 9, "score": 181249.74062658625 }, { "content": "// Build genesis storage according to the mock runtime.\n\npub fn new_test_ext() -> sp_io::TestExternalities {\n\n\tsystem::GenesisConfig::default()\n\n\t\t.build_storage::<Test>()\n\n\t\t.unwrap()\n\n\t\t.into()\n\n}\n\n\n", "file_path": "pallets/account-linker/src/mock.rs", "rank": 10, "score": 166199.9277978391 }, { "content": "/// A trait for converting a value to base58 encoded string.\n\npub trait ToBase32 {\n\n\t/// Converts a value of `self` to a base58 value, returning the owned string.\n\n\tfn to_base32(&self) -> Vec<u8>;\n\n}\n\n\n\nimpl ToBase32 for [u8] {\n\n // /// Convert between bit sizes\n\n // fn to_base32(&self) -> Vec<u8> {\n\n // let from: u32 = 8;\n\n // let to: u32 = 5;\n\n\n\n // let mut acc: u32 = 0;\n\n // let mut bits: u32 = 0;\n\n // let mut ret: Vec<u8> = Vec::new();\n\n // let maxv: u32 = (1<<to) - 1;\n\n // for &value in self.into_iter() {\n\n // let v: u32 = value as u32;\n\n\n\n // acc = (acc << from) | v;\n\n // bits += from;\n", "file_path": "pallets/account-linker/src/btc/witness.rs", "rank": 12, "score": 161328.69242798336 }, { "content": "/// A trait for converting a value to base58 encoded string.\n\npub trait ToBase58 {\n\n\t/// Converts a value of `self` to a base58 value, returning the owned string.\n\n\tfn to_base58(&self) -> Vec<u8>;\n\n}\n\n\n\nimpl ToBase58 for [u8] {\n\n\tfn to_base58(&self) -> Vec<u8> {\n\n\t\tlet zcount = self.iter().take_while(|x| **x == 0).count();\n\n\t\tlet size = (self.len() - zcount) * 138 / 100 + 1;\n\n\t\tlet mut buffer = vec![0u8; size];\n\n\n\n\t\tlet mut i = zcount;\n\n\t\tlet mut high = size - 1;\n\n\n\n\t\twhile i < self.len() {\n\n\t\t\tlet mut carry = self[i] as u32;\n\n\t\t\tlet mut j = size - 1;\n\n\n\n\t\t\twhile j > high || carry != 0 {\n\n\t\t\t\tcarry += 256 * buffer[j] as u32;\n", "file_path": "pallets/account-linker/src/btc/base58.rs", "rank": 13, "score": 161328.69242798336 }, { "content": "pub trait Bech32 {\n\n fn encode(&self, hrp: Vec<u8>) -> Result<Vec<u8>, &'static str>;\n\n}\n\n\n\nimpl Bech32 for [u8] {\n\n fn encode(&self, hrp: Vec<u8>) -> Result<Vec<u8>, &'static str> {\n\n if hrp.len() < 1 {\n\n return Err(\"invalidData\")\n\n }\n\n\n\n let mut combined: Vec<u8> = self.clone().to_vec();\n\n combined.extend_from_slice(&create_checksum(&hrp, &self.to_vec()));\n\n let mut encoded = hrp;\n\n encoded.push(SEP);\n\n for p in combined {\n\n if p >= 32 {\n\n return Err(\"invalidData\")\n\n }\n\n encoded.push(ALPHABET[p as usize]);\n\n }\n\n Ok(encoded)\n\n }\n\n}\n\n\n\nconst GEN: [u32; 5] = [0x3b6a57b2, 0x26508e6d, 0x1ea119fa, 0x3d4233dd, 0x2a1462b3];\n\n\n", "file_path": "pallets/account-linker/src/btc/witness.rs", "rank": 14, "score": 161324.97786798296 }, { "content": "// address to string bytes\n\npub fn address_to_string(address: &[u8; 20]) -> Vec<u8> {\n\n\n\n\tlet mut vec_result: Vec<u8> = Vec::new();\n\n\tfor item in address {\n\n\t\tlet a: u8 = item & 0x0F;\n\n\t\tlet b: u8 = item >> 4;\n\n\t\tvec_result.push(u8_to_str_byte(b));\n\n\t\tvec_result.push(u8_to_str_byte(a));\n\n\t}\n\n\treturn vec_result;\n\n}\n", "file_path": "pallets/offchain-worker/src/utils.rs", "rank": 15, "score": 160834.04900011176 }, { "content": "fn generate_msg(account: &AccountId32, block_number: u32) -> Message {\n\n\n\n\tlet mut bytes = b\"\\x19Ethereum Signed Message:\\n51Link Litentry: \".encode();\n\n\tlet mut account_vec = account.encode();\n\n\tlet mut expiring_block_number_vec = block_number.encode();\n\n\n\n\tbytes.append(&mut account_vec);\n\n\tbytes.append(&mut expiring_block_number_vec);\n\n\n\n\tMessage::from(bytes.keccak256())\n\n}\n\n\n", "file_path": "pallets/account-linker/src/tests/eth.rs", "rank": 16, "score": 159404.52991288528 }, { "content": "/// Returns a eth_sign-compatible hash of data to sign.\n\n/// The data is prefixed with special message to prevent\n\n/// malicious DApps from using the function to sign forged transactions.\n\npub fn eth_data_hash(mut data: Vec<u8>) -> Result<[u8; 32], &'static str> {\n\n\tconst MSG_LEN: usize = 51;\n\n\tif data.len() != MSG_LEN {\n\n\t\tdebug::error!(\"Ethereum message has an unexpected length {} !!! Expected is {}.\", data.len(), MSG_LEN);\n\n\t\treturn Err(\"Unexpected ethereum message length!\");\n\n\t}\n\n\tlet mut length_bytes = usize_to_u8_array(data.len())?;\n\n\tlet mut eth_data = b\"\\x19Ethereum Signed Message:\\n\".encode();\n\n\teth_data.append(&mut length_bytes);\n\n\teth_data.append(&mut data);\n\n\tOk(sp_io::hashing::keccak_256(&eth_data))\n\n}\n\n\n", "file_path": "pallets/account-linker/src/util_eth.rs", "rank": 17, "score": 155889.84492433758 }, { "content": "// number byte to string byte\n\npub fn u8_to_str_byte(a: u8) -> u8{\n\n\tif a < 10 {\n\n\t\treturn a + 48 as u8;\n\n\t}\n\n\telse {\n\n\t\treturn a + 87 as u8;\n\n\t}\n\n}\n\n\n", "file_path": "pallets/offchain-worker/src/utils.rs", "rank": 18, "score": 155638.91445337882 }, { "content": "type Block = frame_system::mocking::MockBlock<Test>;\n\n\n\n// Configure a mock runtime to test the pallet.\n\nframe_support::construct_runtime!(\n\n\tpub enum Test where\n\n\t\tBlock = Block,\n\n\t\tNodeBlock = Block,\n\n\t\tUncheckedExtrinsic = UncheckedExtrinsic,\n\n\t{\n\n\t\tSystem: frame_system::{Module, Call, Config, Storage, Event<T>},\n\n\t\tAccountLinker: account_linker::{Module, Call, Storage, Event<T>},\n\n\t}\n\n);\n\n\n\nparameter_types! {\n\n\tpub const BlockHashCount: u32 = 250;\n\n\tpub const SS58Prefix: u8 = 42;\n\n}\n\n\n\nimpl system::Config for Test {\n", "file_path": "pallets/account-linker/src/mock.rs", "rank": 19, "score": 154424.63379798696 }, { "content": "/// Data source to blockchain type\n\npub fn data_source_to_index(data_source: DataSource) -> u32 {\n\n match data_source {\n\n DataSource::Invalid => u32::MAX, \n\n DataSource::EthEtherScan => 0,\n\n DataSource::EthInfura => 1,\n\n DataSource::BtcBlockChain => 2,\n\n }\n\n}\n\n\n", "file_path": "pallets/offchain-worker/src/urls.rs", "rank": 20, "score": 151730.12178644116 }, { "content": "type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic<Test>;\n", "file_path": "pallets/account-linker/src/mock.rs", "rank": 21, "score": 149078.49805040547 }, { "content": "fn hrp_expand(hrp: &Vec<u8>) -> Vec<u8> {\n\n let mut v: Vec<u8> = Vec::new();\n\n for b in hrp {\n\n v.push(*b >> 5);\n\n }\n\n v.push(0);\n\n for b in hrp {\n\n v.push(*b & 0x1f);\n\n }\n\n v\n\n}\n\n\n", "file_path": "pallets/account-linker/src/btc/witness.rs", "rank": 22, "score": 147211.24033474404 }, { "content": "#[test]\n\nfn test_btc_link_p2pkh() {\n\n\tnew_test_ext().execute_with(|| {\n\n\n\n run_to_block(1);\n\n\n\n\t\t// Generate random key pair\n\n\t\tlet s = Secp256k1::new();\n\n\t\tlet pair = s.generate_keypair(&mut thread_rng());\n\n\t\tlet public_key = key::PublicKey {\n\n\t\t\tcompressed: true,\n\n\t\t\tkey: pair.1,\n\n\t\t};\n\n\n\n\t\t// Generate pay-to-pubkey-hash address\n\n\t\tlet address = Address::p2pkh(&public_key, Network::Bitcoin);\n\n\n\n\t\tlet account: AccountId32 = AccountId32::from([255u8; 32]);\n\n\t\tlet block_number: u32 = 99999;\n\n\n\n\t\tlet mut bytes = b\"Link Litentry: \".encode();\n", "file_path": "pallets/account-linker/src/tests/btc.rs", "rank": 23, "score": 144393.4676968205 }, { "content": "#[test]\n\nfn test_btc_link_p2wpkh() {\n\n\tnew_test_ext().execute_with(|| {\n\n\n\n run_to_block(1);\n\n\n\n\t\t// Generate random key pair\n\n\t\tlet s = Secp256k1::new();\n\n\t\tlet pair = s.generate_keypair(&mut thread_rng());\n\n\t\tlet public_key = key::PublicKey {\n\n\t\t\tcompressed: true,\n\n\t\t\tkey: pair.1,\n\n\t\t};\n\n\n\n\t\t// Generate pay-to-pubkey-hash address\n\n\t\tlet address = Address::p2wpkh(&public_key, Network::Bitcoin).unwrap();\n\n\n\n\t\tprintln!(\"{}\", address);\n\n\t\tlet account: AccountId32 = AccountId32::from([255u8; 32]);\n\n\t\tlet block_number: u32 = 99999;\n\n\n", "file_path": "pallets/account-linker/src/tests/btc.rs", "rank": 24, "score": 144393.4676968205 }, { "content": "fn create_checksum(hrp: &Vec<u8>, data: &Vec<u8>) -> Vec<u8> {\n\n let mut values: Vec<u8> = hrp_expand(hrp);\n\n values.extend_from_slice(data);\n\n // Pad with 6 zeros\n\n values.extend_from_slice(&[0u8; 6]);\n\n let plm: u32 = polymod(values) ^ 1;\n\n let mut checksum: Vec<u8> = Vec::new();\n\n for p in 0..6 {\n\n checksum.push(((plm >> 5 * (5 - p)) & 0x1f) as u8);\n\n }\n\n checksum\n\n}\n\n\n", "file_path": "pallets/account-linker/src/btc/witness.rs", "rank": 25, "score": 143624.42913214955 }, { "content": "/// Builds a new service for a full client.\n\npub fn new_full(mut config: Configuration) -> Result<TaskManager, ServiceError> {\n\n\tlet sc_service::PartialComponents {\n\n\t\tclient, backend, mut task_manager, import_queue, mut keystore_container, select_chain, transaction_pool,\n\n\t\tinherent_data_providers,\n\n\t\tother: (block_import, grandpa_link),\n\n\t} = new_partial(&config)?;\n\n\n\n\tif let Some(url) = &config.keystore_remote {\n\n\t\tmatch remote_keystore(url) {\n\n\t\t\tOk(k) => keystore_container.set_remote_keystore(k),\n\n\t\t\tErr(e) => {\n\n\t\t\t\treturn Err(ServiceError::Other(\n\n\t\t\t\t\tformat!(\"Error hooking up remote keystore for {}: {}\", url, e)))\n\n\t\t\t}\n\n\t\t};\n\n\t}\n\n\n\n\tconfig.network.extra_sets.push(sc_finality_grandpa::grandpa_peers_set_config());\n\n\n\n\tlet (network, network_status_sinks, system_rpc_tx, network_starter) =\n", "file_path": "node/src/service.rs", "rank": 26, "score": 141789.3937522206 }, { "content": "/// Builds a new service for a light client.\n\npub fn new_light(mut config: Configuration) -> Result<TaskManager, ServiceError> {\n\n\tlet (client, backend, keystore_container, mut task_manager, on_demand) =\n\n\t\tsc_service::new_light_parts::<Block, RuntimeApi, Executor>(&config)?;\n\n\n\n\tconfig.network.extra_sets.push(sc_finality_grandpa::grandpa_peers_set_config());\n\n\n\n\tlet select_chain = sc_consensus::LongestChain::new(backend.clone());\n\n\n\n\tlet transaction_pool = Arc::new(sc_transaction_pool::BasicPool::new_light(\n\n\t\tconfig.transaction_pool.clone(),\n\n\t\tconfig.prometheus_registry(),\n\n\t\ttask_manager.spawn_handle(),\n\n\t\tclient.clone(),\n\n\t\ton_demand.clone(),\n\n\t));\n\n\n\n\tlet (grandpa_block_import, _) = sc_finality_grandpa::block_import(\n\n\t\tclient.clone(),\n\n\t\t&(client.clone() as Arc<_>),\n\n\t\tselect_chain.clone(),\n", "file_path": "node/src/service.rs", "rank": 27, "score": 141789.3937522206 }, { "content": "/// Deserialize string to Vec<u8>\n\npub fn de_string_to_bytes<'de, D>(de: D) -> Result<Vec<u8>, D::Error>\n\nwhere\n\n\tD: Deserializer<'de>,\n\n{\n\n\tlet s: &str = Deserialize::deserialize(de)?;\n\n\tOk(s.as_bytes().to_vec())\n\n}\n\n\n\n/// Implement Debug trait for print TokenInfo\n\nimpl fmt::Debug for TokenInfo {\n\n\tfn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n\t\twrite!(\n\n\t\t\tf,\n\n\t\t\t\"{{ etherscan: {}, infura: {}, blockchain: {} }}\",\n\n\t\t\tsp_std::str::from_utf8(&self.etherscan).map_err(|_| fmt::Error)?,\n\n\t\t\tsp_std::str::from_utf8(&self.infura).map_err(|_| fmt::Error)?,\n\n\t\t\tsp_std::str::from_utf8(&self.blockchain).map_err(|_| fmt::Error)?,\n\n\t\t)\n\n\t}\n\n}\n\n\n", "file_path": "pallets/offchain-worker/src/urls.rs", "rank": 29, "score": 140452.3680503238 }, { "content": "#[test]\n\nfn test_invalid_expiring_block_number_btc() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\t// Generate random key pair\n\n\t\tlet s = Secp256k1::new();\n\n\t\tlet pair = s.generate_keypair(&mut thread_rng());\n\n\t\tlet public_key = key::PublicKey {\n\n\t\t\tcompressed: true,\n\n\t\t\tkey: pair.1,\n\n\t\t};\n\n\n\n\t\t// Generate pay-to-pubkey-hash address\n\n\t\tlet address = Address::p2pkh(&public_key, Network::Bitcoin);\n\n\n\n\t\tlet account: AccountId32 = AccountId32::from([255u8; 32]);\n\n\t\tlet block_number: u32 = crate::EXPIRING_BLOCK_NUMBER_MAX + 1;\n\n\n\n\t\tlet mut bytes = b\"Link Litentry: \".encode();\n\n\t\tlet mut account_vec = account.encode();\n\n\t\tlet mut expiring_block_number_vec = block_number.encode();\n\n\n", "file_path": "pallets/account-linker/src/tests/btc.rs", "rank": 30, "score": 138409.9014055683 }, { "content": "/// Convert a usize type to a u8 array.\n\n/// The input is first converted as a string with decimal presentation,\n\n/// and then this string is converted to a byte array with UTF8 encoding.\n\n/// To avoid unnecessary complexity, the current function supports up to\n\n/// 2 digits unsigned decimal (range 0 - 99)\n\nfn usize_to_u8_array(length: usize) -> Result<Vec<u8>, &'static str> {\n\n\tif length >= 100 {\n\n\t\tErr(\"Unexpected ethereum message length!\")\n\n\t} else {\n\n\t\tlet digits = b\"0123456789\".encode();\n\n\t\tlet tens = length / 10;\n\n\t\tlet ones = length % 10;\n\n\n\n\t\tlet mut vec_res: Vec<u8> = Vec::new();\n\n\t\tif tens != 0 {\n\n\t\t\tvec_res.push(digits[tens]);\n\n\t\t}\n\n\t\tvec_res.push(digits[ones]);\n\n\t\tOk(vec_res)\n\n\t}\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\tuse super::*;\n", "file_path": "pallets/account-linker/src/util_eth.rs", "rank": 31, "score": 138320.7772182408 }, { "content": "\ttrait Store for Module<T: Config> as OffchainWorkerModule {\n\n\t\t/// Record how many balances stored for Litentry user\n\n\t\tTotalClaims get(fn total_claims): u64;\n\n\n\n\t\t/// Record the accounts send claims in latest block\n\n\t\tClaimAccountSet get(fn query_account_set): map hasher(blake2_128_concat) T::AccountId => ();\n\n\n\n\t\t/// ClaimAccountIndex record the index of account claimed asset query in last session\n\n\t\tClaimAccountIndex get(fn claim_account_index): map hasher(blake2_128_concat) T::AccountId => Option<u32>;\n\n\n\n\t\t/// Record account's btc and ethereum balance\n\n\t\tAccountBalance get(fn account_balance): map hasher(blake2_128_concat) T::AccountId => (Option<u128>, Option<u128>);\n\n\n\n\t\t/// Query result on chain\n\n\t\tCommitAccountBalance get(fn commit_account_balance): double_map hasher(blake2_128_concat) T::AccountId, hasher(blake2_128_concat) QueryKey<T::AccountId> => Option<u128>;\n\n\n\n\t\t/// ocw index in last session\n\n\t\tOcwAccountIndex get(fn ocw_account_index): map hasher(blake2_128_concat) T::AccountId => Option<u32>;\n\n\t}\n\n}\n", "file_path": "pallets/offchain-worker/src/lib.rs", "rank": 32, "score": 131636.65134809687 }, { "content": "type AccountId = <<MultiSignature as Verify>::Signer as IdentifyAccount>::AccountId;\n\nconst SEED: u32 = 0;\n\n\n\nbenchmarks!{\n\n \n\n _ {\n\n let b in 1 .. 1000 => ();\n\n }\n\n link_eth {\n\n\n\n\t\tlet b in ...;\n\n let caller = account(\"caller\", 0, 0);\n\n let account_id: T::AccountId = account(\"Alice\", 0, SEED);\n\n\n\n let index: u32 = 0;\n\n let addr_expected = [16, 146, 71, 235, 177, 95, 237, 92, 255, 45, 73, 190, 133, 132, 185, 41, 14, 77, 9, 207];\n\n let expiring_block_number: u32 = 10000;\n\n let r = [133, 13, 66, 20, 141, 102, 233, 186, 153, 38, 81, 149, 29, 16, 191, 87, 206, 103, 230, 184, 32, 165, 174, 40, 221, 54, 212, 61, 132, 38, 254, 39];\n\n let s = [19, 118, 77, 20, 241, 238, 52, 206, 124, 232, 254, 37, 109, 69, 191, 253, 242, 19, 48, 32, 92, 134, 123, 2, 6, 223, 233, 225, 129, 41, 235, 116];\n\n let v: u8 = 28_u8;\n", "file_path": "pallets/account-linker/src/benchmarking.rs", "rank": 33, "score": 127887.26665774458 }, { "content": "pub fn new_partial(config: &Configuration) -> Result<sc_service::PartialComponents<\n\n\tFullClient, FullBackend, FullSelectChain,\n\n\tsp_consensus::DefaultImportQueue<Block, FullClient>,\n\n\tsc_transaction_pool::FullPool<Block, FullClient>,\n\n\t(\n\n\t\tsc_consensus_aura::AuraBlockImport<\n\n\t\t\tBlock,\n\n\t\t\tFullClient,\n\n\t\t\tsc_finality_grandpa::GrandpaBlockImport<FullBackend, Block, FullClient, FullSelectChain>,\n\n\t\t\tAuraPair\n\n\t\t>,\n\n\t\tsc_finality_grandpa::LinkHalf<Block, FullClient, FullSelectChain>,\n\n\t)\n\n>, ServiceError> {\n\n\tif config.keystore_remote.is_some() {\n\n\t\treturn Err(ServiceError::Other(\n\n\t\t\tformat!(\"Remote Keystores are not supported.\")))\n\n\t}\n\n\tlet inherent_data_providers = sp_inherents::InherentDataProviders::new();\n\n\n", "file_path": "node/src/service.rs", "rank": 34, "score": 123065.12321342825 }, { "content": "/// Generate an account ID from seed.\n\npub fn get_account_id_from_seed<TPublic: Public>(seed: &str) -> AccountId where\n\n\tAccountPublic: From<<TPublic::Pair as Pair>::Public>\n\n{\n\n\tAccountPublic::from(get_from_seed::<TPublic>(seed)).into_account()\n\n}\n\n\n", "file_path": "node/src/chain_spec.rs", "rank": 35, "score": 122349.77744013318 }, { "content": "fn generate_sig(key_pair: &KeyPair, msg: &Message) -> [u8; 65] {\n\n\tsign(key_pair.secret(), &msg).unwrap().into_electrum()\n\n}\n\n\n", "file_path": "pallets/account-linker/src/tests/eth.rs", "rank": 36, "score": 122148.90737348015 }, { "content": "// Fetch json result from remote URL with post method\n\npub fn fetch_json_http_post<'a>(remote_url: &'a [u8], body: &'a [u8]) -> Result<Vec<u8>, &'static str> {\n\n let remote_url_str = core::str::from_utf8(remote_url)\n\n .map_err(|_| \"Error in converting remote_url to string\")?;\n\n\n\n debug::info!(\"Offchain Worker post request url is {}.\", remote_url_str);\n\n\n\n let pending = http::Request::post(remote_url_str, vec![body]).send()\n\n .map_err(|_| \"Error in sending http POST request\")?;\n\n\n\n let response = pending.wait()\n\n .map_err(|_| \"Error in waiting http response back\")?;\n\n\n\n if response.code != 200 {\n\n debug::warn!(\"Unexpected status code: {}\", response.code);\n\n return Err(\"Non-200 status code returned from http request\");\n\n }\n\n\n\n let json_result: Vec<u8> = response.body().collect::<Vec<u8>>();\n\n\n\n let balance =\n\n core::str::from_utf8(&json_result).map_err(|_| \"JSON result cannot convert to string\")?;\n\n\n\n Ok(balance.as_bytes().to_vec())\n\n}\n\n\n", "file_path": "pallets/offchain-worker/src/urls.rs", "rank": 37, "score": 122146.2004836596 }, { "content": "// Fetch json result from remote URL with get method\n\npub fn fetch_json_http_get<'a>(remote_url: &'a [u8]) -> Result<Vec<u8>, &'static str> {\n\n let remote_url_str = core::str::from_utf8(remote_url)\n\n .map_err(|_| \"Error in converting remote_url to string\")?;\n\n\n\n let pending = http::Request::get(remote_url_str).send()\n\n .map_err(|_| \"Error in sending http GET request\")?;\n\n\n\n let response = pending.wait()\n\n .map_err(|_| \"Error in waiting http response back\")?;\n\n\n\n if response.code != 200 {\n\n debug::warn!(\"Unexpected status code: {}\", response.code);\n\n return Err(\"Non-200 status code returned from http request\");\n\n }\n\n\n\n let json_result: Vec<u8> = response.body().collect::<Vec<u8>>();\n\n\n\n let balance =\n\n core::str::from_utf8(&json_result).map_err(|_| \"JSON result cannot convert to string\")?;\n\n\n\n Ok(balance.as_bytes().to_vec())\n\n}\n\n\n", "file_path": "pallets/offchain-worker/src/urls.rs", "rank": 38, "score": 120678.28819556108 }, { "content": "// Send request to local server for query api tokens\n\npub fn send_get_token() -> Result<Vec<u8>, &'static str> {\n\n let pending = http::Request::get(super::TOKEN_SERVER_URL).send()\n\n .map_err(|_| \"Error in sending http GET request\")?;\n\n\n\n let response = pending.wait()\n\n .map_err(|_| \"Error in waiting http response back\")?;\n\n\n\n if response.code != 200 {\n\n debug::warn!(\"Unexpected status code: {}\", response.code);\n\n return Err(\"Non-200 status code returned from http request\");\n\n }\n\n\n\n let json_result: Vec<u8> = response.body().collect::<Vec<u8>>();\n\n\n\n Ok(json_result)\n\n}\n\n\n", "file_path": "pallets/offchain-worker/src/urls.rs", "rank": 39, "score": 120456.23417096415 }, { "content": "type BalanceOf<T> = <<T as Config>::Currency as Currency<<T as frame_system::Config>::AccountId>>::Balance;\n", "file_path": "pallets/offchain-worker/src/lib.rs", "rank": 40, "score": 119074.84549839934 }, { "content": "// Parse the token from local server\n\npub fn parse_store_tokens(resp_str: &str) {\n\n let token_info: Result<TokenInfo, _> = serde_json::from_str(&resp_str);\n\n\n\n match token_info {\n\n Ok(info) => {\n\n let s_info = StorageValueRef::persistent(b\"offchain-worker::token\");\n\n s_info.set(&info);\n\n debug::info!(\"Token info get from local server is {:?}.\", &info);\n\n },\n\n Err(_) => {},\n\n }\n\n}\n", "file_path": "pallets/offchain-worker/src/urls.rs", "rank": 41, "score": 113445.2186428674 }, { "content": "pub fn development_config() -> Result<ChainSpec, String> {\n\n\tlet wasm_binary = WASM_BINARY.ok_or_else(|| \"Development wasm binary not available\".to_string())?;\n\n\n\n\n\n\tOk(ChainSpec::from_genesis(\n\n\t\t// Name\n\n\t\t\"Development\",\n\n\t\t// ID\n\n\t\t\"dev\",\n\n\t\tChainType::Development,\n\n\t\tmove || testnet_genesis(\n\n\t\t\twasm_binary,\n\n\t\t\t// Initial PoA authorities\n\n\t\t\tvec![\n\n\t\t\t\tauthority_keys_from_seed(\"Alice\"),\n\n\t\t\t],\n\n\t\t\t// Sudo account\n\n\t\t\tget_account_id_from_seed::<sr25519::Public>(\"Alice\"),\n\n\t\t\t// Pre-funded accounts\n\n\t\t\tvec![\n", "file_path": "node/src/chain_spec.rs", "rank": 42, "score": 113005.37728611976 }, { "content": "pub fn litentry_config() -> Result<ChainSpec, String> {\n\n\tlet wasm_binary = WASM_BINARY.ok_or(\"Development wasm binary not available\".to_string())?;\n\n\n\n\tOk(ChainSpec::from_genesis(\n\n\t\t// Name\n\n\t\t\"Litentry\",\n\n\t\t// ID\n\n\t\t\"Litentry\",\n\n\t\tChainType::Live,\n\n\t\tmove || testnet_genesis(\n\n\t\t\twasm_binary,\n\n\t\t\t// Initial PoA authorities\n\n\t\t\tvec![\n\n\t\t\t\t// 49xBdg1G5MBrtPwPDK7Wv9WLjbRHZ71iRtAs3pRUPMDLVocf\n\n\t\t\t\t(\n\n\t\t\t\t\thex![\"9ce8c6f2c22502322fb29a1af5de753ce2c62e9eeb0a93efe1bd0ad56438e93a\"].unchecked_into(),\n\n\t\t\t\t\thex![\"9ce8c6f2c22502322fb29a1af5de753ce2c62e9eeb0a93efe1bd0ad56438e93a\"].unchecked_into(),\n\n\t\t\t\t),\n\n\t\t\t\t// 4Ackc6jWc31xiMdjPaDr7PzZCynKgi7im1Cc4Y2pRJcNvjo8\n\n\t\t\t\t(\n", "file_path": "node/src/chain_spec.rs", "rank": 43, "score": 113005.37728611976 }, { "content": "pub fn local_testnet_config() -> Result<ChainSpec, String> {\n\n\tlet wasm_binary = WASM_BINARY.ok_or_else(|| \"Development wasm binary not available\".to_string())?;\n\n\n\n\tOk(ChainSpec::from_genesis(\n\n\t\t// Name\n\n\t\t\"Local Testnet\",\n\n\t\t// ID\n\n\t\t\"local_testnet\",\n\n\t\tChainType::Local,\n\n\t\tmove || testnet_genesis(\n\n\t\t\twasm_binary,\n\n\t\t\t// Initial PoA authorities\n\n\t\t\tvec![\n\n\t\t\t\tauthority_keys_from_seed(\"Alice\"),\n\n\t\t\t\tauthority_keys_from_seed(\"Bob\"),\n\n\t\t\t],\n\n\t\t\t// Sudo account\n\n\t\t\tget_account_id_from_seed::<sr25519::Public>(\"Alice\"),\n\n\t\t\t// Pre-funded accounts\n\n\t\t\tvec![\n", "file_path": "node/src/chain_spec.rs", "rank": 44, "score": 110713.68883727278 }, { "content": "#[test]\n\nfn test_unexpected_address_eth() {\n\n\tnew_test_ext().execute_with(|| {\n\n\n\n\t\tlet account: AccountId32 = AccountId32::from([72u8; 32]);\n\n\t\tlet block_number: u32 = 99999;\n\n\n\n\t\tlet mut gen = Random{};\n\n\t\tlet key_pair = gen.generate().unwrap();\n\n\n\n\t\tlet msg = generate_msg(&account, block_number);\n\n\t\tlet sig = generate_sig(&key_pair, &msg);\n\n\t\tlet (r, s, v) = generate_rsv(&sig);\n\n\n\n\t\tassert_noop!(\n\n\t\t\tAccountLinker::link_eth(\n\n\t\t\t\tOrigin::signed(account.clone()),\n\n\t\t\t\taccount.clone(),\n\n\t\t\t\t0,\n\n\t\t\t\tgen.generate().unwrap().address().to_fixed_bytes(),\n\n\t\t\t\tblock_number,\n\n\t\t\t\tr,\n\n\t\t\t\ts,\n\n\t\t\t\tv),\n\n\t\t\tAccountLinkerError::UnexpectedAddress\n\n\t\t);\n\n\t});\n\n}\n\n\n", "file_path": "pallets/account-linker/src/tests/eth.rs", "rank": 45, "score": 109042.37667965371 }, { "content": "#[test]\n\nfn test_insert_eth_address() {\n\n\tnew_test_ext().execute_with(|| {\n\n\n\n run_to_block(1);\n\n\n\n\t\tlet account: AccountId32 = AccountId32::from([5u8; 32]);\n\n\t\tlet block_number: u32 = 99999;\n\n\n\n\t\tlet mut gen = Random{};\n\n\t\tlet mut expected_vec = Vec::new();\n\n\n\n\t\tfor i in 0..(MAX_ETH_LINKS) {\n\n\n\n\t\t\tlet key_pair = gen.generate().unwrap();\n\n\n\n\t\t\tlet msg = generate_msg(&account, block_number + i as u32);\n\n\t\t\tlet sig = generate_sig(&key_pair, &msg);\n\n\n\n\t\t\tlet (r, s, v) = generate_rsv(&sig);\n\n\n", "file_path": "pallets/account-linker/src/tests/eth.rs", "rank": 46, "score": 109042.37667965371 }, { "content": "#[test]\n\nfn test_update_eth_address() {\n\n\tnew_test_ext().execute_with(|| {\n\n\n\n\t\tlet account: AccountId32 = AccountId32::from([40u8; 32]);\n\n\t\tlet block_number: u32 = 99999;\n\n\n\n\t\tlet mut gen = Random{};\n\n\t\tfor i in 0..(MAX_ETH_LINKS) {\n\n\t\t\tlet key_pair = gen.generate().unwrap();\n\n\t\t\tlet msg = generate_msg(&account, block_number + i as u32);\n\n\t\t\tlet sig = generate_sig(&key_pair, &msg);\n\n\t\t\tlet (r, s, v) = generate_rsv(&sig);\n\n\n\n\t\t\tassert_ok!(AccountLinker::link_eth(\n\n\t\t\t\tOrigin::signed(account.clone()),\n\n\t\t\t\taccount.clone(),\n\n\t\t\t\ti as u32,\n\n\t\t\t\tkey_pair.address().to_fixed_bytes(),\n\n\t\t\t\tblock_number + i as u32,\n\n\t\t\t\tr,\n", "file_path": "pallets/account-linker/src/tests/eth.rs", "rank": 47, "score": 109042.37667965371 }, { "content": "/// Data source to blockchain type\n\npub fn data_source_to_block_chain_type(data_source: DataSource) -> BlockChainType {\n\n match data_source {\n\n DataSource::Invalid => BlockChainType::Invalid, \n\n DataSource::EthEtherScan => BlockChainType::ETH,\n\n DataSource::EthInfura => BlockChainType::ETH,\n\n DataSource::BtcBlockChain => BlockChainType::BTC,\n\n }\n\n}\n\n\n\n/// Http Get URL structure\n\npub struct HttpGet<'a> {\n\n pub blockchain: BlockChainType,\n\n // URL affix\n\n pub prefix: &'a str,\n\n pub delimiter: &'a str,\n\n pub postfix: &'a str,\n\n pub api_token: &'a str,\n\n}\n\n\n\n/// Http Post URL structure\n", "file_path": "pallets/offchain-worker/src/urls.rs", "rank": 48, "score": 108987.61721103246 }, { "content": "#[test]\n\nfn test_eth_address_pool_overflow() {\n\n\tnew_test_ext().execute_with(|| {\n\n\n\n\t\tlet account: AccountId32 = AccountId32::from([113u8; 32]);\n\n\t\tlet block_number: u32 = 99999;\n\n\n\n\t\tlet mut gen = Random{};\n\n\t\tlet mut expected_vec = Vec::new();\n\n\n\n\t\tfor index in 0..(MAX_ETH_LINKS*2) {\n\n\t\t\tlet key_pair = gen.generate().unwrap();\n\n\n\n\t\t\tlet msg = generate_msg(&account, block_number);\n\n\t\t\tlet sig = generate_sig(&key_pair, &msg);\n\n\t\t\tlet (r, s, v) = generate_rsv(&sig);\n\n\n\n\t\t\tassert_ok!(AccountLinker::link_eth(\n\n\t\t\t\tOrigin::signed(account.clone()),\n\n\t\t\t\taccount.clone(),\n\n\t\t\t\tindex as u32,\n", "file_path": "pallets/account-linker/src/tests/eth.rs", "rank": 49, "score": 106728.7120934525 }, { "content": "#[test]\n\nfn test_expired_block_number_eth() {\n\n\tnew_test_ext().execute_with(|| {\n\n\n\n\t\tlet account: AccountId32 = AccountId32::from([0u8; 32]);\n\n\t\tlet block_number: u32 = 0;\n\n\n\n\t\tlet mut gen = Random{};\n\n\t\tlet key_pair = gen.generate().unwrap();\n\n\n\n\t\tlet msg = generate_msg(&account, block_number);\n\n\t\tlet sig = generate_sig(&key_pair, &msg);\n\n\t\tlet (r, s, v) = generate_rsv(&sig);\n\n\n\n\t\tassert_noop!(\n\n\t\t\tAccountLinker::link_eth(\n\n\t\t\t\tOrigin::signed(account.clone()),\n\n\t\t\t\taccount.clone(),\n\n\t\t\t\t0,\n\n\t\t\t\tkey_pair.address().to_fixed_bytes(),\n\n\t\t\t\tblock_number,\n\n\t\t\t\tr,\n\n\t\t\t\ts,\n\n\t\t\t\tv),\n\n\t\t\tAccountLinkerError::LinkRequestExpired\n\n\t\t);\n\n\t});\n\n}\n\n\n", "file_path": "pallets/account-linker/src/tests/eth.rs", "rank": 50, "score": 106728.7120934525 }, { "content": "type AccountPublic = <MultiSignature as Verify>::Signer;\n", "file_path": "pallets/account-linker/src/benchmarking.rs", "rank": 51, "score": 105655.51365380199 }, { "content": "#[test]\n\nfn test_invalid_expiring_block_number_eth() {\n\n\tnew_test_ext().execute_with(|| {\n\n\n\n\t\tlet account: AccountId32 = AccountId32::from([0u8; 32]);\n\n\t\tlet block_number: u32 = crate::EXPIRING_BLOCK_NUMBER_MAX + 1;\n\n\n\n\t\tlet mut gen = Random{};\n\n\t\tlet key_pair = gen.generate().unwrap();\n\n\n\n\t\tlet msg = generate_msg(&account, block_number);\n\n\t\tlet sig = generate_sig(&key_pair, &msg);\n\n\t\tlet (r, s, v) = generate_rsv(&sig);\n\n\n\n\t\tassert_noop!(\n\n\t\t\tAccountLinker::link_eth(\n\n\t\t\t\tOrigin::signed(account.clone()),\n\n\t\t\t\taccount.clone(),\n\n\t\t\t\t0,\n\n\t\t\t\tkey_pair.address().to_fixed_bytes(),\n\n\t\t\t\tblock_number,\n\n\t\t\t\tr,\n\n\t\t\t\ts,\n\n\t\t\t\tv),\n\n\t\t\tAccountLinkerError::InvalidExpiringBlockNumber\n\n\t\t);\n\n\t});\n\n}\n\n\n", "file_path": "pallets/account-linker/src/tests/eth.rs", "rank": 52, "score": 104541.39389342569 }, { "content": "// Get the API tokens from local server\n\npub fn get_token() {\n\n match send_get_token() {\n\n Ok(json_result) => {\n\n match core::str::from_utf8(&json_result) {\n\n Ok(balance) => parse_store_tokens(balance),\n\n Err(_) => {},\n\n }\n\n },\n\n Err(_) => {},\n\n }\n\n}\n\n\n\n#[allow(dead_code)]\n", "file_path": "pallets/offchain-worker/src/urls.rs", "rank": 53, "score": 101573.83505132937 }, { "content": "#[cfg(feature = \"std\")]\n\npub fn native_version() -> NativeVersion {\n\n\tNativeVersion {\n\n\t\truntime_version: VERSION,\n\n\t\tcan_author_with: Default::default(),\n\n\t}\n\n}\n\n\n\n/// We assume that ~10% of the block weight is consumed by `on_initalize` handlers.\n\n/// This is used to limit the maximal weight of a single extrinsic.\n\nconst AVERAGE_ON_INITIALIZE_RATIO: Perbill = Perbill::from_percent(10);\n\n/// We allow `Normal` extrinsics to fill up the block up to 75%, the rest can be used\n\n/// by Operational extrinsics.\n\nconst NORMAL_DISPATCH_RATIO: Perbill = Perbill::from_percent(75);\n\n/// We allow for 2 seconds of compute with a 6 second average block time.\n\nconst MAXIMUM_BLOCK_WEIGHT: Weight = 2 * WEIGHT_PER_SECOND;\n\n\n\nparameter_types! {\n\n\tpub const BlockHashCount: BlockNumber = 2400;\n\n\tpub const Version: RuntimeVersion = VERSION;\n\n\tpub RuntimeBlockLength: BlockLength =\n", "file_path": "runtime/src/lib.rs", "rank": 54, "score": 98098.38583324946 }, { "content": "/// Properties for Litentry.\n\npub fn litentry_properties() -> Properties {\n\n\tlet mut properties = Properties::new();\n\n\n\n\tproperties.insert(\"ss58Format\".into(), 31.into());\n\n\tproperties.insert(\"tokenDecimals\".into(), 12.into());\n\n\tproperties.insert(\"tokenSymbol\".into(), \"LIT\".into());\n\n\n\n\tproperties\n\n}\n\n\n", "file_path": "node/src/chain_spec.rs", "rank": 55, "score": 98094.48276075674 }, { "content": "/// Instantiate all full RPC extensions.\n\npub fn create_full<C, P>(\n\n\tdeps: FullDeps<C, P>,\n\n) -> jsonrpc_core::IoHandler<sc_rpc::Metadata> where\n\n\tC: ProvideRuntimeApi<Block>,\n\n\tC: HeaderBackend<Block> + HeaderMetadata<Block, Error=BlockChainError> + 'static,\n\n\tC: Send + Sync + 'static,\n\n\tC::Api: substrate_frame_rpc_system::AccountNonceApi<Block, AccountId, Index>,\n\n\tC::Api: pallet_transaction_payment_rpc::TransactionPaymentRuntimeApi<Block, Balance>,\n\n\tC::Api: BlockBuilder<Block>,\n\n\tP: TransactionPool + 'static,\n\n{\n\n\tuse substrate_frame_rpc_system::{FullSystem, SystemApi};\n\n\tuse pallet_transaction_payment_rpc::{TransactionPayment, TransactionPaymentApi};\n\n\n\n\tlet mut io = jsonrpc_core::IoHandler::default();\n\n\tlet FullDeps {\n\n\t\tclient,\n\n\t\tpool,\n\n\t\tdeny_unsafe,\n\n\t} = deps;\n", "file_path": "node/src/rpc.rs", "rank": 56, "score": 95100.91976933632 }, { "content": "/// Parse and run command line arguments\n\npub fn run() -> sc_cli::Result<()> {\n\n\tlet cli = Cli::from_args();\n\n\n\n\tmatch &cli.subcommand {\n\n\t\tSome(Subcommand::Key(cmd)) => cmd.run(&cli),\n\n\t\tSome(Subcommand::BuildSpec(cmd)) => {\n\n\t\t\tlet runner = cli.create_runner(cmd)?;\n\n\t\t\trunner.sync_run(|config| cmd.run(config.chain_spec, config.network))\n\n\t\t},\n\n\t\tSome(Subcommand::CheckBlock(cmd)) => {\n\n\t\t\tlet runner = cli.create_runner(cmd)?;\n\n\t\t\trunner.async_run(|config| {\n\n\t\t\t\tlet PartialComponents { client, task_manager, import_queue, ..}\n\n\t\t\t\t\t= service::new_partial(&config)?;\n\n\t\t\t\tOk((cmd.run(client, import_queue), task_manager))\n\n\t\t\t})\n\n\t\t},\n\n\t\tSome(Subcommand::ExportBlocks(cmd)) => {\n\n\t\t\tlet runner = cli.create_runner(cmd)?;\n\n\t\t\trunner.async_run(|config| {\n", "file_path": "node/src/command.rs", "rank": 57, "score": 95100.91976933632 }, { "content": "var Account = require(\"eth-lib/lib/account\");\n", "file_path": "ts-tests/scripts/smoke-test.ts", "rank": 58, "score": 89780.58482121423 }, { "content": "pub mod base58;\n\npub mod legacy;\n\npub mod witness;", "file_path": "pallets/account-linker/src/btc/mod.rs", "rank": 59, "score": 89040.7038079958 }, { "content": "\tfn correct_dhash160() {\n\n\n\n\t\tlet pk = decode(\"0450863AD64A87AE8A2FE83C1AF1A8403CB53F53E486D8511DAD8A04887E5B23522CD470243453A299FA9E77237716103ABC11A1DF38855ED6F2EE187E9C582BA6\").unwrap();\n\n\n\n let hash = hash160(&pk);\n\n\n\n let result = decode(\"010966776006953D5567439E5E39F86A0D273BEE\").unwrap();\n\n\t\tlet mut hash_expected = [0u8; 20];\n\n\t\thash_expected[0..20].copy_from_slice(&result[0..20]);\n\n\n\n\t\tassert_eq!(hash, hash_expected);\n\n }\n\n\n\n #[test]\n\n fn correct_btc_addr_from_pk() {\n\n let pk = decode(\"0450863AD64A87AE8A2FE83C1AF1A8403CB53F53E486D8511DAD8A04887E5B23522CD470243453A299FA9E77237716103ABC11A1DF38855ED6F2EE187E9C582BA6\").unwrap();\n\n let mut pk_input = [0u8; 65];\n\n pk_input[0..65].copy_from_slice(&pk[0..65]);\n\n\n\n let addr = btc_addr_from_pk(&pk_input);\n\n\n\n let addr_expected_hex = decode(\"00010966776006953D5567439E5E39F86A0D273BEED61967F6\").unwrap();\n\n let mut addr_expected = [0u8; 25];\n\n addr_expected[0..25].copy_from_slice(&addr_expected_hex[0..25]);\n\n assert_eq!(addr, addr_expected);\n\n }\n\n\n\n}", "file_path": "pallets/account-linker/src/btc/legacy.rs", "rank": 60, "score": 88955.7320626372 }, { "content": "use sha2::{Digest, Sha256};\n\nuse ripemd160::Ripemd160;\n\n\n", "file_path": "pallets/account-linker/src/btc/legacy.rs", "rank": 61, "score": 88953.11021703309 }, { "content": "mod btc;\n\nmod eth;", "file_path": "pallets/account-linker/src/tests/mod.rs", "rank": 62, "score": 88237.6102001228 }, { "content": "\t\t\tOrigin::signed(account.clone()),\n\n\t\t\taccount.clone(),\n\n\t\t\t0,\n\n\t\t\taddr_expected.clone(),\n\n\t\t\tblock_number,\n\n\t\t\tr,\n\n\t\t\ts,\n\n\t\t\tv.to_i32() as u8\n\n\t\t));\n\n\n\n\t\tlet addr_stored = String::from_utf8(AccountLinker::btc_addresses(&account)[0].clone()).unwrap();\n\n\n\n assert_eq!(addr_stored, address.to_string());\n\n\n\n //assert_eq!(\n\n // System::events()[0].event,\n\n // TestEvent::account_linker( Event::<Test>::BtcAddressLinked(account, addr_expected) )\n\n //);\n\n\n\n\t});\n\n}\n", "file_path": "pallets/account-linker/src/tests/btc.rs", "rank": 63, "score": 87512.84029488328 }, { "content": "\t\t\taccount.clone(),\n\n\t\t\t0,\n\n\t\t\taddr_expected.clone(),\n\n\t\t\tblock_number,\n\n\t\t\tr,\n\n\t\t\ts,\n\n\t\t\tv.to_i32() as u8\n\n\t\t));\n\n\n\n\t\tlet addr_stored = String::from_utf8(AccountLinker::btc_addresses(&account)[0].clone()).unwrap();\n\n\n\n assert_eq!(addr_stored, address.to_string());\n\n\n\n //assert_eq!(\n\n // System::events()[0].event,\n\n // AccountLinker::Event::<Test>::BtcAddressLinked(account, addr_expected)\n\n //);\n\n\n\n\t});\n\n}\n\n\n", "file_path": "pallets/account-linker/src/tests/btc.rs", "rank": 64, "score": 87510.08103417499 }, { "content": "\t\tbytes.append(&mut account_vec);\n\n\t\tbytes.append(&mut expiring_block_number_vec);\n\n\n\n\t\tlet message = BTCMessage::from_slice(&bytes.keccak256()).unwrap();\n\n\n\n\t\tlet (v, rs) = s.sign_recoverable(&message, &pair.0).serialize_compact();\n\n\n\n\t\tlet mut r = [0u8; 32];\n\n\t\tlet mut s = [0u8; 32];\n\n\n\n\t\tr[..32].copy_from_slice(&rs[..32]);\n\n\t\ts[..32].copy_from_slice(&rs[32..64]);\n\n\n\n\t\tassert_noop!(\n\n\t\t\tAccountLinker::link_btc(\n\n\t\t\t\tOrigin::signed(account.clone()),\n\n\t\t\t\taccount.clone(),\n\n\t\t\t\t0,\n\n\t\t\t\taddress.clone().to_string().as_bytes().to_vec(),\n\n\t\t\t\tblock_number,\n\n\t\t\t\tr,\n\n\t\t\t\ts,\n\n\t\t\t\tv.to_i32() as u8),\n\n\t\t\tAccountLinkerError::InvalidExpiringBlockNumber\n\n\t\t);\n\n\n\n\t});\n\n}\n\n\n", "file_path": "pallets/account-linker/src/tests/btc.rs", "rank": 65, "score": 87504.73510976981 }, { "content": "use crate::{mock::*};\n\n\n\nuse codec::Encode;\n\nuse parity_crypto::Keccak256;\n\nuse frame_support::{assert_ok, assert_noop};\n\nuse sp_runtime::AccountId32;\n\n\n\nuse bitcoin::network::constants::Network;\n\nuse bitcoin::util::address::Address;\n\nuse bitcoin::util::key;\n\nuse bitcoin::secp256k1::{Secp256k1, Message as BTCMessage};\n\nuse bitcoin::secp256k1::rand::thread_rng;\n\n\n\n#[test]\n", "file_path": "pallets/account-linker/src/tests/btc.rs", "rank": 66, "score": 87504.21120243389 }, { "content": "\t\tlet mut account_vec = account.encode();\n\n\t\tlet mut expiring_block_number_vec = block_number.encode();\n\n\n\n\t\tbytes.append(&mut account_vec);\n\n\t\tbytes.append(&mut expiring_block_number_vec);\n\n\n\n\t\tlet message = BTCMessage::from_slice(&bytes.keccak256()).unwrap();\n\n\n\n\t\tlet (v, rs) = s.sign_recoverable(&message, &pair.0).serialize_compact();\n\n\n\n\t\tlet mut r = [0u8; 32];\n\n\t\tlet mut s = [0u8; 32];\n\n\n\n\t\tr[..32].copy_from_slice(&rs[..32]);\n\n\t\ts[..32].copy_from_slice(&rs[32..64]);\n\n\n\n let addr_expected = address.clone().to_string().as_bytes().to_vec();\n\n\n\n\t\tassert_ok!(AccountLinker::link_btc(\n\n\t\t\tOrigin::signed(account.clone()),\n", "file_path": "pallets/account-linker/src/tests/btc.rs", "rank": 67, "score": 87502.25519196728 }, { "content": "\t\tlet mut bytes = b\"Link Litentry: \".encode();\n\n\t\tlet mut account_vec = account.encode();\n\n\t\tlet mut expiring_block_number_vec = block_number.encode();\n\n\n\n\t\tbytes.append(&mut account_vec);\n\n\t\tbytes.append(&mut expiring_block_number_vec);\n\n\n\n\t\tlet message = BTCMessage::from_slice(&bytes.keccak256()).unwrap();\n\n\n\n\t\tlet (v, rs) = s.sign_recoverable(&message, &pair.0).serialize_compact();\n\n\n\n\t\tlet mut r = [0u8; 32];\n\n\t\tlet mut s = [0u8; 32];\n\n\n\n\t\tr[..32].copy_from_slice(&rs[..32]);\n\n\t\ts[..32].copy_from_slice(&rs[32..64]);\n\n\n\n let addr_expected = address.clone().to_string().as_bytes().to_vec();\n\n\n\n\t\tassert_ok!(AccountLinker::link_btc(\n", "file_path": "pallets/account-linker/src/tests/btc.rs", "rank": 68, "score": 87500.16175425544 }, { "content": "/// Generate an Aura authority key.\n\npub fn authority_keys_from_seed(s: &str) -> (AuraId, GrandpaId) {\n\n\t(\n\n\t\tget_from_seed::<AuraId>(s),\n\n\t\tget_from_seed::<GrandpaId>(s),\n\n\t)\n\n}\n\n\n", "file_path": "node/src/chain_spec.rs", "rank": 69, "score": 82104.34742239726 }, { "content": "// Parse the balance from infura response\n\npub fn parse_infura_balances(price_str: &str) -> Option<Vec<u128>> {\n\n //[\n\n // {\"jsonrpc\":\"2.0\",\"id\":1,\"result\":\"0x4563918244f40000\"},\n\n // {\"jsonrpc\":\"2.0\",\"id\":1,\"result\":\"0xff\"}\n\n //]\n\n\n\n let token_info: Vec<InfuraBalance> = serde_json::from_str(price_str).ok()?;\n\n let result: Vec<u128> = token_info.iter().map(|item| match utils::chars_to_u128(&item.result.iter().map(|i| *i as char).collect()) {\n\n Ok(balance) => balance,\n\n Err(_) => 0_u128,\n\n }).collect();\n\n Some(result)\n\n}\n\n\n", "file_path": "pallets/offchain-worker/src/urls.rs", "rank": 70, "score": 77470.18111522752 }, { "content": "// Parse the balance from etherscan response\n\npub fn parse_etherscan_balances(price_str: &str) -> Option<Vec<u128>> {\n\n // {\n\n // \"status\": \"1\",\n\n // \"message\": \"OK\",\n\n // \"result\":\n\n // [\n\n // {\"account\":\"0x742d35Cc6634C0532925a3b844Bc454e4438f44e\",\"balance\":\"3804372455842738500000001\"},\n\n // {\"account\":\"0xBE0eB53F46cd790Cd13851d5EFf43D12404d33E8\",\"balance\":\"2571179226430511381996287\"}\n\n // ]\n\n // }\n\n debug::info!(\"Offchain Worker response from etherscan is {:?}\", price_str);\n\n\n\n let token_info: EtherScanResponse = serde_json::from_str(price_str).ok()?;\n\n let result: Vec<u128> = token_info.result.iter().map(|item| match utils::chars_to_u128(&item.balance.iter().map(|i| *i as char).collect()) {\n\n Ok(balance) => balance,\n\n Err(_) => 0_u128,\n\n }).collect();\n\n Some(result)\n\n}\n\n\n\n#[allow(dead_code)]\n", "file_path": "pallets/offchain-worker/src/urls.rs", "rank": 71, "score": 77470.18111522752 }, { "content": "// Parse balances from blockchain info response\n\npub fn parse_blockchain_info_balances(price_str: &str) -> Option<Vec<u128>>{\n\n // {\n\n //\t\"1A1zP1eP5QGefi2DMPTfTL5SLmv7DivfNa\":{\"final_balance\":6835384571,\"n_tx\":2635,\"total_received\":6835384571},\n\n // \"15EW3AMRm2yP6LEF5YKKLYwvphy3DmMqN6\":{\"final_balance\":0,\"n_tx\":4,\"total_received\":310925609}\n\n // }\n\n let mut balance_vec: Vec<u128> = Vec::new();\n\n\n\n let value: serde_json::Value = serde_json::from_str(price_str).ok()?;\n\n\n\n match value {\n\n serde_json::Value::Object(map_data) => {\n\n for (_, v) in map_data.iter() {\n\n match v[\"final_balance\"].as_u64() {\n\n Some(balance) => balance_vec.push(balance as u128),\n\n None => (), \n\n }\n\n }\n\n },\n\n _ => (),\n\n };\n\n\n\n Some(balance_vec)\n\n}\n\n\n\n#[allow(dead_code)]\n", "file_path": "pallets/offchain-worker/src/urls.rs", "rank": 72, "score": 76269.10305675984 }, { "content": "type AccountPublic = <Signature as Verify>::Signer;\n\n\n", "file_path": "node/src/chain_spec.rs", "rank": 73, "score": 74244.9266257034 }, { "content": "// u128 number string to u128\n\npub fn chars_to_u128(vec: &Vec<char>) -> Result<u128, &'static str> {\n\n\t// Check if the number string is decimal or hexadecimal (whether starting with 0x or not) \n\n\tlet base = if vec.len() >= 2 && vec[0] == '0' && vec[1] == 'x' {\n\n\t\t// This is a hexadecimal number\n\n\t\t16\n\n\t} else {\n\n\t\t// This is a decimal number\n\n\t\t10\n\n\t};\n\n\n\n\tlet mut result: u128 = 0;\n\n\tfor (i, item) in vec.iter().enumerate() {\n\n\t\t// Skip the 0 and x digit for hex. \n\n\t\t// Using skip here instead of a new vec build to avoid an unnecessary copy operation\n\n\t\tif base == 16 && i < 2 {\n\n\t\t\tcontinue;\n\n\t\t}\n\n\n\n\t\tlet n = item.to_digit(base);\n\n\t\tmatch n {\n", "file_path": "pallets/offchain-worker/src/utils.rs", "rank": 74, "score": 74056.8351171187 }, { "content": "/// Generate a crypto pair from seed.\n\npub fn get_from_seed<TPublic: Public>(seed: &str) -> <TPublic::Pair as Pair>::Public {\n\n\tTPublic::Pair::from_string(&format!(\"//{}\", seed), None)\n\n\t\t.expect(\"static values are valid; qed\")\n\n\t\t.public()\n\n}\n\n\n", "file_path": "node/src/chain_spec.rs", "rank": 75, "score": 68944.74547560049 }, { "content": "\ttype SystemWeightInfo = ();\n\n\ttype SS58Prefix = SS58Prefix;\n\n}\n\n\n\nimpl account_linker::Config for Test {\n\n\ttype Event = Event;\n\n}\n\n\n\npub type AccountLinkerError = account_linker::Error<Test>;\n\n\n", "file_path": "pallets/account-linker/src/mock.rs", "rank": 76, "score": 61356.73522804554 }, { "content": "\ttype BaseCallFilter = ();\n\n\ttype Origin = Origin;\n\n\ttype BlockWeights = ();\n\n\ttype BlockLength = ();\n\n\ttype DbWeight = ();\n\n\ttype Call = Call;\n\n\ttype Index = u32;\n\n\ttype BlockNumber = u32;\n\n\ttype Hash = H256;\n\n\ttype Hashing = BlakeTwo256;\n\n\ttype AccountId = AccountId32;\n\n\ttype Lookup = IdentityLookup<Self::AccountId>;\n\n\ttype Header = generic::Header<Self::BlockNumber, BlakeTwo256>;\n\n\ttype Event = Event;\n\n\ttype BlockHashCount = BlockHashCount;\n\n\ttype Version = ();\n\n\ttype PalletInfo = PalletInfo;\n\n\ttype AccountData = ();\n\n\ttype OnNewAccount = ();\n\n\ttype OnKilledAccount = ();\n", "file_path": "pallets/account-linker/src/mock.rs", "rank": 77, "score": 61353.4144901214 }, { "content": "#![cfg(feature = \"runtime-benchmarks\")]\n\n\n\nuse super::*;\n\nuse sp_runtime::{traits::{Verify, IdentifyAccount,}, MultiSignature};\n\nuse sp_core::{sr25519, crypto::UncheckedInto,};\n\nuse frame_benchmarking::{benchmarks, account};\n\nuse frame_system::RawOrigin;\n\nuse sp_std::prelude::*;\n\nuse sp_runtime::AccountId32;\n", "file_path": "pallets/account-linker/src/benchmarking.rs", "rank": 78, "score": 61351.77049543973 }, { "content": " \n\n }: _ (RawOrigin::Signed(caller), account_id.clone(), index, addr_expected, expiring_block_number.into(), r, s, v)\n\n\n\n link_btc {\n\n let b in ...;\n\n let caller = account(\"caller\", 0, 0);\n\n let account_id: T::AccountId = account(\"Alice\", 0, SEED);\n\n\n\n let index: u32 = 0;\n\n let addr_expected = vec![49, 51, 121, 55, 106, 72, 52, 85, 57, 113, 68, 112, 69, 77, 77, 119, 87, 90, 117, 52, 99, 122, 52, 107, 55, 67, 81, 107, 90, 72, 100, 101, 113, 71];\n\n let expiring_block_number: u32 = 10000;\n\n let r = [250, 57, 156, 18, 181, 153, 186, 77, 81, 242, 31, 146, 82, 115, 85, 163, 136, 220, 104, 194, 98, 88, 28, 109, 163, 113, 12, 47, 193, 183, 189, 106];\n\n let s = [41, 163, 172, 76, 129, 83, 66, 195, 126, 213, 207, 91, 186, 70, 255, 125, 111, 38, 123, 240, 178, 101, 22, 192, 133, 22, 245, 109, 50, 175, 225, 208];\n\n let v: u8 = 0_u8;\n\n \n\n }: _ (RawOrigin::Signed(caller), account_id.clone(), index, addr_expected, expiring_block_number.into(), r, s, v)\n\n}\n", "file_path": "pallets/account-linker/src/benchmarking.rs", "rank": 79, "score": 61351.65594469238 }, { "content": "use frame_support::{\n\n\tparameter_types,\n\n\ttraits::{OnFinalize, OnInitialize},\n\n};\n\nuse frame_system as system;\n\nuse crate as account_linker;\n\nuse sp_core::H256;\n\nuse sp_runtime::{\n\n\ttraits::{BlakeTwo256, IdentityLookup},\n\n\tAccountId32,\n\n\tgeneric,\n\n};\n\n\n\npub use crate::MAX_ETH_LINKS;\n\n\n", "file_path": "pallets/account-linker/src/mock.rs", "rank": 80, "score": 61348.61212400533 }, { "content": "import { ApiPromise, WsProvider, Keyring } from \"@polkadot/api\";\n\nimport { KeyringPair } from '@polkadot/keyring/types';\n\nimport { UInt } from '@polkadot/types/codec';\n\nimport { TypeRegistry } from \"@polkadot/types/create\";\n\n// Import Web3 from 'web3';\n\nimport { expect } from \"chai\";\n\nimport { step } from \"mocha-steps\";\n\nimport { describeLitentry } from \"./utils\"\n\n\n\nconst privateKey = '0xe82c0c4259710bb0d6cf9f9e8d0ad73419c1278a14d375e5ca691e7618103011';\n\n\n\n// Provider is set to localhost for development\n\nconst wsProvider = new WsProvider(\"ws://localhost:9944\");\n\n\n\n// Keyring needed to sign using Alice account\n\nconst keyring = new Keyring({ type: 'sr25519' });\n\n\n\n// Configs of test ropsten account\n\nconst testEthAddress = \"[0x4d88dc5d528a33e4b8be579e9476715f60060582]\";\n\n\n\nconst msgPrefix: string = \"Link Litentry: \";\n\n\n\n// Create Ethereum Link from ALICE\n\nasync function eth_link(api: ApiPromise, alice: KeyringPair) {\n\n\n\n console.log(`\\nStep 1: Link Ethereum account`);\n\n\n\n const registry = new TypeRegistry();\n\n\n\n // Encode prefix with concatenated utf8, instead of SCALE codec to match the litentry node implementation\n\n\tlet encodedPrefix = Buffer.from(msgPrefix, 'utf-8');\n\n \n\n let encodedExpiredBlock = new UInt(registry, 10000, 32).toU8a();\n\n\n\n let encodedMsg = new Uint8Array(encodedPrefix.length + alice.addressRaw.length + encodedExpiredBlock.length);\n\n encodedMsg.set(encodedPrefix);\n\n encodedMsg.set(alice.addressRaw, encodedPrefix.length);\n\n encodedMsg.set(encodedExpiredBlock, encodedPrefix.length + alice.addressRaw.length);\n\n\n\n // Web3 is used to sign the message with ethereum prefix (\"\\x19Ethereum ...\")\n\n const Web3 = require(\"web3\");\n\n const web3 = new Web3();\n\n // Convert byte array to hex string\n\n let hexString = \"0x\" + Buffer.from(encodedMsg).toString('hex');\n\n\n\n let signedMsg = web3.eth.accounts.sign(hexString, privateKey);\n\n \n\n // Convert ethereum address to bytes array\n\n let ethAddressBytes = web3.utils.hexToBytes(web3.eth.accounts.privateKeyToAccount(privateKey).address);\n\n\n\n console.log(`r is ${signedMsg.r}`);\n\n console.log(`s is ${signedMsg.s}`);\n\n console.log(`v is ${signedMsg.v}`);\n\n\n\n const transaction = api.tx.accountLinkerModule.linkEth(alice.address, 0, ethAddressBytes, 10000, signedMsg.r, signedMsg.s, signedMsg.v);\n\n\n\n const link = new Promise<{ block: string }>(async (resolve, reject) => {\n\n\t\tconst unsub = await transaction.signAndSend(alice, (result) => {\n\n\t\t\tconsole.log(`Link creation is ${result.status}`);\n\n\t\t\tif (result.status.isInBlock) {\n\n\t\t\t\tconsole.log(`Link included at blockHash ${result.status.asInBlock}`);\n\n console.log(`Waiting for finalization... (can take a minute)`);\n\n } else if (result.status.isFinalized) {\n\n\t\t\t\tconsole.log(`Transfer finalized at blockHash ${result.status.asFinalized}`);\n\n\t\t\t\tunsub();\n\n\t\t\t\tresolve({\n\n\t\t\t\t\tblock: result.status.asFinalized.toString(),\n\n\t\t\t\t});\n\n\t\t\t}\n\n\t\t});\n\n\t});\n\n\treturn link;\n\n\n\n}\n\n\n\n// Retrieve Alice & Link Storage\n\nasync function check_linking_state(api: ApiPromise, alice: KeyringPair) {\n\n\n\n\tconsole.log(`\\nStep 2: Retrieving linking state of Alice `);\n\n\n\n\t// Retrieve Alice account with new nonce value\n\n\tconst { nonce, data: balance } = await api.query.system.account(alice.address);\n\n\tconsole.log(`Alice Substrate Account (nonce: ${nonce}) balance, free: ${balance.free}`);\n\n\n\n\tconst linkedEthAddress = (await api.query.accountLinkerModule.ethereumLink(alice.address));\n\n console.log(`Linked Ethereum addresses of Alice are: ${linkedEthAddress.toString()}`);\n\n\n\n\treturn linkedEthAddress;\n\n}\n\n\n\n\n\n// Claim Assets for Alice\n\nasync function asset_claim(api: ApiPromise, alice: KeyringPair) {\n\n\n\n\tconsole.log(`\\nStep 3: Claim assets for Alice`);\n\n\n\n\tconst transaction = await api.tx.offchainWorkerModule.assetClaim();\n\n\n\n\tconst data = new Promise<{ block: string }>(async (resolve, reject) => {\n\n\t\tconst unsub = await transaction.signAndSend(alice, (result) => {\n\n\t\t\tconsole.log(`Transfer is ${result.status}`);\n\n\t\t\tif (result.status.isInBlock) {\n\n\t\t\t\tconsole.log(`Transfer included at blockHash ${result.status.asInBlock}`);\n\n console.log(`Waiting for finalization... (can take a minute)`);\n\n } else if (result.status.isFinalized) {\n\n\t\t\t\tconsole.log(`Transfer finalized at blockHash ${result.status.asFinalized}`);\n\n\t\t\t\tunsub();\n\n\t\t\t\tresolve({\n\n\t\t\t\t\tblock: result.status.asFinalized.toString(),\n\n\t\t\t\t});\n\n\t\t\t}\n\n\t\t});\n\n\t});\n\n\treturn data;\n\n}\n\n\n\n// Retrieve assets balances of Alice\n\nasync function get_assets(api: ApiPromise, alice: KeyringPair) {\n\n\n\n\tconsole.log(`\\nStep 4: Retrieving assets of Alice`);\n\n\n\n\t// Retrieve Alice account with new nonce value\n\n\tconst { nonce, data: balance } = await api.query.system.account(alice.address);\n\n\tconsole.log(`Alice Substrate Account (nonce: ${nonce}) balance, free: ${balance.free}`);\n\n\n\n\tconst assetsBalances = (await api.query.offchainWorkerModule.accountBalance(alice.address));\n\n console.log(`Linked Ethereum balances of Alice are: ${assetsBalances.toString()}`);\n\n \n\n\treturn assetsBalances;\n\n\n\n}\n\n\n\ndescribeLitentry(\"Test Ethereum Link and Balance Fetch\", ``, (context) =>{\n\n\n\n step(\"Create Ethereum Link\", async function () {\n\n await eth_link(context.api, context.alice);\n\n })\n\n\n\n step(\"Retrieving Alice's linked Ethereum accounts\", async function () {\n\n const ethAddr = await check_linking_state(context.api, context.alice);\n\n \n\n expect(ethAddr.toString()).to.equal(testEthAddress);\n\n })\n\n\n\n step(\"Claim assets for Alice\", async function () {\n\n await asset_claim(context.api, context.alice);\n\n })\n\n\n\n step(\"Retrieving assets information of Alice\", async function () {\n\n // First wait for 36s ~ 6 blocks\n\n await new Promise(r => setTimeout(r, 36000));\n\n const balances = await get_assets(context.api, context.alice);\n\n // TODO fetch real time balance and compare it here\n\n expect(balances.toString()).to.equal(`[null,\"0x00000000000000004563918244f40000\"]`);\n\n })\n\n\n\n});\n", "file_path": "ts-tests/tests/test-eth-balance.ts", "rank": 81, "score": 60758.49735609532 }, { "content": "\t\twhile j < size {\n\n\t\t\tresult.push(ALPHABET[buffer[j] as usize]);\n\n\t\t\tj += 1;\n\n\t\t}\n\n\n\n\t\tresult\n\n\t}\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\tuse super::ToBase58;\n\n\tuse std::str::from_utf8;\n\n\tuse hex::decode;\n\n\n\n\t#[test]\n\n\tfn test_to_base58_basic() {\n\n\t\tassert_eq!(from_utf8(&b\"\".to_base58()).unwrap(), \"\");\n\n\t\tassert_eq!(from_utf8(&[32].to_base58()).unwrap(), \"Z\");\n\n\t\tassert_eq!(from_utf8(&[45].to_base58()).unwrap(), \"n\");\n", "file_path": "pallets/account-linker/src/btc/base58.rs", "rank": 82, "score": 58440.5217814388 }, { "content": "mod tests {\n\n\tuse super::*;\n\n\tuse std::str::from_utf8;\n\n\n\n\t#[test]\n\n\tfn test_to_base32_basic() {\n\n\t\tassert_eq!(from_utf8(&vec![0x00, 0x01, 0x02].encode(b\"bech32\".to_vec()).unwrap()).unwrap(), \"bech321qpz4nc4pe\");\n\n }\n\n\n\n #[test]\n\n fn valid_address() {\n\n let pairs: Vec<(&str, Vec<u8>)> = vec![\n\n (\n\n \"BC1QW508D6QEJXTDG4Y5R3ZARVARY0C5XW7KV8F3T4\",\n\n vec![\n\n 0x00, 0x14, 0x75, 0x1e, 0x76, 0xe8, 0x19, 0x91, 0x96, 0xd4, 0x54,\n\n 0x94, 0x1c, 0x45, 0xd1, 0xb3, 0xa3, 0x23, 0xf1, 0x43, 0x3b, 0xd6\n\n ]\n\n ),\n\n (\n", "file_path": "pallets/account-linker/src/btc/witness.rs", "rank": 83, "score": 58438.2709417818 }, { "content": "\n\nuse sp_std::prelude::*;\n\n\n\npub struct WitnessProgram {\n\n /// Witness program version\n\n pub version: u8,\n\n /// Witness program content\n\n pub program: Vec<u8>\n\n}\n\n\n\nimpl WitnessProgram {\n\n /// Converts a Witness Program to a SegWit Address\n\n pub fn to_address(&self, hrp: Vec<u8>) -> Result<Vec<u8>, &'static str> {\n\n // Verify that the program is valid\n\n let mut data: Vec<u8> = vec![self.version];\n\n // Convert 8-bit program into 5-bit\n\n let p5 = self.program.to_base32();\n\n // let p5 = convert_bits(self.program.to_vec(), 8, 5, true)?;\n\n data.extend_from_slice(&p5);\n\n let b32 = data.encode(hrp)?;\n", "file_path": "pallets/account-linker/src/btc/witness.rs", "rank": 84, "score": 58436.76789756167 }, { "content": "//! Based on https://github.com/debris/base58/blob/master/src/lib.rs\n\n//! works only up to 128 bytes\n\nuse sp_std::prelude::*;\n\n\n\nconst ALPHABET: &'static [u8] = b\"123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz\";\n\n\n\n/// A trait for converting a value to base58 encoded string.\n", "file_path": "pallets/account-linker/src/btc/base58.rs", "rank": 85, "score": 58436.66009733074 }, { "content": " // while bits >= to {\n\n // bits -= to;\n\n // ret.push(((acc >> bits) & maxv) as u8);\n\n // }\n\n // }\n\n // if bits > 0 {\n\n // ret.push(((acc << (to - bits)) & maxv) as u8);\n\n // }\n\n\n\n // ret\n\n // }\n\n\n\n\tfn to_base32(&self) -> Vec<u8> {\n\n // Amount of bits left over from last round, stored in buffer.\n\n let mut buffer_bits = 0u32;\n\n // Holds all unwritten bits left over from last round. The bits are stored beginning from\n\n // the most significant bit. E.g. if buffer_bits=3, then the byte with bits a, b and c will\n\n // look as follows: [a, b, c, 0, 0, 0, 0, 0]\n\n let mut buffer: u8 = 0;\n\n\n", "file_path": "pallets/account-linker/src/btc/witness.rs", "rank": 86, "score": 58434.87702820907 }, { "content": " Ok(b32)\n\n }\n\n\n\n /// Extracts a WitnessProgram out of a provided script public key\n\n pub fn from_scriptpubkey(pubkey: &[u8]) -> Result<Self, &'static str> {\n\n // We need a version byte and a program length byte, with a program at \n\n // least 2 bytes long.\n\n if pubkey.len() < 4 {\n\n return Err(\"TooShort\")\n\n }\n\n let proglen: usize = pubkey[1] as usize;\n\n // Check that program length byte is consistent with pubkey length\n\n if pubkey.len() != 2 + proglen {\n\n return Err(\"InvalidLengthByte\")\n\n }\n\n // Process script version\n\n let mut v: u8 = pubkey[0];\n\n if v > 0x50 {\n\n v -= 0x50;\n\n }\n", "file_path": "pallets/account-linker/src/btc/witness.rs", "rank": 87, "score": 58433.758994247524 }, { "content": "\t\t\t\tbuffer[j] = (carry % 58) as u8;\n\n\t\t\t\tcarry /= 58;\n\n\n\n\t\t\t\t// in original trezor implementation it was underflowing\n\n\t\t\t\tif j > 0 {\n\n\t\t\t\t\tj -= 1;\n\n\t\t\t\t}\n\n\t\t\t}\n\n\n\n\t\t\ti += 1;\n\n\t\t\thigh = j;\n\n\t\t}\n\n\n\n\t\tlet mut j = buffer.iter().take_while(|x| **x == 0).count();\n\n\n\n\t\tlet mut result = Vec::new();\n\n\t\tfor _ in 0..zcount {\n\n\t\t\tresult.push(b'1');\n\n\t\t}\n\n\n", "file_path": "pallets/account-linker/src/btc/base58.rs", "rank": 88, "score": 58431.90492144783 }, { "content": "\t#[test]\n\n\tfn test_to_base58_bitcoin_repo_cases() {\n\n\t\tlet test_cases = vec![\n\n\t\t\t(\"\", \"\"),\n\n\t\t\t(\"61\", \"2g\"),\n\n\t\t\t(\"626262\", \"a3gV\"),\n\n\t\t\t(\"636363\", \"aPEr\"),\n\n\t\t\t(\"73696d706c792061206c6f6e6720737472696e67\", \"2cFupjhnEsSn59qHXstmK2ffpLv2\"),\n\n\t\t\t(\"00eb15231dfceb60925886b67d065299925915aeb172c06647\", \"1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L\"),\n\n\t\t\t(\"516b6fcd0f\", \"ABnLTmg\"),\n\n\t\t\t(\"bf4f89001e670274dd\", \"3SEo3LWLoPntC\"),\n\n\t\t\t(\"572e4794\", \"3EFU7m\"),\n\n\t\t\t(\"ecac89cad93923c02321\", \"EJDM8drfXA6uyA\"),\n\n\t\t\t(\"10c8511e\", \"Rt5zm\"),\n\n\t\t\t(\"00000000000000000000\", \"1111111111\"),\n\n\t\t\t(\"000111d38e5fc9071ffcd20b4a763cc9ae4f252bb4e48fd66a835e252ada93ff480d6dd43dc62a641155a5\", \"123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz\"),\n\n\t\t\t(\"000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f606162636465666768696a6b6c6d6e6f707172737475767778797a7b7c7d7e7f808182838485868788898a8b8c8d8e8f909192939495969798999a9b9c9d9e9fa0a1a2a3a4a5a6a7a8a9aaabacadaeafb0b1b2b3b4b5b6b7b8b9babbbcbdbebfc0c1c2c3c4c5c6c7c8c9cacbcccdcecfd0d1d2d3d4d5d6d7d8d9dadbdcdddedfe0e1e2e3e4e5e6e7e8e9eaebecedeeeff0f1f2f3f4f5f6f7f8f9fafbfcfdfeff\", \"1cWB5HCBdLjAuqGGReWE3R3CguuwSjw6RHn39s2yuDRTS5NsBgNiFpWgAnEx6VQi8csexkgYw3mdYrMHr8x9i7aEwP8kZ7vccXWqKDvGv3u1GxFKPuAkn8JCPPGDMf3vMMnbzm6Nh9zh1gcNsMvH3ZNLmP5fSG6DGbbi2tuwMWPthr4boWwCxf7ewSgNQeacyozhKDDQQ1qL5fQFUW52QKUZDZ5fw3KXNQJMcNTcaB723LchjeKun7MuGW5qyCBZYzA1KjofN1gYBV3NqyhQJ3Ns746GNuf9N2pQPmHz4xpnSrrfCvy6TVVz5d4PdrjeshsWQwpZsZGzvbdAdN8MKV5QsBDY\")\n\n\t\t];\n\n\n\n\t\tfor test_case in test_cases.into_iter() {\n\n\t\t\tlet (input, output) = test_case;\n\n\t\t\tlet input = decode(input).unwrap();\n\n\t\t\tassert_eq!(from_utf8(&input.to_base58()).unwrap(), output);\n\n\t\t}\n\n\t}\n\n}\n", "file_path": "pallets/account-linker/src/btc/base58.rs", "rank": 89, "score": 58431.55386160518 }, { "content": " let program = &pubkey[2..];\n\n Ok(WitnessProgram {\n\n version: v,\n\n program: program.to_vec()\n\n })\n\n }\n\n}\n\n\n\nconst SEP: u8 = b'1';\n\nconst ALPHABET: &'static [u8] = b\"qpzry9x8gf2tvdw0s3jn54khce6mua7l\";\n\n\n", "file_path": "pallets/account-linker/src/btc/witness.rs", "rank": 90, "score": 58430.02602814874 }, { "content": "//! Based on https://github.com/sipa/bech32/tree/master/ref/rust\n\n//! // Copyright (c) 2017 Clark Moody\n\n//\n\n// Permission is hereby granted, free of charge, to any person obtaining a copy\n\n// of this software and associated documentation files (the \"Software\"), to deal\n\n// in the Software without restriction, including without limitation the rights\n\n// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\n// copies of the Software, and to permit persons to whom the Software is\n\n// furnished to do so, subject to the following conditions:\n\n//\n\n// The above copyright notice and this permission notice shall be included in\n\n// all copies or substantial portions of the Software.\n\n//\n\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\n// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\n// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\n// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\n// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n\n// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n\n// THE SOFTWARE.\n", "file_path": "pallets/account-linker/src/btc/witness.rs", "rank": 91, "score": 58429.712285059046 }, { "content": " buffer_bits += 3;\n\n }\n\n\n\n // There can be at most two u5s left in the buffer after processing all bytes, write them.\n\n if buffer_bits >= 5 {\n\n result.push((buffer & 0b1111_1000) >> 3);\n\n buffer <<= 5;\n\n buffer_bits -= 5;\n\n }\n\n\n\n if buffer_bits != 0 {\n\n result.push(buffer >> 3);\n\n }\n\n\n\n result\n\n\n\n\t}\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "pallets/account-linker/src/btc/witness.rs", "rank": 92, "score": 58428.71063308068 }, { "content": "\t\tassert_eq!(from_utf8(&[48].to_base58()).unwrap(), \"q\");\n\n\t\tassert_eq!(from_utf8(&[49].to_base58()).unwrap(), \"r\");\n\n\t\tassert_eq!(from_utf8(&[57].to_base58()).unwrap(), \"z\");\n\n\t\tassert_eq!(from_utf8(&[45, 49].to_base58()).unwrap(), \"4SU\");\n\n\t\tassert_eq!(from_utf8(&[49, 49].to_base58()).unwrap(), \"4k8\");\n\n\t\tassert_eq!(from_utf8(&b\"abc\".to_base58()).unwrap(), \"ZiCa\");\n\n\t\tassert_eq!(from_utf8(&b\"1234598760\".to_base58()).unwrap(), \"3mJr7AoUXx2Wqd\");\n\n\t\tassert_eq!(from_utf8(&b\"abcdefghijklmnopqrstuvwxyz\".to_base58()).unwrap(), \"3yxU3u1igY8WkgtjK92fbJQCd4BZiiT1v25f\");\n\n\t}\n\n\n\n\t#[test]\n\n\tfn test_to_base58_initial_zeros() {\n\n\t\tassert_eq!(from_utf8(&b\"\\0abc\".to_base58()).unwrap(), \"1ZiCa\");\n\n\t\tassert_eq!(from_utf8(&b\"\\0\\0abc\".to_base58()).unwrap(), \"11ZiCa\");\n\n\t\tassert_eq!(from_utf8(&b\"\\0\\0\\0abc\".to_base58()).unwrap(), \"111ZiCa\");\n\n\t\tassert_eq!(from_utf8(&b\"\\0\\0\\0\\0abc\".to_base58()).unwrap(), \"1111ZiCa\");\n\n\t}\n\n\n\n\t/// https://github.com/bitcoin/bitcoin/blob/master/src/test/data/base58_encode_decode.json\n\n\t/// NB: left is hex data\n", "file_path": "pallets/account-linker/src/btc/base58.rs", "rank": 93, "score": 58428.58962156666 }, { "content": " \"bc1pw508d6qejxtdg4y5r3zarvary0c5xw7kw508d6qejxtdg4y5r3zarvary0c5xw7k7grplx\",\n\n vec![\n\n 0x51, 0x28, 0x75, 0x1e, 0x76, 0xe8, 0x19, 0x91, 0x96, 0xd4, 0x54,\n\n 0x94, 0x1c, 0x45, 0xd1, 0xb3, 0xa3, 0x23, 0xf1, 0x43, 0x3b, 0xd6,\n\n 0x75, 0x1e, 0x76, 0xe8, 0x19, 0x91, 0x96, 0xd4, 0x54, 0x94, 0x1c,\n\n 0x45, 0xd1, 0xb3, 0xa3, 0x23, 0xf1, 0x43, 0x3b, 0xd6\n\n ]\n\n ),\n\n (\n\n \"BC1SW50QA3JX3S\",\n\n vec![\n\n 0x60, 0x02, 0x75, 0x1e\n\n ]\n\n ),\n\n (\n\n \"bc1zw508d6qejxtdg4y5r3zarvaryvg6kdaj\",\n\n vec![\n\n 0x52, 0x10, 0x75, 0x1e, 0x76, 0xe8, 0x19, 0x91, 0x96, 0xd4, 0x54,\n\n 0x94, 0x1c, 0x45, 0xd1, 0xb3, 0xa3, 0x23\n\n ]\n", "file_path": "pallets/account-linker/src/btc/witness.rs", "rank": 94, "score": 58426.07972685563 }, { "content": " let mut result = Vec::new();\n\n\n\n\n\n for b in self.into_iter() {\n\n // Write first u5 if we have to write two u5s this round. That only happens if the\n\n // buffer holds too many bits, so we don't have to combine buffer bits with new bits\n\n // from this rounds byte.\n\n if buffer_bits >= 5 {\n\n result.push((buffer & 0b1111_1000) >> 3);\n\n buffer <<= 5;\n\n buffer_bits -= 5;\n\n }\n\n\n\n // Combine all bits from buffer with enough bits from this rounds byte so that they fill\n\n // a u5. Save remaining bits from byte to buffer.\n\n let from_buffer = buffer >> 3;\n\n let from_byte = b >> (3 + buffer_bits); // buffer_bits <= 4\n\n\n\n result.push(from_buffer | from_byte);\n\n buffer = b << (5 - buffer_bits);\n", "file_path": "pallets/account-linker/src/btc/witness.rs", "rank": 95, "score": 58426.07972685563 }, { "content": " ),\n\n ];\n\n for p in pairs {\n\n let (address, scriptpubkey) = p;\n\n\n\n let hrp = b\"bc\".to_vec();\n\n\n\n let spk_result = WitnessProgram::from_scriptpubkey(&scriptpubkey);\n\n assert!(spk_result.is_ok());\n\n let prog = spk_result.unwrap();\n\n\n\n let enc_result = prog.to_address(hrp);\n\n assert!(enc_result.is_ok());\n\n\n\n let enc_address = enc_result.unwrap();\n\n assert_eq!(address.to_lowercase(), from_utf8(&enc_address).unwrap().to_lowercase());\n\n }\n\n }\n\n \n\n}", "file_path": "pallets/account-linker/src/btc/witness.rs", "rank": 96, "score": 58426.07972685563 }, { "content": "\t\t\tassert_ok!(AccountLinker::link_eth(\n\n\t\t\t\tOrigin::signed(account.clone()),\n\n\t\t\t\taccount.clone(),\n\n\t\t\t\ti as u32,\n\n\t\t\t\tkey_pair.address().to_fixed_bytes(),\n\n\t\t\t\tblock_number + i as u32,\n\n\t\t\t\tr,\n\n\t\t\t\ts,\n\n\t\t\t\tv\n\n\t\t\t));\n\n\n\n assert_eq!(AccountLinker::eth_addresses(&account).len(), i+1);\n\n //assert_eq!(\n\n // System::events()[i].event,\n\n // TestEvent::account_linker( Event::<Test>::EthAddressLinked(\n\n // account.clone(),\n\n // key_pair.address().to_fixed_bytes().to_vec())\n\n // )\n\n //);\n\n\t\t\texpected_vec.push(key_pair.address().to_fixed_bytes());\n\n\t\t}\n\n\t\tassert_eq!(AccountLinker::eth_addresses(&account), expected_vec);\n\n\t});\n\n}\n\n\n", "file_path": "pallets/account-linker/src/tests/eth.rs", "rank": 97, "score": 57641.886941355886 }, { "content": "\t\t\t\ts,\n\n\t\t\t\tv\n\n\t\t\t));\n\n\t\t}\n\n\n\n\t\tlet index: u32 = 2 as u32;\n\n\t\t// Retrieve previous addr\n\n\t\tlet addr_before_update = AccountLinker::eth_addresses(&account)[index as usize];\n\n\t\t// Update addr at slot `index`\n\n\t\tlet key_pair = gen.generate().unwrap();\n\n\t\tlet block_number = block_number + 9 as u32;\n\n\t\tlet msg = generate_msg(&account, block_number);\n\n\t\tlet sig = generate_sig(&key_pair, &msg);\n\n\t\tlet (r, s, v) = generate_rsv(&sig);\n\n\n\n\t\tassert_ok!(AccountLinker::link_eth(\n\n\t\t\tOrigin::signed(account.clone()),\n\n\t\t\taccount.clone(),\n\n\t\t\tindex,\n\n\t\t\tkey_pair.address().to_fixed_bytes(),\n", "file_path": "pallets/account-linker/src/tests/eth.rs", "rank": 98, "score": 57641.69287189263 }, { "content": "use crate::{mock::*};\n\n\n\nuse codec::Encode;\n\nuse parity_crypto::Keccak256;\n\nuse parity_crypto::publickey::{Random, Generator, Message, sign, KeyPair};\n\nuse frame_support::{assert_ok, assert_noop};\n\nuse sp_runtime::AccountId32;\n\n\n", "file_path": "pallets/account-linker/src/tests/eth.rs", "rank": 99, "score": 57636.59681779517 } ]
Rust
ezgui/src/screen_geom.rs
accelsao/abstreet
eca71d27c95abd74a96863ed20bbd92c7850cd33
use crate::Canvas; use geom::{trim_f64, Polygon, Pt2D}; use serde::{Deserialize, Serialize}; #[derive(Debug, Clone, Copy, PartialEq)] pub struct ScreenPt { pub x: f64, pub y: f64, } impl ScreenPt { pub fn new(x: f64, y: f64) -> ScreenPt { ScreenPt { x, y } } pub fn to_pt(self) -> Pt2D { Pt2D::new(self.x, self.y) } } impl From<winit::dpi::LogicalPosition<f64>> for ScreenPt { fn from(lp: winit::dpi::LogicalPosition<f64>) -> ScreenPt { ScreenPt { x: lp.x, y: lp.y } } } #[derive(Clone, Debug)] pub struct ScreenRectangle { pub x1: f64, pub y1: f64, pub x2: f64, pub y2: f64, } impl ScreenRectangle { pub fn top_left(top_left: ScreenPt, dims: ScreenDims) -> ScreenRectangle { ScreenRectangle { x1: top_left.x, y1: top_left.y, x2: top_left.x + dims.width, y2: top_left.y + dims.height, } } pub fn placeholder() -> ScreenRectangle { ScreenRectangle { x1: 0.0, y1: 0.0, x2: 0.0, y2: 0.0, } } pub fn contains(&self, pt: ScreenPt) -> bool { pt.x >= self.x1 && pt.x <= self.x2 && pt.y >= self.y1 && pt.y <= self.y2 } pub fn pt_to_percent(&self, pt: ScreenPt) -> Option<(f64, f64)> { if self.contains(pt) { Some(( (pt.x - self.x1) / self.width(), (pt.y - self.y1) / self.height(), )) } else { None } } pub fn percent_to_pt(&self, x: f64, y: f64) -> ScreenPt { ScreenPt::new(self.x1 + x * self.width(), self.y1 + y * self.height()) } pub fn width(&self) -> f64 { self.x2 - self.x1 } pub fn height(&self) -> f64 { self.y2 - self.y1 } pub fn dims(&self) -> ScreenDims { ScreenDims::new(self.x2 - self.x1, self.y2 - self.y1) } pub fn center(&self) -> ScreenPt { ScreenPt::new((self.x1 + self.x2) / 2.0, (self.y1 + self.y2) / 2.0) } pub fn to_polygon(&self) -> Polygon { Polygon::rectangle(self.width(), self.height()).translate(self.x1, self.y1) } } #[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)] pub struct ScreenDims { pub width: f64, pub height: f64, } impl ScreenDims { pub fn new(width: f64, height: f64) -> ScreenDims { ScreenDims { width: trim_f64(width), height: trim_f64(height), } } pub fn top_left_for_corner(&self, corner: ScreenPt, canvas: &Canvas) -> ScreenPt { if corner.x + self.width < canvas.window_width { if corner.y + self.height < canvas.window_height { corner } else { ScreenPt::new(corner.x, corner.y - self.height) } } else { if corner.y + self.height < canvas.window_height { ScreenPt::new(corner.x - self.width, corner.y) } else { ScreenPt::new(corner.x - self.width, corner.y - self.height) } } } pub fn scaled(&self, factor: f64) -> ScreenDims { ScreenDims::new(self.width * factor, self.height * factor) } } impl From<winit::dpi::LogicalSize<f64>> for ScreenDims { fn from(size: winit::dpi::LogicalSize<f64>) -> ScreenDims { ScreenDims { width: size.width, height: size.height, } } } impl From<ScreenDims> for winit::dpi::LogicalSize<f64> { fn from(dims: ScreenDims) -> winit::dpi::LogicalSize<f64> { winit::dpi::LogicalSize::new(dims.width, dims.height) } }
use crate::Canvas; use geom::{trim_f64, Polygon, Pt2D}; use serde::{Deserialize, Serialize}; #[derive(Debug, Clone, Copy, PartialEq)] pub struct ScreenPt { pub x: f64, pub y: f64, } impl ScreenPt { pub fn new(x: f64, y: f64) -> ScreenPt { ScreenPt { x, y } } pub fn to_pt(self) -> Pt2D { Pt2D::new(self.x, self.y) } } impl From<winit::dpi::LogicalPosition<f64>> for ScreenPt { fn from(lp: winit::dpi::LogicalPosition<f64>) -> ScreenPt { ScreenPt { x: lp.x, y: lp.y } } } #[derive(Clone, Debug)] pub struct ScreenRectangle { pub x1: f64, pub y1: f64, pub x2: f64, pub y2: f64, } impl ScreenRectangle { pub fn top_left(top_left: ScreenPt, dims: ScreenDims) -> ScreenRectangle { ScreenRectangle { x1: top_left.x, y1: top_left.y, x2: top_left.x + dims.width, y2: top_left.y + dims.height, } } pub fn placeholder() -> ScreenRectangle { ScreenRectangle { x1: 0.0, y1: 0.0, x2: 0.0, y2: 0.0, } } pub fn contains(&self, pt: ScreenPt) -> bool { pt.x >= self.x1 && pt.x <= self.x2 && pt.y >= self.y1 && pt.y <= self.y2 } pub fn pt_to_percent(&self, pt: ScreenPt) -> Option<(f64, f64)> { if self.contains(pt) { Some(( (pt.x - self.x1) / self.width(), (pt.y - self.y1) / self.height(), )) } else { None } } pub fn percent_to_pt(&self, x: f64, y: f64) -> ScreenPt { ScreenPt::new(self.x1 + x * self.width(), self.y1 + y * self.height()) } pub fn width(&self) -> f64 { self.x2 - self.x1 } pub fn height(&self) -> f64 { self.y2 - self.y1 } pub fn dims(&self) -> ScreenDims { ScreenDims::new(self.x2 - self.x1, self.y2 - self.y1) } pub fn center(&self) -> ScreenPt { ScreenPt::new((self.x1 + self.x2) / 2.0, (self.y1 + self.y2) / 2.0) } pub fn to_polygon(&self) -> Polygon { Polygon::rectangle(self.width(), self.height()).translate(self.x1, self.y1) } } #[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)] pub struct ScreenDims { pub width: f64, pub height: f64, } impl ScreenDims { pub fn new(width: f64, height: f64) -> ScreenDims { ScreenDims { width: trim_f64(width), height: trim_f64(height), } } pub fn top_left_for_corner(&self, corner: ScreenPt, canvas: &Canvas) -> ScreenPt {
} pub fn scaled(&self, factor: f64) -> ScreenDims { ScreenDims::new(self.width * factor, self.height * factor) } } impl From<winit::dpi::LogicalSize<f64>> for ScreenDims { fn from(size: winit::dpi::LogicalSize<f64>) -> ScreenDims { ScreenDims { width: size.width, height: size.height, } } } impl From<ScreenDims> for winit::dpi::LogicalSize<f64> { fn from(dims: ScreenDims) -> winit::dpi::LogicalSize<f64> { winit::dpi::LogicalSize::new(dims.width, dims.height) } }
if corner.x + self.width < canvas.window_width { if corner.y + self.height < canvas.window_height { corner } else { ScreenPt::new(corner.x, corner.y - self.height) } } else { if corner.y + self.height < canvas.window_height { ScreenPt::new(corner.x - self.width, corner.y) } else { ScreenPt::new(corner.x - self.width, corner.y - self.height) } }
if_condition
[ { "content": "fn area_under_curve(raw: Vec<(Time, usize)>, width: f64, height: f64) -> Polygon {\n\n assert!(!raw.is_empty());\n\n let min_x = Time::START_OF_DAY;\n\n let min_y = 0;\n\n let max_x = raw.last().unwrap().0;\n\n let max_y = raw.iter().max_by_key(|(_, cnt)| *cnt).unwrap().1;\n\n\n\n let mut pts = Vec::new();\n\n for (t, cnt) in raw {\n\n pts.push(lttb::DataPoint::new(\n\n width * (t - min_x) / (max_x - min_x),\n\n height * (1.0 - (((cnt - min_y) as f64) / ((max_y - min_y) as f64))),\n\n ));\n\n }\n\n let mut downsampled = Vec::new();\n\n for pt in lttb::lttb(pts, 100) {\n\n downsampled.push(Pt2D::new(pt.x, pt.y));\n\n }\n\n downsampled.push(Pt2D::new(width, height));\n\n downsampled.push(downsampled[0]);\n\n Ring::must_new(downsampled).to_polygon()\n\n}\n\n\n", "file_path": "game/src/sandbox/speed.rs", "rank": 0, "score": 394289.87476491835 }, { "content": "// TODO If this proves useful, lift to geom\n\npub fn thick_lineseries(pts: Vec<Pt2D>, width: Distance) -> Polygon {\n\n use lyon::math::{point, Point};\n\n use lyon::path::Path;\n\n use lyon::tessellation::geometry_builder::{BuffersBuilder, Positions, VertexBuffers};\n\n use lyon::tessellation::{StrokeOptions, StrokeTessellator};\n\n\n\n let mut builder = Path::builder();\n\n for (idx, pt) in pts.into_iter().enumerate() {\n\n let pt = point(pt.x() as f32, pt.y() as f32);\n\n if idx == 0 {\n\n builder.move_to(pt);\n\n } else {\n\n builder.line_to(pt);\n\n }\n\n }\n\n let path = builder.build();\n\n\n\n let mut geom: VertexBuffers<Point, u32> = VertexBuffers::new();\n\n let mut buffer = BuffersBuilder::new(&mut geom, Positions);\n\n StrokeTessellator::new()\n", "file_path": "ezgui/src/widgets/line_plot.rs", "rank": 1, "score": 335068.7155076751 }, { "content": "pub fn trim_f64(x: f64) -> f64 {\n\n (x * 10_000.0).round() / 10_000.0\n\n}\n", "file_path": "geom/src/lib.rs", "rank": 2, "score": 318665.6311725254 }, { "content": "fn lerp(pct: f64, (x1, x2): (f32, f32)) -> f32 {\n\n x1 + (pct as f32) * (x2 - x1)\n\n}\n\n\n\nimpl FancyColor {\n\n pub(crate) fn style(&self, pt: Pt2D) -> [f32; 4] {\n\n match self {\n\n FancyColor::RGBA(c) => [c.r, c.g, c.b, c.a],\n\n FancyColor::LinearGradient(ref lg) => {\n\n let c = lg.interp(pt);\n\n [c.r, c.g, c.b, c.a]\n\n }\n\n }\n\n }\n\n}\n", "file_path": "ezgui/src/color.rs", "rank": 3, "score": 317082.0253492716 }, { "content": "pub fn retain_btreemap<K: Ord + Clone, V, F: Fn(&K, &V) -> bool>(\n\n map: &mut BTreeMap<K, V>,\n\n keep: F,\n\n) {\n\n let mut remove_keys: Vec<K> = Vec::new();\n\n for (k, v) in map.iter() {\n\n if !keep(k, v) {\n\n remove_keys.push(k.clone());\n\n }\n\n }\n\n for k in remove_keys {\n\n map.remove(&k);\n\n }\n\n}\n\n\n", "file_path": "abstutil/src/collections.rs", "rank": 4, "score": 310869.8400054368 }, { "content": "// Produces (block, arrow)\n\nfn make_geom(offset: f64, pl: PolyLine, width: Distance, angle: Angle) -> (Polygon, Polygon) {\n\n let height = TURN_ICON_ARROW_LENGTH;\n\n // Always extend the pl first to handle short entry lanes\n\n let extension = PolyLine::must_new(vec![\n\n pl.last_pt(),\n\n pl.last_pt()\n\n .project_away(Distance::meters(500.0), pl.last_line().angle()),\n\n ]);\n\n let pl = pl.must_extend(extension);\n\n let slice = pl.exact_slice(offset * height, (offset + 1.0) * height);\n\n let block = slice.make_polygons(width);\n\n\n\n let arrow = {\n\n let center = slice.middle();\n\n PolyLine::must_new(vec![\n\n center.project_away(TURN_ICON_ARROW_LENGTH / 2.0, angle.opposite()),\n\n center.project_away(TURN_ICON_ARROW_LENGTH / 2.0, angle),\n\n ])\n\n .make_arrow(Distance::meters(0.5), ArrowCap::Triangle)\n\n };\n\n\n\n (block, arrow)\n\n}\n", "file_path": "game/src/render/turn.rs", "rank": 5, "score": 309090.80332093075 }, { "content": "pub fn clamp(x: f64, min: f64, max: f64) -> f64 {\n\n if x < min {\n\n min\n\n } else if x > max {\n\n max\n\n } else {\n\n x\n\n }\n\n}\n\n\n", "file_path": "abstutil/src/lib.rs", "rank": 6, "score": 304953.2742837601 }, { "content": "fn to_pt(pt: Pt2D) -> Point2d<f64> {\n\n Point2d::new(pt.x(), pt.y())\n\n}\n\n\n", "file_path": "map_model/src/make/turns.rs", "rank": 7, "score": 297610.7924997603 }, { "content": "fn from_pt(pt: Point2d<f64>) -> Pt2D {\n\n Pt2D::new(pt.x, pt.y)\n\n}\n\n\n", "file_path": "map_model/src/make/turns.rs", "rank": 8, "score": 297610.7924997603 }, { "content": "fn to_geo(pts: &Vec<Pt2D>) -> geo::Polygon<f64> {\n\n geo::Polygon::new(\n\n geo::LineString::from(\n\n pts.iter()\n\n .map(|pt| geo::Point::new(pt.x(), pt.y()))\n\n .collect::<Vec<_>>(),\n\n ),\n\n Vec::new(),\n\n )\n\n}\n\n\n", "file_path": "geom/src/polygon.rs", "rank": 9, "score": 287466.6690852684 }, { "content": "pub trait ObjectID: Clone + Copy + Debug + Eq + Hash {\n\n // Higher shows up in the front.\n\n fn zorder(&self) -> usize;\n\n}\n\n\n\npub struct Object<ID: ObjectID> {\n\n id: ID,\n\n geometry: Vec<(Color, Polygon)>,\n\n}\n\n\n\nimpl<ID: ObjectID> Object<ID> {\n\n pub fn new(id: ID, color: Color, poly: Polygon) -> Object<ID> {\n\n Object {\n\n id,\n\n geometry: vec![(color, poly)],\n\n }\n\n }\n\n}\n\n\n", "file_path": "map_editor/src/world.rs", "rank": 10, "score": 287398.40292167827 }, { "content": "fn from_geo(p: geo::Polygon<f64>) -> Polygon {\n\n Polygon::buggy_new(\n\n p.into_inner()\n\n .0\n\n .into_points()\n\n .into_iter()\n\n .map(|pt| Pt2D::new(pt.x(), pt.y()))\n\n .collect(),\n\n )\n\n}\n\n\n", "file_path": "geom/src/polygon.rs", "rank": 11, "score": 279169.1752931258 }, { "content": "// TODO Temporarily public for debugging.\n\npub fn calculate_corners(i: &Intersection, map: &Map) -> Vec<Polygon> {\n\n let mut corners = Vec::new();\n\n\n\n for turn in map.get_turns_in_intersection(i.id) {\n\n if turn.turn_type == TurnType::SharedSidewalkCorner {\n\n // Avoid double-rendering\n\n if map.get_l(turn.id.src).dst_i != i.id {\n\n continue;\n\n }\n\n let width = map\n\n .get_l(turn.id.src)\n\n .width\n\n .min(map.get_l(turn.id.dst).width);\n\n\n\n // Special case for dead-ends: just thicken the geometry.\n\n if i.roads.len() == 1 {\n\n corners.push(turn.geom.make_polygons(width));\n\n continue;\n\n }\n\n\n", "file_path": "game/src/render/intersection.rs", "rank": 12, "score": 276262.6661349387 }, { "content": "pub fn retain_btreeset<K: Ord + Clone, F: FnMut(&K) -> bool>(set: &mut BTreeSet<K>, mut keep: F) {\n\n let mut remove: Vec<K> = Vec::new();\n\n for k in set.iter() {\n\n if !keep(k) {\n\n remove.push(k.clone());\n\n }\n\n }\n\n for k in remove {\n\n set.remove(&k);\n\n }\n\n}\n\n\n", "file_path": "abstutil/src/collections.rs", "rank": 13, "score": 265052.61799393175 }, { "content": "fn from_multi(multi: geo::MultiPolygon<f64>) -> Vec<Polygon> {\n\n multi.into_iter().map(from_geo).collect()\n\n}\n", "file_path": "geom/src/polygon.rs", "rank": 14, "score": 257877.35256601637 }, { "content": "pub fn elapsed_seconds(since: Instant) -> f64 {\n\n let dt = since.elapsed();\n\n (dt.as_secs() as f64) + (f64::from(dt.subsec_nanos()) * 1e-9)\n\n}\n\n\n", "file_path": "abstutil/src/time.rs", "rank": 15, "score": 249713.11321254793 }, { "content": "pub fn prettyprint_time(seconds: f64) -> String {\n\n format!(\"{:.4}s\", seconds)\n\n}\n\n\n\n// TODO This is an awful way to measure memory usage, but I can't find anything else that works.\n\npub struct MeasureMemory {\n\n before_mb: usize,\n\n}\n\n\n\nimpl MeasureMemory {\n\n pub fn new() -> MeasureMemory {\n\n MeasureMemory {\n\n before_mb: process_used_memory_mb(),\n\n }\n\n }\n\n\n\n pub fn reset(&mut self, section: &str, timer: &mut Timer) {\n\n let now_mb = process_used_memory_mb();\n\n if now_mb >= self.before_mb {\n\n timer.note(format!(\n", "file_path": "abstutil/src/time.rs", "rank": 16, "score": 249713.11321254793 }, { "content": "#[cfg(target_arch = \"wasm32\")]\n\npub fn file_exists(path: String) -> bool {\n\n SYSTEM_DATA\n\n .get_file(path.trim_start_matches(\"../data/system/\"))\n\n .is_some()\n\n}\n", "file_path": "abstutil/src/io.rs", "rank": 17, "score": 249640.9962920133 }, { "content": "fn arrow(pt: ScreenPt) -> Option<Box<dyn Fn(&GfxCtx, &App) -> Pt2D>> {\n\n Some(Box::new(move |_, _| pt.to_pt()))\n\n}\n\n\n\nimpl Stage {\n\n fn new(task: Task) -> Stage {\n\n Stage {\n\n messages: Vec::new(),\n\n task,\n\n warp_to: None,\n\n spawn: None,\n\n }\n\n }\n\n\n\n fn msg<I: Into<String>>(\n\n mut self,\n\n lines: Vec<I>,\n\n point_to: Option<Box<dyn Fn(&GfxCtx, &App) -> Pt2D>>,\n\n ) -> Stage {\n\n self.messages.push((\n", "file_path": "game/src/sandbox/gameplay/tutorial.rs", "rank": 18, "score": 240115.65847476106 }, { "content": "pub fn contains_duplicates<T: Ord>(vec: &Vec<T>) -> bool {\n\n let mut set = BTreeSet::new();\n\n for item in vec {\n\n if set.contains(item) {\n\n return true;\n\n }\n\n set.insert(item);\n\n }\n\n false\n\n}\n\n\n\n// Use when your key is just PartialEq, not Ord or Hash.\n\npub struct VecMap<K, V> {\n\n inner: Vec<(K, V)>,\n\n}\n\n\n\nimpl<K: Clone + PartialEq, V> VecMap<K, V> {\n\n pub fn new() -> VecMap<K, V> {\n\n VecMap { inner: Vec::new() }\n\n }\n", "file_path": "abstutil/src/collections.rs", "rank": 19, "score": 230609.3952989747 }, { "content": "pub fn deserialize_multimap<\n\n 'de,\n\n D: Deserializer<'de>,\n\n K: Deserialize<'de> + Eq + Ord + Clone,\n\n V: Deserialize<'de> + Eq + Ord + Clone,\n\n>(\n\n d: D,\n\n) -> Result<MultiMap<K, V>, D::Error> {\n\n let vec = <Vec<(K, Vec<V>)>>::deserialize(d)?;\n\n let mut map = MultiMap::new();\n\n for (key, values) in vec {\n\n for value in values {\n\n map.insert(key.clone(), value);\n\n }\n\n }\n\n Ok(map)\n\n}\n\n\n", "file_path": "abstutil/src/io.rs", "rank": 20, "score": 229575.1473606192 }, { "content": "pub fn deserialize_btreemap<\n\n 'de,\n\n D: Deserializer<'de>,\n\n K: Deserialize<'de> + Ord,\n\n V: Deserialize<'de>,\n\n>(\n\n d: D,\n\n) -> Result<BTreeMap<K, V>, D::Error> {\n\n let vec = <Vec<(K, V)>>::deserialize(d)?;\n\n let mut map = BTreeMap::new();\n\n for (k, v) in vec {\n\n map.insert(k, v);\n\n }\n\n Ok(map)\n\n}\n\n\n", "file_path": "abstutil/src/io.rs", "rank": 21, "score": 229575.1473606192 }, { "content": "pub fn serialize_multimap<\n\n S: Serializer,\n\n K: Serialize + Eq + Ord + Clone,\n\n V: Serialize + Eq + Ord + Clone,\n\n>(\n\n map: &MultiMap<K, V>,\n\n s: S,\n\n) -> Result<S::Ok, S::Error> {\n\n // TODO maybe need to sort to have deterministic output\n\n map.raw_map().iter().collect::<Vec<(_, _)>>().serialize(s)\n\n}\n\n\n", "file_path": "abstutil/src/io.rs", "rank": 22, "score": 229575.1473606192 }, { "content": "fn point(x: &f64, y: &f64) -> Point {\n\n Point::new((*x) as f32, (*y) as f32)\n\n}\n\n\n", "file_path": "ezgui/src/svg.rs", "rank": 23, "score": 226149.7637658198 }, { "content": "pub fn serialize_usize<S: Serializer>(x: &usize, s: S) -> Result<S::Ok, S::Error> {\n\n if let Ok(x) = u32::try_from(*x) {\n\n x.serialize(s)\n\n } else {\n\n Err(serde::ser::Error::custom(format!(\"{} can't fit in u32\", x)))\n\n }\n\n}\n\n\n", "file_path": "abstutil/src/io.rs", "rank": 24, "score": 223238.32261593355 }, { "content": "pub fn make_top_panel(ctx: &mut EventCtx, app: &App, can_undo: bool, can_redo: bool) -> Composite {\n\n let row = vec![\n\n Btn::text_fg(\"Finish\").build_def(ctx, hotkey(Key::Escape)),\n\n Btn::text_fg(\"Preview\").build_def(ctx, lctrl(Key::P)),\n\n (if can_undo {\n\n Btn::svg_def(\"system/assets/tools/undo.svg\").build(ctx, \"undo\", lctrl(Key::Z))\n\n } else {\n\n Widget::draw_svg_transform(\n\n ctx,\n\n \"system/assets/tools/undo.svg\",\n\n RewriteColor::ChangeAll(Color::WHITE.alpha(0.5)),\n\n )\n\n })\n\n .centered_vert(),\n\n (if can_redo {\n\n Btn::svg_def(\"system/assets/tools/redo.svg\").build(\n\n ctx,\n\n \"redo\",\n\n // TODO ctrl+shift+Z!\n\n lctrl(Key::Y),\n", "file_path": "game/src/edit/traffic_signals.rs", "rank": 25, "score": 221666.5242651003 }, { "content": "// TODO Still can't figure out how to derive Deserialize on NodeMap directly.\n\npub fn deserialize_nodemap<\n\n 'de,\n\n D: Deserializer<'de>,\n\n T: Deserialize<'de> + Copy + Ord + Debug + Serialize,\n\n>(\n\n d: D,\n\n) -> Result<NodeMap<T>, D::Error> {\n\n // TODO I'm offline and can't look up hw to use Deserializer twice in sequence. Since the two\n\n // fields are redundant, just serialize one of them.\n\n let id_to_node = <Vec<T>>::deserialize(d)?;\n\n let mut node_to_id = BTreeMap::new();\n\n for (id, node) in id_to_node.iter().enumerate() {\n\n node_to_id.insert(*node, id);\n\n }\n\n\n\n Ok(NodeMap {\n\n node_to_id,\n\n id_to_node,\n\n })\n\n}\n", "file_path": "map_model/src/pathfind/node_map.rs", "rank": 26, "score": 219707.45907342268 }, { "content": "// The polygon should exist entirely within the thick bands around all original roads -- it just\n\n// carves up part of that space, doesn't reach past it.\n\n// Also returns a list of labeled polygons for debugging.\n\npub fn intersection_polygon(\n\n driving_side: DrivingSide,\n\n i: &Intersection,\n\n roads: &mut BTreeMap<OriginalRoad, Road>,\n\n timer: &mut Timer,\n\n) -> Result<(Polygon, Vec<(String, Polygon)>), Box<dyn Error>> {\n\n if i.roads.is_empty() {\n\n panic!(\"{} has no roads\", i.id);\n\n }\n\n\n\n // Turn all of the incident roads into two PolyLines (the \"forwards\" and \"backwards\" borders of\n\n // the road, if the roads were oriented to both be incoming to the intersection), both ending\n\n // at the intersection, and the last segment of the center line.\n\n // TODO Maybe express the two incoming PolyLines as the \"right\" and \"left\"\n\n let mut lines: Vec<(OriginalRoad, Line, PolyLine, PolyLine)> = Vec::new();\n\n for id in &i.roads {\n\n let r = &roads[id];\n\n\n\n let pl = if r.src_i == i.id {\n\n r.trimmed_center_pts.reversed()\n", "file_path": "map_model/src/make/initial/geometry.rs", "rank": 27, "score": 219637.0514887505 }, { "content": "pub fn serialize_btreemap<S: Serializer, K: Serialize, V: Serialize>(\n\n map: &BTreeMap<K, V>,\n\n s: S,\n\n) -> Result<S::Ok, S::Error> {\n\n map.iter().collect::<Vec<(_, _)>>().serialize(s)\n\n}\n\n\n", "file_path": "abstutil/src/io.rs", "rank": 28, "score": 219192.68802021377 }, { "content": "pub fn can_edit_lane(mode: &GameplayMode, l: LaneID, app: &App) -> bool {\n\n mode.can_edit_lanes()\n\n && !app.primary.map.get_l(l).is_walkable()\n\n && app.primary.map.get_l(l).lane_type != LaneType::SharedLeftTurn\n\n && !app.primary.map.get_l(l).is_light_rail()\n\n}\n\n\n", "file_path": "game/src/edit/mod.rs", "rank": 29, "score": 217520.16020132502 }, { "content": "pub fn serialized_size_bytes<T: Serialize>(obj: &T) -> usize {\n\n bincode::serialized_size(obj).unwrap() as usize\n\n}\n\n\n", "file_path": "abstutil/src/io.rs", "rank": 30, "score": 211384.71160102493 }, { "content": "#[cfg(target_arch = \"wasm32\")]\n\npub fn maybe_read_binary<T: DeserializeOwned>(\n\n path: String,\n\n _timer: &mut Timer,\n\n) -> Result<T, Error> {\n\n if let Some(raw) = SYSTEM_DATA.get_file(path.trim_start_matches(\"../data/system/\")) {\n\n let obj: T = bincode::deserialize(raw.contents())\n\n .map_err(|err| Error::new(ErrorKind::Other, err))?;\n\n Ok(obj)\n\n } else {\n\n Err(Error::new(\n\n ErrorKind::Other,\n\n format!(\"Can't maybe_read_binary {}, it doesn't exist\", path),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "abstutil/src/io.rs", "rank": 31, "score": 211010.92486198334 }, { "content": "// Converts a RawMap to a Map.\n\npub fn raw_to_map(name: &str, build_ch: bool, timer: &mut Timer) -> map_model::Map {\n\n timer.start(format!(\"Raw->Map for {}\", name));\n\n let raw: map_model::raw::RawMap = abstutil::read_binary(abstutil::path_raw_map(name), timer);\n\n let map = map_model::Map::create_from_raw(raw, build_ch, timer);\n\n timer.start(\"save map\");\n\n map.save();\n\n timer.stop(\"save map\");\n\n timer.stop(format!(\"Raw->Map for {}\", name));\n\n\n\n // TODO Just sticking this here for now\n\n if map.get_name() == \"huge_seattle\" {\n\n timer.start(\"generating city manifest\");\n\n abstutil::write_binary(\n\n abstutil::path(format!(\"system/cities/{}.bin\", map.get_city_name())),\n\n &map_model::City::new(&map),\n\n );\n\n timer.stop(\"generating city manifest\");\n\n }\n\n\n\n map\n\n}\n", "file_path": "importer/src/utils.rs", "rank": 32, "score": 209750.09780662617 }, { "content": "pub fn to_json<T: Serialize>(obj: &T) -> String {\n\n serde_json::to_string_pretty(obj).unwrap()\n\n}\n\n\n", "file_path": "abstutil/src/io.rs", "rank": 33, "score": 203945.33474364944 }, { "content": "pub fn prettyprint_usize(x: usize) -> String {\n\n let num = format!(\"{}\", x);\n\n let mut result = String::new();\n\n let mut i = num.len();\n\n for c in num.chars() {\n\n result.push(c);\n\n i -= 1;\n\n if i > 0 && i % 3 == 0 {\n\n result.push(',');\n\n }\n\n }\n\n result\n\n}\n\n\n", "file_path": "abstutil/src/time.rs", "rank": 34, "score": 203397.65583686793 }, { "content": "pub fn debug(ctx: &EventCtx, app: &App, details: &mut Details, id: LaneID) -> Vec<Widget> {\n\n let mut rows = header(ctx, app, details, id, Tab::LaneDebug(id));\n\n let map = &app.primary.map;\n\n let l = map.get_l(id);\n\n let r = map.get_r(l.parent);\n\n\n\n let mut kv = Vec::new();\n\n\n\n kv.push((\"Parent\".to_string(), r.id.to_string()));\n\n\n\n if l.lane_type.is_for_moving_vehicles() {\n\n kv.push((\n\n \"Driving blackhole\".to_string(),\n\n l.driving_blackhole.to_string(),\n\n ));\n\n kv.push((\n\n \"Biking blackhole\".to_string(),\n\n l.biking_blackhole.to_string(),\n\n ));\n\n }\n", "file_path": "game/src/info/lane.rs", "rank": 35, "score": 200675.85335446912 }, { "content": "fn is_counter_clockwise(pt1: Pt2D, pt2: Pt2D, pt3: Pt2D) -> bool {\n\n (pt3.y() - pt1.y()) * (pt2.x() - pt1.x()) > (pt2.y() - pt1.y()) * (pt3.x() - pt1.x())\n\n}\n\n\n\n#[derive(Clone, Serialize, Deserialize, Debug)]\n\npub struct InfiniteLine(Pt2D, Pt2D);\n\n\n\nimpl InfiniteLine {\n\n // Fails for parallel lines.\n\n // https://stackoverflow.com/a/565282 by way of\n\n // https://github.com/ucarion/line_intersection/blob/master/src/lib.rs\n\n pub fn intersection(&self, other: &InfiniteLine) -> Option<Pt2D> {\n\n fn cross(a: (f64, f64), b: (f64, f64)) -> f64 {\n\n a.0 * b.1 - a.1 * b.0\n\n }\n\n\n\n let p = self.0;\n\n let q = other.0;\n\n let r = (self.1.x() - self.0.x(), self.1.y() - self.0.y());\n\n let s = (other.1.x() - other.0.x(), other.1.y() - other.0.y());\n", "file_path": "geom/src/line.rs", "rank": 36, "score": 197260.61219981834 }, { "content": "#[cfg(target_arch = \"wasm32\")]\n\npub fn write_json<T: Serialize>(path: String, obj: &T) {\n\n // TODO not yet\n\n}\n\n\n", "file_path": "abstutil/src/io.rs", "rank": 37, "score": 196228.46685886505 }, { "content": "#[cfg(target_arch = \"wasm32\")]\n\npub fn write_binary<T: Serialize>(path: String, obj: &T) {\n\n // TODO\n\n}\n\n\n", "file_path": "abstutil/src/io.rs", "rank": 38, "score": 196228.46685886505 }, { "content": "// Uses osmconvert to clip the input .osm (or .pbf) against a polygon and produce some output.\n\n// Skips if the output exists.\n\npub fn osmconvert(input: &str, clipping_polygon: String, output: String) {\n\n let input = abstutil::path(input);\n\n let clipping_polygon = abstutil::path(clipping_polygon);\n\n let output = abstutil::path(output);\n\n\n\n if Path::new(&output).exists() {\n\n println!(\"- {} already exists\", output);\n\n return;\n\n }\n\n println!(\"- Clipping {} to {}\", input, clipping_polygon);\n\n\n\n run(Command::new(\"osmconvert\")\n\n .arg(input)\n\n .arg(format!(\"-B={}\", clipping_polygon))\n\n .arg(\"--complete-ways\")\n\n .arg(format!(\"-o={}\", output)));\n\n}\n\n\n", "file_path": "importer/src/utils.rs", "rank": 39, "score": 196168.0475528646 }, { "content": "// Exports to https://github.com/d-wasserman/shared-row/\n\npub fn export(roads: Vec<RoadID>, map: &Map) {\n\n let geojson = GeoJson::from(FeatureCollection {\n\n bbox: None,\n\n features: roads.into_iter().map(|r| road(r, map)).collect(),\n\n foreign_members: None,\n\n });\n\n abstutil::write_json(\"shared_row_export.json\".to_string(), &geojson);\n\n}\n\n\n", "file_path": "game/src/debug/shared_row.rs", "rank": 40, "score": 195520.59031521037 }, { "content": "fn to_pct(value: f64, (low, high): (f64, f64)) -> f64 {\n\n assert!(low <= high);\n\n assert!(value >= low);\n\n assert!(value <= high);\n\n (value - low) / (high - low)\n\n}\n\n\n", "file_path": "ezgui/src/color.rs", "rank": 41, "score": 195243.21612793603 }, { "content": "pub fn deserialize_usize<'de, D: Deserializer<'de>>(d: D) -> Result<usize, D::Error> {\n\n let x = <u32>::deserialize(d)?;\n\n Ok(x as usize)\n\n}\n\n\n\n// Just list all things from a directory, return sorted by name, with file extension removed.\n", "file_path": "abstutil/src/io.rs", "rank": 42, "score": 195039.44504203077 }, { "content": "fn infer_spots(lot_polygon: &Polygon, aisles: &Vec<Vec<Pt2D>>) -> Vec<(Pt2D, Angle)> {\n\n let mut spots = Vec::new();\n\n let mut finalized_lines = Vec::new();\n\n\n\n for aisle in aisles {\n\n let aisle_thickness = NORMAL_LANE_THICKNESS / 2.0;\n\n let pl = PolyLine::unchecked_new(aisle.clone());\n\n\n\n for rotate in vec![90.0, -90.0] {\n\n // Blindly generate all of the lines\n\n let lines = {\n\n let mut lines = Vec::new();\n\n let mut start = Distance::ZERO;\n\n while start + NORMAL_LANE_THICKNESS < pl.length() {\n\n let (pt, angle) = pl.must_dist_along(start);\n\n start += NORMAL_LANE_THICKNESS;\n\n let theta = angle.rotate_degs(rotate);\n\n lines.push(Line::must_new(\n\n pt.project_away(aisle_thickness / 2.0, theta),\n\n pt.project_away(aisle_thickness / 2.0 + PARKING_LOT_SPOT_LENGTH, theta),\n", "file_path": "map_model/src/make/parking_lots.rs", "rank": 43, "score": 193208.93274485838 }, { "content": "#[cfg(not(target_arch = \"wasm32\"))]\n\npub fn load_all_objects<T: DeserializeOwned>(dir: String) -> Vec<(String, T)> {\n\n let mut timer = Timer::new(format!(\"load_all_objects from {}\", dir));\n\n let mut tree: BTreeMap<String, T> = BTreeMap::new();\n\n match std::fs::read_dir(&dir) {\n\n Ok(iter) => {\n\n for entry in iter {\n\n let filename = entry.unwrap().file_name();\n\n let path = Path::new(&filename);\n\n let path_str = path.to_string_lossy();\n\n if path_str.starts_with('.') {\n\n continue;\n\n }\n\n let full_path = format!(\"{}/{}\", dir, path_str);\n\n let name = path\n\n .file_stem()\n\n .unwrap()\n\n .to_os_string()\n\n .into_string()\n\n .unwrap();\n\n let maybe_load: Result<T, Error> = if path_str.ends_with(\".json\") {\n", "file_path": "abstutil/src/io.rs", "rank": 44, "score": 189078.29427412938 }, { "content": "#[cfg(target_arch = \"wasm32\")]\n\npub fn load_all_objects<T: DeserializeOwned>(_dir: String) -> Vec<(String, T)> {\n\n // TODO\n\n Vec::new()\n\n}\n\n\n\n// TODO I'd like to get rid of this and just use Timer.read_file, but external libraries consume\n\n// the reader. :\\\n\npub struct FileWithProgress {\n\n inner: BufReader<File>,\n\n\n\n path: String,\n\n processed_bytes: usize,\n\n total_bytes: usize,\n\n started_at: Instant,\n\n last_printed_at: Instant,\n\n}\n\n\n\nimpl FileWithProgress {\n\n // Also hands back a callback that'll add the final result to the timer. The caller must run\n\n // it.\n", "file_path": "abstutil/src/io.rs", "rank": 45, "score": 189078.29427412938 }, { "content": "pub fn read_binary<T: DeserializeOwned>(path: String, timer: &mut Timer) -> T {\n\n match maybe_read_binary(path.clone(), timer) {\n\n Ok(obj) => obj,\n\n Err(err) => panic!(\"Couldn't read_binary({}): {}\", path, err),\n\n }\n\n}\n\n\n\n// For BTreeMaps with struct keys. See https://github.com/serde-rs/json/issues/402.\n\n\n", "file_path": "abstutil/src/io.rs", "rank": 46, "score": 184811.7430766955 }, { "content": "pub fn read_json<T: DeserializeOwned>(path: String, timer: &mut Timer) -> T {\n\n match maybe_read_json(path.clone(), timer) {\n\n Ok(obj) => obj,\n\n Err(err) => panic!(\"Couldn't read_json({}): {}\", path, err),\n\n }\n\n}\n\n\n", "file_path": "abstutil/src/io.rs", "rank": 47, "score": 184811.7430766955 }, { "content": "fn pts_to_line_string(raw_pts: &Vec<Pt2D>) -> geo::LineString<f64> {\n\n let pts: Vec<geo::Point<f64>> = raw_pts\n\n .iter()\n\n .map(|pt| geo::Point::new(pt.x(), pt.y()))\n\n .collect();\n\n pts.into()\n\n}\n", "file_path": "geom/src/find_closest.rs", "rank": 48, "score": 183858.3095542086 }, { "content": "// TODO A squished octagon would look better\n\nfn make_octagon(center: Pt2D, radius: Distance, facing: Angle) -> Polygon {\n\n Ring::must_new(\n\n (0..=8)\n\n .map(|i| center.project_away(radius, facing.rotate_degs(22.5 + f64::from(i * 360 / 8))))\n\n .collect(),\n\n )\n\n .to_polygon()\n\n}\n\n\n", "file_path": "game/src/render/intersection.rs", "rank": 49, "score": 183737.04717236946 }, { "content": "// If the result doesn't contain a requested point, then there was no matching lane close\n\n// enough.\n\nfn match_points_to_lanes<F: Fn(&Lane) -> bool>(\n\n bounds: &Bounds,\n\n pts: HashSet<HashablePt2D>,\n\n lanes: &Vec<Lane>,\n\n filter: F,\n\n buffer: Distance,\n\n max_dist_away: Distance,\n\n timer: &mut Timer,\n\n) -> HashMap<HashablePt2D, Position> {\n\n if pts.is_empty() {\n\n return HashMap::new();\n\n }\n\n\n\n let mut closest: FindClosest<LaneID> = FindClosest::new(bounds);\n\n timer.start_iter(\"index lanes\", lanes.len());\n\n for l in lanes {\n\n timer.next();\n\n if filter(l) && l.length() > (buffer + EPSILON_DIST) * 2.0 {\n\n closest.add(\n\n l.id,\n", "file_path": "map_model/src/make/mod.rs", "rank": 50, "score": 180122.61452969443 }, { "content": "pub fn prebake_all() {\n\n let mut timer = Timer::new(\"prebake all challenge results\");\n\n\n\n {\n\n let map = map_model::Map::new(abstutil::path_map(\"montlake\"), &mut timer);\n\n let scenario: Scenario =\n\n abstutil::read_binary(abstutil::path_scenario(\"montlake\", \"weekday\"), &mut timer);\n\n prebake(&map, scenario, None, &mut timer);\n\n\n\n for generator in TutorialState::scenarios_to_prebake(&map) {\n\n let scenario = generator.generate(\n\n &map,\n\n &mut SimFlags::for_test(\"prebaked\").make_rng(),\n\n &mut timer,\n\n );\n\n prebake(&map, scenario, None, &mut timer);\n\n }\n\n }\n\n\n\n for name in vec![\"lakeslice\"] {\n\n let map = map_model::Map::new(abstutil::path_map(name), &mut timer);\n\n let scenario: Scenario =\n\n abstutil::read_binary(abstutil::path_scenario(name, \"weekday\"), &mut timer);\n\n prebake(&map, scenario, None, &mut timer);\n\n }\n\n}\n\n\n\n// TODO This variant will be more useful when all scenarios tend to actually complete. ;)\n", "file_path": "game/src/challenges.rs", "rank": 51, "score": 180098.3332012956 }, { "content": "pub fn load(\n\n path: &str,\n\n gps_bounds: &GPSBounds,\n\n require_all_pts_in_bounds: bool,\n\n timer: &mut Timer,\n\n) -> Result<ExtraShapes, Box<dyn Error>> {\n\n println!(\"Opening {}\", path);\n\n let (f, done) = FileWithProgress::new(path)?;\n\n // TODO FileWithProgress should implement BufRead, so we don't have to double wrap like this\n\n let mut reader = Reader::from_reader(std::io::BufReader::new(f));\n\n reader.trim_text(true);\n\n\n\n let mut buf = Vec::new();\n\n\n\n // TODO uncomfortably stateful\n\n let mut shapes = Vec::new();\n\n let mut scanned_schema = false;\n\n let mut attributes: BTreeMap<String, String> = BTreeMap::new();\n\n let mut attrib_key: Option<String> = None;\n\n\n", "file_path": "kml/src/lib.rs", "rank": 52, "score": 180098.3332012956 }, { "content": "#[cfg(not(feature = \"wasm-backend\"))]\n\nfn scroll_wheel_multiplier() -> f64 {\n\n 1.0\n\n}\n\n\n\n#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]\n\npub enum Key {\n\n // Case is unspecified.\n\n // TODO Would be cool to represent A and UpperA, but then release semantics get weird... hold\n\n // shift and A, release shift -- does that trigger a Release(UpperA) and a Press(A)?\n\n A,\n\n B,\n\n C,\n\n D,\n\n E,\n\n F,\n\n G,\n\n H,\n\n I,\n\n J,\n\n K,\n", "file_path": "ezgui/src/event.rs", "rank": 53, "score": 178148.68700772652 }, { "content": "pub fn schedule(\n\n ctx: &mut EventCtx,\n\n app: &App,\n\n details: &mut Details,\n\n id: PersonID,\n\n is_paused: bool,\n\n) -> Vec<Widget> {\n\n let mut rows = header(ctx, app, details, id, Tab::PersonSchedule(id), is_paused);\n\n let person = app.primary.sim.get_person(id);\n\n let mut rng = XorShiftRng::seed_from_u64(id.0 as u64);\n\n\n\n // TODO Proportional 24-hour timeline would be easier to understand\n\n let mut last_t = Time::START_OF_DAY;\n\n for t in &person.trips {\n\n let trip = app.primary.sim.trip_info(*t);\n\n let at = match trip.start {\n\n TripEndpoint::Bldg(b) => {\n\n let b = app.primary.map.get_b(b);\n\n if b.amenities.is_empty() {\n\n b.address.clone()\n", "file_path": "game/src/info/person.rs", "rank": 54, "score": 177708.01953484153 }, { "content": "#[cfg(not(feature = \"profiler\"))]\n\npub fn start_profiler() {\n\n panic!(\"abstutil/profiler feature not enabled in Cargo.toml\");\n\n}\n\n\n", "file_path": "abstutil/src/time.rs", "rank": 55, "score": 177708.01953484153 }, { "content": "pub fn traffic(\n\n ctx: &mut EventCtx,\n\n app: &App,\n\n details: &mut Details,\n\n id: IntersectionID,\n\n opts: &DataOptions,\n\n) -> Vec<Widget> {\n\n let mut rows = header(\n\n ctx,\n\n app,\n\n details,\n\n id,\n\n Tab::IntersectionTraffic(id, opts.clone()),\n\n );\n\n\n\n let mut txt = Text::new();\n\n\n\n txt.add(Line(format!(\n\n \"Since midnight: {} agents crossed\",\n\n prettyprint_usize(\n", "file_path": "game/src/info/intersection.rs", "rank": 56, "score": 177708.01953484153 }, { "content": "pub fn ongoing(\n\n ctx: &mut EventCtx,\n\n app: &App,\n\n id: TripID,\n\n agent: AgentID,\n\n open_trip: &mut OpenTrip,\n\n details: &mut Details,\n\n) -> Widget {\n\n let phases = app\n\n .primary\n\n .sim\n\n .get_analytics()\n\n .get_trip_phases(id, &app.primary.map);\n\n let trip = app.primary.sim.trip_info(id);\n\n\n\n let col_width = 7;\n\n let props = app.primary.sim.agent_properties(agent);\n\n let activity = agent.to_type().ongoing_verb();\n\n let time_so_far = app.primary.sim.time() - trip.departure;\n\n\n", "file_path": "game/src/info/trip.rs", "rank": 57, "score": 177708.01953484153 }, { "content": "// If the output file doesn't already exist, downloads the URL into that location. Clips .kml\n\n// files and converts to a .bin.\n\npub fn download_kml(\n\n output: &str,\n\n url: &str,\n\n bounds: &geom::GPSBounds,\n\n require_all_pts_in_bounds: bool,\n\n timer: &mut Timer,\n\n) {\n\n assert!(url.ends_with(\".kml\"));\n\n let output = abstutil::path(output);\n\n if Path::new(&output).exists() {\n\n println!(\"- {} already exists\", output);\n\n return;\n\n }\n\n // Create the directory\n\n std::fs::create_dir_all(Path::new(&output).parent().unwrap())\n\n .expect(\"Creating parent dir failed\");\n\n\n\n let tmp = \"tmp_output\";\n\n if Path::new(&output.replace(\".bin\", \".kml\")).exists() {\n\n std::fs::copy(output.replace(\".bin\", \".kml\"), tmp).unwrap();\n", "file_path": "importer/src/utils.rs", "rank": 58, "score": 177708.01953484153 }, { "content": "pub fn delay(\n\n ctx: &mut EventCtx,\n\n app: &App,\n\n details: &mut Details,\n\n id: IntersectionID,\n\n opts: &DataOptions,\n\n fan_chart: bool,\n\n) -> Vec<Widget> {\n\n let mut rows = header(\n\n ctx,\n\n app,\n\n details,\n\n id,\n\n Tab::IntersectionDelay(id, opts.clone(), fan_chart),\n\n );\n\n let i = app.primary.map.get_i(id);\n\n\n\n assert!(i.is_traffic_signal());\n\n rows.push(opts.to_controls(ctx, app));\n\n rows.push(Checkbox::toggle(\n", "file_path": "game/src/info/intersection.rs", "rank": 59, "score": 177708.01953484153 }, { "content": "pub fn trips(\n\n ctx: &mut EventCtx,\n\n app: &App,\n\n details: &mut Details,\n\n id: PersonID,\n\n open_trips: &mut BTreeMap<TripID, OpenTrip>,\n\n is_paused: bool,\n\n) -> Vec<Widget> {\n\n let mut rows = header(\n\n ctx,\n\n app,\n\n details,\n\n id,\n\n Tab::PersonTrips(id, open_trips.clone()),\n\n is_paused,\n\n );\n\n\n\n let map = &app.primary.map;\n\n let sim = &app.primary.sim;\n\n let person = sim.get_person(id);\n", "file_path": "game/src/info/person.rs", "rank": 60, "score": 177708.01953484153 }, { "content": "#[cfg(not(feature = \"profiler\"))]\n\npub fn stop_profiler() {\n\n panic!(\"abstutil/profiler feature not enabled in Cargo.toml\");\n\n}\n", "file_path": "abstutil/src/time.rs", "rank": 61, "score": 177708.01953484153 }, { "content": "pub fn traffic(\n\n ctx: &mut EventCtx,\n\n app: &App,\n\n details: &mut Details,\n\n id: LaneID,\n\n opts: &DataOptions,\n\n) -> Vec<Widget> {\n\n let mut rows = header(ctx, app, details, id, Tab::LaneTraffic(id, opts.clone()));\n\n let map = &app.primary.map;\n\n let l = map.get_l(id);\n\n let r = map.get_r(l.parent);\n\n\n\n // Since this applies to the entire road, ignore lane type.\n\n let mut txt = Text::from(Line(\"Traffic over entire road, not just this lane\"));\n\n txt.add(Line(format!(\n\n \"Since midnight: {} agents crossed\",\n\n prettyprint_usize(app.primary.sim.get_analytics().road_thruput.total_for(r.id))\n\n )));\n\n rows.push(txt.draw(ctx));\n\n\n", "file_path": "game/src/info/lane.rs", "rank": 62, "score": 177708.01953484153 }, { "content": "pub fn finished(\n\n ctx: &mut EventCtx,\n\n app: &App,\n\n person: PersonID,\n\n open_trips: &mut BTreeMap<TripID, OpenTrip>,\n\n id: TripID,\n\n details: &mut Details,\n\n) -> Widget {\n\n let trip = app.primary.sim.trip_info(id);\n\n let phases = if open_trips[&id].show_after {\n\n app.primary\n\n .sim\n\n .get_analytics()\n\n .get_trip_phases(id, &app.primary.map)\n\n } else {\n\n app.prebaked().get_trip_phases(id, &app.primary.map)\n\n };\n\n\n\n let mut col = Vec::new();\n\n\n", "file_path": "game/src/info/trip.rs", "rank": 63, "score": 177708.01953484153 }, { "content": "pub fn bio(\n\n ctx: &mut EventCtx,\n\n app: &App,\n\n details: &mut Details,\n\n id: PersonID,\n\n is_paused: bool,\n\n) -> Vec<Widget> {\n\n let mut rows = header(ctx, app, details, id, Tab::PersonBio(id), is_paused);\n\n let person = app.primary.sim.get_person(id);\n\n let mut rng = XorShiftRng::seed_from_u64(id.0 as u64);\n\n\n\n let mut svg_data = Vec::new();\n\n svg_face::generate_face(&mut svg_data, &mut rng).unwrap();\n\n let batch = GeomBatch::from_svg_contents(svg_data).autocrop();\n\n let dims = batch.get_dims();\n\n let batch = batch.scale((200.0 / dims.width).min(200.0 / dims.height));\n\n rows.push(Widget::draw_batch(ctx, batch).centered_horiz());\n\n\n\n let nickname = petname::Petnames::default().generate(&mut rng, 2, \" \");\n\n let age = rng.gen_range(5, 100);\n", "file_path": "game/src/info/person.rs", "rank": 64, "score": 177708.01953484153 }, { "content": "pub fn crowd(\n\n ctx: &EventCtx,\n\n app: &App,\n\n details: &mut Details,\n\n members: &Vec<PedestrianID>,\n\n) -> Vec<Widget> {\n\n let mut rows = vec![];\n\n\n\n rows.push(Widget::row(vec![\n\n Line(\"Pedestrian crowd\").small_heading().draw(ctx),\n\n header_btns(ctx),\n\n ]));\n\n\n\n for (idx, id) in members.into_iter().enumerate() {\n\n let person = app\n\n .primary\n\n .sim\n\n .agent_to_person(AgentID::Pedestrian(*id))\n\n .unwrap();\n\n // TODO What other info is useful to summarize?\n", "file_path": "game/src/info/person.rs", "rank": 65, "score": 177708.01953484153 }, { "content": "pub fn future(\n\n ctx: &mut EventCtx,\n\n app: &App,\n\n id: TripID,\n\n open_trip: &mut OpenTrip,\n\n details: &mut Details,\n\n) -> Widget {\n\n let trip = app.primary.sim.trip_info(id);\n\n\n\n let mut col = Vec::new();\n\n\n\n let now = app.primary.sim.time();\n\n if now > trip.departure {\n\n col.extend(make_table(\n\n ctx,\n\n vec![(\"Start delayed\", (now - trip.departure).to_string())].into_iter(),\n\n ));\n\n }\n\n\n\n if let Some(estimated_trip_time) = app\n", "file_path": "game/src/info/trip.rs", "rank": 66, "score": 177708.01953484153 }, { "content": "#[allow(unused)]\n\npub fn generic_prebake_all() {\n\n let mut timer = Timer::new(\"prebake all challenge results\");\n\n\n\n let mut per_map: BTreeMap<String, Vec<Challenge>> = BTreeMap::new();\n\n for (_, list) in Challenge::all() {\n\n for c in list {\n\n per_map\n\n .entry(c.gameplay.map_path())\n\n .or_insert_with(Vec::new)\n\n .push(c);\n\n }\n\n }\n\n for (map_path, list) in per_map {\n\n timer.start(format!(\"prebake for {}\", map_path));\n\n let map = map_model::Map::new(map_path.clone(), &mut timer);\n\n\n\n let mut done_scenarios = HashSet::new();\n\n for challenge in list {\n\n // Bit of an abuse of this, but just need to fix the rng seed.\n\n if let Some(scenario) = challenge.gameplay.scenario(\n", "file_path": "game/src/challenges.rs", "rank": 67, "score": 177708.01953484153 }, { "content": "pub fn arrivals(\n\n ctx: &mut EventCtx,\n\n app: &App,\n\n details: &mut Details,\n\n id: IntersectionID,\n\n opts: &DataOptions,\n\n) -> Vec<Widget> {\n\n let mut rows = header(\n\n ctx,\n\n app,\n\n details,\n\n id,\n\n Tab::IntersectionArrivals(id, opts.clone()),\n\n );\n\n\n\n rows.push(throughput(\n\n ctx,\n\n app,\n\n \"Number of in-bound trips from this border\",\n\n move |_| app.primary.sim.all_arrivals_at_border(id),\n\n opts,\n\n ));\n\n\n\n rows\n\n}\n\n\n", "file_path": "game/src/info/intersection.rs", "rank": 68, "score": 177708.01953484153 }, { "content": "pub fn read(\n\n path: &str,\n\n input_gps_bounds: &GPSBounds,\n\n timer: &mut Timer,\n\n) -> Result<Document, Box<dyn Error>> {\n\n timer.start(format!(\"read {}\", path));\n\n let bytes = slurp_file(path)?;\n\n let raw_string = std::str::from_utf8(&bytes)?;\n\n let tree = roxmltree::Document::parse(raw_string)?;\n\n timer.stop(format!(\"read {}\", path));\n\n\n\n let mut doc = Document {\n\n gps_bounds: input_gps_bounds.clone(),\n\n nodes: BTreeMap::new(),\n\n ways: BTreeMap::new(),\n\n relations: BTreeMap::new(),\n\n };\n\n\n\n timer.start(\"scrape objects\");\n\n for obj in tree.descendants() {\n", "file_path": "convert_osm/src/reader.rs", "rank": 69, "score": 177708.01953484153 }, { "content": "// Returns a legend\n\npub fn make_heatmap(\n\n ctx: &mut EventCtx,\n\n batch: &mut GeomBatch,\n\n bounds: &Bounds,\n\n pts: Vec<Pt2D>,\n\n opts: &HeatmapOptions,\n\n) -> Widget {\n\n // 7 colors, 8 labels\n\n let num_colors = 7;\n\n let gradient = match opts.color_scheme.as_ref() {\n\n \"Turbo\" => colorous::TURBO,\n\n \"Inferno\" => colorous::INFERNO,\n\n \"Warm\" => colorous::WARM,\n\n \"Cool\" => colorous::COOL,\n\n \"Oranges\" => colorous::ORANGES,\n\n \"Spectral\" => colorous::SPECTRAL,\n\n _ => unreachable!(),\n\n };\n\n let colors: Vec<Color> = (0..num_colors)\n\n .map(|i| {\n", "file_path": "game/src/common/heatmap.rs", "rank": 70, "score": 175424.0360233253 }, { "content": "pub fn current_demand(\n\n ctx: &mut EventCtx,\n\n app: &App,\n\n details: &mut Details,\n\n id: IntersectionID,\n\n) -> Vec<Widget> {\n\n let mut rows = header(ctx, app, details, id, Tab::IntersectionDemand(id));\n\n\n\n let mut total_demand = 0;\n\n let mut demand_per_group: Vec<(&PolyLine, usize)> = Vec::new();\n\n for g in app.primary.map.get_traffic_signal(id).turn_groups.values() {\n\n let demand = app\n\n .primary\n\n .sim\n\n .get_analytics()\n\n .demand\n\n .get(&g.id)\n\n .cloned()\n\n .unwrap_or(0);\n\n if demand > 0 {\n", "file_path": "game/src/info/intersection.rs", "rank": 71, "score": 175424.0360233253 }, { "content": "#[cfg(feature = \"scenarios\")]\n\npub fn ensure_popdat_exists(\n\n timer: &mut abstutil::Timer,\n\n) -> (crate::soundcast::PopDat, map_model::Map) {\n\n if abstutil::file_exists(abstutil::path_popdat()) {\n\n println!(\"- {} exists, not regenerating it\", abstutil::path_popdat());\n\n return (\n\n abstutil::read_binary(abstutil::path_popdat(), timer),\n\n map_model::Map::new(abstutil::path_map(\"huge_seattle\"), timer),\n\n );\n\n }\n\n\n\n if !abstutil::file_exists(abstutil::path_raw_map(\"huge_seattle\")) {\n\n osm_to_raw(\"huge_seattle\", timer);\n\n }\n\n let huge_map = if abstutil::file_exists(abstutil::path_map(\"huge_seattle\")) {\n\n map_model::Map::new(abstutil::path_map(\"huge_seattle\"), timer)\n\n } else {\n\n crate::utils::raw_to_map(\"huge_seattle\", true, timer)\n\n };\n\n\n\n (crate::soundcast::import_data(&huge_map, timer), huge_map)\n\n}\n\n\n", "file_path": "importer/src/seattle.rs", "rank": 72, "score": 175424.0360233253 }, { "content": "pub fn checkbox_per_mode(\n\n ctx: &mut EventCtx,\n\n app: &App,\n\n current_state: &BTreeSet<TripMode>,\n\n) -> Widget {\n\n let mut filters = Vec::new();\n\n for m in TripMode::all() {\n\n filters.push(\n\n Checkbox::colored(\n\n ctx,\n\n m.ongoing_verb(),\n\n color_for_mode(app, m),\n\n current_state.contains(&m),\n\n )\n\n .margin_right(24),\n\n );\n\n }\n\n Widget::custom_row(filters)\n\n}\n\n\n", "file_path": "game/src/helpers.rs", "rank": 73, "score": 175424.0360233253 }, { "content": "// No offset. I'm not exactly sure how the simplification in usvg works, but this doesn't support\n\n// transforms or strokes or text, just fills. Luckily, all of the files exported from Figma so far\n\n// work just fine.\n\npub fn add_svg_inner(\n\n batch: &mut GeomBatch,\n\n svg_tree: usvg::Tree,\n\n tolerance: f32,\n\n) -> Result<Bounds, String> {\n\n let mut fill_tess = tessellation::FillTessellator::new();\n\n let mut stroke_tess = tessellation::StrokeTessellator::new();\n\n // TODO This breaks on start.svg; the order there matters. color1, color2, then color1 again.\n\n let mut mesh_per_color: VecMap<FancyColor, VertexBuffers<_, u16>> = VecMap::new();\n\n\n\n for node in svg_tree.root().descendants() {\n\n if let usvg::NodeKind::Path(ref p) = *node.borrow() {\n\n // TODO Handle transforms\n\n\n\n if let Some(ref fill) = p.fill {\n\n let color = convert_color(&fill.paint, fill.opacity.value(), &svg_tree);\n\n let geom = mesh_per_color.mut_or_insert(color, VertexBuffers::new);\n\n if fill_tess\n\n .tessellate(\n\n convert_path(p),\n", "file_path": "ezgui/src/svg.rs", "rank": 74, "score": 175424.0360233253 }, { "content": "pub fn extract_route(\n\n rel_id: RelationID,\n\n rel: &Relation,\n\n doc: &Document,\n\n boundary: &Polygon,\n\n timer: &mut Timer,\n\n) -> Option<RawBusRoute> {\n\n let full_name = rel.tags.get(\"name\")?.clone();\n\n let short_name = rel\n\n .tags\n\n .get(\"ref\")\n\n .cloned()\n\n .unwrap_or_else(|| full_name.clone());\n\n let is_bus = match rel.tags.get(\"route\")?.as_ref() {\n\n \"bus\" => true,\n\n \"light_rail\" => false,\n\n x => {\n\n if !vec![\"bicycle\", \"foot\", \"railway\", \"road\", \"tracks\", \"train\"].contains(&x) {\n\n // TODO Handle these at some point\n\n println!(\n", "file_path": "convert_osm/src/transit.rs", "rank": 75, "score": 175424.0360233253 }, { "content": "pub fn traffic_signal(\n\n ctx: &mut EventCtx,\n\n app: &App,\n\n details: &mut Details,\n\n id: IntersectionID,\n\n) -> Vec<Widget> {\n\n let mut rows = header(ctx, app, details, id, Tab::IntersectionTrafficSignal(id));\n\n\n\n // Slightly inaccurate -- the turn rendering may slightly exceed the intersection polygon --\n\n // but this is close enough.\n\n let bounds = app.primary.map.get_i(id).polygon.get_bounds();\n\n // Pick a zoom so that we fit a fixed width in pixels\n\n let zoom = 150.0 / bounds.width();\n\n let bbox = Polygon::rectangle(zoom * bounds.width(), zoom * bounds.height());\n\n\n\n let signal = app.primary.map.get_traffic_signal(id);\n\n {\n\n let mut txt = Text::new();\n\n txt.add(Line(format!(\"{} phases\", signal.phases.len())).small_heading());\n\n txt.add(Line(format!(\"Signal offset: {}\", signal.offset)));\n", "file_path": "game/src/info/intersection.rs", "rank": 76, "score": 175424.0360233253 }, { "content": "pub fn parked_car(\n\n ctx: &mut EventCtx,\n\n app: &App,\n\n details: &mut Details,\n\n id: CarID,\n\n is_paused: bool,\n\n) -> Vec<Widget> {\n\n let mut rows = vec![];\n\n\n\n rows.push(Widget::row(vec![\n\n Line(format!(\"Parked car #{}\", id.0))\n\n .small_heading()\n\n .draw(ctx),\n\n Widget::row(vec![\n\n // Little indirect, but the handler of this action is actually the ContextualActions\n\n // for SandboxMode.\n\n if is_paused {\n\n Btn::svg_def(\"system/assets/tools/location.svg\").build(\n\n ctx,\n\n \"follow (run the simulation)\",\n", "file_path": "game/src/info/person.rs", "rank": 77, "score": 175424.0360233253 }, { "content": "pub fn maybe_read_json<T: DeserializeOwned>(path: String, timer: &mut Timer) -> Result<T, Error> {\n\n if !path.ends_with(\".json\") && !path.ends_with(\".geojson\") {\n\n panic!(\"read_json needs {} to end with .json or .geojson\", path);\n\n }\n\n\n\n timer.start(format!(\"parse {}\", path));\n\n // TODO timer.read_file isn't working here. And we need to call stop() if there's no file.\n\n let result: Result<T, Error> = slurp_file(&path).and_then(|raw| {\n\n serde_json::from_slice(&raw).map_err(|err| Error::new(ErrorKind::Other, err))\n\n });\n\n timer.stop(format!(\"parse {}\", path));\n\n result\n\n}\n\n\n", "file_path": "abstutil/src/io.rs", "rank": 78, "score": 174652.20287338793 }, { "content": "#[cfg(not(target_arch = \"wasm32\"))]\n\npub fn maybe_read_binary<T: DeserializeOwned>(path: String, timer: &mut Timer) -> Result<T, Error> {\n\n if !path.ends_with(\".bin\") {\n\n panic!(\"read_binary needs {} to end with .bin\", path);\n\n }\n\n\n\n timer.read_file(&path)?;\n\n let obj: T =\n\n bincode::deserialize_from(timer).map_err(|err| Error::new(ErrorKind::Other, err))?;\n\n Ok(obj)\n\n}\n\n\n", "file_path": "abstutil/src/io.rs", "rank": 79, "score": 174652.20287338793 }, { "content": "pub fn snap_bus_stops(\n\n mut route: RawBusRoute,\n\n raw: &mut RawMap,\n\n pt_to_road: &HashMap<HashablePt2D, OriginalRoad>,\n\n timer: &mut Timer,\n\n) -> Result<RawBusRoute, String> {\n\n // TODO RawBusStop should have an osm_node_id()\n\n\n\n // For every stop, figure out what road segment and direction it matches up to.\n\n for stop in &mut route.stops {\n\n let idx_in_route = route\n\n .all_pts\n\n .iter()\n\n .position(|(node, _)| stop.vehicle_pos.0 == *node)\n\n .ok_or(format!(\"{} missing from route?!\", stop.vehicle_pos.0))?;\n\n\n\n let road = if raw.intersections.contains_key(&stop.vehicle_pos.0) {\n\n // Prefer to match just before an intersection, instead of just after\n\n let mut found = None;\n\n for idx in (0..idx_in_route).rev() {\n", "file_path": "convert_osm/src/transit.rs", "rank": 80, "score": 173239.44208958163 }, { "content": "pub fn try_change_lt(\n\n ctx: &mut EventCtx,\n\n map: &mut Map,\n\n l: LaneID,\n\n new_lt: LaneType,\n\n) -> Result<EditCmd, Box<dyn State>> {\n\n let orig_edits = map.get_edits().clone();\n\n\n\n let mut edits = orig_edits.clone();\n\n let cmd = EditCmd::ChangeLaneType {\n\n id: l,\n\n lt: new_lt,\n\n orig_lt: map.get_l(l).lane_type,\n\n };\n\n edits.commands.push(cmd.clone());\n\n map.try_apply_edits(edits, &mut Timer::throwaway());\n\n\n\n let mut errors = Vec::new();\n\n let r = map.get_parent(l);\n\n\n", "file_path": "game/src/edit/validate.rs", "rank": 81, "score": 173239.44208958163 }, { "content": "pub fn make_weekday_scenario(\n\n map: &Map,\n\n popdat: &PopDat,\n\n huge_map: &Map,\n\n timer: &mut Timer,\n\n) -> Scenario {\n\n let trips = clip_trips(map, popdat, huge_map, timer);\n\n let orig_trips = trips.len();\n\n\n\n let mut individ_trips: Vec<Option<IndividTrip>> = Vec::new();\n\n // person -> (trip seq, index into individ_trips)\n\n let mut trips_per_person: MultiMap<OrigPersonID, ((usize, bool, usize), usize)> =\n\n MultiMap::new();\n\n for (trip, depart, person, seq) in timer.parallelize(\n\n \"turn Soundcast trips into SpawnTrips\",\n\n Parallelism::Polite,\n\n trips,\n\n |trip| {\n\n (\n\n SpawnTrip::new(trip.from, trip.to, trip.orig.mode, map),\n", "file_path": "importer/src/soundcast/trips.rs", "rank": 82, "score": 173239.44208958163 }, { "content": "pub fn maybe_edit_intersection(\n\n ctx: &mut EventCtx,\n\n app: &mut App,\n\n id: IntersectionID,\n\n mode: &GameplayMode,\n\n) -> Option<Box<dyn State>> {\n\n if app.primary.map.maybe_get_stop_sign(id).is_some()\n\n && mode.can_edit_stop_signs()\n\n && app.per_obj.left_click(ctx, \"edit stop signs\")\n\n {\n\n return Some(Box::new(StopSignEditor::new(ctx, app, id, mode.clone())));\n\n }\n\n\n\n if app.primary.map.maybe_get_traffic_signal(id).is_some()\n\n && app.per_obj.left_click(ctx, \"edit traffic signal\")\n\n {\n\n return Some(TrafficSignalEditor::new(ctx, app, id, mode.clone()));\n\n }\n\n\n\n if app.primary.map.get_i(id).is_closed()\n", "file_path": "game/src/edit/mod.rs", "rank": 83, "score": 173239.44208958163 }, { "content": "// Could be caused by closing intersections\n\npub fn check_sidewalk_connectivity(\n\n ctx: &mut EventCtx,\n\n app: &mut App,\n\n cmd: EditCmd,\n\n) -> Option<Box<dyn State>> {\n\n let orig_edits = app.primary.map.get_edits().clone();\n\n let (_, disconnected_before) =\n\n connectivity::find_scc(&app.primary.map, PathConstraints::Pedestrian);\n\n\n\n let mut edits = orig_edits.clone();\n\n edits.commands.push(cmd);\n\n app.primary\n\n .map\n\n .try_apply_edits(edits, &mut Timer::throwaway());\n\n\n\n let (_, disconnected_after) =\n\n connectivity::find_scc(&app.primary.map, PathConstraints::Pedestrian);\n\n app.primary\n\n .map\n\n .must_apply_edits(orig_edits, &mut Timer::throwaway());\n", "file_path": "game/src/edit/validate.rs", "rank": 84, "score": 173239.44208958163 }, { "content": "pub fn make_all_buildings(\n\n input: &BTreeMap<OriginalBuilding, RawBuilding>,\n\n map: &Map,\n\n timer: &mut Timer,\n\n) -> Vec<Building> {\n\n timer.start(\"convert buildings\");\n\n let mut center_per_bldg: BTreeMap<OriginalBuilding, HashablePt2D> = BTreeMap::new();\n\n let mut query: HashSet<HashablePt2D> = HashSet::new();\n\n timer.start_iter(\"get building center points\", input.len());\n\n for (id, b) in input {\n\n timer.next();\n\n let center = b.polygon.center().to_hashable();\n\n center_per_bldg.insert(*id, center);\n\n query.insert(center);\n\n }\n\n\n\n // equiv_pos could be a little closer, so use two buffers\n\n let sidewalk_buffer = Distance::meters(7.5);\n\n let sidewalk_pts = match_points_to_lanes(\n\n map.get_bounds(),\n", "file_path": "map_model/src/make/buildings.rs", "rank": 85, "score": 173239.44208958163 }, { "content": "// Returns amenities and a mapping of all points to split road. (Some internal points on roads are\n\n// removed, so this mapping isn't redundant.)\n\npub fn split_up_roads(\n\n map: &mut RawMap,\n\n mut input: OsmExtract,\n\n timer: &mut Timer,\n\n) -> (\n\n Vec<(Pt2D, String, String)>,\n\n HashMap<HashablePt2D, OriginalRoad>,\n\n) {\n\n timer.start(\"splitting up roads\");\n\n\n\n let mut pt_to_intersection: HashMap<HashablePt2D, OriginalIntersection> = HashMap::new();\n\n let mut counts_per_pt = Counter::new();\n\n for (_, r) in &input.roads {\n\n for (idx, raw_pt) in r.center_points.iter().enumerate() {\n\n let pt = raw_pt.to_hashable();\n\n let count = counts_per_pt.inc(pt);\n\n\n\n // All start and endpoints of ways are also intersections.\n\n if count == 2 || idx == 0 || idx == r.center_points.len() - 1 {\n\n if !pt_to_intersection.contains_key(&pt) {\n", "file_path": "convert_osm/src/split_ways.rs", "rank": 86, "score": 173239.44208958163 }, { "content": "pub fn area(ctx: &EventCtx, app: &App, _: &mut Details, id: AreaID) -> Vec<Widget> {\n\n let mut rows = vec![];\n\n\n\n rows.push(Widget::row(vec![\n\n Line(id.to_string()).small_heading().draw(ctx),\n\n header_btns(ctx),\n\n ]));\n\n\n\n let a = app.primary.map.get_a(id);\n\n rows.extend(make_table(\n\n ctx,\n\n a.osm_tags\n\n .inner()\n\n .iter()\n\n .map(|(k, v)| (k.to_string(), v.to_string())),\n\n ));\n\n\n\n rows\n\n}\n", "file_path": "game/src/info/debug.rs", "rank": 87, "score": 173072.72592713934 }, { "content": "pub fn path_popdat() -> String {\n\n path(\"input/seattle/popdat.bin\")\n\n}\n\n\n", "file_path": "abstutil/src/lib.rs", "rank": 88, "score": 172475.58047735028 }, { "content": "pub fn path_all_maps() -> String {\n\n path(\"system/maps\")\n\n}\n\n\n", "file_path": "abstutil/src/lib.rs", "rank": 89, "score": 172475.58047735028 }, { "content": "// Only draws a box when time_left is present\n\npub fn draw_signal_phase(\n\n prerender: &Prerender,\n\n phase: &Phase,\n\n i: IntersectionID,\n\n time_left: Option<Duration>,\n\n batch: &mut GeomBatch,\n\n app: &App,\n\n signal_style: TrafficSignalStyle,\n\n) {\n\n let signal = app.primary.map.get_traffic_signal(i);\n\n\n\n match signal_style {\n\n TrafficSignalStyle::BAP => {\n\n let mut dont_walk = BTreeSet::new();\n\n let mut crossed_roads = BTreeSet::new();\n\n for g in signal.turn_groups.keys() {\n\n if g.crosswalk {\n\n dont_walk.insert(g);\n\n // TODO This is incorrect; some crosswalks hop over intermediate roads. How do\n\n // we detect or plumb that?\n", "file_path": "game/src/render/traffic_signal.rs", "rank": 90, "score": 171147.8883442144 }, { "content": "pub fn make_change_traffic(\n\n ctx: &mut EventCtx,\n\n app: &App,\n\n btn: ScreenRectangle,\n\n current: String,\n\n) -> Box<dyn State> {\n\n let mut choices = Vec::new();\n\n for name in abstutil::list_all_objects(abstutil::path_all_scenarios(app.primary.map.get_name()))\n\n {\n\n if name == \"weekday\" {\n\n choices.push(Choice::new(\"realistic weekday traffic\", name).tooltip(\n\n \"Trips will begin throughout the entire day. Midnight is usually quiet, so you \\\n\n may need to fast-forward to morning rush hour. Data comes from Puget Sound \\\n\n Regional Council's Soundcast model.\",\n\n ));\n\n } else {\n\n choices.push(Choice::new(name.clone(), name));\n\n }\n\n }\n\n choices.push(\n", "file_path": "game/src/sandbox/gameplay/freeform.rs", "rank": 91, "score": 171147.8883442144 }, { "content": "// TODO Figure out a nicer API to construct generic sortable tables.\n\npub fn make_table(\n\n ctx: &mut EventCtx,\n\n app: &App,\n\n headers: Vec<Widget>,\n\n rows: Vec<(String, Vec<GeomBatch>)>,\n\n total_width: f64,\n\n) -> Widget {\n\n let total_width = total_width;\n\n let mut width_per_col: Vec<f64> = headers.iter().map(|w| w.get_width_for_forcing()).collect();\n\n for (_, row) in &rows {\n\n for (col, width) in row.iter().zip(width_per_col.iter_mut()) {\n\n *width = width.max(col.get_dims().width);\n\n }\n\n }\n\n let extra_margin = ((total_width - width_per_col.clone().into_iter().sum::<f64>())\n\n / (width_per_col.len() - 1) as f64)\n\n .max(0.0);\n\n\n\n let mut col = vec![Widget::custom_row(\n\n headers\n", "file_path": "game/src/sandbox/dashboards/trip_table.rs", "rank": 92, "score": 171147.8883442144 }, { "content": "pub fn draw_selected_group(\n\n app: &App,\n\n batch: &mut GeomBatch,\n\n g: &DrawTurnGroup,\n\n tg: &TurnGroup,\n\n next_priority: Option<TurnPriority>,\n\n) {\n\n // TODO Refactor this mess. Maybe after things like \"dashed with outline\" can be expressed more\n\n // composably like SVG, using lyon.\n\n let block_color = match next_priority {\n\n Some(TurnPriority::Protected) => {\n\n let green = Color::hex(\"#72CE36\");\n\n let arrow = tg.geom.make_arrow(BIG_ARROW_THICKNESS, ArrowCap::Triangle);\n\n batch.push(green.alpha(0.5), arrow.clone());\n\n if let Ok(p) = arrow.to_outline(Distance::meters(0.1)) {\n\n batch.push(green, p);\n\n }\n\n green\n\n }\n\n Some(TurnPriority::Yield) => {\n", "file_path": "game/src/edit/traffic_signals.rs", "rank": 93, "score": 171147.8883442144 } ]
Rust
crates/witx2/src/interface.rs
yowl/witx-bindgen
9c10658073776e0ea4e3e30cef295cb1e306ecb0
use crate::{ abi::Abi, ast::interface::{Ast, Item}, rewrite_error, }; use anyhow::{bail, Context, Result}; use id_arena::{Arena, Id}; use std::collections::{HashMap, HashSet}; use std::fs; use std::path::{Path, PathBuf}; #[derive(Debug, Clone)] pub struct Interface { pub name: String, pub types: Arena<TypeDef>, pub type_lookup: HashMap<String, TypeId>, pub resources: Arena<Resource>, pub resource_lookup: HashMap<String, ResourceId>, pub interfaces: Arena<Interface>, pub interface_lookup: HashMap<String, InterfaceId>, pub functions: Vec<Function>, pub globals: Vec<Global>, } pub type TypeId = Id<TypeDef>; pub type ResourceId = Id<Resource>; pub type InterfaceId = Id<Interface>; #[derive(Debug, Clone)] pub struct TypeDef { pub docs: Docs, pub kind: TypeDefKind, pub name: Option<String>, pub foreign_module: Option<String>, } #[derive(Debug, Clone)] pub enum TypeDefKind { Record(Record), Variant(Variant), List(Type), Pointer(Type), ConstPointer(Type), PushBuffer(Type), PullBuffer(Type), Type(Type), } #[derive(Debug, PartialEq, Eq, Hash, Copy, Clone)] pub enum Type { U8, U16, U32, U64, S8, S16, S32, S64, F32, F64, Char, CChar, Usize, Handle(ResourceId), Id(TypeId), } #[derive(PartialEq, Debug, Copy, Clone)] pub enum Int { U8, U16, U32, U64, } #[derive(Debug, Clone)] pub struct Record { pub fields: Vec<Field>, pub kind: RecordKind, } #[derive(Copy, Clone, Debug)] pub enum RecordKind { Other, Flags(Option<Int>), Tuple, } #[derive(Debug, Clone)] pub struct Field { pub docs: Docs, pub name: String, pub ty: Type, } impl Record { pub fn is_tuple(&self) -> bool { matches!(self.kind, RecordKind::Tuple) } pub fn is_flags(&self) -> bool { matches!(self.kind, RecordKind::Flags(_)) } pub fn num_i32s(&self) -> usize { (self.fields.len() + 31) / 32 } } impl RecordKind { pub fn infer(types: &Arena<TypeDef>, fields: &[Field]) -> RecordKind { if fields.is_empty() { return RecordKind::Other; } if fields.iter().all(|t| is_bool(&t.ty, types)) { return RecordKind::Flags(None); } if fields .iter() .enumerate() .all(|(i, m)| m.name.as_str().parse().ok() == Some(i)) { return RecordKind::Tuple; } return RecordKind::Other; fn is_bool(t: &Type, types: &Arena<TypeDef>) -> bool { match t { Type::Id(v) => match &types[*v].kind { TypeDefKind::Variant(v) => v.is_bool(), TypeDefKind::Type(t) => is_bool(t, types), _ => false, }, _ => false, } } } } #[derive(Debug, Clone)] pub struct Variant { pub cases: Vec<Case>, pub tag: Int, } #[derive(Debug, Clone)] pub struct Case { pub docs: Docs, pub name: String, pub ty: Option<Type>, } impl Variant { pub fn infer_tag(cases: usize) -> Int { match cases { n if n <= u8::max_value() as usize => Int::U8, n if n <= u16::max_value() as usize => Int::U16, n if n <= u32::max_value() as usize => Int::U32, n if n <= u64::max_value() as usize => Int::U64, _ => panic!("too many cases to fit in a repr"), } } pub fn is_bool(&self) -> bool { self.cases.len() == 2 && self.cases[0].name == "false" && self.cases[1].name == "true" && self.cases[0].ty.is_none() && self.cases[1].ty.is_none() } pub fn is_enum(&self) -> bool { self.cases.iter().all(|c| c.ty.is_none()) } pub fn as_option(&self) -> Option<&Type> { if self.cases.len() != 2 { return None; } if self.cases[0].name != "none" || self.cases[0].ty.is_some() { return None; } if self.cases[1].name != "some" { return None; } self.cases[1].ty.as_ref() } pub fn as_expected(&self) -> Option<(Option<&Type>, Option<&Type>)> { if self.cases.len() != 2 { return None; } if self.cases[0].name != "ok" { return None; } if self.cases[1].name != "err" { return None; } Some((self.cases[0].ty.as_ref(), self.cases[1].ty.as_ref())) } } #[derive(Clone, Default, Debug)] pub struct Docs { pub contents: Option<String>, } impl<'a, T, U> From<T> for Docs where T: ExactSizeIterator<Item = U>, U: AsRef<str>, { fn from(iter: T) -> Self { if iter.len() == 0 { return Self { contents: None }; } let mut docs = String::new(); for doc in iter { let doc = doc.as_ref(); if let Some(doc) = doc.strip_prefix("//") { docs.push_str(doc.trim_start_matches('/').trim()); } else { assert!(doc.starts_with("/*")); assert!(doc.ends_with("*/")); for line in doc[2..doc.len() - 2].lines() { docs.push_str(line); docs.push('\n'); } } } Self { contents: Some(docs), } } } #[derive(Debug, Clone)] pub struct Resource { pub docs: Docs, pub name: String, pub foreign_module: Option<String>, } #[derive(Debug, Clone)] pub struct Global { pub docs: Docs, pub name: String, pub ty: Type, } #[derive(Debug, Clone)] pub struct Function { pub abi: Abi, pub is_async: bool, pub docs: Docs, pub name: String, pub kind: FunctionKind, pub params: Vec<(String, Type)>, pub results: Vec<(String, Type)>, } #[derive(Debug, Clone)] pub enum FunctionKind { Freestanding, Static { resource: ResourceId, name: String }, Method { resource: ResourceId, name: String }, } impl Function { pub fn item_name(&self) -> &str { match &self.kind { FunctionKind::Freestanding => &self.name, FunctionKind::Static { name, .. } => name, FunctionKind::Method { name, .. } => name, } } } impl Interface { pub fn parse(name: &str, input: &str) -> Result<Interface> { Interface::parse_with(name, input, |name| { bail!("cannot load interface `{}`", name) }) } pub fn parse_file(path: impl AsRef<Path>) -> Result<Interface> { let path = path.as_ref(); let parent = path.parent().unwrap(); let contents = std::fs::read_to_string(&path) .with_context(|| format!("failed to read interface `{}`", path.display()))?; Interface::parse_with(path, &contents, |name| load_fs(parent, name)) } pub fn parse_with( path: impl AsRef<Path>, contents: &str, mut load: impl FnMut(&str) -> Result<(PathBuf, String)>, ) -> Result<Interface> { Interface::_parse_with( path.as_ref(), contents, &mut load, &mut HashSet::new(), &mut HashMap::new(), ) } fn _parse_with( path: &Path, contents: &str, load: &mut dyn FnMut(&str) -> Result<(PathBuf, String)>, visiting: &mut HashSet<PathBuf>, map: &mut HashMap<String, Interface>, ) -> Result<Interface> { let ast = match Ast::parse(contents) { Ok(ast) => ast, Err(mut e) => { let file = path.display().to_string(); rewrite_error(&mut e, &file, contents); return Err(e); } }; if !visiting.insert(path.to_path_buf()) { bail!("file `{}` recursively imports itself", path.display()) } for item in ast.items.iter() { let u = match item { Item::Use(u) => u, _ => continue, }; if map.contains_key(&*u.from[0].name) { continue; } let (path, contents) = load(&u.from[0].name) ?; let instance = Self::_parse_with(&path, &contents, load, visiting, map)?; map.insert(u.from[0].name.to_string(), instance); } visiting.remove(path); let name = path.file_stem().unwrap().to_str().unwrap(); match ast.resolve(name, map) { Ok(i) => Ok(i), Err(mut e) => { let file = path.display().to_string(); rewrite_error(&mut e, &file, contents); Err(e) } } } pub fn topological_types(&self) -> Vec<TypeId> { let mut ret = Vec::new(); let mut visited = HashSet::new(); for (id, _) in self.types.iter() { self.topo_visit(id, &mut ret, &mut visited); } ret } fn topo_visit(&self, id: TypeId, list: &mut Vec<TypeId>, visited: &mut HashSet<TypeId>) { if !visited.insert(id) { return; } match &self.types[id].kind { TypeDefKind::Type(t) | TypeDefKind::List(t) | TypeDefKind::PushBuffer(t) | TypeDefKind::PullBuffer(t) | TypeDefKind::Pointer(t) | TypeDefKind::ConstPointer(t) => self.topo_visit_ty(t, list, visited), TypeDefKind::Record(r) => { for f in r.fields.iter() { self.topo_visit_ty(&f.ty, list, visited); } } TypeDefKind::Variant(v) => { for v in v.cases.iter() { if let Some(ty) = &v.ty { self.topo_visit_ty(ty, list, visited); } } } } list.push(id); } fn topo_visit_ty(&self, ty: &Type, list: &mut Vec<TypeId>, visited: &mut HashSet<TypeId>) { if let Type::Id(id) = ty { self.topo_visit(*id, list, visited); } } pub fn all_bits_valid(&self, ty: &Type) -> bool { match ty { Type::U8 | Type::S8 | Type::U16 | Type::S16 | Type::U32 | Type::S32 | Type::U64 | Type::S64 | Type::F32 | Type::F64 | Type::CChar | Type::Usize => true, Type::Char | Type::Handle(_) => false, Type::Id(id) => match &self.types[*id].kind { TypeDefKind::List(_) | TypeDefKind::Variant(_) | TypeDefKind::PushBuffer(_) | TypeDefKind::PullBuffer(_) => false, TypeDefKind::Type(t) => self.all_bits_valid(t), TypeDefKind::Record(r) => r.fields.iter().all(|f| self.all_bits_valid(&f.ty)), TypeDefKind::Pointer(_) | TypeDefKind::ConstPointer(_) => true, }, } } pub fn has_preview1_pointer(&self, ty: &Type) -> bool { match ty { Type::Id(id) => match &self.types[*id].kind { TypeDefKind::List(t) | TypeDefKind::PushBuffer(t) | TypeDefKind::PullBuffer(t) => { self.has_preview1_pointer(t) } TypeDefKind::Type(t) => self.has_preview1_pointer(t), TypeDefKind::Pointer(_) | TypeDefKind::ConstPointer(_) => true, TypeDefKind::Record(r) => r.fields.iter().any(|f| self.has_preview1_pointer(&f.ty)), TypeDefKind::Variant(v) => v.cases.iter().any(|c| match &c.ty { Some(ty) => self.has_preview1_pointer(ty), None => false, }), }, _ => false, } } } fn load_fs(root: &Path, name: &str) -> Result<(PathBuf, String)> { let path = root.join(name).with_extension("witx"); let contents = fs::read_to_string(&path).context(format!("failed to read `{}`", path.display()))?; Ok((path, contents)) }
use crate::{ abi::Abi, ast::interface::{Ast, Item}, rewrite_error, }; use anyhow::{bail, Context, Result}; use id_arena::{Arena, Id}; use std::collections::{HashMap, HashSet}; use std::fs; use std::path::{Path, PathBuf}; #[derive(Debug, Clone)] pub struct Interface { pub name: String, pub types: Arena<TypeDef>, pub type_lookup: HashMap<String, TypeId>, pub resources: Arena<Resource>, pub resource_lookup: HashMap<String, ResourceId>, pub interfaces: Arena<Interface>, pub interface_lookup: HashMap<String, InterfaceId>, pub functions: Vec<Function>, pub globals: Vec<Global>, } pub type TypeId = Id<TypeDef>; pub type ResourceId = Id<Resource>; pub type InterfaceId = Id<Interface>; #[derive(Debug, Clone)] pub struct TypeDef { pub docs: Docs, pub kind: TypeDefKind, pub name: Option<String>, pub foreign_module: Option<String>, } #[derive(Debug, Clone)] pub enum TypeDefKind { Record(Record), Variant(Variant), List(Type), Pointer(Type), ConstPointer(Type), PushBuffer(Type), PullBuffer(Type), Type(Type), } #[derive(Debug, PartialEq, Eq, Hash, Copy, Clone)] pub enum Type { U8, U16, U32, U64, S8, S16, S32, S64, F32, F64, Char, CChar, Usize, Handle(ResourceId), Id(TypeId), } #[derive(PartialEq, Debug, Copy, Clone)] pub enum Int { U8, U16, U32, U64, } #[derive(Debug, Clone)] pub struct Record { pub fields: Vec<Field>, pub kind: RecordKind, } #[derive(Copy, Clone, Debug)] pub enum RecordKind { Other, Flags(Option<Int>), Tuple, } #[derive(Debug, Clone)] pub struct Field { pub docs: Docs, pub name: String, pub ty: Type, } impl Record { pub fn is_tuple(&self) -> bool { matches!(self.kind, RecordKind::Tuple) } pub fn is_flags(&self) -> bool { matches!(self.kind, RecordKind::Flags(_)) } pub fn num_i32s(&self) -> usize { (self.fields.len() + 31) / 32 } } impl RecordKind { pub fn infer(types: &Arena<TypeDef>, fields: &[Field]) -> RecordKind { if fields.is_empty() { return RecordKind::Other; } if fields.iter().all(|t| is_bool(&t.ty, types)) { return RecordKind::Flags(None); } if fields .iter() .enumerate() .all(|(i, m)| m.name.as_str().parse().ok() == Some(i)) { return RecordKind::Tuple; } return RecordKind::Other; fn is_bool(t: &Type, types: &Arena<TypeDef>) -> bool { match t { Type::Id(v) => match &types[*v].kind { TypeDefKind::Variant(v) => v.is_bool(), TypeDefKind::Type(t) => is_bool(t, types), _ => false, }, _ => false, } } } } #[derive(Debug, Clone)] pub struct Variant { pub cases: Vec<Case>, pub tag: Int, } #[derive(Debug, Clone)] pub struct Case { pub docs: Docs, pub name: String, pub ty: Option<Type>, } impl Variant { pub fn infer_tag(cases: usize) -> Int { match cases { n if n <= u8::max_value() as usize => Int::U8, n if n <= u16::max_value() as usize => Int::U16, n if n <= u32::max_value() as usize => Int::U32, n if n <= u64::max_value() as usize => Int::U64, _ => panic!("too many cases to fit in a repr"), } } pub fn is_bool(&self) -> bool { self.cases.len() == 2 && self.cases[0].name == "false" && self.cases[1].name == "true" && self.cases[0].ty.is_none() && self.cases[1].ty.is_none() } pub fn is_enum(&self) -> bool { self.cases.iter().all(|c| c.ty.is_none()) } pub fn as_option(&self) -> Option<&Type> { if self.cases.len() != 2 { return None; } if self.cases[0].name != "none" || self.cases[0].ty.is_some() { return None; } if self.cases[1].name != "some" { return None; } self.cases[1].ty.as_ref() } pub fn as_expected(&self) -> Option<(Option<&Type>, Option<&Type>)> { if self.cases.len() != 2 { return None; } if self.cases[0].name != "ok" { return None; } if self.cases[1].name != "err" { return None; } Some((self.cases[0].ty.as_ref(), self.cases[1].ty.as_ref())) } } #[derive(Clone, Default, Debug)] pub struct Docs { pub contents: Option<String>, } impl<'a, T, U> From<T> for Docs where T: ExactSizeIterator<Item = U>, U: AsRef<str>, { fn from(iter: T) -> Self { if iter.len() == 0 { return Self { contents: None }; } let mut docs = String::new(); for doc in iter { let doc = doc.as_ref(); if let Some(doc) = doc.strip_prefix("//") { docs.push_str(doc.trim_start_matches('/').trim()); } else { assert!(doc.starts_with("/*")); assert!(doc.ends_with("*/")); for line in doc[2..doc.len() - 2].lines() { docs.push_str(line); docs.push('\n'); } } } Self { contents: Some(docs), } } } #[derive(Debug, Clone)] pub struct Resource { pub docs: Docs, pub name: String, pub foreign_module: Option<String>, } #[derive(Debug, Clone)] pub struct Global { pub docs: Docs, pub name: String, pub ty: Type, } #[derive(Debug, Clone)] pub struct Function { pub abi: Abi, pub is_async: bool, pub docs: Docs, pub name: String, pub kind: FunctionKind, pub params: Vec<(String, Type)>, pub results: Vec<(String, Type)>, } #[derive(Debug, Clone)] pub enum FunctionKind { Freestanding, Static { resource: ResourceId, name: String }, Method { resource: ResourceId, name: String }, } impl Function { pub fn item_name(&self) -> &str { match &self.kind { FunctionKind::Freestanding => &self.name, FunctionKind::Static { name, .. } => name, FunctionKind::Method { name, .. } => name, } } } impl Interface { pub fn parse(name: &str, input: &str) -> Result<Interface> { Interface::parse_with(name, input, |name| { bail!("cannot load interface `{}`", name) }) } pub fn parse_file(path: impl AsRef<Path>) -> Result<Interface> {
pub fn parse_with( path: impl AsRef<Path>, contents: &str, mut load: impl FnMut(&str) -> Result<(PathBuf, String)>, ) -> Result<Interface> { Interface::_parse_with( path.as_ref(), contents, &mut load, &mut HashSet::new(), &mut HashMap::new(), ) } fn _parse_with( path: &Path, contents: &str, load: &mut dyn FnMut(&str) -> Result<(PathBuf, String)>, visiting: &mut HashSet<PathBuf>, map: &mut HashMap<String, Interface>, ) -> Result<Interface> { let ast = match Ast::parse(contents) { Ok(ast) => ast, Err(mut e) => { let file = path.display().to_string(); rewrite_error(&mut e, &file, contents); return Err(e); } }; if !visiting.insert(path.to_path_buf()) { bail!("file `{}` recursively imports itself", path.display()) } for item in ast.items.iter() { let u = match item { Item::Use(u) => u, _ => continue, }; if map.contains_key(&*u.from[0].name) { continue; } let (path, contents) = load(&u.from[0].name) ?; let instance = Self::_parse_with(&path, &contents, load, visiting, map)?; map.insert(u.from[0].name.to_string(), instance); } visiting.remove(path); let name = path.file_stem().unwrap().to_str().unwrap(); match ast.resolve(name, map) { Ok(i) => Ok(i), Err(mut e) => { let file = path.display().to_string(); rewrite_error(&mut e, &file, contents); Err(e) } } } pub fn topological_types(&self) -> Vec<TypeId> { let mut ret = Vec::new(); let mut visited = HashSet::new(); for (id, _) in self.types.iter() { self.topo_visit(id, &mut ret, &mut visited); } ret } fn topo_visit(&self, id: TypeId, list: &mut Vec<TypeId>, visited: &mut HashSet<TypeId>) { if !visited.insert(id) { return; } match &self.types[id].kind { TypeDefKind::Type(t) | TypeDefKind::List(t) | TypeDefKind::PushBuffer(t) | TypeDefKind::PullBuffer(t) | TypeDefKind::Pointer(t) | TypeDefKind::ConstPointer(t) => self.topo_visit_ty(t, list, visited), TypeDefKind::Record(r) => { for f in r.fields.iter() { self.topo_visit_ty(&f.ty, list, visited); } } TypeDefKind::Variant(v) => { for v in v.cases.iter() { if let Some(ty) = &v.ty { self.topo_visit_ty(ty, list, visited); } } } } list.push(id); } fn topo_visit_ty(&self, ty: &Type, list: &mut Vec<TypeId>, visited: &mut HashSet<TypeId>) { if let Type::Id(id) = ty { self.topo_visit(*id, list, visited); } } pub fn all_bits_valid(&self, ty: &Type) -> bool { match ty { Type::U8 | Type::S8 | Type::U16 | Type::S16 | Type::U32 | Type::S32 | Type::U64 | Type::S64 | Type::F32 | Type::F64 | Type::CChar | Type::Usize => true, Type::Char | Type::Handle(_) => false, Type::Id(id) => match &self.types[*id].kind { TypeDefKind::List(_) | TypeDefKind::Variant(_) | TypeDefKind::PushBuffer(_) | TypeDefKind::PullBuffer(_) => false, TypeDefKind::Type(t) => self.all_bits_valid(t), TypeDefKind::Record(r) => r.fields.iter().all(|f| self.all_bits_valid(&f.ty)), TypeDefKind::Pointer(_) | TypeDefKind::ConstPointer(_) => true, }, } } pub fn has_preview1_pointer(&self, ty: &Type) -> bool { match ty { Type::Id(id) => match &self.types[*id].kind { TypeDefKind::List(t) | TypeDefKind::PushBuffer(t) | TypeDefKind::PullBuffer(t) => { self.has_preview1_pointer(t) } TypeDefKind::Type(t) => self.has_preview1_pointer(t), TypeDefKind::Pointer(_) | TypeDefKind::ConstPointer(_) => true, TypeDefKind::Record(r) => r.fields.iter().any(|f| self.has_preview1_pointer(&f.ty)), TypeDefKind::Variant(v) => v.cases.iter().any(|c| match &c.ty { Some(ty) => self.has_preview1_pointer(ty), None => false, }), }, _ => false, } } } fn load_fs(root: &Path, name: &str) -> Result<(PathBuf, String)> { let path = root.join(name).with_extension("witx"); let contents = fs::read_to_string(&path).context(format!("failed to read `{}`", path.display()))?; Ok((path, contents)) }
let path = path.as_ref(); let parent = path.parent().unwrap(); let contents = std::fs::read_to_string(&path) .with_context(|| format!("failed to read interface `{}`", path.display()))?; Interface::parse_with(path, &contents, |name| load_fs(parent, name)) }
function_block-function_prefix_line
[ { "content": "pub fn case_name(id: &str) -> String {\n\n if id.chars().next().unwrap().is_alphabetic() {\n\n id.to_camel_case()\n\n } else {\n\n format!(\"V{}\", id)\n\n }\n\n}\n\n\n", "file_path": "crates/gen-rust/src/lib.rs", "rank": 0, "score": 443626.0483821046 }, { "content": "pub fn int_repr(repr: Int) -> &'static str {\n\n match repr {\n\n Int::U8 => \"u8\",\n\n Int::U16 => \"u16\",\n\n Int::U32 => \"u32\",\n\n Int::U64 => \"u64\",\n\n }\n\n}\n\n\n", "file_path": "crates/gen-rust/src/lib.rs", "rank": 1, "score": 432125.57869076764 }, { "content": "fn int_repr(ty: Int) -> &'static str {\n\n match ty {\n\n Int::U8 => \"uint8_t\",\n\n Int::U16 => \"uint16_t\",\n\n Int::U32 => \"uint32_t\",\n\n Int::U64 => \"uint64_t\",\n\n }\n\n}\n\n\n", "file_path": "crates/gen-c/src/lib.rs", "rank": 2, "score": 420055.90952802874 }, { "content": "pub fn wasm_type(ty: WasmType) -> &'static str {\n\n match ty {\n\n WasmType::I32 => \"i32\",\n\n WasmType::I64 => \"i64\",\n\n WasmType::F32 => \"f32\",\n\n WasmType::F64 => \"f64\",\n\n }\n\n}\n\n\n", "file_path": "crates/gen-rust/src/lib.rs", "rank": 4, "score": 392553.6229359857 }, { "content": "pub fn rewrite_error(err: &mut anyhow::Error, file: &str, contents: &str) {\n\n let lex = match err.downcast_mut::<Error>() {\n\n Some(err) => err,\n\n None => return,\n\n };\n\n let pos = match lex {\n\n Error::Unexpected(at, _)\n\n | Error::UnterminatedComment(at)\n\n | Error::Wanted { at, .. }\n\n | Error::UnterminatedString(at)\n\n | Error::NewlineInString(at)\n\n | Error::InvalidCharInString(at, _)\n\n | Error::InvalidEscape(at, _) => *at,\n\n };\n\n\n\n let msg = highlight_err(pos, None, file, contents, lex);\n\n *err = anyhow::anyhow!(\"{}\", msg);\n\n}\n\n\n", "file_path": "crates/witx2/src/lex.rs", "rank": 5, "score": 381719.9477981034 }, { "content": "fn is_char(interface: &witx2::Interface, ty: &Type) -> bool {\n\n match ty {\n\n Type::Char => true,\n\n Type::Id(id) => match &interface.types[*id].kind {\n\n TypeDefKind::Type(t) => is_char(interface, t),\n\n _ => false,\n\n },\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "crates/wasmlink/src/adapter/call.rs", "rank": 6, "score": 370894.6881340808 }, { "content": "fn import_kind(ty: ImportSectionEntryType) -> &'static str {\n\n match ty {\n\n ImportSectionEntryType::Function(_) => \"function\",\n\n ImportSectionEntryType::Table(_) => \"table\",\n\n ImportSectionEntryType::Memory(_) => \"memory\",\n\n ImportSectionEntryType::Event(_) => {\n\n unimplemented!(\"event imports are not implemented\")\n\n }\n\n ImportSectionEntryType::Global(_) => \"global\",\n\n ImportSectionEntryType::Module(_) => \"module\",\n\n ImportSectionEntryType::Instance(_) => \"instance\",\n\n }\n\n}\n\n\n\npub(crate) fn export_kind(kind: ExternalKind) -> &'static str {\n\n match kind {\n\n ExternalKind::Function => \"function\",\n\n ExternalKind::Table => \"table\",\n\n ExternalKind::Memory => \"memory\",\n\n ExternalKind::Event => unimplemented!(\"event exports are not implemented\"),\n\n ExternalKind::Global => \"global\",\n\n ExternalKind::Type => unimplemented!(\"type exports are not implemented\"),\n\n ExternalKind::Module => \"module\",\n\n ExternalKind::Instance => \"instance\",\n\n }\n\n}\n\n\n", "file_path": "crates/wasmlink/src/module.rs", "rank": 7, "score": 366305.05040766695 }, { "content": "pub fn load(path: impl AsRef<Path>) -> Result<Interface> {\n\n Interface::parse_file(path)\n\n}\n\n\n\n#[derive(Default)]\n\npub struct Source {\n\n s: String,\n\n indent: usize,\n\n}\n\n\n\nimpl Source {\n\n pub fn push_str(&mut self, src: &str) {\n\n let lines = src.lines().collect::<Vec<_>>();\n\n for (i, line) in lines.iter().enumerate() {\n\n let trimmed = line.trim();\n\n if trimmed.starts_with(\"}\") && self.s.ends_with(\" \") {\n\n self.s.pop();\n\n self.s.pop();\n\n }\n\n self.s.push_str(if lines.len() == 1 {\n", "file_path": "crates/gen-core/src/lib.rs", "rank": 8, "score": 365247.4347112315 }, { "content": "pub fn to_rust_ident(name: &str) -> String {\n\n match name {\n\n \"in\" => \"in_\".into(),\n\n \"type\" => \"type_\".into(),\n\n \"where\" => \"where_\".into(),\n\n \"yield\" => \"yield_\".into(),\n\n \"async\" => \"async_\".into(),\n\n \"self\" => \"self_\".into(),\n\n s => s.to_snake_case(),\n\n }\n\n}\n\n\n", "file_path": "crates/gen-rust/src/lib.rs", "rank": 9, "score": 359255.02804284357 }, { "content": "fn case_field_name(case: &Case) -> String {\n\n if case.name.parse::<u32>().is_ok() {\n\n format!(\"f{}\", case.name)\n\n } else {\n\n case.name.to_snake_case()\n\n }\n\n}\n", "file_path": "crates/gen-c/src/lib.rs", "rank": 10, "score": 354233.30899599305 }, { "content": "fn wasm_type(ty: WasmType) -> &'static str {\n\n match ty {\n\n WasmType::I32 => \"int32_t\",\n\n WasmType::I64 => \"int64_t\",\n\n WasmType::F32 => \"float\",\n\n WasmType::F64 => \"double\",\n\n }\n\n}\n\n\n", "file_path": "crates/gen-c/src/lib.rs", "rank": 11, "score": 348481.07885006827 }, { "content": "fn wasm_ty_typing(ty: WasmType) -> &'static str {\n\n match ty {\n\n WasmType::I32 => \"int\",\n\n WasmType::I64 => \"int\",\n\n WasmType::F32 => \"float\",\n\n WasmType::F64 => \"float\",\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::Source;\n\n\n\n #[test]\n\n fn simple_append() {\n\n let mut s = Source::default();\n\n s.push_str(\"x\");\n\n assert_eq!(s.s, \"x\");\n\n s.push_str(\"y\");\n\n assert_eq!(s.s, \"xy\");\n", "file_path": "crates/gen-wasmtime-py/src/lib.rs", "rank": 12, "score": 346389.09993452614 }, { "content": "fn parse_interface(s: &str) -> Result<(String, PathBuf)> {\n\n match s.split_once('=') {\n\n Some((name, path)) => Ok((name.into(), path.into())),\n\n None => bail!(\"expected a value with format `NAME=INTERFACE`\"),\n\n }\n\n}\n\n\n\n/// WebAssembly module linker.\n\n#[derive(Debug, StructOpt)]\n\n#[structopt(name = \"wasmlink\", version = env!(\"CARGO_PKG_VERSION\"), global_settings = &[\n\n AppSettings::VersionlessSubcommands,\n\n AppSettings::ColoredHelp,\n\n AppSettings::ArgRequiredElseHelp,\n\n])]\n\npub struct App {\n\n /// A transitive imported module to the module being linked.\n\n #[structopt(long = \"module\", short = \"m\", value_name = \"NAME=MODULE\", parse(try_from_str = parse_module), required = true, min_values = 1)]\n\n pub modules: Vec<(String, PathBuf)>,\n\n\n\n /// The path to an interface definition file for an imported module.\n", "file_path": "crates/wasmlink-cli/src/lib.rs", "rank": 13, "score": 344610.2486400001 }, { "content": "fn rewrite_error(err: &mut anyhow::Error, file: &str, contents: &str) {\n\n #[cfg(feature = \"old-witx-compat\")]\n\n if let Some(err) = err.downcast_mut::<wast::Error>() {\n\n err.set_path(file.as_ref());\n\n err.set_text(contents);\n\n return;\n\n }\n\n let parse = match err.downcast_mut::<Error>() {\n\n Some(err) => err,\n\n None => return lex::rewrite_error(err, file, contents),\n\n };\n\n let msg = crate::lex::highlight_err(\n\n parse.span.start as usize,\n\n Some(parse.span.end as usize),\n\n file,\n\n contents,\n\n &parse.msg,\n\n );\n\n *err = anyhow::anyhow!(\"{}\", msg);\n\n}\n", "file_path": "crates/witx2/src/lib.rs", "rank": 14, "score": 337016.2289110194 }, { "content": "fn wasm_ty_ctor(ty: WasmType) -> &'static str {\n\n match ty {\n\n WasmType::I32 => \"wasmtime.ValType.i32()\",\n\n WasmType::I64 => \"wasmtime.ValType.i64()\",\n\n WasmType::F32 => \"wasmtime.ValType.f32()\",\n\n WasmType::F64 => \"wasmtime.ValType.f64()\",\n\n }\n\n}\n\n\n", "file_path": "crates/gen-wasmtime-py/src/lib.rs", "rank": 15, "score": 333738.3114758625 }, { "content": "fn has_list(interface: &witx2::Interface, ty: &witx2::Type) -> bool {\n\n use witx2::{Type, TypeDefKind};\n\n\n\n match ty {\n\n Type::Id(id) => match &interface.types[*id].kind {\n\n TypeDefKind::List(_) => true,\n\n TypeDefKind::Type(t) => has_list(interface, t),\n\n TypeDefKind::Record(r) => r.fields.iter().any(|f| has_list(interface, &f.ty)),\n\n TypeDefKind::Variant(v) => v.cases.iter().any(|c| {\n\n c.ty.as_ref()\n\n .map(|t| has_list(interface, t))\n\n .unwrap_or(false)\n\n }),\n\n _ => false,\n\n },\n\n _ => false,\n\n }\n\n}\n\n\n\npub(crate) struct FunctionInfo {\n", "file_path": "crates/wasmlink/src/module.rs", "rank": 16, "score": 324204.24714359397 }, { "content": "fn parse_id<'a>(tokens: &mut Tokenizer<'a>) -> Result<Id<'a>> {\n\n match tokens.next()? {\n\n Some((span, Token::Id)) => Ok(Id {\n\n name: tokens.get_span(span).into(),\n\n span,\n\n }),\n\n Some((span, Token::StrLit)) => Ok(Id {\n\n name: tokens.parse_str(span).into(),\n\n span,\n\n }),\n\n other => {\n\n let (span, msg) = tokens.format_expected_error(\"an identifier or string\", other);\n\n bail!(Error { span, msg })\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/witx2/src/ast/interface.rs", "rank": 17, "score": 319412.34737411304 }, { "content": "pub fn to_js_ident(name: &str) -> &str {\n\n match name {\n\n \"in\" => \"in_\",\n\n \"import\" => \"import_\",\n\n s => s,\n\n }\n\n}\n\n\n", "file_path": "crates/gen-js/src/lib.rs", "rank": 18, "score": 316843.5713419871 }, { "content": "pub fn bitcast(casts: &[Bitcast], operands: &[String], results: &mut Vec<String>) {\n\n for (cast, operand) in casts.iter().zip(operands) {\n\n results.push(match cast {\n\n Bitcast::None => operand.clone(),\n\n Bitcast::F32ToF64 => format!(\"f64::from({})\", operand),\n\n Bitcast::I32ToI64 => format!(\"i64::from({})\", operand),\n\n Bitcast::F32ToI32 => format!(\"({}).to_bits() as i32\", operand),\n\n Bitcast::F64ToI64 => format!(\"({}).to_bits() as i64\", operand),\n\n Bitcast::F64ToF32 => format!(\"{} as f32\", operand),\n\n Bitcast::I64ToI32 => format!(\"{} as i32\", operand),\n\n Bitcast::I32ToF32 => format!(\"f32::from_bits({} as u32)\", operand),\n\n Bitcast::I64ToF64 => format!(\"f64::from_bits({} as u64)\", operand),\n\n Bitcast::F32ToI64 => format!(\"i64::from(({}).to_bits())\", operand),\n\n Bitcast::I64ToF32 => format!(\"f32::from_bits({} as u32)\", operand),\n\n });\n\n }\n\n}\n", "file_path": "crates/gen-rust/src/lib.rs", "rank": 19, "score": 310712.80354646745 }, { "content": "fn parse_opt_id<'a>(tokens: &mut Tokenizer<'a>) -> Result<Option<Id<'a>>> {\n\n let mut other = tokens.clone();\n\n match other.next()? {\n\n Some((span, Token::Id)) => {\n\n *tokens = other;\n\n Ok(Some(Id {\n\n name: tokens.get_span(span).into(),\n\n span,\n\n }))\n\n }\n\n Some((span, Token::StrLit)) => {\n\n *tokens = other;\n\n Ok(Some(Id {\n\n name: tokens.parse_str(span).into(),\n\n span,\n\n }))\n\n }\n\n _ => Ok(None),\n\n }\n\n}\n\n\n", "file_path": "crates/witx2/src/ast/interface.rs", "rank": 20, "score": 305629.77050642646 }, { "content": "fn parse_module(s: &str) -> Result<(String, PathBuf)> {\n\n match s.split_once('=') {\n\n Some((name, path)) => Ok((name.into(), path.into())),\n\n None => bail!(\"expected a value with format `NAME=MODULE`\"),\n\n }\n\n}\n\n\n", "file_path": "crates/wasmlink-cli/src/lib.rs", "rank": 21, "score": 303571.7235416267 }, { "content": "fn parse_docs<'a>(tokens: &mut Tokenizer<'a>) -> Result<Docs<'a>> {\n\n let mut docs = Docs::default();\n\n let mut clone = tokens.clone();\n\n while let Some((span, token)) = clone.next_raw()? {\n\n match token {\n\n Token::Whitespace => {}\n\n Token::Comment => docs.docs.push(tokens.get_span(span).into()),\n\n _ => break,\n\n };\n\n *tokens = clone.clone();\n\n }\n\n Ok(docs)\n\n}\n\n\n\nimpl<'a> Type<'a> {\n\n fn parse(tokens: &mut Tokenizer<'a>) -> Result<Self> {\n\n match tokens.next()? {\n\n Some((_span, Token::U8)) => Ok(Type::U8),\n\n Some((_span, Token::U16)) => Ok(Type::U16),\n\n Some((_span, Token::U32)) => Ok(Type::U32),\n", "file_path": "crates/witx2/src/ast/interface.rs", "rank": 22, "score": 302165.8929498628 }, { "content": "pub trait Token: Eq + PartialEq + Copy + Clone {\n\n fn whitespace() -> Self;\n\n fn comment() -> Self;\n\n fn string() -> Self;\n\n fn parse(start: usize, ch: char, tokenizer: &mut Tokenizer<'_, Self>) -> Result<Self, Error>;\n\n fn ignored(&self) -> bool;\n\n fn describe(&self) -> &'static str;\n\n}\n\n\n", "file_path": "crates/witx2/src/lex.rs", "rank": 23, "score": 300138.6056678456 }, { "content": "fn adapt(name: &str, bytes: &[u8], witx_path: &Path) -> Result<wasm_encoder::Module> {\n\n let module = Module::new(\n\n name,\n\n bytes,\n\n if witx_path.is_file() {\n\n vec![witx2::Interface::parse_file(witx_path)?]\n\n } else {\n\n Vec::new()\n\n },\n\n )?;\n\n\n\n let mut next_resource_id = 0;\n\n let adapter = ModuleAdapter::new(&module, &mut next_resource_id);\n\n\n\n adapter.adapt()\n\n}\n\n\n", "file_path": "crates/wasmlink/tests/run.rs", "rank": 24, "score": 300036.9153115255 }, { "content": "pub fn to_val_type(ty: &Type) -> wasm_encoder::ValType {\n\n match ty {\n\n Type::I32 => wasm_encoder::ValType::I32,\n\n Type::I64 => wasm_encoder::ValType::I64,\n\n Type::F32 => wasm_encoder::ValType::F32,\n\n Type::F64 => wasm_encoder::ValType::F64,\n\n Type::V128 => wasm_encoder::ValType::V128,\n\n Type::FuncRef => wasm_encoder::ValType::FuncRef,\n\n Type::ExternRef => wasm_encoder::ValType::ExternRef,\n\n Type::ExnRef | Type::Func | Type::EmptyBlockType => {\n\n unimplemented!(\"unsupported value type\")\n\n }\n\n }\n\n}\n\n\n\n/// Represents a linked module built from a dependency graph.\n", "file_path": "crates/wasmlink/src/linker.rs", "rank": 25, "score": 296789.66468874103 }, { "content": "/// Runs the `future` provided to completion, polling the future whenever its\n\n/// waker receives a call to `wake`.\n\npub fn execute(future: impl Future<Output = ()> + 'static) {\n\n let waker = Arc::new(PollingWaker {\n\n state: RefCell::new(State::Waiting(Box::pin(future))),\n\n });\n\n waker.wake()\n\n}\n\n\n\nimpl Wake for PollingWaker {\n\n fn wake(self: Arc<Self>) {\n\n let mut state = self.state.borrow_mut();\n\n let mut future = match mem::replace(&mut *state, State::Polling) {\n\n // We are the first wake to come in to wake-up this future. This\n\n // means that we need to actually poll the future, so leave the\n\n // `Polling` state in place.\n\n State::Waiting(future) => future,\n\n\n\n // Otherwise the future is either already polling or it was already\n\n // woken while it was being polled, in both instances we reset the\n\n // state back to `Woken` and then we return. This means that the\n\n // future is owned by some previous stack frame and will drive the\n", "file_path": "crates/rust-wasm/src/futures.rs", "rank": 26, "score": 276024.9969323339 }, { "content": "#[proc_macro]\n\npub fn import(input: TokenStream) -> TokenStream {\n\n run(input, Direction::Import)\n\n}\n\n\n\n/// Generate code to support implementing the given interfaces and exporting\n\n/// them to wasm modules.\n", "file_path": "crates/wasmtime-impl/src/lib.rs", "rank": 27, "score": 266375.734493443 }, { "content": "#[proc_macro]\n\npub fn export(input: TokenStream) -> TokenStream {\n\n run(input, Direction::Export)\n\n}\n\n\n", "file_path": "crates/wasmtime-impl/src/lib.rs", "rank": 28, "score": 266375.734493443 }, { "content": "#[proc_macro]\n\npub fn import(input: TokenStream) -> TokenStream {\n\n run(input, Direction::Import)\n\n}\n\n\n", "file_path": "crates/rust-wasm-impl/src/lib.rs", "rank": 29, "score": 262206.6529196223 }, { "content": "#[proc_macro]\n\npub fn export(input: TokenStream) -> TokenStream {\n\n run(input, Direction::Export)\n\n}\n\n\n", "file_path": "crates/rust-wasm-impl/src/lib.rs", "rank": 30, "score": 262206.6529196223 }, { "content": "fn verify(dir: &str, name: &str) {\n\n let dir = Path::new(dir);\n\n let path = PathBuf::from(env::var_os(\"WASI_SDK_PATH\").unwrap());\n\n let mut cmd = Command::new(path.join(\"bin/clang\"));\n\n cmd.arg(\"--sysroot\").arg(path.join(\"share/wasi-sysroot\"));\n\n cmd.arg(dir.join(format!(\"{}.c\", name)));\n\n cmd.arg(\"-I\").arg(dir);\n\n cmd.arg(\"-Wall\")\n\n .arg(\"-Wextra\")\n\n .arg(\"-Werror\")\n\n .arg(\"-Wno-unused-parameter\");\n\n cmd.arg(\"-c\");\n\n cmd.arg(\"-o\").arg(dir.join(\"obj.o\"));\n\n\n\n println!(\"{:?}\", cmd);\n\n let output = match cmd.output() {\n\n Ok(output) => output,\n\n Err(e) => panic!(\"failed to spawn compiler: {}\", e),\n\n };\n\n\n", "file_path": "crates/gen-c/tests/codegen.rs", "rank": 31, "score": 258716.41799422738 }, { "content": "fn sorted_iter<K: Ord, V>(map: &HashMap<K, V>) -> impl Iterator<Item = (&K, &V)> {\n\n let mut list = map.into_iter().collect::<Vec<_>>();\n\n list.sort_by_key(|p| p.0);\n\n list.into_iter()\n\n}\n", "file_path": "crates/gen-wasmtime/src/lib.rs", "rank": 32, "score": 257572.81363875518 }, { "content": "#[derive(Debug, Copy, Clone)]\n\nenum LoadType {\n\n I32_8U,\n\n I32_16U,\n\n I32,\n\n I64,\n\n}\n\n\n\nimpl From<witx2::Int> for LoadType {\n\n fn from(i: witx2::Int) -> Self {\n\n match i {\n\n witx2::Int::U8 => Self::I32_8U,\n\n witx2::Int::U16 => Self::I32_16U,\n\n witx2::Int::U32 => Self::I32,\n\n witx2::Int::U64 => Self::I64,\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/wasmlink/src/adapter/call.rs", "rank": 33, "score": 257475.63218719605 }, { "content": "enum ConfigField {\n\n Interfaces(Vec<witx2::Interface>),\n\n Async(witx_bindgen_gen_wasmtime::Async),\n\n CustomError(bool),\n\n}\n\n\n\nimpl Parse for ConfigField {\n\n fn parse(input: ParseStream<'_>) -> Result<Self> {\n\n let l = input.lookahead1();\n\n if l.peek(kw::src) {\n\n input.parse::<kw::src>()?;\n\n let name;\n\n syn::bracketed!(name in input);\n\n let name = name.parse::<syn::LitStr>()?;\n\n input.parse::<Token![:]>()?;\n\n let s = input.parse::<syn::LitStr>()?;\n\n let interface = witx2::Interface::parse(&name.value(), &s.value())\n\n .map_err(|e| Error::new(s.span(), e))?;\n\n Ok(ConfigField::Interfaces(vec![interface]))\n\n } else if l.peek(kw::paths) {\n", "file_path": "crates/wasmtime-impl/src/lib.rs", "rank": 34, "score": 257445.45594243787 }, { "content": "// Files written in this proc-macro are loaded as source code in Rust. This is\n\n// done to assist with compiler error messages so there's an actual file to go\n\n// look at, but this causes issues with mtime-tracking in Cargo since it appears\n\n// to Cargo that a file was modified after the build started, which causes Cargo\n\n// to rebuild on subsequent builds. All our dependencies are tracked via the\n\n// inputs to the proc-macro itself, so there's no need for Cargo to track these\n\n// files, so we specifically set the mtime of the file to something older to\n\n// prevent triggering rebuilds.\n\nfn write_old_file(path: impl AsRef<Path>, contents: impl AsRef<[u8]>) {\n\n let path = path.as_ref();\n\n fs::write(path, contents).unwrap();\n\n let now = filetime::FileTime::from_system_time(SystemTime::now() - Duration::from_secs(600));\n\n filetime::set_file_mtime(path, now).unwrap();\n\n}\n\n\n", "file_path": "crates/test-helpers/src/lib.rs", "rank": 35, "score": 255934.03424771453 }, { "content": "enum Input {\n\n Bytes(*const u8, usize),\n\n General {\n\n shim: unsafe fn([usize; 2], *const u8, &Memory, i32, u32, &mut u32) -> Result<(), Trap>,\n\n iterator: [usize; 2],\n\n serialize: *const u8,\n\n },\n\n}\n\n\n", "file_path": "crates/wasmtime/src/imports.rs", "rank": 36, "score": 255817.00189066643 }, { "content": "fn run(wasm: &str) -> Result<()> {\n\n use exports::*;\n\n\n\n let (exports, mut store) = crate::instantiate(\n\n wasm,\n\n |linker| imports::add_to_linker(linker, |cx| -> &mut MyImports { &mut cx.imports }),\n\n |store, module, linker| Exports::instantiate(store, module, linker, |cx| &mut cx.exports),\n\n )?;\n\n\n\n exports.test_imports(&mut store)?;\n\n assert_eq!(exports.multiple_results(&mut store,)?, (100, 200));\n\n assert_eq!(exports.swap_tuple(&mut store, (1u8, 2u32))?, (2u32, 1u8));\n\n assert_eq!(exports.roundtrip_flags1(&mut store, F1::A)?, F1::A);\n\n assert_eq!(\n\n exports.roundtrip_flags1(&mut store, F1::empty())?,\n\n F1::empty()\n\n );\n\n assert_eq!(exports.roundtrip_flags1(&mut store, F1::B)?, F1::B);\n\n assert_eq!(\n\n exports.roundtrip_flags1(&mut store, F1::A | F1::B)?,\n", "file_path": "tests/runtime/records/host.rs", "rank": 37, "score": 255267.6368123567 }, { "content": "fn run(wasm: &str) -> Result<()> {\n\n use exports::*;\n\n\n\n let (exports, mut store) = crate::instantiate(\n\n wasm,\n\n |linker| imports::add_to_linker(linker, |cx| -> &mut MyImports { &mut cx.imports }),\n\n |store, module, linker| Exports::instantiate(store, module, linker, |cx| &mut cx.exports),\n\n )?;\n\n\n\n exports.test_imports(&mut store)?;\n\n\n\n assert_eq!(exports.roundtrip_option(&mut store, Some(1.0))?, Some(1));\n\n assert_eq!(exports.roundtrip_option(&mut store, None)?, None);\n\n assert_eq!(exports.roundtrip_option(&mut store, Some(2.0))?, Some(2));\n\n assert_eq!(exports.roundtrip_result(&mut store, Ok(2))?, Ok(2.0));\n\n assert_eq!(exports.roundtrip_result(&mut store, Ok(4))?, Ok(4.0));\n\n assert_eq!(exports.roundtrip_result(&mut store, Err(5.3))?, Err(5));\n\n\n\n assert_eq!(exports.roundtrip_enum(&mut store, E1::A)?, E1::A);\n\n assert_eq!(exports.roundtrip_enum(&mut store, E1::B)?, E1::B);\n", "file_path": "tests/runtime/variants/host.rs", "rank": 38, "score": 255242.57956161557 }, { "content": "fn verify(dir: &str, name: &str) {\n\n let (cmd, args) = if cfg!(windows) {\n\n (\"cmd.exe\", &[\"/c\", \"npx.cmd\"] as &[&str])\n\n } else {\n\n (\"npx\", &[] as &[&str])\n\n };\n\n\n\n let status = Command::new(cmd)\n\n .args(args)\n\n .arg(\"eslint\")\n\n .arg(\"-c\")\n\n .arg(\".eslintrc.js\")\n\n .arg(Path::new(dir).join(&format!(\"{}.js\", name)))\n\n .status()\n\n .unwrap();\n\n assert!(status.success());\n\n}\n", "file_path": "crates/gen-js/tests/codegen.rs", "rank": 39, "score": 255134.4158552073 }, { "content": "fn to_trap(err: impl std::error::Error + Send + Sync + 'static) -> Trap {\n\n Trap::from(Box::new(err) as Box<dyn std::error::Error + Send + Sync>)\n\n}\n\n\n\nimpl<'a> BorrowChecker<'a> {\n\n pub fn new(data: &'a mut [u8]) -> BorrowChecker<'a> {\n\n BorrowChecker {\n\n ptr: data.as_mut_ptr(),\n\n len: data.len(),\n\n shared_borrows: Default::default(),\n\n mut_borrows: Default::default(),\n\n _marker: marker::PhantomData,\n\n }\n\n }\n\n\n\n pub fn slice<T: AllBytesValid>(&mut self, ptr: i32, len: i32) -> Result<&'a [T], Trap> {\n\n let (ret, r) = self.get_slice(ptr, len)?;\n\n // SAFETY: We're promoting the valid lifetime of `ret` from a temporary\n\n // borrow on `self` to `'a` on this `BorrowChecker`. At the same time\n\n // we're recording that this is a persistent shared borrow (until this\n", "file_path": "crates/wasmtime/src/region.rs", "rank": 40, "score": 253767.6734255264 }, { "content": "enum ConfigField {\n\n Interfaces(Vec<witx2::Interface>),\n\n Unchecked,\n\n MultiModule,\n\n}\n\n\n\nimpl Parse for ConfigField {\n\n fn parse(input: ParseStream<'_>) -> Result<Self> {\n\n let l = input.lookahead1();\n\n if l.peek(kw::src) {\n\n input.parse::<kw::src>()?;\n\n let name;\n\n syn::bracketed!(name in input);\n\n let name = name.parse::<syn::LitStr>()?;\n\n input.parse::<Token![:]>()?;\n\n let s = input.parse::<syn::LitStr>()?;\n\n let interface = witx2::Interface::parse(&name.value(), &s.value())\n\n .map_err(|e| Error::new(s.span(), e))?;\n\n Ok(ConfigField::Interfaces(vec![interface]))\n\n } else if l.peek(kw::paths) {\n", "file_path": "crates/rust-wasm-impl/src/lib.rs", "rank": 41, "score": 252214.10588606683 }, { "content": " def clone(self) -> '{name}':\n\n self._refcnt += 1\n\n return self\n\n\n", "file_path": "crates/gen-wasmtime-py/src/lib.rs", "rank": 42, "score": 252184.96814978553 }, { "content": "fn is_keylike(ch: char) -> bool {\n\n ch == '_'\n\n || ch == '-'\n\n || ('A'..='Z').contains(&ch)\n\n || ('a'..='z').contains(&ch)\n\n || ('0'..='9').contains(&ch)\n\n}\n\n\n", "file_path": "crates/witx2/src/lex.rs", "rank": 43, "score": 247016.01239782033 }, { "content": "fn run(wasm: &str) -> anyhow::Result<()> {\n\n let (exports, mut store) = crate::instantiate_smw(\n\n wasm,\n\n |linker| imports::add_to_linker(linker, |cx| -> &mut Host { &mut cx.imports }),\n\n |store, module, linker| {\n\n exports::Exports::instantiate(store, module, linker, |cx| &mut cx.exports)\n\n },\n\n )?;\n\n\n\n // Test that the import instance called the functions we made available with\n\n // the expected arguments.\n\n\n\n exports.test_imports(&mut store)?;\n\n\n\n assert_eq!(store.data().imports.f1_s, \"Hello, WITX!\");\n\n\n\n assert!(store.data().imports.f2_called, \"JS should have called `f2`\");\n\n\n\n assert_eq!(store.data().imports.f3_a, \"\");\n\n assert_eq!(store.data().imports.f3_b, \"🚀\");\n", "file_path": "tests/runtime/smw_strings/host.rs", "rank": 44, "score": 240665.48775177664 }, { "content": "fn run(wasm: &str) -> anyhow::Result<()> {\n\n let (exports, mut store) = crate::instantiate_smw(\n\n wasm,\n\n |linker| imports::add_to_linker(linker, |cx| -> &mut Host { &mut cx.imports }),\n\n |store, module, linker| {\n\n exports::Exports::instantiate(store, module, linker, |cx| &mut cx.exports)\n\n },\n\n )?;\n\n\n\n // Test that the import instance called the functions we made available with\n\n // the expected arguments.\n\n\n\n exports.test_imports(&mut store)?;\n\n\n\n assert!(\n\n store.data().imports.f1_called,\n\n \"top-level JS imported and called `f1`\",\n\n );\n\n\n\n assert_eq!(\n", "file_path": "tests/runtime/smw_functions/host.rs", "rank": 45, "score": 240641.95799575263 }, { "content": "pub fn guard() -> impl Drop {\n\n struct A(usize);\n\n\n\n impl Drop for A {\n\n fn drop(&mut self) {\n\n assert_eq!(get(), self.0);\n\n }\n\n }\n\n\n\n A(get())\n\n}\n", "file_path": "crates/test-rust-wasm/src/lib.rs", "rank": 46, "score": 238905.4246861328 }, { "content": "pub fn highlight_err(\n\n start: usize,\n\n end: Option<usize>,\n\n file: &str,\n\n input: &str,\n\n err: impl fmt::Display,\n\n) -> String {\n\n let (line, col) = linecol_in(start, input);\n\n let snippet = input.lines().nth(line).unwrap_or(\"\");\n\n let mut msg = format!(\n\n \"\\\n\n{err}\n\n --> {file}:{line}:{col}\n\n |\n\n {line:4} | {snippet}\n\n | {marker:>0$}\",\n\n col + 1,\n\n file = file,\n\n line = line + 1,\n\n col = col + 1,\n", "file_path": "crates/witx2/src/lex.rs", "rank": 47, "score": 237166.37734939164 }, { "content": "fn int_size_align(i: Int) -> (usize, usize) {\n\n match i {\n\n Int::U8 => (1, 1),\n\n Int::U16 => (2, 2),\n\n Int::U32 => (4, 4),\n\n Int::U64 => (8, 8),\n\n }\n\n}\n\n\n", "file_path": "crates/witx2/src/sizealign.rs", "rank": 48, "score": 233396.79711304686 }, { "content": "fn verify(dir: &str, _name: &str) {\n\n let wasm = std::fs::read(Path::new(dir).join(\"foo.wasm\")).unwrap();\n\n let mut validator = wasmparser::Validator::new();\n\n validator.wasm_features(wasmparser::WasmFeatures {\n\n bulk_memory: true,\n\n module_linking: true,\n\n multi_memory: true,\n\n ..wasmparser::WasmFeatures::default()\n\n });\n\n validator.validate_all(&wasm).expect(\"wasm isn't valid\");\n\n}\n", "file_path": "crates/gen-spidermonkey/tests/codegen-smw.rs", "rank": 49, "score": 228440.59497213387 }, { "content": "fn verify(dir: &str, _name: &str) {\n\n let output = Command::new(\"mypy\")\n\n .arg(Path::new(dir).join(\"bindings.py\"))\n\n .arg(\"--config-file\")\n\n .arg(\"mypy.ini\")\n\n .output()\n\n .expect(\"failed to run `mypy`; do you have it installed?\");\n\n if output.status.success() {\n\n return;\n\n }\n\n panic!(\n\n \"mypy failed\n\n\n\nstatus: {status}\n\n\n\nstdout ---\n\n{stdout}\n\n\n\nstderr ---\n\n{stderr}\",\n\n status = output.status,\n\n stdout = String::from_utf8_lossy(&output.stdout).replace(\"\\n\", \"\\n\\t\"),\n\n stderr = String::from_utf8_lossy(&output.stderr).replace(\"\\n\", \"\\n\\t\"),\n\n );\n\n}\n", "file_path": "crates/gen-wasmtime-py/tests/codegen.rs", "rank": 50, "score": 228440.59497213387 }, { "content": "struct ResourceFunction {\n\n name: String,\n\n ty: &'static FuncType,\n\n type_index: u32,\n\n exported: bool,\n\n}\n\n\n", "file_path": "crates/wasmlink/src/resources.rs", "rank": 51, "score": 226269.51101262233 }, { "content": "fn default_config() -> Result<Config> {\n\n // Create an engine with caching enabled to assist with iteration in this\n\n // project.\n\n let mut config = Config::new();\n\n config.cache_config_load_default()?;\n\n config.wasm_backtrace_details(wasmtime::WasmBacktraceDetails::Enable);\n\n Ok(config)\n\n}\n\n\n", "file_path": "crates/gen-wasmtime/tests/runtime.rs", "rank": 52, "score": 222593.57011635078 }, { "content": "pub fn get() -> usize {\n\n ALLOC_AMT.load(SeqCst)\n\n}\n\n\n", "file_path": "crates/test-rust-wasm/src/lib.rs", "rank": 53, "score": 222529.3855624793 }, { "content": " def __init__(self, ptr: int, len: int, size: int, write: Callable) -> None:\n\n self.ptr = ptr\n\n self.len = len\n\n self.size = size\n\n self.write = write\n\n\n", "file_path": "crates/gen-wasmtime-py/src/lib.rs", "rank": 54, "score": 222180.3924444762 }, { "content": " def __init__(self, ptr: int, len: int, size: int, read: Callable) -> None:\n\n self.len = len\n\n self.ptr = ptr\n\n self.size = size\n\n self.read = read\n\n\n", "file_path": "crates/gen-wasmtime-py/src/lib.rs", "rank": 55, "score": 222180.3924444762 }, { "content": "#[proc_macro]\n\n#[cfg(feature = \"witx-bindgen-gen-c\")]\n\npub fn codegen_c_export(input: TokenStream) -> TokenStream {\n\n gen_verify(input, Direction::Export, \"export\", || {\n\n witx_bindgen_gen_c::Opts::default().build()\n\n })\n\n}\n\n\n", "file_path": "crates/test-helpers/src/lib.rs", "rank": 56, "score": 221879.61941486638 }, { "content": "#[proc_macro]\n\npub fn runtime_tests(input: TokenStream) -> TokenStream {\n\n let host_extension = input.to_string();\n\n let host_extension = host_extension.trim_matches('\"');\n\n let host_file = format!(\"host.{}\", host_extension);\n\n let mut tests = Vec::new();\n\n let cwd = std::env::current_dir().unwrap();\n\n for entry in std::fs::read_dir(cwd.join(\"tests/runtime\")).unwrap() {\n\n let entry = entry.unwrap().path();\n\n if !entry.join(&host_file).exists() {\n\n continue;\n\n }\n\n let name_str = entry.file_name().unwrap().to_str().unwrap();\n\n for (lang, name, wasm) in WASMS {\n\n if *name != name_str {\n\n continue;\n\n }\n\n let name_str = format!(\"{}_{}\", name_str, lang);\n\n let name = quote::format_ident!(\"{}\", name_str);\n\n let host_file = entry.join(&host_file).to_str().unwrap().to_string();\n\n let import_witx = entry.join(\"imports.witx\").to_str().unwrap().to_string();\n", "file_path": "crates/test-helpers/src/lib.rs", "rank": 57, "score": 221879.61941486638 }, { "content": "#[proc_macro]\n\n#[cfg(feature = \"witx-bindgen-gen-c\")]\n\npub fn codegen_c_import(input: TokenStream) -> TokenStream {\n\n gen_verify(input, Direction::Import, \"import\", || {\n\n witx_bindgen_gen_c::Opts::default().build()\n\n })\n\n}\n\n\n", "file_path": "crates/test-helpers/src/lib.rs", "rank": 58, "score": 221879.61941486638 }, { "content": "fn convert_ty(ty: WasmType) -> wasm_encoder::ValType {\n\n match ty {\n\n WasmType::I32 => wasm_encoder::ValType::I32,\n\n WasmType::I64 => wasm_encoder::ValType::I64,\n\n WasmType::F32 => wasm_encoder::ValType::F32,\n\n WasmType::F64 => wasm_encoder::ValType::F64,\n\n }\n\n}\n\n\n", "file_path": "crates/gen-spidermonkey/src/lib.rs", "rank": 59, "score": 221048.47055630945 }, { "content": "fn to_json(i: &witx2::Interface) -> String {\n\n #[derive(Serialize)]\n\n struct Interface {\n\n #[serde(skip_serializing_if = \"Vec::is_empty\")]\n\n resources: Vec<Resource>,\n\n #[serde(skip_serializing_if = \"Vec::is_empty\")]\n\n types: Vec<TypeDef>,\n\n #[serde(skip_serializing_if = \"Vec::is_empty\")]\n\n functions: Vec<Function>,\n\n #[serde(skip_serializing_if = \"Vec::is_empty\")]\n\n globals: Vec<Global>,\n\n }\n\n\n\n #[derive(Serialize)]\n\n struct Resource {\n\n name: String,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n foreign_module: Option<String>,\n\n }\n\n\n", "file_path": "crates/witx2/tests/all.rs", "rank": 60, "score": 220944.16572432808 }, { "content": "fn pop_wasm(operands: &mut Vec<Operand>) -> u32 {\n\n match operands.pop() {\n\n Some(op) => op.unwrap_wasm(),\n\n None => panic!(\"`pop_wasm` with an empty stack\"),\n\n }\n\n}\n\n\n", "file_path": "crates/gen-spidermonkey/src/lib.rs", "rank": 61, "score": 220010.46255133167 }, { "content": "fn pop_js(operands: &mut Vec<Operand>) -> u32 {\n\n match operands.pop() {\n\n Some(op) => op.unwrap_js(),\n\n None => panic!(\"`pop_js` with an empty stack\"),\n\n }\n\n}\n\n\n", "file_path": "crates/gen-spidermonkey/src/lib.rs", "rank": 62, "score": 220010.46255133167 }, { "content": "#[proc_macro]\n\n#[cfg(feature = \"witx-bindgen-gen-wasmtime\")]\n\npub fn codegen_wasmtime_export(input: TokenStream) -> TokenStream {\n\n gen_rust(\n\n input,\n\n Direction::Export,\n\n &[\n\n (\n\n \"export\",\n\n || witx_bindgen_gen_wasmtime::Opts::default().build(),\n\n |_| quote::quote!(),\n\n ),\n\n (\n\n \"export-tracing-and-custom-error\",\n\n || {\n\n let mut opts = witx_bindgen_gen_wasmtime::Opts::default();\n\n opts.tracing = true;\n\n opts.custom_error = true;\n\n opts.build()\n\n },\n\n |_| quote::quote!(),\n\n ),\n", "file_path": "crates/test-helpers/src/lib.rs", "rank": 63, "score": 218716.11864622677 }, { "content": "#[proc_macro]\n\n#[cfg(feature = \"witx-bindgen-gen-spidermonkey\")]\n\npub fn codegen_spidermonkey_import(input: TokenStream) -> TokenStream {\n\n gen_verify(input, Direction::Import, \"import\", || {\n\n let mut gen = witx_bindgen_gen_spidermonkey::SpiderMonkeyWasm::new(\"foo.js\", \"\");\n\n gen.import_spidermonkey(true);\n\n gen\n\n })\n\n}\n\n\n", "file_path": "crates/test-helpers/src/lib.rs", "rank": 64, "score": 218716.11864622677 }, { "content": "#[proc_macro]\n\n#[cfg(feature = \"witx-bindgen-gen-wasmtime-py\")]\n\npub fn codegen_py_export(input: TokenStream) -> TokenStream {\n\n gen_verify(input, Direction::Export, \"export\", || {\n\n witx_bindgen_gen_wasmtime_py::Opts::default().build()\n\n })\n\n}\n\n\n", "file_path": "crates/test-helpers/src/lib.rs", "rank": 65, "score": 218716.11864622677 }, { "content": "#[proc_macro]\n\n#[cfg(feature = \"witx-bindgen-gen-js\")]\n\npub fn codegen_js_import(input: TokenStream) -> TokenStream {\n\n gen_verify(input, Direction::Import, \"import\", || {\n\n witx_bindgen_gen_js::Opts::default().build()\n\n })\n\n}\n\n\n", "file_path": "crates/test-helpers/src/lib.rs", "rank": 66, "score": 218716.11864622677 }, { "content": "#[proc_macro]\n\n#[cfg(feature = \"witx-bindgen-gen-spidermonkey\")]\n\npub fn codegen_spidermonkey_export(input: TokenStream) -> TokenStream {\n\n gen_verify(input, Direction::Export, \"export\", || {\n\n let mut gen = witx_bindgen_gen_spidermonkey::SpiderMonkeyWasm::new(\"foo.js\", \"\");\n\n gen.import_spidermonkey(true);\n\n gen\n\n })\n\n}\n\n\n", "file_path": "crates/test-helpers/src/lib.rs", "rank": 67, "score": 218716.11864622677 }, { "content": "#[proc_macro]\n\n#[cfg(feature = \"witx-bindgen-gen-wasmtime-py\")]\n\npub fn codegen_py_import(input: TokenStream) -> TokenStream {\n\n gen_verify(input, Direction::Import, \"import\", || {\n\n witx_bindgen_gen_wasmtime_py::Opts::default().build()\n\n })\n\n}\n\n\n", "file_path": "crates/test-helpers/src/lib.rs", "rank": 68, "score": 218716.11864622677 }, { "content": "#[proc_macro]\n\n#[cfg(feature = \"witx-bindgen-gen-js\")]\n\npub fn codegen_js_export(input: TokenStream) -> TokenStream {\n\n gen_verify(input, Direction::Export, \"export\", || {\n\n witx_bindgen_gen_js::Opts::default().build()\n\n })\n\n}\n\n\n", "file_path": "crates/test-helpers/src/lib.rs", "rank": 69, "score": 218716.11864622677 }, { "content": "#[proc_macro]\n\n#[cfg(feature = \"witx-bindgen-gen-wasmtime\")]\n\npub fn codegen_wasmtime_import(input: TokenStream) -> TokenStream {\n\n gen_rust(\n\n input,\n\n Direction::Import,\n\n &[\n\n (\n\n \"import\",\n\n || witx_bindgen_gen_wasmtime::Opts::default().build(),\n\n |_| quote::quote!(),\n\n ),\n\n (\n\n \"import-async\",\n\n || {\n\n let mut opts = witx_bindgen_gen_wasmtime::Opts::default();\n\n opts.async_ = witx_bindgen_gen_wasmtime::Async::All;\n\n opts.build()\n\n },\n\n |_| quote::quote!(),\n\n ),\n\n ],\n\n )\n\n}\n\n\n", "file_path": "crates/test-helpers/src/lib.rs", "rank": 70, "score": 218716.11864622677 }, { "content": "#[proc_macro]\n\n#[cfg(feature = \"witx-bindgen-gen-rust-wasm\")]\n\npub fn codegen_rust_wasm_import(input: TokenStream) -> TokenStream {\n\n gen_rust(\n\n input,\n\n Direction::Import,\n\n &[\n\n (\n\n \"import\",\n\n || witx_bindgen_gen_rust_wasm::Opts::default().build(),\n\n |_| quote::quote!(),\n\n ),\n\n (\n\n \"import-unchecked\",\n\n || {\n\n let mut opts = witx_bindgen_gen_rust_wasm::Opts::default();\n\n opts.unchecked = true;\n\n opts.build()\n\n },\n\n |_| quote::quote!(),\n\n ),\n\n ],\n\n )\n\n}\n\n\n", "file_path": "crates/test-helpers/src/lib.rs", "rank": 71, "score": 215680.70750343753 }, { "content": "#[proc_macro]\n\n#[cfg(feature = \"witx-bindgen-gen-rust-wasm\")]\n\npub fn codegen_rust_wasm_export(input: TokenStream) -> TokenStream {\n\n use heck::*;\n\n use std::collections::BTreeMap;\n\n use witx2::{FunctionKind, Type, TypeDefKind};\n\n\n\n return gen_rust(\n\n input,\n\n Direction::Export,\n\n &[\n\n (\n\n \"export\",\n\n || witx_bindgen_gen_rust_wasm::Opts::default().build(),\n\n gen_extra,\n\n ),\n\n (\n\n \"export-unchecked\",\n\n || {\n\n let mut opts = witx_bindgen_gen_rust_wasm::Opts::default();\n\n opts.unchecked = true;\n\n opts.symbol_namespace = \"unchecked\".to_string();\n", "file_path": "crates/test-helpers/src/lib.rs", "rank": 72, "score": 215680.70750343753 }, { "content": "/// This is the direction from the user's perspective. Are we importing\n\n/// functions to call, or defining functions and exporting them to be called?\n\n///\n\n/// In the wasmtime bindings, this is the opposite of the witx2::abi::Direction\n\n/// value, because these bindings work differently from other bindings:\n\n///\n\n/// In a wasm-calling-wasm use case, one wasm module would use the `Import`\n\n/// ABI, the other would use the `Export` ABI, and there would be an adapter\n\n/// layer between the two that translates from one ABI to the other.\n\n///\n\n/// With wasm-calling-host, we don't go through a separate adapter layer;\n\n/// the binding code we generate on the host side just does everything\n\n/// itself. So when the host is conceptually \"exporting\" a function to\n\n/// wasm, it uses the `Import` ABI so that wasm can also use the `Import`\n\n/// ABI and import it directly from the host.\n\n///\n\n/// These are all implementation details; from the user perspective, it's\n\n/// just: `export` means I'm exporting functions to be called, `import`\n\n/// means I'm importing functions that I'm going to call, in both wasm\n\n/// modules and host code. The enum here represents this user perspective.\n\nenum Direction {\n\n Import,\n\n Export,\n\n}\n\n\n\n/// Generate code to support consuming the given interfaces, importaing them\n\n/// from wasm modules.\n", "file_path": "crates/wasmtime-impl/src/lib.rs", "rank": 73, "score": 215576.25058545673 }, { "content": " def __init__(self, val: int, obj: '{iface}') -> None:\n\n self._wasm_val = val\n\n self._refcnt = 1\n\n self._obj = obj\n\n self._destroyed = False\n\n\n", "file_path": "crates/gen-wasmtime-py/src/lib.rs", "rank": 74, "score": 212665.67853276076 }, { "content": "enum FunctionRet {\n\n /// The function return is normal and needs to extra handling.\n\n Normal,\n\n /// The function return was wrapped in a `Result` in Rust. The `Ok` variant\n\n /// is the actual value that will be lowered, and the `Err`, if present,\n\n /// means that a trap has occurred.\n\n CustomToTrap,\n\n /// The function returns a `Result` in both wasm and in Rust, but the\n\n /// Rust error type is a custom error and must be converted to `err`. The\n\n /// `ok` variant payload is provided here too.\n\n CustomToError { ok: Option<Type>, err: String },\n\n}\n\n\n\nimpl Wasmtime {\n\n pub fn new() -> Wasmtime {\n\n Wasmtime::default()\n\n }\n\n\n\n fn print_intrinsics(&mut self) {\n\n if self.needs_raw_mem {\n", "file_path": "crates/gen-wasmtime/src/lib.rs", "rank": 75, "score": 211211.46525139242 }, { "content": "enum NeededFunction {\n\n Realloc,\n\n Free,\n\n}\n\n\n", "file_path": "crates/gen-wasmtime/src/lib.rs", "rank": 76, "score": 211211.46525139242 }, { "content": " def _validate_guest_char(i: int) -> str:\n\n if i > 0x10ffff or (i >= 0xd800 and i <= 0xdfff):\n\n raise TypeError('not a valid char');\n\n return chr(i)\n\n \",\n\n );\n\n }\n\n if self.needs_expected {\n\n self.pyimport(\"dataclasses\", \"dataclass\");\n\n self.pyimport(\"typing\", \"TypeVar\");\n\n self.pyimport(\"typing\", \"Generic\");\n\n self.pyimport(\"typing\", \"Union\");\n\n self.needs_t_typevar = true;\n\n self.src.push_str(\n\n \"\n\n @dataclass\n", "file_path": "crates/gen-wasmtime-py/src/lib.rs", "rank": 77, "score": 210762.62322532362 }, { "content": "fn run(wasm: &str) -> Result<()> {\n\n use exports::*;\n\n\n\n let (exports, mut store) = crate::instantiate(\n\n wasm,\n\n |linker| imports::add_to_linker(linker, |cx| -> &mut MyImports { &mut cx.imports }),\n\n |store, module, linker| Exports::instantiate(store, module, linker, |cx| &mut cx.exports),\n\n )?;\n\n\n\n exports.test_imports(&mut store)?;\n\n\n\n exports.list_in_record1(\n\n &mut store,\n\n ListInRecord1 {\n\n a: \"list_in_record1\",\n\n },\n\n )?;\n\n assert_eq!(exports.list_in_record2(&mut store)?.a, \"list_in_record2\");\n\n\n\n assert_eq!(\n", "file_path": "tests/runtime/flavorful/host.rs", "rank": 78, "score": 210259.05986669846 }, { "content": "fn run(wasm: &str) -> Result<()> {\n\n use exports::*;\n\n\n\n let (exports, mut store) = crate::instantiate(\n\n wasm,\n\n |linker| imports::add_to_linker(linker, |cx| -> &mut MyImports { &mut cx.imports }),\n\n |store, module, linker| Exports::instantiate(store, module, linker, |cx| &mut cx.exports),\n\n )?;\n\n\n\n exports.test_imports(&mut store)?;\n\n // fn buffers(wasm: &Wasm<Context>) -> Result<()> {\n\n // let mut out = [0; 10];\n\n // let n = wasm.buffer_u8(&[0u8], &mut out)? as usize;\n\n // assert_eq!(n, 3);\n\n // assert_eq!(&out[..n], [1, 2, 3]);\n\n // assert!(out[n..].iter().all(|x| *x == 0));\n\n\n\n // let mut out = [0; 10];\n\n // let n = wasm.buffer_u32(&[0], &mut out)? as usize;\n\n // assert_eq!(n, 3);\n", "file_path": "tests/runtime/buffers/host.rs", "rank": 79, "score": 210259.05986669846 }, { "content": "fn run(wasm: &str) -> Result<()> {\n\n let (exports, mut store) = crate::instantiate(\n\n wasm,\n\n |linker| imports::add_to_linker(linker, |cx| -> &mut MyImports { &mut cx.imports }),\n\n |store, module, linker| {\n\n exports::Exports::instantiate(store, module, linker, |cx| &mut cx.exports)\n\n },\n\n )?;\n\n\n\n exports.test_imports(&mut store)?;\n\n assert_eq!(exports.roundtrip_u8(&mut store, 1)?, 1);\n\n assert_eq!(\n\n exports.roundtrip_u8(&mut store, u8::min_value())?,\n\n u8::min_value()\n\n );\n\n assert_eq!(\n\n exports.roundtrip_u8(&mut store, u8::max_value())?,\n\n u8::max_value()\n\n );\n\n\n", "file_path": "tests/runtime/numbers/host.rs", "rank": 80, "score": 210259.05986669846 }, { "content": "fn run(wasm: &str) -> Result<()> {\n\n use exports::*;\n\n\n\n let (exports, mut store) = crate::instantiate(\n\n wasm,\n\n |linker| {\n\n imports::add_to_linker(\n\n linker,\n\n |cx: &mut crate::Context<(MyImports, imports::ImportsTables<MyImports>), _>| {\n\n (&mut cx.imports.0, &mut cx.imports.1)\n\n },\n\n )\n\n },\n\n |store, module, linker| Exports::instantiate(store, module, linker, |cx| &mut cx.exports),\n\n )?;\n\n\n\n exports.test_imports(&mut store)?;\n\n\n\n let s: WasmState = exports.wasm_state_create(&mut store)?;\n\n assert_eq!(exports.wasm_state_get_val(&mut store, &s)?, 100);\n", "file_path": "tests/runtime/handles/host.rs", "rank": 81, "score": 210259.05986669846 }, { "content": "fn run(wasm: &str) -> Result<()> {\n\n use exports::*;\n\n\n\n let (exports, mut store) = crate::instantiate(\n\n wasm,\n\n |linker| {\n\n imports::add_to_linker(\n\n linker,\n\n |cx: &mut crate::Context<(MyImports, imports::ImportsTables<MyImports>), _>| {\n\n (&mut cx.imports.0, &mut cx.imports.1)\n\n },\n\n )\n\n },\n\n |store, module, linker| Exports::instantiate(store, module, linker, |cx| &mut cx.exports),\n\n )?;\n\n\n\n assert_err(\n\n exports.invalid_bool(&mut store),\n\n \"invalid discriminant for `bool`\",\n\n )?;\n", "file_path": "tests/runtime/invalid/host.rs", "rank": 82, "score": 210259.05986669846 }, { "content": "fn run(wasm: &str) -> Result<()> {\n\n let (exports, mut store) = crate::instantiate(\n\n wasm,\n\n |linker| imports::add_to_linker(linker, |cx| -> &mut MyImports { &mut cx.imports }),\n\n |store, module, linker| {\n\n exports::Exports::instantiate(store, module, linker, |cx| &mut cx.exports)\n\n },\n\n )?;\n\n\n\n exports.thunk(&mut store)?;\n\n\n\n assert!(store.data().imports.hit);\n\n\n\n Ok(())\n\n}\n", "file_path": "tests/runtime/smoke/host.rs", "rank": 83, "score": 210259.05986669846 }, { "content": "fn run(wasm: &str) -> Result<()> {\n\n use exports::*;\n\n\n\n let (exports, mut store) = crate::instantiate(\n\n wasm,\n\n |linker| imports::add_to_linker(linker, |cx| -> &mut MyImports { &mut cx.imports }),\n\n |store, module, linker| Exports::instantiate(store, module, linker, |cx| &mut cx.exports),\n\n )?;\n\n\n\n let bytes = exports.allocated_bytes(&mut store)?;\n\n exports.test_imports(&mut store)?;\n\n exports.list_param(&mut store, &[1, 2, 3, 4])?;\n\n exports.list_param2(&mut store, \"foo\")?;\n\n exports.list_param3(&mut store, &[\"foo\", \"bar\", \"baz\"])?;\n\n exports.list_param4(&mut store, &[&[\"foo\", \"bar\"], &[\"baz\"]])?;\n\n assert_eq!(exports.list_result(&mut store)?, [1, 2, 3, 4, 5]);\n\n assert_eq!(exports.list_result2(&mut store)?, \"hello!\");\n\n assert_eq!(exports.list_result3(&mut store)?, [\"hello,\", \"world!\"]);\n\n assert_eq!(exports.string_roundtrip(&mut store, \"x\")?, \"x\");\n\n assert_eq!(exports.string_roundtrip(&mut store, \"\")?, \"\");\n\n assert_eq!(\n\n exports.string_roundtrip(&mut store, \"hello ⚑ world\")?,\n\n \"hello ⚑ world\"\n\n );\n\n // Ensure that we properly called `free` everywhere in all the glue that we\n\n // needed to.\n\n assert_eq!(bytes, exports.allocated_bytes(&mut store)?);\n\n Ok(())\n\n}\n", "file_path": "tests/runtime/lists/host.rs", "rank": 84, "score": 210259.05986669846 }, { "content": "#[derive(Debug)]\n\nstruct Return {\n\n splat_tuple: bool,\n\n scalar: Option<Scalar>,\n\n retptrs: Vec<Type>,\n\n}\n\n\n", "file_path": "crates/gen-c/src/lib.rs", "rank": 85, "score": 207105.17467113066 }, { "content": "#[test]\n\nfn ok() {}\n\n\n\n#[rustfmt::skip]\n\nmod imports {\n\n test_helpers::codegen_rust_wasm_import!(\n\n \"*.witx\"\n\n\n\n // If you want to exclude a specific test you can include it here with\n\n // gitignore glob syntax:\n\n //\n\n // \"!wasm.witx\"\n\n // \"!host.witx\"\n\n //\n\n //\n\n // Similarly you can also just remove the `*.witx` glob and list tests\n\n // individually if you're debugging.\n\n );\n\n}\n\n\n\nmod exports {\n", "file_path": "crates/gen-rust-wasm/tests/codegen.rs", "rank": 86, "score": 199718.83750807052 }, { "content": "fn unify(a: WasmType, b: WasmType) -> WasmType {\n\n use WasmType::*;\n\n\n\n match (a, b) {\n\n (I64, _) | (_, I64) | (I32, F64) | (F64, I32) => I64,\n\n\n\n (I32, I32) | (I32, F32) | (F32, I32) => I32,\n\n\n\n (F32, F32) => F32,\n\n (F64, F64) | (F32, F64) | (F64, F32) => F64,\n\n }\n\n}\n\n\n\nimpl From<Int> for WasmType {\n\n fn from(i: Int) -> WasmType {\n\n match i {\n\n Int::U8 | Int::U16 | Int::U32 => WasmType::I32,\n\n Int::U64 => WasmType::I64,\n\n }\n\n }\n", "file_path": "crates/witx2/src/abi.rs", "rank": 87, "score": 198431.56627100686 }, { "content": "fn run(wasm: &str) -> anyhow::Result<()> {\n\n let (exports, mut store) = crate::instantiate_smw(\n\n wasm,\n\n |linker| imports::add_to_linker(linker, |cx| -> &mut Host { &mut cx.imports }),\n\n |store, module, linker| {\n\n exports::Exports::instantiate(store, module, linker, |cx| &mut cx.exports)\n\n },\n\n )?;\n\n\n\n // Test that the import instance called the functions we made available with\n\n // the expected arguments.\n\n\n\n exports.test_imports(&mut store)?;\n\n\n\n assert_eq!(store.data().imports.f1_l, vec![1, 2, 3]);\n\n\n\n assert!(store.data().imports.f2_called);\n\n\n\n assert_eq!(store.data().imports.f3_a, vec![]);\n\n assert_eq!(store.data().imports.f3_b, vec![1, 2, 3]);\n", "file_path": "tests/runtime/smw_lists/host.rs", "rank": 88, "score": 197584.5852153899 }, { "content": "fn cast(from: WasmType, to: WasmType) -> Bitcast {\n\n use WasmType::*;\n\n\n\n match (from, to) {\n\n (I32, I32) | (I64, I64) | (F32, F32) | (F64, F64) => Bitcast::None,\n\n\n\n (I32, I64) => Bitcast::I32ToI64,\n\n (F32, F64) => Bitcast::F32ToF64,\n\n (F32, I32) => Bitcast::F32ToI32,\n\n (F64, I64) => Bitcast::F64ToI64,\n\n\n\n (I64, I32) => Bitcast::I64ToI32,\n\n (F64, F32) => Bitcast::F64ToF32,\n\n (I32, F32) => Bitcast::I32ToF32,\n\n (I64, F64) => Bitcast::I64ToF64,\n\n\n\n (F32, I64) => Bitcast::F32ToI64,\n\n (I64, F32) => Bitcast::I64ToF32,\n\n (F64, I32) | (I32, F64) => unreachable!(),\n\n }\n\n}\n", "file_path": "crates/witx2/src/abi.rs", "rank": 89, "score": 196016.16028742067 }, { "content": "#[proc_macro]\n\n#[cfg(feature = \"witx-bindgen-gen-wasmtime\")]\n\npub fn runtime_tests_wasmtime(_input: TokenStream) -> TokenStream {\n\n let mut tests = Vec::new();\n\n let cwd = std::env::current_dir().unwrap();\n\n for entry in std::fs::read_dir(cwd.join(\"tests/runtime\")).unwrap() {\n\n let entry = entry.unwrap().path();\n\n if !entry.join(\"host.rs\").exists() {\n\n continue;\n\n }\n\n let name_str = entry.file_name().unwrap().to_str().unwrap();\n\n for (lang, name, wasm) in WASMS {\n\n if *name != name_str {\n\n continue;\n\n }\n\n let name = quote::format_ident!(\"{}_{}\", name_str, lang);\n\n let host_file = entry.join(\"host.rs\").to_str().unwrap().to_string();\n\n tests.push(quote::quote! {\n\n mod #name {\n\n include!(#host_file);\n\n\n\n #[test]\n", "file_path": "crates/test-helpers/src/lib.rs", "rank": 90, "score": 195528.82080937352 }, { "content": "fn run(input: TokenStream, dir: Direction) -> TokenStream {\n\n let input = syn::parse_macro_input!(input as Opts);\n\n let mut gen = input.opts.build();\n\n let mut files = Files::default();\n\n let (imports, exports) = match dir {\n\n Direction::Import => (input.interfaces, vec![]),\n\n Direction::Export => (vec![], input.interfaces),\n\n };\n\n gen.generate_all(&imports, &exports, &mut files);\n\n\n\n let (_, contents) = files.iter().next().unwrap();\n\n\n\n let contents = std::str::from_utf8(contents).unwrap();\n\n let mut contents = contents.parse::<TokenStream>().unwrap();\n\n\n\n // Include a dummy `include_str!` for any files we read so rustc knows that\n\n // we depend on the contents of those files.\n\n let cwd = std::env::current_dir().unwrap();\n\n for file in input.files.iter() {\n\n contents.extend(\n", "file_path": "crates/wasmtime-impl/src/lib.rs", "rank": 91, "score": 195156.78647559314 }, { "content": " def _store(ty: Any, mem: wasmtime.Memory, store: wasmtime.Storelike, base: int, offset: int, val: Any) -> None:\n\n ptr = (base & 0xffffffff) + offset\n\n if ptr + ctypes.sizeof(ty) > mem.data_len(store):\n\n raise IndexError('out-of-bounds store')\n\n raw_base = mem.data_ptr(store)\n\n c_ptr = ctypes.POINTER(ty)(\n\n ty.from_address(ctypes.addressof(raw_base.contents) + ptr)\n\n )\n\n c_ptr[0] = val\n\n \",\n\n );\n\n }\n\n if self.needs_load {\n\n // TODO: this uses native endianness\n\n self.pyimport(\"ctypes\", None);\n\n self.src.push_str(\n\n \"\n", "file_path": "crates/gen-wasmtime-py/src/lib.rs", "rank": 92, "score": 194225.10517612286 }, { "content": "fn run(input: TokenStream, dir: Direction) -> TokenStream {\n\n let input = syn::parse_macro_input!(input as Opts);\n\n let mut gen = input.opts.build();\n\n let mut files = Files::default();\n\n let (imports, exports) = match dir {\n\n Direction::Import => (input.interfaces, vec![]),\n\n Direction::Export => (vec![], input.interfaces),\n\n };\n\n gen.generate_all(&imports, &exports, &mut files);\n\n let (_, contents) = files.iter().next().unwrap();\n\n let mut contents = std::str::from_utf8(contents).unwrap().to_string();\n\n\n\n // Include a dummy `include_str!` for any files we read so rustc knows that\n\n // we depend on the contents of those files.\n\n let cwd = std::env::current_dir().unwrap();\n\n for file in input.files.iter() {\n\n contents.push_str(&format!(\n\n \"const _: &str = include_str!(r#\\\"{}\\\"#);\\n\",\n\n cwd.join(file).display()\n\n ));\n\n }\n\n\n\n contents.parse().unwrap()\n\n}\n\n\n", "file_path": "crates/rust-wasm-impl/src/lib.rs", "rank": 93, "score": 191992.72634814717 }, { "content": " def _load(ty: Any, mem: wasmtime.Memory, store: wasmtime.Storelike, base: int, offset: int) -> Any:\n\n ptr = (base & 0xffffffff) + offset\n\n if ptr + ctypes.sizeof(ty) > mem.data_len(store):\n\n raise IndexError('out-of-bounds store')\n\n raw_base = mem.data_ptr(store)\n\n c_ptr = ctypes.POINTER(ty)(\n\n ty.from_address(ctypes.addressof(raw_base.contents) + ptr)\n\n )\n\n return c_ptr[0]\n\n \",\n\n );\n\n }\n\n if self.needs_validate_guest_char {\n\n self.src.push_str(\n\n \"\n", "file_path": "crates/gen-wasmtime-py/src/lib.rs", "rank": 94, "score": 189542.97462877436 }, { "content": "fn execute(name: &str, wasm: &Path, ts: &Path, imports: &Path, exports: &Path) {\n\n let mut dir = PathBuf::from(env!(\"OUT_DIR\"));\n\n dir.push(name);\n\n drop(fs::remove_dir_all(&dir));\n\n fs::create_dir_all(&dir).unwrap();\n\n\n\n println!(\"OUT_DIR = {:?}\", dir);\n\n println!(\"Generating bindings...\");\n\n let imports = witx_bindgen_gen_core::witx2::Interface::parse_file(imports).unwrap();\n\n let exports = witx_bindgen_gen_core::witx2::Interface::parse_file(exports).unwrap();\n\n let mut files = Default::default();\n\n witx_bindgen_gen_js::Opts::default()\n\n .build()\n\n .generate_all(&[imports], &[exports], &mut files);\n\n for (file, contents) in files.iter() {\n\n fs::write(dir.join(file), contents).unwrap();\n\n }\n\n\n\n let (cmd, args) = if cfg!(windows) {\n\n (\"cmd.exe\", &[\"/c\", \"npx.cmd\"] as &[&str])\n", "file_path": "crates/gen-js/tests/runtime.rs", "rank": 95, "score": 189141.35950659885 }, { "content": "#[test]\n\nfn wasmlink_file_tests() -> Result<()> {\n\n for entry in fs::read_dir(\"tests\")? {\n\n let entry = entry?;\n\n\n\n let path = entry.path();\n\n\n\n match (\n\n path.file_stem().and_then(OsStr::to_str),\n\n path.extension().and_then(OsStr::to_str),\n\n ) {\n\n (Some(stem), Some(\"wat\")) => {\n\n let bytes = parse_file(&path)?;\n\n\n\n let mut witx_path = path.clone();\n\n assert!(witx_path.set_extension(\"witx\"));\n\n\n\n let output = match adapt(stem, &bytes, &witx_path) {\n\n Ok(adapted) => print_bytes(&adapted.finish())?,\n\n Err(e) => e.to_string(),\n\n };\n", "file_path": "crates/wasmlink/tests/run.rs", "rank": 96, "score": 187967.07057559 }, { "content": "fn execute(name: &str, wasm: &Path, py: &Path, imports: &Path, exports: &Path) {\n\n let out_dir = PathBuf::from(env!(\"OUT_DIR\"));\n\n let dir = out_dir.join(name);\n\n drop(fs::remove_dir_all(&dir));\n\n fs::create_dir_all(&dir).unwrap();\n\n fs::create_dir_all(&dir.join(\"imports\")).unwrap();\n\n fs::create_dir_all(&dir.join(\"exports\")).unwrap();\n\n\n\n println!(\"OUT_DIR = {:?}\", dir);\n\n println!(\"Generating bindings...\");\n\n let iface = witx_bindgen_gen_core::witx2::Interface::parse_file(imports).unwrap();\n\n let mut files = Default::default();\n\n witx_bindgen_gen_wasmtime_py::Opts::default()\n\n .build()\n\n .generate_all(&[iface], &[], &mut files);\n\n for (file, contents) in files.iter() {\n\n fs::write(dir.join(\"imports\").join(file), contents).unwrap();\n\n }\n\n fs::write(dir.join(\"imports\").join(\"__init__.py\"), \"\").unwrap();\n\n\n", "file_path": "crates/gen-wasmtime-py/tests/runtime.rs", "rank": 97, "score": 186339.62141891778 }, { "content": "fn instantiate<I: Default, E: Default, T>(\n\n wasm: &str,\n\n add_imports: impl FnOnce(&mut Linker<Context<I, E>>) -> Result<()>,\n\n mk_exports: impl FnOnce(\n\n &mut Store<Context<I, E>>,\n\n &Module,\n\n &mut Linker<Context<I, E>>,\n\n ) -> Result<(T, Instance)>,\n\n) -> Result<(T, Store<Context<I, E>>)> {\n\n let engine = Engine::new(&default_config()?)?;\n\n let module = Module::from_file(&engine, wasm)?;\n\n\n\n let mut linker = Linker::new(&engine);\n\n add_imports(&mut linker)?;\n\n wasmtime_wasi::add_to_linker(&mut linker, |cx| &mut cx.wasi)?;\n\n\n\n let mut store = Store::new(\n\n &engine,\n\n Context {\n\n wasi: default_wasi(),\n\n imports: I::default(),\n\n exports: E::default(),\n\n },\n\n );\n\n let (exports, _instance) = mk_exports(&mut store, &module, &mut linker)?;\n\n Ok((exports, store))\n\n}\n\n\n", "file_path": "crates/gen-wasmtime/tests/runtime.rs", "rank": 98, "score": 185368.64689576748 }, { "content": "struct Context<I, E> {\n\n wasi: wasmtime_wasi::WasiCtx,\n\n imports: I,\n\n exports: E,\n\n}\n\n\n", "file_path": "crates/gen-wasmtime/tests/runtime.rs", "rank": 99, "score": 185338.02527068893 } ]
Rust
kernel/src/drivers/keyboard/codes.rs
andrewimm/imm-dos-nx
2716a4954eae779cdf0596e061d16484d06c3563
#[derive(Copy, Clone)] #[repr(u8)] pub enum KeyCode { None = 0x00, Delete = 0x07, Backspace = 0x08, Tab = 0x09, Enter = 0x0d, Caps = 0x10, Shift = 0x11, Control = 0x12, Menu = 0x13, Alt = 0x14, Escape = 0x1b, Space = 0x20, ArrowLeft = 0x21, ArrowUp = 0x22, ArrowRight = 0x23, ArrowDown = 0x24, Comma = 0x2c, Minus = 0x2d, Period = 0x2e, Slash = 0x2f, Num0 = 0x30, Num1 = 0x31, Num2 = 0x32, Num3 = 0x33, Num4 = 0x34, Num5 = 0x35, Num6 = 0x36, Num7 = 0x37, Num8 = 0x38, Num9 = 0x39, Semicolon = 0x3a, Quote = 0x3b, LessThan = 0x3c, Equals = 0x3d, GreaterThan = 0x3e, A = 0x41, B = 0x42, C = 0x43, D = 0x44, E = 0x45, F = 0x46, G = 0x47, H = 0x48, I = 0x49, J = 0x4a, K = 0x4b, L = 0x4c, M = 0x4d, N = 0x4e, O = 0x4f, P = 0x50, Q = 0x51, R = 0x52, S = 0x53, T = 0x54, U = 0x55, V = 0x56, W = 0x57, X = 0x58, Y = 0x59, Z = 0x5a, BracketLeft = 0x5b, Backslash = 0x5c, BracketRight = 0x5d, Backtick = 0x5f, } pub const US_LAYOUT: [(u8, u8); 0x60] = [ (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (0x7f, 0x7f), (0x08, 0x08), (0x09, 0x09), (0, 0), (0, 0), (0, 0), (0x0a, 0x0a), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (0x1b, 0x1b), (0, 0), (0, 0), (0, 0), (0, 0), (0x20, 0x20), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (0x2c, 0x3c), (0x2d, 0x5f), (0x2e, 0x3e), (0x2f, 0x3f), (0x30, 0x29), (0x31, 0x21), (0x32, 0x40), (0x33, 0x23), (0x34, 0x24), (0x35, 0x25), (0x36, 0x5e), (0x37, 0x26), (0x38, 0x2a), (0x39, 0x28), (0x3b, 0x3a), (0x27, 0x22), (0, 0), (0x3d, 0x2b), (0, 0), (0, 0), (0, 0), (0x61, 0x41), (0x62, 0x42), (0x63, 0x43), (0x64, 0x44), (0x65, 0x45), (0x66, 0x46), (0x67, 0x47), (0x68, 0x48), (0x69, 0x49), (0x6a, 0x4a), (0x6b, 0x4b), (0x6c, 0x4c), (0x6d, 0x4d), (0x6e, 0x4e), (0x6f, 0x4f), (0x70, 0x50), (0x71, 0x51), (0x72, 0x52), (0x73, 0x53), (0x74, 0x54), (0x75, 0x55), (0x76, 0x56), (0x77, 0x57), (0x78, 0x58), (0x79, 0x59), (0x7a, 0x5a), (0x5b, 0x7b), (0x5c, 0x7c), (0x5d, 0x7d), (0, 0), (0x60, 0x7e), ]; pub const SCANCODES_TO_KEYCODES: [KeyCode; 60] = [ KeyCode::None, KeyCode::Escape, KeyCode::Num1, KeyCode::Num2, KeyCode::Num3, KeyCode::Num4, KeyCode::Num5, KeyCode::Num6, KeyCode::Num7, KeyCode::Num8, KeyCode::Num9, KeyCode::Num0, KeyCode::Minus, KeyCode::Equals, KeyCode::Backspace, KeyCode::Tab, KeyCode::Q, KeyCode::W, KeyCode::E, KeyCode::R, KeyCode::T, KeyCode::Y, KeyCode::U, KeyCode::I, KeyCode::O, KeyCode::P, KeyCode::BracketLeft, KeyCode::BracketRight, KeyCode::Enter, KeyCode::Control, KeyCode::A, KeyCode::S, KeyCode::D, KeyCode::F, KeyCode::G, KeyCode::H, KeyCode::J, KeyCode::K, KeyCode::L, KeyCode::Semicolon, KeyCode::Quote, KeyCode::Backtick, KeyCode::Shift, KeyCode::Backslash, KeyCode::Z, KeyCode::X, KeyCode::C, KeyCode::V, KeyCode::B, KeyCode::N, KeyCode::M, KeyCode::Comma, KeyCode::Period, KeyCode::Slash, KeyCode::Shift, KeyCode::None, KeyCode::Alt, KeyCode::Space, KeyCode::Caps, KeyCode::None, ]; pub fn get_keycode(scan_code: u8) -> KeyCode { if scan_code < 60 { SCANCODES_TO_KEYCODES[scan_code as usize] } else { KeyCode::None } } pub fn get_extended_keycode(scan_code: u8) -> KeyCode { match scan_code { 0x1c => KeyCode::Enter, 0x48 => KeyCode::ArrowUp, 0x4b => KeyCode::ArrowLeft, 0x4d => KeyCode::ArrowRight, 0x50 => KeyCode::ArrowDown, _ => KeyCode::None, } }
#[derive(Copy, Clone)] #[repr(u8)] pub enum KeyCode { None = 0x00, Delete = 0x07, Backspace = 0x08, Tab = 0x09, Enter = 0x0d, Caps = 0x10, Shift = 0x11, Control = 0x12, Menu = 0x13, Alt = 0x14, Escape = 0x1b, Space = 0x20, ArrowLeft = 0x21, ArrowUp = 0x22, ArrowRight = 0x23, ArrowDown = 0x24, Comma = 0x2c, Minus = 0x2d, Period = 0x2e, Slash = 0x2f, Num0 = 0x30, Num1 = 0x31, Num2 = 0x32, Num3 = 0x33, Num4 = 0x34, Num5 = 0x35, Num6 = 0x36, Num7 = 0x37, Num8 = 0x38, Num9 = 0x39, Semicolon = 0x3a, Quote = 0x3b, LessThan = 0x3c, Equals = 0x3d, GreaterThan = 0x3e, A = 0x41, B = 0x42, C = 0x43, D = 0x44, E = 0x45, F = 0x46, G = 0x47, H = 0x48, I = 0x49, J = 0x4a, K = 0x4b, L = 0x4c, M = 0x4d, N = 0x4e, O = 0x4f, P = 0x50, Q = 0x51, R = 0x52, S = 0x53, T = 0x54, U = 0x55, V = 0x56, W = 0x57, X = 0x58, Y = 0x59, Z = 0x5a, BracketLeft = 0x5b, Backslash = 0x5c, BracketRight = 0x5d, Backtick = 0x5f, } pub const US_LAYOUT: [(u8, u8); 0x60] = [ (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (0x7f, 0x7f), (0x08, 0x08), (0x09, 0x09), (0, 0), (0, 0), (0, 0), (0x0a, 0x0a), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (0x1b, 0x1b), (0, 0), (0, 0), (0, 0), (0, 0), (0x20, 0x20), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (0x2c, 0x3c), (0x2d, 0x5f), (0x2e, 0x3e), (0x2f, 0x3f), (0x30, 0x29), (0x31, 0x21), (0x32, 0x40), (0x33, 0x23), (0x34, 0x24), (0x35, 0x25), (0x36, 0x5e), (0x37, 0x26), (0x38, 0x2a), (0x39, 0x28), (0x3b, 0x3a), (0x27, 0x22), (0, 0), (0x3d, 0x2b), (0, 0), (0, 0), (0, 0), (0x61, 0x41), (0x62, 0x42), (0x63, 0x43), (0x64, 0x44), (0x65, 0x45), (0x66, 0x46), (0x67, 0x47), (0x68, 0x48), (0x69, 0x49), (0x6a, 0x4a), (0x6b, 0x4b), (0x6c, 0x4c), (0x6d, 0x4d), (0x6e, 0x4e), (0x6f, 0x4f), (0x70, 0x50), (0x71, 0x51), (0x72, 0x52), (0x73, 0x53), (0x74, 0x54), (0x75, 0x55), (0x76, 0x56), (0x77, 0x57), (0x78, 0x58), (0x79, 0x59), (0x7a, 0x5a), (0x5b, 0x7b), (0x5c, 0x7c), (0x5d, 0x7d), (0, 0), (0x60, 0x7e), ];
eyCode::Enter, 0x48 => KeyCode::ArrowUp, 0x4b => KeyCode::ArrowLeft, 0x4d => KeyCode::ArrowRight, 0x50 => KeyCode::ArrowDown, _ => KeyCode::None, } }
pub const SCANCODES_TO_KEYCODES: [KeyCode; 60] = [ KeyCode::None, KeyCode::Escape, KeyCode::Num1, KeyCode::Num2, KeyCode::Num3, KeyCode::Num4, KeyCode::Num5, KeyCode::Num6, KeyCode::Num7, KeyCode::Num8, KeyCode::Num9, KeyCode::Num0, KeyCode::Minus, KeyCode::Equals, KeyCode::Backspace, KeyCode::Tab, KeyCode::Q, KeyCode::W, KeyCode::E, KeyCode::R, KeyCode::T, KeyCode::Y, KeyCode::U, KeyCode::I, KeyCode::O, KeyCode::P, KeyCode::BracketLeft, KeyCode::BracketRight, KeyCode::Enter, KeyCode::Control, KeyCode::A, KeyCode::S, KeyCode::D, KeyCode::F, KeyCode::G, KeyCode::H, KeyCode::J, KeyCode::K, KeyCode::L, KeyCode::Semicolon, KeyCode::Quote, KeyCode::Backtick, KeyCode::Shift, KeyCode::Backslash, KeyCode::Z, KeyCode::X, KeyCode::C, KeyCode::V, KeyCode::B, KeyCode::N, KeyCode::M, KeyCode::Comma, KeyCode::Period, KeyCode::Slash, KeyCode::Shift, KeyCode::None, KeyCode::Alt, KeyCode::Space, KeyCode::Caps, KeyCode::None, ]; pub fn get_keycode(scan_code: u8) -> KeyCode { if scan_code < 60 { SCANCODES_TO_KEYCODES[scan_code as usize] } else { KeyCode::None } } pub fn get_extended_keycode(scan_code: u8) -> KeyCode { match scan_code { 0x1c => K
random
[]
Rust
futures-util/src/compat/compat03as01.rs
EkardNT/futures-rs
90c83b8faca107e6c4db63d10b0d4f2ea36d6628
use futures_01::{ task as task01, Async as Async01, Future as Future01, Poll as Poll01, Stream as Stream01, }; #[cfg(feature = "sink")] use futures_01::{ AsyncSink as AsyncSink01, Sink as Sink01, StartSend as StartSend01, }; use futures_core::{ task::{RawWaker, RawWakerVTable}, TryFuture as TryFuture03, TryStream as TryStream03, }; #[cfg(feature = "sink")] use futures_sink::Sink as Sink03; use crate::task::{ self as task03, ArcWake as ArcWake03, WakerRef, }; #[cfg(feature = "sink")] use std::marker::PhantomData; use std::{ mem, pin::Pin, sync::Arc, task::Context, }; #[derive(Debug, Clone, Copy)] #[must_use = "futures do nothing unless you `.await` or poll them"] pub struct Compat<T> { pub(crate) inner: T, } #[cfg(feature = "sink")] #[derive(Debug)] #[must_use = "sinks do nothing unless polled"] pub struct CompatSink<T, Item> { inner: T, _phantom: PhantomData<fn(Item)>, } impl<T> Compat<T> { pub fn new(inner: T) -> Compat<T> { Compat { inner } } pub fn get_ref(&self) -> &T { &self.inner } pub fn get_mut(&mut self) -> &mut T { &mut self.inner } pub fn into_inner(self) -> T { self.inner } } #[cfg(feature = "sink")] impl<T, Item> CompatSink<T, Item> { pub fn new(inner: T) -> Self { CompatSink { inner, _phantom: PhantomData, } } pub fn get_ref(&self) -> &T { &self.inner } pub fn get_mut(&mut self) -> &mut T { &mut self.inner } pub fn into_inner(self) -> T { self.inner } } fn poll_03_to_01<T, E>(x: task03::Poll<Result<T, E>>) -> Result<Async01<T>, E> { match x? { task03::Poll::Ready(t) => Ok(Async01::Ready(t)), task03::Poll::Pending => Ok(Async01::NotReady), } } impl<Fut> Future01 for Compat<Fut> where Fut: TryFuture03 + Unpin, { type Item = Fut::Ok; type Error = Fut::Error; fn poll(&mut self) -> Poll01<Self::Item, Self::Error> { with_context(self, |inner, cx| poll_03_to_01(inner.try_poll(cx))) } } impl<St> Stream01 for Compat<St> where St: TryStream03 + Unpin, { type Item = St::Ok; type Error = St::Error; fn poll(&mut self) -> Poll01<Option<Self::Item>, Self::Error> { with_context(self, |inner, cx| match inner.try_poll_next(cx)? { task03::Poll::Ready(None) => Ok(Async01::Ready(None)), task03::Poll::Ready(Some(t)) => Ok(Async01::Ready(Some(t))), task03::Poll::Pending => Ok(Async01::NotReady), }) } } #[cfg(feature = "sink")] impl<T, Item> Sink01 for CompatSink<T, Item> where T: Sink03<Item> + Unpin, { type SinkItem = Item; type SinkError = T::Error; fn start_send( &mut self, item: Self::SinkItem, ) -> StartSend01<Self::SinkItem, Self::SinkError> { with_sink_context(self, |mut inner, cx| { match inner.as_mut().poll_ready(cx)? { task03::Poll::Ready(()) => { inner.start_send(item).map(|()| AsyncSink01::Ready) } task03::Poll::Pending => Ok(AsyncSink01::NotReady(item)), } }) } fn poll_complete(&mut self) -> Poll01<(), Self::SinkError> { with_sink_context(self, |inner, cx| poll_03_to_01(inner.poll_flush(cx))) } fn close(&mut self) -> Poll01<(), Self::SinkError> { with_sink_context(self, |inner, cx| poll_03_to_01(inner.poll_close(cx))) } } #[derive(Clone)] struct Current(task01::Task); impl Current { fn new() -> Current { Current(task01::current()) } fn as_waker(&self) -> WakerRef<'_> { unsafe fn ptr_to_current<'a>(ptr: *const ()) -> &'a Current { &*(ptr as *const Current) } fn current_to_ptr(current: &Current) -> *const () { current as *const Current as *const () } unsafe fn clone(ptr: *const ()) -> RawWaker { mem::transmute::<task03::Waker, RawWaker>( task03::waker(Arc::new(ptr_to_current(ptr).clone())) ) } unsafe fn drop(_: *const ()) {} unsafe fn wake(ptr: *const ()) { ptr_to_current(ptr).0.notify() } let ptr = current_to_ptr(self); let vtable = &RawWakerVTable::new(clone, wake, wake, drop); WakerRef::new_unowned(std::mem::ManuallyDrop::new(unsafe { task03::Waker::from_raw(RawWaker::new(ptr, vtable)) })) } } impl ArcWake03 for Current { fn wake_by_ref(arc_self: &Arc<Self>) { arc_self.0.notify(); } } fn with_context<T, R, F>(compat: &mut Compat<T>, f: F) -> R where T: Unpin, F: FnOnce(Pin<&mut T>, &mut Context<'_>) -> R, { let current = Current::new(); let waker = current.as_waker(); let mut cx = Context::from_waker(&waker); f(Pin::new(&mut compat.inner), &mut cx) } #[cfg(feature = "sink")] fn with_sink_context<T, Item, R, F>(compat: &mut CompatSink<T, Item>, f: F) -> R where T: Unpin, F: FnOnce(Pin<&mut T>, &mut Context<'_>) -> R, { let current = Current::new(); let waker = current.as_waker(); let mut cx = Context::from_waker(&waker); f(Pin::new(&mut compat.inner), &mut cx) } #[cfg(feature = "io-compat")] mod io { use super::*; use futures_io::{AsyncRead as AsyncRead03, AsyncWrite as AsyncWrite03}; use tokio_io::{AsyncRead as AsyncRead01, AsyncWrite as AsyncWrite01}; fn poll_03_to_io<T>(x: task03::Poll<Result<T, std::io::Error>>) -> Result<T, std::io::Error> { match x { task03::Poll::Ready(Ok(t)) => Ok(t), task03::Poll::Pending => Err(std::io::ErrorKind::WouldBlock.into()), task03::Poll::Ready(Err(e)) => Err(e), } } impl<R: AsyncRead03 + Unpin> std::io::Read for Compat<R> { fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> { let current = Current::new(); let waker = current.as_waker(); let mut cx = Context::from_waker(&waker); poll_03_to_io(Pin::new(&mut self.inner).poll_read(&mut cx, buf)) } } impl<R: AsyncRead03 + Unpin> AsyncRead01 for Compat<R> { unsafe fn prepare_uninitialized_buffer(&self, buf: &mut [u8]) -> bool { let initializer = self.inner.initializer(); let does_init = initializer.should_initialize(); if does_init { initializer.initialize(buf); } does_init } } impl<W: AsyncWrite03 + Unpin> std::io::Write for Compat<W> { fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> { let current = Current::new(); let waker = current.as_waker(); let mut cx = Context::from_waker(&waker); poll_03_to_io(Pin::new(&mut self.inner).poll_write(&mut cx, buf)) } fn flush(&mut self) -> std::io::Result<()> { let current = Current::new(); let waker = current.as_waker(); let mut cx = Context::from_waker(&waker); poll_03_to_io(Pin::new(&mut self.inner).poll_flush(&mut cx)) } } impl<W: AsyncWrite03 + Unpin> AsyncWrite01 for Compat<W> { fn shutdown(&mut self) -> std::io::Result<Async01<()>> { let current = Current::new(); let waker = current.as_waker(); let mut cx = Context::from_waker(&waker); poll_03_to_01(Pin::new(&mut self.inner).poll_close(&mut cx)) } } }
use futures_01::{ task as task01, Async as Async01, Future as Future01, Poll as Poll01, Stream as Stream01, }; #[cfg(feature = "sink")] use futures_01::{ AsyncSink as AsyncSink01, Sink as Sink01, StartSend as StartSend01, }; use futures_core::{ task::{RawWaker, RawWakerVTable}, TryFuture as TryFuture03, TryStream as TryStream03, }; #[cfg(feature = "sink")] use futures_sink::Sink as Sink03; use crate::task::{ self as task03, ArcWake as ArcWake03, WakerRef, }; #[cfg(feature = "sink")] use std::marker::PhantomData; use std::{ mem, pin::Pin, sync::Arc, task::Context, }; #[derive(Debug, Clone, Copy)] #[must_use = "futures do nothing unless you `.await` or poll them"] pub struct Compat<T> { pub(crate) inner: T, } #[cfg(feature = "sink")] #[derive(Debug)] #[must_use = "sinks do nothing unless polled"] pub struct CompatSink<T, Item> { inner: T, _phantom: PhantomData<fn(Item)>, } impl<T> Compat<T> { pub fn new(inner: T) -> Compat<T> { Compat { inner } } pub fn get_ref(&self) -> &T { &self.inner } pub fn get_mut(&mut self) -> &mut T { &mut self.inner } pub fn into_inner(self) -> T { self.inner } } #[cfg(feature = "sink")] impl<T, Item> CompatSink<T, Item> { pub fn new(inner: T) -> Self { CompatSink { inner, _phantom: PhantomData, } } pub fn get_ref(&self) -> &T { &self.inner } pub fn get_mut(&mut self) -> &mut T { &mut self.inner } pub fn into_inner(self) -> T { self.inner } } fn poll_03_to_01<T, E>(x: task03::Poll<Result<T, E>>) -> Result<Async01<T>, E> { match x? { task03::Poll::Ready(t) => Ok(Async01::Ready(t)), task03::Poll::Pending => Ok(Async01::NotReady), } } impl<Fut> Future01 for Compat<Fut> where Fut: TryFuture03 + Unpin, { type Item = Fut::Ok; type Error = Fut::Error; fn poll(&mut self) -> Poll01<Self::Item, Self::Error> { with_context(self, |inner, cx| poll_03_to_01(inner.try_poll(cx))) } } impl<St> Stream01 for Compat<St> where St: TryStream03 + Unpin, { type Item = St::Ok; type Error = St::Error; fn poll(&mut self) -> Poll01<Option<Self::Item>, Self::Error> { with_context(self, |inner, cx| match inner.try_poll_next(cx)? { task03::Poll::Ready(None) => Ok(Async01::Ready(None)), task03::Poll::Ready(Some(t)) => Ok(Async01::Ready(Some(t))), task03::Poll::Pending => Ok(Async01::NotReady), }) } } #[cfg(feature = "sink")] impl<T, Item> Sink01 for CompatSink<T, Item> where T: Sink03<Item> + Unpin, { type SinkItem = Item; type SinkError = T::Error; fn start_send( &mut self, item: Self::SinkItem, ) -> StartSend01<Self::SinkItem, Self::SinkError> { with_sink_context(self, |mut inner, cx| { match inner.as_mut().poll_ready(cx)? { task03::Poll::Ready(()) => { inner.start_send(item).map(|()| AsyncSink01::Ready) } task03::Poll::Pending => Ok(AsyncSink01::NotReady(item)), } }) } fn poll_complete(&mut self) -> Poll01<(), Self::SinkError> { with_sink_context(self, |inner, cx| poll_03_to_01(inner.poll_flush(cx))) } fn close(&mut self) -> Poll01<(), Self::SinkError> { with_sink_context(self, |inner, cx| poll_03_to_01(inner.poll_close(cx))) } } #[derive(Clone)] struct Current(task01::Task); impl Current { fn new() -> Current { Current(task01::current()) } fn as_waker(&self) -> WakerRef<'_> { unsafe fn ptr_to_current<'a>(ptr: *const ()) -> &'a Current { &*(ptr as *const Current) } fn current_to_ptr(current: &Current) -> *const () { current as *const Current as *const () } unsafe fn clone(ptr: *const ()) -> RawWaker { mem::transmute::<task03::Waker, RawWaker>( task03::waker(Arc::new(ptr_to_current(ptr).clone())) ) } unsafe fn drop(_: *const ()) {} unsafe fn wake(ptr: *const ()) { ptr_to_current(ptr).0.notify() } let ptr = current_to_ptr(self); let vtable = &RawWakerVTable::new(clone, wake, wake, drop); WakerRef::new_unowned(std::mem::ManuallyDrop::new(unsafe { task03::Waker::from_raw(RawWaker::new(ptr, vtable)) })) } } impl ArcWake03 for Current { fn wake_by_ref(arc_self: &Arc<Self>) { arc_self.0.notify(); } } fn with_context<T, R, F>(compat: &mut Compat<T>, f: F) -> R where T: Unpin, F: FnOnce(Pin<&mut T>, &mut Context<'_>) -> R, { let current = Current::new(); let waker = current.as_waker(); let mut cx = Context::from_waker(&waker); f(Pin::new(&mut compat.inner), &mut cx) } #[cfg(feature = "sink")] fn with_sink_context<T, Item, R, F>(compat: &mut CompatSink<T, Item>, f: F) -> R where T: Unpin, F: FnOnce(Pin<&mut T>, &mut Context<'_>) -> R, { let current = Current::new(); let waker = current.as_waker(); let mut cx = Context::from_waker(&waker); f(Pin::new(&mut compat.inner), &mut cx) } #[cfg(feature = "io-compat")] mod io { use super::*; use futures_io::{AsyncRead as AsyncRead03, AsyncWrite as AsyncWrite03}; use tokio_io::{AsyncRead as AsyncRead01, AsyncWrite as AsyncWrite01}; fn poll_03_to_io<T>(x: task03::Poll<Result<T, std::io::Error>>) -> Result<T, std::io::Error> { match x { task03::Poll::Ready(Ok(t)) => Ok(t), task03::Poll::Pending => Err(std::io::ErrorKind::WouldBlock.into()), task03::Poll::Ready(Err(e)) => Err(e), } } impl<R: AsyncRead03 + Unpin> std::io::Read for Compat<R> { fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> { let current = Current::new(); let waker = current.as_waker(); let mut cx = Context::from_waker(&waker); poll_03_to_io(Pin::new(&mut self.inner).poll_read(&mut cx, buf)) } } impl<R: AsyncRead03 + Unpin> AsyncRead01 for Compat<R> { unsafe fn prepare_uninitialized_buffer(&self, buf: &mut [u8]) -> bool { let initializer = self.inner.initializer(); let does_init = initializer.should_initialize(); if does_init { initializer.initialize(buf); } does_init } } impl<W: AsyncWrite03 + Unpin> std::io::Write for Compat<W> { fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> { let current = Current::new(); let waker = current.as_waker(); let mut cx = Context::from_waker(&waker); poll_03_to_io(Pin::new(&mut self.inner).poll_write(&mut cx, buf)) } fn flush(&mut self) -> std::io::Result<()> { let current = Current::new(); let waker = current.as_waker(); let mut cx = Context::from_waker(&waker); poll_03_to_io(Pin::new(&mut self.inner).poll_flush(&mut cx)) } } impl<W: AsyncWrite03 + Unpin> AsyncWrite01 for Compat<W> { fn shutdown(&mut
} }
self) -> std::io::Result<Async01<()>> { let current = Current::new(); let waker = current.as_waker(); let mut cx = Context::from_waker(&waker); poll_03_to_01(Pin::new(&mut self.inner).poll_close(&mut cx)) }
function_block-function_prefixed
[ { "content": "#[doc(hidden)]\n\npub fn poll<F: Future + Unpin>(future: F) -> PollOnce<F> {\n\n PollOnce { future }\n\n}\n\n\n\n#[allow(missing_debug_implementations)]\n\n#[doc(hidden)]\n\npub struct PollOnce<F: Future + Unpin> {\n\n future: F,\n\n}\n\n\n\nimpl<F: Future + Unpin> Future for PollOnce<F> {\n\n type Output = Poll<F::Output>;\n\n fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n Poll::Ready(self.future.poll_unpin(cx))\n\n }\n\n}\n", "file_path": "futures-util/src/async_await/poll.rs", "rank": 1, "score": 467939.7447178153 }, { "content": "fn poll_01_to_03<T, E>(x: Result<Async01<T>, E>) -> task03::Poll<Result<T, E>> {\n\n match x? {\n\n Async01::Ready(t) => task03::Poll::Ready(Ok(t)),\n\n Async01::NotReady => task03::Poll::Pending,\n\n }\n\n}\n\n\n\nimpl<Fut: Future01> Future03 for Compat01As03<Fut> {\n\n type Output = Result<Fut::Item, Fut::Error>;\n\n\n\n fn poll(\n\n mut self: Pin<&mut Self>,\n\n cx: &mut Context<'_>,\n\n ) -> task03::Poll<Self::Output> {\n\n poll_01_to_03(self.in_notify(cx, Future01::poll))\n\n }\n\n}\n\n\n\nimpl<St: Stream01> Stream03 for Compat01As03<St> {\n\n type Item = Result<St::Item, St::Error>;\n", "file_path": "futures-util/src/compat/compat01as03.rs", "rank": 2, "score": 410793.71593046567 }, { "content": "fn run<F: Future + Unpin>(mut f: F) -> F::Output {\n\n let mut cx = noop_context();\n\n loop {\n\n if let Poll::Ready(x) = f.poll_unpin(&mut cx) {\n\n return x;\n\n }\n\n }\n\n}\n\n\n", "file_path": "futures/tests/io_buf_writer.rs", "rank": 4, "score": 395846.82431146555 }, { "content": "fn run<F: Future + Unpin>(mut f: F) -> F::Output {\n\n let mut cx = noop_context();\n\n loop {\n\n if let Poll::Ready(x) = f.poll_unpin(&mut cx) {\n\n return x;\n\n }\n\n }\n\n}\n\n\n", "file_path": "futures/tests/io_buf_reader.rs", "rank": 5, "score": 395846.8243114655 }, { "content": "/// Creates a `Stream` from a seed and a closure returning a `Future`.\n\n///\n\n/// This function is the dual for the `Stream::fold()` adapter: while\n\n/// `Stream::fold()` reduces a `Stream` to one single value, `unfold()` creates a\n\n/// `Stream` from a seed value.\n\n///\n\n/// `unfold()` will call the provided closure with the provided seed, then wait\n\n/// for the returned `Future` to complete with `(a, b)`. It will then yield the\n\n/// value `a`, and use `b` as the next internal state.\n\n///\n\n/// If the closure returns `None` instead of `Some(Future)`, then the `unfold()`\n\n/// will stop producing items and return `Poll::Ready(None)` in future\n\n/// calls to `poll()`.\n\n///\n\n/// In case of error generated by the returned `Future`, the error will be\n\n/// returned by the `Stream`. The `Stream` will then yield\n\n/// `Poll::Ready(None)` in future calls to `poll()`.\n\n///\n\n/// This function can typically be used when wanting to go from the \"world of\n\n/// futures\" to the \"world of streams\": the provided closure can build a\n\n/// `Future` using other library functions working on futures, and `unfold()`\n\n/// will turn it into a `Stream` by repeating the operation.\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// # futures::executor::block_on(async {\n\n/// use futures::stream::{self, StreamExt};\n\n///\n\n/// let stream = stream::unfold(0, |state| async move {\n\n/// if state <= 2 {\n\n/// let next_state = state + 1;\n\n/// let yielded = state * 2;\n\n/// Some((yielded, next_state))\n\n/// } else {\n\n/// None\n\n/// }\n\n/// });\n\n///\n\n/// let result = stream.collect::<Vec<i32>>().await;\n\n/// assert_eq!(result, vec![0, 2, 4]);\n\n/// # });\n\n/// ```\n\npub fn unfold<T, F, Fut, Item>(init: T, f: F) -> Unfold<T, F, Fut>\n\n where F: FnMut(T) -> Fut,\n\n Fut: Future<Output = Option<(Item, T)>>,\n\n{\n\n Unfold {\n\n f,\n\n state: Some(init),\n\n fut: None,\n\n }\n\n}\n\n\n\n/// Stream for the [`unfold`] function.\n\n#[must_use = \"streams do nothing unless polled\"]\n\npub struct Unfold<T, F, Fut> {\n\n f: F,\n\n state: Option<T>,\n\n fut: Option<Fut>,\n\n}\n\n\n\nimpl<T, F, Fut: Unpin> Unpin for Unfold<T, F, Fut> {}\n", "file_path": "futures-util/src/stream/unfold.rs", "rank": 6, "score": 395526.56012896396 }, { "content": "struct StreamSink<T, E, Item>(PhantomData<(T, E, Item)>);\n\n\n\nimpl<T, E, Item> Stream for StreamSink<T, E, Item> {\n\n type Item = Result<T, E>;\n\n fn poll_next(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Option<Self::Item>> {\n\n panic!()\n\n }\n\n}\n\n\n\nimpl<T, E, Item> Sink<Item> for StreamSink<T, E, Item> {\n\n type Error = E;\n\n fn poll_ready(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {\n\n panic!()\n\n }\n\n fn start_send(self: Pin<&mut Self>, _: Item) -> Result<(), Self::Error> {\n\n panic!()\n\n }\n\n fn poll_flush(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {\n\n panic!()\n\n }\n\n fn poll_close(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {\n\n panic!()\n\n }\n\n}\n\n\n", "file_path": "futures/tests/future_try_flatten_stream.rs", "rank": 9, "score": 370206.23492305237 }, { "content": "// Sends a value on an i32 channel sink\n\nstruct StartSendFut<S: Sink<Item> + Unpin, Item: Unpin>(Option<S>, Option<Item>);\n\n\n\nimpl<S: Sink<Item> + Unpin, Item: Unpin> StartSendFut<S, Item> {\n\n fn new(sink: S, item: Item) -> Self {\n\n Self(Some(sink), Some(item))\n\n }\n\n}\n\n\n\nimpl<S: Sink<Item> + Unpin, Item: Unpin> Future for StartSendFut<S, Item> {\n\n type Output = Result<S, S::Error>;\n\n\n\n fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n let Self(inner, item) = self.get_mut();\n\n {\n\n let mut inner = inner.as_mut().unwrap();\n\n ready!(Pin::new(&mut inner).poll_ready(cx))?;\n\n Pin::new(&mut inner).start_send(item.take().unwrap())?;\n\n }\n\n Poll::Ready(Ok(inner.take().unwrap()))\n\n }\n\n}\n\n\n\n// Test that `start_send` on an `mpsc` channel does indeed block when the\n\n// channel is full\n", "file_path": "futures/tests/sink.rs", "rank": 10, "score": 367921.6957348726 }, { "content": "/// Create a new [`Waker`] that counts the number of times it's awoken.\n\n///\n\n/// [`Waker`]: futures_core::task::Waker\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use futures_test::task::new_count_waker;\n\n///\n\n/// let (waker, count) = new_count_waker();\n\n///\n\n/// assert_eq!(count, 0);\n\n///\n\n/// waker.wake_by_ref();\n\n/// waker.wake();\n\n///\n\n/// assert_eq!(count, 2);\n\n/// ```\n\npub fn new_count_waker() -> (Waker, AwokenCount) {\n\n let inner = Arc::new(WakerInner { count: AtomicUsize::new(0) });\n\n (task::waker(inner.clone()), AwokenCount { inner })\n\n}\n", "file_path": "futures-test/src/task/wake_counter.rs", "rank": 11, "score": 367240.5837836401 }, { "content": "fn flag_cx<F, R>(f: F) -> R\n\nwhere\n\n F: FnOnce(Arc<Flag>, &mut Context<'_>) -> R,\n\n{\n\n let flag = Flag::new();\n\n let waker = task::waker_ref(&flag);\n\n let cx = &mut Context::from_waker(&waker);\n\n f(flag.clone(), cx)\n\n}\n\n\n", "file_path": "futures/tests/sink.rs", "rank": 12, "score": 366441.8934923264 }, { "content": "#[doc(hidden)]\n\npub fn assert_is_unpin_stream<S: Stream + Unpin>(_: &mut S) {}\n\n\n\n/// Assert that the next poll to the provided stream will return\n\n/// [`Poll::Pending`](futures_core::task::Poll::Pending).\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use futures::stream;\n\n/// use futures_test::future::FutureTestExt;\n\n/// use futures_test::{\n\n/// assert_stream_pending, assert_stream_next, assert_stream_done,\n\n/// };\n\n/// use futures::pin_mut;\n\n///\n\n/// let stream = stream::once((async { 5 }).pending_once());\n\n/// pin_mut!(stream);\n\n///\n\n/// assert_stream_pending!(stream);\n\n/// assert_stream_next!(stream, 5);\n", "file_path": "futures-test/src/assert.rs", "rank": 13, "score": 362223.055624273 }, { "content": "#[doc(hidden)]\n\n#[inline(always)]\n\npub fn assert_unpin<T: Unpin>(_: &T) {}\n\n\n", "file_path": "futures-util/src/async_await/mod.rs", "rank": 14, "score": 359844.5215381355 }, { "content": "/// Creates a new stream wrapping a function returning `Poll<Option<T>>`.\n\n///\n\n/// Polling the returned stream calls the wrapped function.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use futures::stream::poll_fn;\n\n/// use futures::task::Poll;\n\n///\n\n/// let mut counter = 1usize;\n\n///\n\n/// let read_stream = poll_fn(move |_| -> Poll<Option<String>> {\n\n/// if counter == 0 { return Poll::Ready(None); }\n\n/// counter -= 1;\n\n/// Poll::Ready(Some(\"Hello, World!\".to_owned()))\n\n/// });\n\n/// ```\n\npub fn poll_fn<T, F>(f: F) -> PollFn<F>\n\nwhere\n\n F: FnMut(&mut Context<'_>) -> Poll<Option<T>>,\n\n{\n\n PollFn { f }\n\n}\n\n\n\nimpl<T, F> Stream for PollFn<F>\n\nwhere\n\n F: FnMut(&mut Context<'_>) -> Poll<Option<T>>,\n\n{\n\n type Item = T;\n\n\n\n fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<T>> {\n\n (&mut self.f)(cx)\n\n }\n\n}\n", "file_path": "futures-util/src/stream/poll_fn.rs", "rank": 15, "score": 359238.83631514135 }, { "content": "fn run<F: Future + Unpin>(mut f: F) -> F::Output {\n\n let mut cx = noop_context();\n\n loop {\n\n if let Poll::Ready(x) = f.poll_unpin(&mut cx) {\n\n return x;\n\n }\n\n }\n\n}\n\n\n", "file_path": "futures/tests/io_read_until.rs", "rank": 16, "score": 355551.99054971314 }, { "content": "fn run<F: Future + Unpin>(mut f: F) -> F::Output {\n\n let mut cx = noop_context();\n\n loop {\n\n if let Poll::Ready(x) = f.poll_unpin(&mut cx) {\n\n return x;\n\n }\n\n }\n\n}\n\n\n\nmacro_rules! run_next {\n\n ($expr:expr) => {\n\n run($expr.next()).unwrap().unwrap()\n\n };\n\n}\n\n\n", "file_path": "futures/tests/io_lines.rs", "rank": 17, "score": 355551.99054971314 }, { "content": "/// Creates a [`Waker`] from an `Arc<impl ArcWake>`.\n\n///\n\n/// The returned [`Waker`] will call\n\n/// [`ArcWake.wake()`](ArcWake::wake) if awoken.\n\npub fn waker<W>(wake: Arc<W>) -> Waker\n\nwhere\n\n W: ArcWake,\n\n{\n\n let ptr = Arc::into_raw(wake) as *const ();\n\n\n\n unsafe {\n\n Waker::from_raw(RawWaker::new(ptr, waker_vtable::<W>()))\n\n }\n\n}\n\n\n\n// FIXME: panics on Arc::clone / refcount changes could wreak havoc on the\n\n// code here. We should guard against this by aborting.\n\n\n\nunsafe fn increase_refcount<T: ArcWake>(data: *const ()) {\n\n // Retain Arc, but don't touch refcount by wrapping in ManuallyDrop\n\n let arc = mem::ManuallyDrop::new(Arc::<T>::from_raw(data as *const T));\n\n // Now increase refcount, but don't drop new refcount either\n\n let _arc_clone: mem::ManuallyDrop<_> = arc.clone();\n\n}\n", "file_path": "futures-util/src/task/waker.rs", "rank": 18, "score": 353618.51402460516 }, { "content": "fn run<F: Future + Unpin>(mut f: F) -> F::Output {\n\n let mut cx = noop_context();\n\n loop {\n\n if let Poll::Ready(x) = f.poll_unpin(&mut cx) {\n\n return x;\n\n }\n\n }\n\n}\n\n\n", "file_path": "futures/tests/io_read_line.rs", "rank": 19, "score": 352138.98066405056 }, { "content": "fn run<F: Future + Unpin>(mut f: F) -> F::Output {\n\n let mut cx = noop_context();\n\n loop {\n\n if let Poll::Ready(x) = f.poll_unpin(&mut cx) {\n\n return x;\n\n }\n\n }\n\n}\n\n\n", "file_path": "futures/tests/io_read_to_string.rs", "rank": 20, "score": 352138.98066405056 }, { "content": "#[doc(hidden)]\n\n#[inline(always)]\n\npub fn assert_fused_stream<T: Stream + FusedStream>(_: &T) {}\n", "file_path": "futures-util/src/async_await/mod.rs", "rank": 21, "score": 345910.60468877916 }, { "content": "#[test]\n\nfn test_into_async_bufread() -> std::io::Result<()> {\n\n let stream = stream::iter((1..=2).flat_map(|_| vec![Ok(vec![]), Ok(vec![1, 2, 3, 4, 5])]));\n\n let mut reader = stream.interleave_pending().into_async_read();\n\n\n\n let mut reader = Pin::new(&mut reader);\n\n\n\n assert_fill_buf!(reader, &[1, 2, 3, 4, 5][..]);\n\n reader.as_mut().consume(3);\n\n\n\n assert_fill_buf!(reader, &[4, 5][..]);\n\n reader.as_mut().consume(2);\n\n\n\n assert_fill_buf!(reader, &[1, 2, 3, 4, 5][..]);\n\n reader.as_mut().consume(2);\n\n\n\n assert_fill_buf!(reader, &[3, 4, 5][..]);\n\n reader.as_mut().consume(3);\n\n\n\n assert_fill_buf!(reader, &[][..]);\n\n\n\n Ok(())\n\n}\n", "file_path": "futures/tests/stream_into_async_read.rs", "rank": 22, "score": 329203.4917080116 }, { "content": "#[inline]\n\npub fn waker_ref<W>(wake: &Arc<W>) -> WakerRef<'_>\n\nwhere\n\n W: ArcWake\n\n{\n\n // simply copy the pointer instead of using Arc::into_raw,\n\n // as we don't actually keep a refcount by using ManuallyDrop.<\n\n let ptr = (&**wake as *const W) as *const ();\n\n\n\n let waker = ManuallyDrop::new(unsafe {\n\n Waker::from_raw(RawWaker::new(ptr, waker_vtable::<W>()))\n\n });\n\n WakerRef::new_unowned(waker)\n\n}\n", "file_path": "futures-util/src/task/waker_ref.rs", "rank": 23, "score": 327644.691029583 }, { "content": "fn err_list() -> Box<Stream<Item=i32, Error=u32> + Send> {\n\n let (tx, rx) = mpsc::channel(1);\n\n tx.send(Ok(1))\n\n .and_then(|tx| tx.send(Ok(2)))\n\n .and_then(|tx| tx.send(Err(3)))\n\n .forget();\n\n Box::new(rx.then(|r| r.unwrap()))\n\n}\n\n\n", "file_path": "futures/tests_disabled/stream.rs", "rank": 24, "score": 327459.4417772783 }, { "content": "/// Creates a new future wrapping around a function returning [`Poll`].\n\n///\n\n/// Polling the returned future delegates to the wrapped function.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # futures::executor::block_on(async {\n\n/// use futures::future::poll_fn;\n\n/// use futures::task::{Context, Poll};\n\n///\n\n/// fn read_line(_cx: &mut Context<'_>) -> Poll<String> {\n\n/// Poll::Ready(\"Hello, World!\".into())\n\n/// }\n\n///\n\n/// let read_future = poll_fn(read_line);\n\n/// assert_eq!(read_future.await, \"Hello, World!\".to_owned());\n\n/// # });\n\n/// ```\n\npub fn poll_fn<T, F>(f: F) -> PollFn<F>\n\nwhere\n\n F: FnMut(&mut Context<'_>) -> Poll<T>\n\n{\n\n PollFn { f }\n\n}\n\n\n\nimpl<F> fmt::Debug for PollFn<F> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.debug_struct(\"PollFn\").finish()\n\n }\n\n}\n\n\n\nimpl<T, F> Future for PollFn<F>\n\n where F: FnMut(&mut Context<'_>) -> Poll<T>,\n\n{\n\n type Output = T;\n\n\n\n fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<T> {\n\n (&mut self.f)(cx)\n\n }\n\n}\n", "file_path": "futures-util/src/future/poll_fn.rs", "rank": 25, "score": 320872.119979834 }, { "content": "fn assert_stream_sink<S: Stream + Sink<Item>, Item>(_: &S) {}\n\n\n", "file_path": "futures/tests/future_try_flatten_stream.rs", "rank": 26, "score": 319472.3406003565 }, { "content": "struct Guard<'a> { buf: &'a mut Vec<u8>, len: usize }\n\n\n\nimpl Drop for Guard<'_> {\n\n fn drop(&mut self) {\n\n unsafe { self.buf.set_len(self.len); }\n\n }\n\n}\n\n\n\n// This uses an adaptive system to extend the vector when it fills. We want to\n\n// avoid paying to allocate and zero a huge chunk of memory if the reader only\n\n// has 4 bytes while still making large reads if the reader does have a ton\n\n// of data to return. Simply tacking on an extra DEFAULT_BUF_SIZE space every\n\n// time is 4,500 times (!) slower than this if the reader has a very small\n\n// amount of data to return.\n\n//\n\n// Because we're extending the buffer with uninitialized data for trusted\n\n// readers, we need to make sure to truncate that if any of this panics.\n\npub(super) fn read_to_end_internal<R: AsyncRead + ?Sized>(\n\n mut rd: Pin<&mut R>,\n\n cx: &mut Context<'_>,\n", "file_path": "futures-util/src/io/read_to_end.rs", "rank": 27, "score": 318265.6510227433 }, { "content": "/// Create a future that is immediately ready with an error value.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # futures::executor::block_on(async {\n\n/// use futures::future;\n\n///\n\n/// let a = future::err::<i32, i32>(1);\n\n/// assert_eq!(a.await, Err(1));\n\n/// # });\n\n/// ```\n\npub fn err<T, E>(err: E) -> Ready<Result<T, E>> {\n\n Ready(Some(Err(err)))\n\n}\n", "file_path": "futures-util/src/future/ready.rs", "rank": 28, "score": 317525.5336281662 }, { "content": "/// Creates a new future that allows delayed execution of a closure.\n\n///\n\n/// The provided closure is only run once the future is polled.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # futures::executor::block_on(async {\n\n/// use futures::future;\n\n///\n\n/// let a = future::lazy(|_| 1);\n\n/// assert_eq!(a.await, 1);\n\n///\n\n/// let b = future::lazy(|_| -> i32 {\n\n/// panic!(\"oh no!\")\n\n/// });\n\n/// drop(b); // closure is never run\n\n/// # });\n\n/// ```\n\npub fn lazy<F, R>(f: F) -> Lazy<F>\n\n where F: FnOnce(&mut Context<'_>) -> R,\n\n{\n\n Lazy { f: Some(f) }\n\n}\n\n\n\nimpl<F, R> FusedFuture for Lazy<F>\n\n where F: FnOnce(&mut Context<'_>) -> R,\n\n{\n\n fn is_terminated(&self) -> bool { self.f.is_none() }\n\n}\n\n\n\nimpl<F, R> Future for Lazy<F>\n\n where F: FnOnce(&mut Context<'_>) -> R,\n\n{\n\n type Output = R;\n\n\n\n fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<R> {\n\n Poll::Ready((self.f.take().unwrap())(cx))\n\n }\n\n}\n", "file_path": "futures-util/src/future/lazy.rs", "rank": 29, "score": 315719.86150060716 }, { "content": "/// Creates an instance of a writer which will successfully consume all data.\n\n///\n\n/// All calls to `poll_write` on the returned instance will return `Poll::Ready(Ok(buf.len()))`\n\n/// and the contents of the buffer will not be inspected.\n\n///\n\n/// # Examples\n\n///\n\n/// ```rust\n\n/// # futures::executor::block_on(async {\n\n/// use futures::io::{self, AsyncWriteExt};\n\n///\n\n/// let buffer = vec![1, 2, 3, 5, 8];\n\n/// let mut writer = io::sink();\n\n/// let num_bytes = writer.write(&buffer).await?;\n\n/// assert_eq!(num_bytes, 5);\n\n/// # Ok::<(), Box<dyn std::error::Error>>(()) }).unwrap();\n\n/// ```\n\npub fn sink() -> Sink {\n\n Sink { _priv: () }\n\n}\n\n\n\nimpl AsyncWrite for Sink {\n\n #[inline]\n\n fn poll_write(\n\n self: Pin<&mut Self>,\n\n _: &mut Context<'_>,\n\n buf: &[u8],\n\n ) -> Poll<io::Result<usize>> {\n\n Poll::Ready(Ok(buf.len()))\n\n }\n\n\n\n #[inline]\n\n fn poll_write_vectored(\n\n self: Pin<&mut Self>,\n\n _: &mut Context<'_>,\n\n bufs: &[IoSlice<'_>],\n\n ) -> Poll<io::Result<usize>> {\n", "file_path": "futures-util/src/io/sink.rs", "rank": 30, "score": 314732.5690457227 }, { "content": "/// An extension trait for `Sink`s that provides a variety of convenient\n\n/// combinator functions.\n\npub trait SinkExt<Item>: Sink<Item> {\n\n /// Composes a function *in front of* the sink.\n\n ///\n\n /// This adapter produces a new sink that passes each value through the\n\n /// given function `f` before sending it to `self`.\n\n ///\n\n /// To process each value, `f` produces a *future*, which is then polled to\n\n /// completion before passing its result down to the underlying sink. If the\n\n /// future produces an error, that error is returned by the new sink.\n\n ///\n\n /// Note that this function consumes the given sink, returning a wrapped\n\n /// version, much like `Iterator::map`.\n\n fn with<U, Fut, F, E>(self, f: F) -> With<Self, Item, U, Fut, F>\n\n where F: FnMut(U) -> Fut,\n\n Fut: Future<Output = Result<Item, E>>,\n\n E: From<Self::Error>,\n\n Self: Sized\n\n {\n\n With::new(self, f)\n\n }\n", "file_path": "futures-util/src/sink/mod.rs", "rank": 31, "score": 312069.69772541913 }, { "content": "// Set up and run a basic single-threaded spawner loop, invoking `f` on each\n\n// turn.\n\nfn run_executor<T, F: FnMut(&mut Context<'_>) -> Poll<T>>(mut f: F) -> T {\n\n let _enter = enter()\n\n .expect(\"cannot execute `LocalPool` executor from within \\\n\n another executor\");\n\n\n\n CURRENT_THREAD_NOTIFY.with(|thread_notify| {\n\n let waker = waker_ref(thread_notify);\n\n let mut cx = Context::from_waker(&waker);\n\n loop {\n\n if let Poll::Ready(t) = f(&mut cx) {\n\n return t;\n\n }\n\n thread::park();\n\n }\n\n })\n\n}\n\n\n", "file_path": "futures-executor/src/local_pool.rs", "rank": 32, "score": 309284.68892959005 }, { "content": "struct NotifyWaker(task03::Waker);\n\n\n", "file_path": "futures-util/src/compat/compat01as03.rs", "rank": 33, "score": 306935.77501793415 }, { "content": "#[doc(hidden)]\n\npub fn shuffle<T>(slice: &mut [T]) {\n\n for i in (1..slice.len()).rev() {\n\n slice.swap(i, gen_index(i + 1));\n\n }\n\n}\n\n\n", "file_path": "futures-util/src/async_await/random.rs", "rank": 34, "score": 304762.8214186396 }, { "content": "/// Create a new [`Waker`](futures_core::task::Waker) which will\n\n/// panic when `wake()` is called on it. The [`Waker`] can be converted\n\n/// into a [`Waker`] which will behave the same way.\n\n///\n\n/// # Examples\n\n///\n\n/// ```should_panic\n\n/// use futures_test::task::panic_waker;\n\n///\n\n/// let waker = panic_waker();\n\n/// waker.wake(); // Will panic\n\n/// ```\n\npub fn panic_waker() -> Waker {\n\n unsafe { Waker::from_raw(raw_panic_waker()) }\n\n}\n\n\n", "file_path": "futures-test/src/task/panic_waker.rs", "rank": 35, "score": 299715.92485947534 }, { "content": "#[inline]\n\npub fn noop_waker() -> Waker {\n\n unsafe {\n\n Waker::from_raw(noop_raw_waker())\n\n }\n\n}\n\n\n\n/// Get a static reference to a [`Waker`] which\n\n/// does nothing when `wake()` is called on it.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use futures::task::noop_waker_ref;\n\n/// let waker = noop_waker_ref();\n\n/// waker.wake_by_ref();\n\n/// ```\n", "file_path": "futures-util/src/task/noop_waker.rs", "rank": 36, "score": 299679.28352179704 }, { "content": "fn poll_executor<T, F: FnMut(&mut Context<'_>) -> T>(mut f: F) -> T {\n\n let _enter = enter()\n\n .expect(\"cannot execute `LocalPool` executor from within \\\n\n another executor\");\n\n\n\n CURRENT_THREAD_NOTIFY.with(|thread_notify| {\n\n let waker = waker_ref(thread_notify);\n\n let mut cx = Context::from_waker(&waker);\n\n f(&mut cx)\n\n })\n\n}\n\n\n\nimpl LocalPool {\n\n /// Create a new, empty pool of tasks.\n\n pub fn new() -> LocalPool {\n\n LocalPool {\n\n pool: FuturesUnordered::new(),\n\n incoming: Default::default(),\n\n }\n\n }\n", "file_path": "futures-executor/src/local_pool.rs", "rank": 37, "score": 294970.20419553225 }, { "content": "#[allow(missing_debug_implementations)] // false positive: this is private type\n\n#[derive(Clone)]\n\nstruct WakerToHandle<'a>(&'a task03::Waker);\n\n\n\nimpl From<WakerToHandle<'_>> for NotifyHandle01 {\n\n fn from(handle: WakerToHandle<'_>) -> NotifyHandle01 {\n\n let ptr = Box::new(NotifyWaker(handle.0.clone()));\n\n\n\n unsafe { NotifyHandle01::new(Box::into_raw(ptr)) }\n\n }\n\n}\n\n\n\nimpl Notify01 for NotifyWaker {\n\n fn notify(&self, _: usize) {\n\n self.0.wake_by_ref();\n\n }\n\n}\n\n\n\nunsafe impl UnsafeNotify01 for NotifyWaker {\n\n unsafe fn clone_raw(&self) -> NotifyHandle01 {\n\n WakerToHandle(&self.0).into()\n\n }\n", "file_path": "futures-util/src/compat/compat01as03.rs", "rank": 38, "score": 293860.440622546 }, { "content": "/// Creates a stream of a single element.\n\n///\n\n/// ```\n\n/// # futures::executor::block_on(async {\n\n/// use futures::stream::{self, StreamExt};\n\n///\n\n/// let stream = stream::once(async { 17 });\n\n/// let collected = stream.collect::<Vec<i32>>().await;\n\n/// assert_eq!(collected, vec![17]);\n\n/// # });\n\n/// ```\n\npub fn once<Fut: Future>(future: Fut) -> Once<Fut> {\n\n Once { future: Some(future) }\n\n}\n\n\n\n/// A stream which emits single element and then EOF.\n\n///\n\n/// This stream will never block and is always ready.\n\n#[derive(Debug)]\n\n#[must_use = \"streams do nothing unless polled\"]\n\npub struct Once<Fut> {\n\n future: Option<Fut>\n\n}\n\n\n\nimpl<Fut: Unpin> Unpin for Once<Fut> {}\n\n\n\nimpl<Fut> Once<Fut> {\n\n unsafe_pinned!(future: Option<Fut>);\n\n}\n\n\n\nimpl<Fut: Future> Stream for Once<Fut> {\n", "file_path": "futures-util/src/stream/once.rs", "rank": 39, "score": 292242.10753516294 }, { "content": "/// Turn a stream into a blocking iterator.\n\n///\n\n/// When `next` is called on the resulting `BlockingStream`, the caller\n\n/// will be blocked until the next element of the `Stream` becomes available.\n\npub fn block_on_stream<S: Stream + Unpin>(stream: S) -> BlockingStream<S> {\n\n BlockingStream { stream }\n\n}\n\n\n\n/// An iterator which blocks on values from a stream until they become available.\n\n#[derive(Debug)]\n\npub struct BlockingStream<S: Stream + Unpin> { stream: S }\n\n\n\nimpl<S: Stream + Unpin> Deref for BlockingStream<S> {\n\n type Target = S;\n\n fn deref(&self) -> &Self::Target {\n\n &self.stream\n\n }\n\n}\n\n\n\nimpl<S: Stream + Unpin> DerefMut for BlockingStream<S> {\n\n fn deref_mut(&mut self) -> &mut Self::Target {\n\n &mut self.stream\n\n }\n\n}\n", "file_path": "futures-executor/src/local_pool.rs", "rank": 40, "score": 289096.2035778469 }, { "content": "fn assert_sink<S: Sink<Item>, Item>(_: &S) {}\n", "file_path": "futures/tests/future_try_flatten_stream.rs", "rank": 41, "score": 287584.12519064284 }, { "content": "/// Get a global reference to a\n\n/// [`Waker`](futures_core::task::Waker) referencing a singleton\n\n/// instance of a [`Waker`] which panics when woken.\n\n///\n\n/// # Examples\n\n///\n\n/// ```should_panic\n\n/// use futures_test::task::panic_waker_ref;\n\n///\n\n/// let waker = panic_waker_ref();\n\n/// waker.wake_by_ref(); // Will panic\n\n/// ```\n\npub fn panic_waker_ref() -> &'static Waker {\n\n thread_local! {\n\n static PANIC_WAKER_INSTANCE: UnsafeCell<Waker> =\n\n UnsafeCell::new(panic_waker());\n\n }\n\n PANIC_WAKER_INSTANCE.with(|l| unsafe { &*l.get() })\n\n}\n", "file_path": "futures-test/src/task/panic_waker.rs", "rank": 42, "score": 287527.05653544964 }, { "content": "#[inline]\n\n#[cfg(feature = \"std\")]\n\npub fn noop_waker_ref() -> &'static Waker {\n\n thread_local! {\n\n static NOOP_WAKER_INSTANCE: UnsafeCell<Waker> =\n\n UnsafeCell::new(noop_waker());\n\n }\n\n NOOP_WAKER_INSTANCE.with(|l| unsafe { &*l.get() })\n\n}\n", "file_path": "futures-util/src/task/noop_waker.rs", "rank": 43, "score": 287501.3544490946 }, { "content": "fn list() -> Box<Stream<Item=i32, Error=u32> + Send> {\n\n let (tx, rx) = mpsc::channel(1);\n\n tx.send(Ok(1))\n\n .and_then(|tx| tx.send(Ok(2)))\n\n .and_then(|tx| tx.send(Ok(3)))\n\n .forget();\n\n Box::new(rx.then(|r| r.unwrap()))\n\n}\n\n\n", "file_path": "futures/tests_disabled/stream.rs", "rank": 44, "score": 286513.397694571 }, { "content": "#[derive(Debug)]\n\nstruct WakerInner {\n\n count: AtomicUsize,\n\n}\n\n\n\nimpl ArcWake for WakerInner {\n\n fn wake_by_ref(arc_self: &Arc<Self>) {\n\n let _ = arc_self.count.fetch_add(1, Ordering::SeqCst);\n\n }\n\n}\n\n\n", "file_path": "futures-test/src/task/wake_counter.rs", "rank": 45, "score": 285907.1458408278 }, { "content": "fn unwrap<T, E: fmt::Debug>(x: Poll<Result<T, E>>) -> T {\n\n match x {\n\n Poll::Ready(Ok(x)) => x,\n\n Poll::Ready(Err(_)) => panic!(\"Poll::Ready(Err(_))\"),\n\n Poll::Pending => panic!(\"Poll::Pending\"),\n\n }\n\n}\n\n\n", "file_path": "futures/tests/sink.rs", "rank": 46, "score": 283272.3187871391 }, { "content": "/// An extension trait which adds utility methods to `AsyncBufRead` types.\n\npub trait AsyncBufReadExt: AsyncBufRead {\n\n /// Creates a future which copies all the bytes from one object to another.\n\n ///\n\n /// The returned future will copy all the bytes read from this `AsyncBufRead` into the\n\n /// `writer` specified. This future will only complete once the `reader` has hit\n\n /// EOF and all bytes have been written to and flushed from the `writer`\n\n /// provided.\n\n ///\n\n /// On success the number of bytes is returned.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// # futures::executor::block_on(async {\n\n /// use futures::io::{AsyncBufReadExt, AsyncWriteExt};\n\n /// use std::io::Cursor;\n\n ///\n\n /// let reader = Cursor::new([1, 2, 3, 4]);\n\n /// let mut writer = Cursor::new([0u8; 5]);\n\n ///\n", "file_path": "futures-util/src/io/mod.rs", "rank": 47, "score": 280412.4034901873 }, { "content": "fn sassert_next<S>(s: &mut S, item: S::Item)\n\nwhere\n\n S: Stream + Unpin,\n\n S::Item: Eq + fmt::Debug,\n\n{\n\n match s.poll_next_unpin(&mut panic_context()) {\n\n Poll::Ready(None) => panic!(\"stream is at its end\"),\n\n Poll::Ready(Some(e)) => assert_eq!(e, item),\n\n Poll::Pending => panic!(\"stream wasn't ready\"),\n\n }\n\n}\n\n\n", "file_path": "futures/tests/sink.rs", "rank": 48, "score": 277593.0709275762 }, { "content": "#[allow(bad_style)]\n\nfn SplitSink<S: Sink<Item>, Item>(lock: BiLock<S>) -> SplitSink<S, Item> {\n\n SplitSink {\n\n lock,\n\n slot: None,\n\n }\n\n}\n\n\n\n/// A `Sink` part of the split pair\n\n#[derive(Debug)]\n\n#[must_use = \"sinks do nothing unless polled\"]\n\npub struct SplitSink<S: Sink<Item>, Item> {\n\n lock: BiLock<S>,\n\n slot: Option<Item>,\n\n}\n\n\n\nimpl<S: Sink<Item>, Item> Unpin for SplitSink<S, Item> {}\n\n\n\nimpl<S: Sink<Item> + Unpin, Item> SplitSink<S, Item> {\n\n /// Attempts to put the two \"halves\" of a split `Stream + Sink` back\n\n /// together. Succeeds only if the `SplitStream<S>` and `SplitSink<S>` are\n", "file_path": "futures-util/src/stream/split.rs", "rank": 49, "score": 275100.897677022 }, { "content": "#[derive(Debug)]\n\nstruct Block<Item> {\n\n offset: usize,\n\n bytes: Item,\n\n}\n\n\n\n/// Sink for the [`into_sink`](super::AsyncWriteExt::into_sink) method.\n\n#[must_use = \"sinks do nothing unless polled\"]\n\n#[derive(Debug)]\n\npub struct IntoSink<W, Item> {\n\n writer: W,\n\n /// An outstanding block for us to push into the underlying writer, along with an offset of how\n\n /// far into this block we have written already.\n\n buffer: Option<Block<Item>>,\n\n}\n\n\n\nimpl<W: Unpin, Item> Unpin for IntoSink<W, Item> {}\n\n\n\nimpl<W: AsyncWrite, Item: AsRef<[u8]>> IntoSink<W, Item> {\n\n unsafe_pinned!(writer: W);\n\n unsafe_unpinned!(buffer: Option<Block<Item>>);\n", "file_path": "futures-util/src/io/into_sink.rs", "rank": 50, "score": 273576.77371590876 }, { "content": "fn iter_pin_mut<T>(slice: Pin<&mut [T]>) -> impl Iterator<Item = Pin<&mut T>> {\n\n // Safety: `std` _could_ make this unsound if it were to decide Pin's\n\n // invariants aren't required to transmit through slices. Otherwise this has\n\n // the same safety as a normal field pin projection.\n\n unsafe { slice.get_unchecked_mut() }\n\n .iter_mut()\n\n .map(|t| unsafe { Pin::new_unchecked(t) })\n\n}\n\n\n\n/// Future for the [`join_all`] function.\n\n#[must_use = \"futures do nothing unless you `.await` or poll them\"]\n\npub struct JoinAll<F>\n\nwhere\n\n F: Future,\n\n{\n\n elems: Pin<Box<[ElemState<F>]>>,\n\n}\n\n\n\nimpl<F> fmt::Debug for JoinAll<F>\n\nwhere\n\n F: Future + fmt::Debug,\n\n F::Output: fmt::Debug,\n\n{\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.debug_struct(\"JoinAll\")\n\n .field(\"elems\", &self.elems)\n\n .finish()\n\n }\n\n}\n\n\n", "file_path": "futures-util/src/future/join_all.rs", "rank": 51, "score": 272433.3869978888 }, { "content": "fn lock_and_then<T, U, E, F>(\n\n lock: &BiLock<T>,\n\n cx: &mut Context<'_>,\n\n f: F\n\n) -> Poll<Result<U, E>>\n\n where F: FnOnce(Pin<&mut T>, &mut Context<'_>) -> Poll<Result<U, E>>\n\n{\n\n let mut l = ready!(lock.poll_lock(cx));\n\n f(l.as_pin_mut(), cx)\n\n}\n\n\n\npub(super) fn split<T: AsyncRead + AsyncWrite>(t: T) -> (ReadHalf<T>, WriteHalf<T>) {\n\n let (a, b) = BiLock::new(t);\n\n (ReadHalf { handle: a }, WriteHalf { handle: b })\n\n}\n\n\n\nimpl<R: AsyncRead> AsyncRead for ReadHalf<R> {\n\n fn poll_read(self: Pin<&mut Self>, cx: &mut Context<'_>, buf: &mut [u8])\n\n -> Poll<io::Result<usize>>\n\n {\n", "file_path": "futures-util/src/io/split.rs", "rank": 52, "score": 272001.1482081725 }, { "content": "/// Creates an instance of a reader that infinitely repeats one byte.\n\n///\n\n/// All reads from this reader will succeed by filling the specified buffer with\n\n/// the given byte.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # futures::executor::block_on(async {\n\n/// use futures::io::{self, AsyncReadExt};\n\n///\n\n/// let mut buffer = [0; 3];\n\n/// let mut reader = io::repeat(0b101);\n\n/// reader.read_exact(&mut buffer).await.unwrap();\n\n/// assert_eq!(buffer, [0b101, 0b101, 0b101]);\n\n/// # Ok::<(), Box<dyn std::error::Error>>(()) }).unwrap();\n\n/// ```\n\npub fn repeat(byte: u8) -> Repeat {\n\n Repeat { byte }\n\n}\n\n\n\nimpl AsyncRead for Repeat {\n\n #[inline]\n\n fn poll_read(\n\n self: Pin<&mut Self>,\n\n _: &mut Context<'_>,\n\n buf: &mut [u8],\n\n ) -> Poll<io::Result<usize>> {\n\n for slot in &mut *buf {\n\n *slot = self.byte;\n\n }\n\n Poll::Ready(Ok(buf.len()))\n\n }\n\n\n\n #[inline]\n\n fn poll_read_vectored(\n\n mut self: Pin<&mut Self>,\n", "file_path": "futures-util/src/io/repeat.rs", "rank": 53, "score": 271131.9117245164 }, { "content": "/// Get a reference to a singleton instance of [`PanicSpawner`].\n\n///\n\n/// # Examples\n\n///\n\n/// ```should_panic\n\n/// use futures::task::SpawnExt;\n\n/// use futures_test::task::panic_spawner_mut;\n\n///\n\n/// let spawner = panic_spawner_mut();\n\n/// spawner.spawn(async { })?; // Will panic\n\n/// # Ok::<(), Box<dyn std::error::Error>>(())\n\n/// ```\n\npub fn panic_spawner_mut() -> &'static mut PanicSpawner {\n\n Box::leak(Box::new(PanicSpawner::new()))\n\n}\n", "file_path": "futures-test/src/task/panic_spawner.rs", "rank": 54, "score": 270946.5583164716 }, { "content": "/// Get a reference to a singleton instance of [`NoopSpawner`].\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use futures::task::SpawnExt;\n\n/// use futures_test::task::noop_spawner_mut;\n\n///\n\n/// let spawner = noop_spawner_mut();\n\n/// spawner.spawn(async { }).unwrap();\n\n/// ```\n\npub fn noop_spawner_mut() -> &'static mut NoopSpawner {\n\n Box::leak(Box::new(NoopSpawner::new()))\n\n}\n", "file_path": "futures-test/src/task/noop_spawner.rs", "rank": 55, "score": 270937.67283323477 }, { "content": "fn iter_pin_mut<T>(slice: Pin<&mut [T]>) -> impl Iterator<Item = Pin<&mut T>> {\n\n // Safety: `std` _could_ make this unsound if it were to decide Pin's\n\n // invariants aren't required to transmit through slices. Otherwise this has\n\n // the same safety as a normal field pin projection.\n\n unsafe { slice.get_unchecked_mut() }\n\n .iter_mut()\n\n .map(|t| unsafe { Pin::new_unchecked(t) })\n\n}\n\n\n", "file_path": "futures-util/src/try_future/try_join_all.rs", "rank": 56, "score": 268028.38359616045 }, { "content": "/// Convert a list of streams into a `Stream` of results from the streams.\n\n///\n\n/// This essentially takes a list of streams (e.g. a vector, an iterator, etc.)\n\n/// and bundles them together into a single stream.\n\n/// The stream will yield items as they become available on the underlying\n\n/// streams internally, in the order they become available.\n\n///\n\n/// Note that the returned set can also be used to dynamically push more\n\n/// futures into the set as they become available.\n\n///\n\n/// This function is only available when the `std` or `alloc` feature of this\n\n/// library is activated, and it is activated by default.\n\npub fn select_all<I>(streams: I) -> SelectAll<I::Item>\n\n where I: IntoIterator,\n\n I::Item: Stream + Unpin\n\n{\n\n let mut set = SelectAll::new();\n\n\n\n for stream in streams {\n\n set.push(stream);\n\n }\n\n\n\n set\n\n}\n\n\n\nimpl<St: Stream + Unpin> FromIterator<St> for SelectAll<St> {\n\n fn from_iter<T: IntoIterator<Item = St>>(iter: T) -> Self {\n\n select_all(iter)\n\n }\n\n}\n", "file_path": "futures-util/src/stream/select_all.rs", "rank": 57, "score": 267033.96620313753 }, { "content": "#[doc(hidden)]\n\n#[inline(always)]\n\npub fn assert_fused_future<T: Future + FusedFuture>(_: &T) {}\n\n\n", "file_path": "futures-util/src/async_await/mod.rs", "rank": 58, "score": 266843.7465734306 }, { "content": "#[must_use = \"sinks do nothing unless polled\"]\n\npub trait Sink<Item> {\n\n /// The type of value produced by the sink when an error occurs.\n\n type Error;\n\n\n\n /// Attempts to prepare the `Sink` to receive a value.\n\n ///\n\n /// This method must be called and return `Poll::Ready(Ok(()))` prior to\n\n /// each call to `start_send`.\n\n ///\n\n /// This method returns `Poll::Ready` once the underlying sink is ready to\n\n /// receive data. If this method returns `Poll::Pending`, the current task\n\n /// is registered to be notified (via `cx.waker().wake_by_ref()`) when `poll_ready`\n\n /// should be called again.\n\n ///\n\n /// In most cases, if the sink encounters an error, the sink will\n\n /// permanently be unable to receive items.\n\n fn poll_ready(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>>;\n\n\n\n /// Begin the process of sending a value to the sink.\n\n /// Each call to this function must be preceded by a successful call to\n", "file_path": "futures-sink/src/lib.rs", "rank": 59, "score": 263545.0647155764 }, { "content": "struct Inner<Fut: Future> {\n\n future_or_output: UnsafeCell<FutureOrOutput<Fut>>,\n\n notifier: Arc<Notifier>,\n\n}\n\n\n", "file_path": "futures-util/src/future/shared.rs", "rank": 60, "score": 254793.98755424583 }, { "content": "/// Create a stream which produces the same item repeatedly.\n\n///\n\n/// The stream never terminates. Note that you likely want to avoid\n\n/// usage of `collect` or such on the returned stream as it will exhaust\n\n/// available memory as it tries to just fill up all RAM.\n\n///\n\n/// ```\n\n/// # futures::executor::block_on(async {\n\n/// use futures::stream::{self, StreamExt};\n\n///\n\n/// let stream = stream::repeat(9);\n\n/// assert_eq!(vec![9, 9, 9], stream.take(3).collect::<Vec<i32>>().await);\n\n/// # });\n\n/// ```\n\npub fn repeat<T>(item: T) -> Repeat<T>\n\n where T: Clone\n\n{\n\n Repeat { item }\n\n}\n\n\n\nimpl<T> Unpin for Repeat<T> {}\n\n\n\nimpl<T> Stream for Repeat<T>\n\n where T: Clone\n\n{\n\n type Item = T;\n\n\n\n fn poll_next(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Option<Self::Item>> {\n\n Poll::Ready(Some(self.item.clone()))\n\n }\n\n}\n\n\n\nimpl<T> FusedStream for Repeat<T>\n\n where T: Clone,\n\n{\n\n fn is_terminated(&self) -> bool {\n\n false\n\n }\n\n}\n", "file_path": "futures-util/src/stream/repeat.rs", "rank": 61, "score": 254132.99038911387 }, { "content": "#[doc(hidden)]\n\npub fn pending_once() -> PendingOnce {\n\n PendingOnce { is_ready: false }\n\n}\n\n\n\n#[allow(missing_debug_implementations)]\n\n#[doc(hidden)]\n\npub struct PendingOnce {\n\n is_ready: bool,\n\n}\n\n\n\nimpl Future for PendingOnce {\n\n type Output = ();\n\n fn poll(mut self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Self::Output> {\n\n if self.is_ready {\n\n Poll::Ready(())\n\n } else {\n\n self.is_ready = true;\n\n Poll::Pending\n\n }\n\n }\n\n}\n", "file_path": "futures-util/src/async_await/pending.rs", "rank": 62, "score": 253576.31335106376 }, { "content": "/// Run a future to completion on the current thread.\n\n///\n\n/// This function will block the caller until the given future has completed.\n\n///\n\n/// Use a [`LocalPool`](LocalPool) if you need finer-grained control over\n\n/// spawned tasks.\n\npub fn block_on<F: Future>(f: F) -> F::Output {\n\n pin_mut!(f);\n\n run_executor(|cx| f.as_mut().poll(cx))\n\n}\n\n\n", "file_path": "futures-executor/src/local_pool.rs", "rank": 63, "score": 246731.39854288456 }, { "content": "pub fn iter<J, T, E>(i: J) -> Iter<J::IntoIter>\n\n where J: IntoIterator<Item=Result<T, E>>,\n\n{\n\n Iter {\n\n iter: i.into_iter(),\n\n }\n\n}\n\n\n\nimpl<I, T, E> Stream for Iter<I>\n\n where I: Iterator<Item=Result<T, E>>,\n\n{\n\n type Item = T;\n\n type Error = E;\n\n\n\n fn poll_next(&mut self, _: &mut Context<'_>) -> Poll<Option<T>, E> {\n\n match self.iter.next() {\n\n Some(Ok(e)) => Ok(Poll::Ready(Some(e))),\n\n Some(Err(e)) => Err(e),\n\n None => Ok(Poll::Ready(None)),\n\n }\n\n }\n\n}\n\n\n", "file_path": "futures/tests_disabled/stream.rs", "rank": 64, "score": 240724.41497441696 }, { "content": "/// Creates a new `Abortable` future and a `AbortHandle` which can be used to stop it.\n\n///\n\n/// This function is a convenient (but less flexible) alternative to calling\n\n/// `AbortHandle::new` and `Abortable::new` manually.\n\n///\n\n/// This function is only available when the `std` or `alloc` feature of this\n\n/// library is activated, and it is activated by default.\n\npub fn abortable<Fut>(future: Fut) -> (Abortable<Fut>, AbortHandle)\n\n where Fut: Future\n\n{\n\n let (handle, reg) = AbortHandle::new_pair();\n\n (\n\n Abortable::new(future, reg),\n\n handle,\n\n )\n\n}\n\n\n\n/// Indicator that the `Abortable` future was aborted.\n\n#[derive(Copy, Clone, Debug, Eq, PartialEq)]\n\npub struct Aborted;\n\n\n\nimpl<Fut> Future for Abortable<Fut> where Fut: Future {\n\n type Output = Result<Fut::Output, Aborted>;\n\n\n\n fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n // Check if the future has been aborted\n\n if self.inner.cancel.load(Ordering::Relaxed) {\n", "file_path": "futures-util/src/future/abortable.rs", "rank": 65, "score": 240270.91720507634 }, { "content": "fn read_to_string_internal<R: AsyncRead + ?Sized>(\n\n reader: Pin<&mut R>,\n\n cx: &mut Context<'_>,\n\n buf: &mut String,\n\n bytes: &mut Vec<u8>,\n\n start_len: usize,\n\n) -> Poll<io::Result<usize>> {\n\n let ret = ready!(read_to_end_internal(reader, cx, bytes, start_len));\n\n if str::from_utf8(&bytes).is_err() {\n\n Poll::Ready(ret.and_then(|_| {\n\n Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n \"stream did not contain valid UTF-8\",\n\n ))\n\n }))\n\n } else {\n\n debug_assert!(buf.is_empty());\n\n // Safety: `bytes` is a valid UTF-8 because `str::from_utf8` returned `Ok`.\n\n mem::swap(unsafe { buf.as_mut_vec() }, bytes);\n\n Poll::Ready(ret)\n", "file_path": "futures-util/src/io/read_to_string.rs", "rank": 66, "score": 237410.6027810414 }, { "content": "/// Wraps a future into a `MaybeDone`\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # futures::executor::block_on(async {\n\n/// use futures::future;\n\n/// use futures::pin_mut;\n\n///\n\n/// let future = future::maybe_done(async { 5 });\n\n/// pin_mut!(future);\n\n/// assert_eq!(future.as_mut().take_output(), None);\n\n/// let () = future.as_mut().await;\n\n/// assert_eq!(future.as_mut().take_output(), Some(5));\n\n/// assert_eq!(future.as_mut().take_output(), None);\n\n/// # });\n\n/// ```\n\npub fn maybe_done<Fut: Future>(future: Fut) -> MaybeDone<Fut> {\n\n MaybeDone::Future(future)\n\n}\n\n\n\nimpl<Fut: Future> MaybeDone<Fut> {\n\n /// Returns an [`Option`] containing a mutable reference to the output of the future.\n\n /// The output of this method will be [`Some`] if and only if the inner\n\n /// future has been completed and [`take_output`](MaybeDone::take_output)\n\n /// has not yet been called.\n\n #[inline]\n\n pub fn output_mut(self: Pin<&mut Self>) -> Option<&mut Fut::Output> {\n\n unsafe {\n\n let this = self.get_unchecked_mut();\n\n match this {\n\n MaybeDone::Done(res) => Some(res),\n\n _ => None,\n\n }\n\n }\n\n }\n\n\n", "file_path": "futures-util/src/future/maybe_done.rs", "rank": 67, "score": 237229.62237797354 }, { "content": "#[test]\n\nfn buf_writer_inner_flushes() {\n\n let mut w = BufWriter::with_capacity(3, Vec::new());\n\n block_on(w.write(&[0, 1])).unwrap();\n\n assert_eq!(*w.get_ref(), []);\n\n block_on(w.flush()).unwrap();\n\n let w = w.into_inner();\n\n assert_eq!(w, [0, 1]);\n\n}\n\n\n", "file_path": "futures/tests/io_buf_writer.rs", "rank": 68, "score": 236231.6516820193 }, { "content": " /// Read bytes asynchronously.\n\n ///\n\n /// This trait is analogous to the `std::io::BufRead` trait, but integrates\n\n /// with the asynchronous task system. In particular, the `poll_fill_buf`\n\n /// method, unlike `BufRead::fill_buf`, will automatically queue the current task\n\n /// for wakeup and return if data is not yet available, rather than blocking\n\n /// the calling thread.\n\n pub trait AsyncBufRead: AsyncRead {\n\n /// Attempt to return the contents of the internal buffer, filling it with more data\n\n /// from the inner reader if it is empty.\n\n ///\n\n /// On success, returns `Poll::Ready(Ok(buf))`.\n\n ///\n\n /// If no data is available for reading, the method returns\n\n /// `Poll::Pending` and arranges for the current task (via\n\n /// `cx.waker().wake_by_ref()`) to receive a notification when the object becomes\n\n /// readable or is closed.\n\n ///\n\n /// This function is a lower-level call. It needs to be paired with the\n\n /// [`consume`] method to function properly. When calling this\n\n /// method, none of the contents will be \"read\" in the sense that later\n\n /// calling [`poll_read`] may return the same contents. As such, [`consume`] must\n\n /// be called with the number of bytes that are consumed from this buffer to\n\n /// ensure that the bytes are never returned twice.\n\n ///\n\n /// [`poll_read`]: AsyncRead::poll_read\n\n /// [`consume`]: AsyncBufRead::consume\n", "file_path": "futures-io/src/lib.rs", "rank": 69, "score": 234929.5535069261 }, { "content": "/// Extension trait for futures 0.1 [`Stream`](futures_01::stream::Stream)\n\npub trait Stream01CompatExt: Stream01 {\n\n /// Converts a futures 0.1\n\n /// [`Stream<Item = T, Error = E>`](futures_01::stream::Stream)\n\n /// into a futures 0.3\n\n /// [`Stream<Item = Result<T, E>>`](futures_core::stream::Stream).\n\n ///\n\n /// ```\n\n /// # futures::executor::block_on(async {\n\n /// use futures::stream::StreamExt;\n\n /// use futures_util::compat::Stream01CompatExt;\n\n ///\n\n /// let stream = futures_01::stream::once::<u32, ()>(Ok(1));\n\n /// let mut stream = stream.compat();\n\n /// assert_eq!(stream.next().await, Some(Ok(1)));\n\n /// assert_eq!(stream.next().await, None);\n\n /// # });\n\n /// ```\n\n fn compat(self) -> Compat01As03<Self>\n\n where\n\n Self: Sized,\n\n {\n\n Compat01As03::new(self)\n\n }\n\n}\n\nimpl<St: Stream01> Stream01CompatExt for St {}\n\n\n\n/// Extension trait for futures 0.1 [`Sink`](futures_01::sink::Sink)\n", "file_path": "futures-util/src/compat/compat01as03.rs", "rank": 70, "score": 234616.0206838714 }, { "content": "/// Extension trait for futures 0.1 [`Future`](futures_01::future::Future)\n\npub trait Future01CompatExt: Future01 {\n\n /// Converts a futures 0.1\n\n /// [`Future<Item = T, Error = E>`](futures_01::future::Future)\n\n /// into a futures 0.3\n\n /// [`Future<Output = Result<T, E>>`](futures_core::future::Future).\n\n ///\n\n /// ```\n\n /// # futures::executor::block_on(async {\n\n /// # // TODO: These should be all using `futures::compat`, but that runs up against Cargo\n\n /// # // feature issues\n\n /// use futures_util::compat::Future01CompatExt;\n\n ///\n\n /// let future = futures_01::future::ok::<u32, ()>(1);\n\n /// assert_eq!(future.compat().await, Ok(1));\n\n /// # });\n\n /// ```\n\n fn compat(self) -> Compat01As03<Self>\n\n where\n\n Self: Sized,\n\n {\n\n Compat01As03::new(self)\n\n }\n\n}\n\nimpl<Fut: Future01> Future01CompatExt for Fut {}\n\n\n", "file_path": "futures-util/src/compat/compat01as03.rs", "rank": 71, "score": 234610.27952567744 }, { "content": "#[cfg(feature = \"sink\")]\n\npub trait Sink01CompatExt: Sink01 {\n\n /// Converts a futures 0.1\n\n /// [`Sink<SinkItem = T, SinkError = E>`](futures_01::sink::Sink)\n\n /// into a futures 0.3\n\n /// [`Sink<T, Error = E>`](futures_sink::Sink).\n\n ///\n\n /// ```\n\n /// # futures::executor::block_on(async {\n\n /// use futures::{sink::SinkExt, stream::StreamExt};\n\n /// use futures_util::compat::{Stream01CompatExt, Sink01CompatExt};\n\n ///\n\n /// let (tx, rx) = futures_01::unsync::mpsc::channel(1);\n\n /// let (mut tx, mut rx) = (tx.sink_compat(), rx.compat());\n\n ///\n\n /// tx.send(1).await.unwrap();\n\n /// drop(tx);\n\n /// assert_eq!(rx.next().await, Some(Ok(1)));\n\n /// assert_eq!(rx.next().await, None);\n\n /// # });\n\n /// ```\n\n fn sink_compat(self) -> Compat01As03Sink<Self, Self::SinkItem>\n\n where\n\n Self: Sized,\n\n {\n\n Compat01As03Sink::new(self)\n\n }\n\n}\n\n#[cfg(feature = \"sink\")]\n\nimpl<Si: Sink01> Sink01CompatExt for Si {}\n\n\n", "file_path": "futures-util/src/compat/compat01as03.rs", "rank": 72, "score": 234609.497605109 }, { "content": "#[test]\n\nfn poll_and_pending() {\n\n let pending_once = async { pending!() };\n\n block_on(async {\n\n pin_mut!(pending_once);\n\n assert_eq!(Poll::Pending, poll!(&mut pending_once));\n\n assert_eq!(Poll::Ready(()), poll!(&mut pending_once));\n\n });\n\n}\n\n\n", "file_path": "futures/tests/async_await_macros.rs", "rank": 73, "score": 233131.95186256157 }, { "content": "#[test]\n\nfn select_streams() {\n\n let (mut tx1, rx1) = mpsc::channel::<i32>(1);\n\n let (mut tx2, rx2) = mpsc::channel::<i32>(1);\n\n let mut rx1 = rx1.fuse();\n\n let mut rx2 = rx2.fuse();\n\n let mut ran = false;\n\n let mut total = 0;\n\n block_on(async {\n\n let mut tx1_opt;\n\n let mut tx2_opt;\n\n select! {\n\n _ = rx1.next() => panic!(),\n\n _ = rx2.next() => panic!(),\n\n default => {\n\n tx1.send(2).await.unwrap();\n\n tx2.send(3).await.unwrap();\n\n tx1_opt = Some(tx1);\n\n tx2_opt = Some(tx2);\n\n }\n\n complete => panic!(),\n", "file_path": "futures/tests/async_await_macros.rs", "rank": 74, "score": 230941.49947220567 }, { "content": "/// An extension trait which adds utility methods to `AsyncWrite` types.\n\npub trait AsyncWriteExt: AsyncWrite {\n\n /// Creates a future which will entirely flush this `AsyncWrite`.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// # futures::executor::block_on(async {\n\n /// use futures::io::{AllowStdIo, AsyncWriteExt};\n\n /// use std::io::{BufWriter, Cursor};\n\n ///\n\n /// let mut output = [0u8; 5];\n\n ///\n\n /// {\n\n /// let writer = Cursor::new(&mut output[..]);\n\n /// let mut buffered = AllowStdIo::new(BufWriter::new(writer));\n\n /// buffered.write_all(&[1, 2]).await?;\n\n /// buffered.write_all(&[3, 4]).await?;\n\n /// buffered.flush().await?;\n\n /// }\n\n ///\n", "file_path": "futures-util/src/io/mod.rs", "rank": 75, "score": 230920.77185607897 }, { "content": "/// An extension trait which adds utility methods to `AsyncRead` types.\n\npub trait AsyncReadExt: AsyncRead {\n\n /// Creates an adaptor which will chain this stream with another.\n\n ///\n\n /// The returned `AsyncRead` instance will first read all bytes from this object\n\n /// until EOF is encountered. Afterwards the output is equivalent to the\n\n /// output of `next`.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// # futures::executor::block_on(async {\n\n /// use futures::io::AsyncReadExt;\n\n /// use std::io::Cursor;\n\n ///\n\n /// let reader1 = Cursor::new([1, 2, 3, 4]);\n\n /// let reader2 = Cursor::new([5, 6, 7, 8]);\n\n ///\n\n /// let mut reader = reader1.chain(reader2);\n\n /// let mut buffer = Vec::new();\n\n ///\n", "file_path": "futures-util/src/io/mod.rs", "rank": 76, "score": 230920.77185607897 }, { "content": "/// An extension trait which adds utility methods to `AsyncSeek` types.\n\npub trait AsyncSeekExt: AsyncSeek {\n\n /// Creates a future which will seek an IO object, and then yield the\n\n /// new position in the object and the object itself.\n\n ///\n\n /// In the case of an error the buffer and the object will be discarded, with\n\n /// the error yielded.\n\n fn seek(&mut self, pos: SeekFrom) -> Seek<'_, Self>\n\n where Self: Unpin,\n\n {\n\n Seek::new(self, pos)\n\n }\n\n}\n\n\n\nimpl<S: AsyncSeek + ?Sized> AsyncSeekExt for S {}\n\n\n", "file_path": "futures-util/src/io/mod.rs", "rank": 77, "score": 230920.77185607897 }, { "content": " /// Extension trait for tokio-io [`AsyncWrite`](tokio_io::AsyncWrite)\n\n pub trait AsyncWrite01CompatExt: AsyncWrite01 {\n\n /// Converts a tokio-io [`AsyncWrite`](tokio_io::AsyncWrite) into a futures-io 0.3\n\n /// [`AsyncWrite`](futures_io::AsyncWrite).\n\n ///\n\n /// ```\n\n /// # futures::executor::block_on(async {\n\n /// use futures::io::AsyncWriteExt;\n\n /// use futures_util::compat::AsyncWrite01CompatExt;\n\n ///\n\n /// let input = b\"Hello World!\";\n\n /// let mut cursor = std::io::Cursor::new(Vec::with_capacity(12));\n\n ///\n\n /// let mut writer = (&mut cursor).compat();\n\n /// writer.write_all(input).await.unwrap();\n\n ///\n\n /// assert_eq!(cursor.into_inner(), input);\n\n /// # });\n\n /// ```\n\n fn compat(self) -> Compat01As03<Self>\n\n where\n", "file_path": "futures-util/src/compat/compat01as03.rs", "rank": 78, "score": 230225.79666807386 }, { "content": " /// Extension trait for tokio-io [`AsyncRead`](tokio_io::AsyncRead)\n\n pub trait AsyncRead01CompatExt: AsyncRead01 {\n\n /// Converts a tokio-io [`AsyncRead`](tokio_io::AsyncRead) into a futures-io 0.3\n\n /// [`AsyncRead`](futures_io::AsyncRead).\n\n ///\n\n /// ```\n\n /// #![feature(impl_trait_in_bindings)]\n\n /// # #![allow(incomplete_features)]\n\n /// # futures::executor::block_on(async {\n\n /// use futures::io::AsyncReadExt;\n\n /// use futures_util::compat::AsyncRead01CompatExt;\n\n ///\n\n /// let input = b\"Hello World!\";\n\n /// let reader: impl tokio_io::AsyncRead = std::io::Cursor::new(input);\n\n /// let mut reader: impl futures::io::AsyncRead + Unpin = reader.compat();\n\n ///\n\n /// let mut output = Vec::with_capacity(12);\n\n /// reader.read_to_end(&mut output).await.unwrap();\n\n /// assert_eq!(output, input);\n\n /// # });\n\n /// ```\n\n fn compat(self) -> Compat01As03<Self>\n\n where\n\n Self: Sized,\n\n {\n\n Compat01As03::new(self)\n\n }\n\n }\n\n impl<R: AsyncRead01> AsyncRead01CompatExt for R {}\n\n\n", "file_path": "futures-util/src/compat/compat01as03.rs", "rank": 79, "score": 230225.79666807386 }, { "content": "#[test]\n\nfn maybe_pending_buf_writer_inner_flushes() {\n\n let mut w = BufWriter::with_capacity(3, MaybePending::new(Vec::new()));\n\n run(w.write(&[0, 1])).unwrap();\n\n assert_eq!(&w.get_ref().inner, &[]);\n\n run(w.flush()).unwrap();\n\n let w = w.into_inner().inner;\n\n assert_eq!(w, [0, 1]);\n\n}\n\n\n\n\n", "file_path": "futures/tests/io_buf_writer.rs", "rank": 80, "score": 230109.60883446847 }, { "content": "/// An extension trait for `Stream`s that provides a variety of convenient\n\n/// combinator functions.\n\npub trait StreamExt: Stream {\n\n /// Creates a future that resolves to the next item in the stream.\n\n ///\n\n /// Note that because `next` doesn't take ownership over the stream,\n\n /// the [`Stream`] type must be [`Unpin`]. If you want to use `next` with a\n\n /// [`!Unpin`](Unpin) stream, you'll first have to pin the stream. This can\n\n /// be done by boxing the stream using [`Box::pin`] or\n\n /// pinning it to the stack using the `pin_mut!` macro from the `pin_utils`\n\n /// crate.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// # futures::executor::block_on(async {\n\n /// use futures::stream::{self, StreamExt};\n\n ///\n\n /// let mut stream = stream::iter(1..=3);\n\n ///\n\n /// assert_eq!(stream.next().await, Some(1));\n\n /// assert_eq!(stream.next().await, Some(2));\n", "file_path": "futures-util/src/stream/mod.rs", "rank": 81, "score": 226648.88481261744 }, { "content": "#[test]\n\nfn join_doesnt_require_unpin() {\n\n let _ = async {\n\n join!(async {}, async {})\n\n };\n\n}\n\n\n", "file_path": "futures/tests/async_await_macros.rs", "rank": 82, "score": 226176.75135383164 }, { "content": "#[derive(Debug)]\n\nstruct Inner<T> {\n\n // Max buffer size of the channel. If `None` then the channel is unbounded.\n\n buffer: Option<usize>,\n\n\n\n // Internal channel state. Consists of the number of messages stored in the\n\n // channel as well as a flag signalling that the channel is closed.\n\n state: AtomicUsize,\n\n\n\n // Atomic, FIFO queue used to send messages to the receiver\n\n message_queue: Queue<T>,\n\n\n\n // Atomic, FIFO queue used to send parked task handles to the receiver.\n\n parked_queue: Queue<Arc<Mutex<SenderTask>>>,\n\n\n\n // Number of senders in existence\n\n num_senders: AtomicUsize,\n\n\n\n // Handle to the receiver's task.\n\n recv_task: AtomicWaker,\n\n}\n\n\n\n// Struct representation of `Inner::state`.\n", "file_path": "futures-channel/src/mpsc/mod.rs", "rank": 83, "score": 225415.21641131974 }, { "content": "/// Additional combinators for testing async readers.\n\npub trait AsyncReadTestExt: AsyncRead {\n\n /// Introduces an extra [`Poll::Pending`](futures_core::task::Poll::Pending)\n\n /// in between each read of the reader.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use futures::task::Poll;\n\n /// use futures::io::AsyncRead;\n\n /// use futures_test::task::noop_context;\n\n /// use futures_test::io::AsyncReadTestExt;\n\n /// use futures::pin_mut;\n\n ///\n\n /// let reader = std::io::Cursor::new(&[1, 2, 3]).interleave_pending();\n\n /// pin_mut!(reader);\n\n ///\n\n /// let mut cx = noop_context();\n\n ///\n\n /// let mut buf = [0, 0];\n\n ///\n", "file_path": "futures-test/src/io/read/mod.rs", "rank": 84, "score": 225164.86127077267 }, { "content": "/// Additional combinators for testing async writers.\n\npub trait AsyncWriteTestExt: AsyncWrite {\n\n /// Introduces an extra [`Poll::Pending`](futures_core::task::Poll::Pending)\n\n /// in between each operation on the writer.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use futures::task::Poll;\n\n /// use futures::io::AsyncWrite;\n\n /// use futures_test::task::noop_context;\n\n /// use futures_test::io::AsyncWriteTestExt;\n\n /// use futures::pin_mut;\n\n ///\n\n /// let writer = std::io::Cursor::new([0u8; 4]).interleave_pending_write();\n\n /// pin_mut!(writer);\n\n ///\n\n /// let mut cx = noop_context();\n\n ///\n\n /// assert_eq!(writer.as_mut().poll_write(&mut cx, &[1, 2])?, Poll::Pending);\n\n /// assert_eq!(writer.as_mut().poll_write(&mut cx, &[1, 2])?, Poll::Ready(2));\n", "file_path": "futures-test/src/io/write/mod.rs", "rank": 85, "score": 225164.86127077267 }, { "content": "// Just a helper function to ensure the futures we're returning all have the\n\n// right implementations.\n\nfn assert_future<T, F>(future: F) -> F\n\n where F: Future<Output=T>,\n\n{\n\n future\n\n}\n", "file_path": "futures-util/src/future/mod.rs", "rank": 86, "score": 225079.15485393023 }, { "content": "/// Additional combinators for testing streams.\n\npub trait StreamTestExt: Stream {\n\n /// Introduces an extra [`Poll::Pending`](futures_core::task::Poll::Pending)\n\n /// in between each item of the stream.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use futures::task::Poll;\n\n /// use futures::stream::{self, Stream};\n\n /// use futures_test::task::noop_context;\n\n /// use futures_test::stream::StreamTestExt;\n\n /// use futures::pin_mut;\n\n ///\n\n /// let stream = stream::iter(vec![1, 2]).interleave_pending();\n\n /// pin_mut!(stream);\n\n ///\n\n /// let mut cx = noop_context();\n\n ///\n\n /// assert_eq!(stream.as_mut().poll_next(&mut cx), Poll::Pending);\n\n /// assert_eq!(stream.as_mut().poll_next(&mut cx), Poll::Ready(Some(1)));\n", "file_path": "futures-test/src/stream/mod.rs", "rank": 87, "score": 224474.9509707355 }, { "content": "struct PanickingStream<T, E> {\n\n _marker: PhantomData<(T, E)>\n\n}\n\n\n\nimpl<T, E> Stream for PanickingStream<T, E> {\n\n type Item = Result<T, E>;\n\n\n\n fn poll_next(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Option<Self::Item>> {\n\n panic!()\n\n }\n\n}\n\n\n", "file_path": "futures/tests/future_try_flatten_stream.rs", "rank": 88, "score": 223762.33452938954 }, { "content": "#[test]\n\nfn try_join_doesnt_require_unpin() {\n\n let _ = async {\n\n try_join!(\n\n async { Ok::<(), ()>(()) },\n\n async { Ok::<(), ()>(()) },\n\n )\n\n };\n\n}\n", "file_path": "futures/tests/async_await_macros.rs", "rank": 89, "score": 222866.64655442027 }, { "content": "/// Create a future that is immediately ready with a success value.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # futures::executor::block_on(async {\n\n/// use futures::future;\n\n///\n\n/// let a = future::ok::<i32, i32>(1);\n\n/// assert_eq!(a.await, Ok(1));\n\n/// # });\n\n/// ```\n\npub fn ok<T, E>(t: T) -> Ready<Result<T, E>> {\n\n Ready(Some(Ok(t)))\n\n}\n\n\n", "file_path": "futures-util/src/future/ready.rs", "rank": 90, "score": 221050.0564010868 }, { "content": "struct ManualAllow<T: Unpin> {\n\n data: Vec<T>,\n\n allow: Rc<Allow>,\n\n}\n\n\n", "file_path": "futures/tests/sink.rs", "rank": 91, "score": 220599.70971203013 }, { "content": "// Immediately accepts all requests to start pushing, but completion is managed\n\n// by manually flushing\n\nstruct ManualFlush<T: Unpin> {\n\n data: Vec<T>,\n\n waiting_tasks: Vec<Waker>,\n\n}\n\n\n\nimpl<T: Unpin> Sink<Option<T>> for ManualFlush<T> {\n\n type Error = ();\n\n\n\n fn poll_ready(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {\n\n Poll::Ready(Ok(()))\n\n }\n\n\n\n fn start_send(mut self: Pin<&mut Self>, item: Option<T>) -> Result<(), Self::Error> {\n\n if let Some(item) = item {\n\n self.data.push(item);\n\n } else {\n\n self.force_flush();\n\n }\n\n Ok(())\n\n }\n", "file_path": "futures/tests/sink.rs", "rank": 92, "score": 220599.70971203013 }, { "content": "/// Adapters specific to `Result`-returning streams\n\npub trait TryStreamExt: TryStream {\n\n /// Wraps the current stream in a new stream which converts the error type\n\n /// into the one provided.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// # futures::executor::block_on(async {\n\n /// use futures::stream::{self, TryStreamExt};\n\n ///\n\n /// let mut stream =\n\n /// stream::iter(vec![Ok(()), Err(5i32)])\n\n /// .err_into::<i64>();\n\n ///\n\n /// assert_eq!(stream.try_next().await, Ok(Some(())));\n\n /// assert_eq!(stream.try_next().await, Err(5i64));\n\n /// # })\n\n /// ```\n\n fn err_into<E>(self) -> ErrInto<Self, E>\n\n where\n", "file_path": "futures-util/src/try_stream/mod.rs", "rank": 93, "score": 220424.56994019798 }, { "content": "struct FutureData<F, T> {\n\n _data: T,\n\n future: F,\n\n}\n\n\n\nimpl<F, T> FutureData<F, T> {\n\n unsafe_pinned!(future: F);\n\n}\n\n\n\nimpl<F: Future, T: Send + 'static> Future for FutureData<F, T> {\n\n type Output = F::Output;\n\n\n\n fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<F::Output> {\n\n self.future().poll(cx)\n\n }\n\n}\n\n\n", "file_path": "futures/tests/eager_drop.rs", "rank": 94, "score": 220155.4414333388 }, { "content": "fn poll_inner<St1, St2>(\n\n flag: &mut bool,\n\n a: Pin<&mut St1>,\n\n b: Pin<&mut St2>,\n\n cx: &mut Context<'_>\n\n) -> Poll<Option<St1::Item>>\n\n where St1: Stream, St2: Stream<Item = St1::Item>\n\n{\n\n let a_done = match a.poll_next(cx) {\n\n Poll::Ready(Some(item)) => {\n\n // give the other stream a chance to go first next time\n\n *flag = !*flag;\n\n return Poll::Ready(Some(item))\n\n },\n\n Poll::Ready(None) => true,\n\n Poll::Pending => false,\n\n };\n\n\n\n match b.poll_next(cx) {\n\n Poll::Ready(Some(item)) => {\n\n Poll::Ready(Some(item))\n\n }\n\n Poll::Ready(None) if a_done => Poll::Ready(None),\n\n Poll::Ready(None) | Poll::Pending => Poll::Pending,\n\n }\n\n}\n", "file_path": "futures-util/src/stream/select.rs", "rank": 95, "score": 219516.47719828476 }, { "content": "/// Creates a future which represents a collection of the outputs of the futures\n\n/// given.\n\n///\n\n/// The returned future will drive execution for all of its underlying futures,\n\n/// collecting the results into a destination `Vec<T>` in the same order as they\n\n/// were provided.\n\n///\n\n/// This function is only available when the `std` or `alloc` feature of this\n\n/// library is activated, and it is activated by default.\n\n///\n\n/// # See Also\n\n///\n\n/// This is purposefully a very simple API for basic use-cases. In a lot of\n\n/// cases you will want to use the more powerful\n\n/// [`FuturesUnordered`][crate::stream::FuturesUnordered] APIs, some\n\n/// examples of additional functionality that provides:\n\n///\n\n/// * Adding new futures to the set even after it has been started.\n\n///\n\n/// * Only polling the specific futures that have been woken. In cases where\n\n/// you have a lot of futures this will result in much more efficient polling.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # futures::executor::block_on(async {\n\n/// use futures::future::join_all;\n\n///\n\n/// async fn foo(i: u32) -> u32 { i }\n\n///\n\n/// let futures = vec![foo(1), foo(2), foo(3)];\n\n///\n\n/// assert_eq!(join_all(futures).await, [1, 2, 3]);\n\n/// # });\n\n/// ```\n\npub fn join_all<I>(i: I) -> JoinAll<I::Item>\n\nwhere\n\n I: IntoIterator,\n\n I::Item: Future,\n\n{\n\n let elems: Box<[_]> = i.into_iter().map(ElemState::Pending).collect();\n\n JoinAll { elems: elems.into() }\n\n}\n\n\n\nimpl<F> Future for JoinAll<F>\n\nwhere\n\n F: Future,\n\n{\n\n type Output = Vec<F::Output>;\n\n\n\n fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n let mut all_done = true;\n\n\n\n for mut elem in iter_pin_mut(self.elems.as_mut()) {\n\n if let Some(pending) = elem.as_mut().pending_pin_mut() {\n", "file_path": "futures-util/src/future/join_all.rs", "rank": 96, "score": 216989.30059443522 }, { "content": "#[derive(Debug)]\n\nenum State<Fut, St> {\n\n // future is not yet called or called and not ready\n\n Future(Fut),\n\n // future resolved to Stream\n\n Stream(St),\n\n}\n\n\n\nimpl<Fut, St> State<Fut, St> {\n\n fn get_pin_mut(self: Pin<&mut Self>) -> State<Pin<&mut Fut>, Pin<&mut St>> {\n\n // safety: data is never moved via the resulting &mut reference\n\n match unsafe { self.get_unchecked_mut() } {\n\n // safety: the future we're re-pinning here will never be moved;\n\n // it will just be polled, then dropped in place\n\n State::Future(f) => State::Future(unsafe { Pin::new_unchecked(f) }),\n\n // safety: the stream we're repinning here will never be moved;\n\n // it will just be polled, then dropped in place\n\n State::Stream(s) => State::Stream(unsafe { Pin::new_unchecked(s) }),\n\n }\n\n }\n\n}\n", "file_path": "futures-util/src/future/flatten_stream.rs", "rank": 97, "score": 216286.59743795416 }, { "content": "/// Creates a new future which will select over a list of futures.\n\n///\n\n/// The returned future will wait for any future within `iter` to be ready. Upon\n\n/// completion the item resolved will be returned, along with the index of the\n\n/// future that was ready and the list of all the remaining futures.\n\n///\n\n/// There are no guarantees provided on the order of the list with the remaining\n\n/// futures. They might be swapped around, reversed, or completely random.\n\n///\n\n/// This function is only available when the `std` or `alloc` feature of this\n\n/// library is activated, and it is activated by default.\n\n///\n\n/// # Panics\n\n///\n\n/// This function will panic if the iterator specified contains no items.\n\npub fn select_all<I>(iter: I) -> SelectAll<I::Item>\n\n where I: IntoIterator,\n\n I::Item: Future + Unpin,\n\n{\n\n let ret = SelectAll {\n\n inner: iter.into_iter().collect()\n\n };\n\n assert!(!ret.inner.is_empty());\n\n ret\n\n}\n\n\n\nimpl<Fut: Future + Unpin> Future for SelectAll<Fut> {\n\n type Output = (Fut::Output, usize, Vec<Fut>);\n\n\n\n fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n let item = self.inner.iter_mut().enumerate().find_map(|(i, f)| {\n\n match f.poll_unpin(cx) {\n\n Poll::Pending => None,\n\n Poll::Ready(e) => Some((i, e)),\n\n }\n", "file_path": "futures-util/src/future/select_all.rs", "rank": 98, "score": 214491.9228238907 }, { "content": "fn assert_done<T, F>(actual_fut: F, expected: T)\n\nwhere\n\n T: PartialEq + Debug,\n\n F: FnOnce() -> Box<dyn Future<Output = T> + Unpin>,\n\n{\n\n let output = block_on(actual_fut());\n\n assert_eq!(output, expected);\n\n}\n\n\n", "file_path": "futures/tests/join_all.rs", "rank": 99, "score": 208890.84101145307 } ]
Rust
src/usbphy/pll_sic_tog.rs
thorhs/mk66f18
ea5a3c933656be9f2f548b28dee91d0bb7821923
#[doc = "Reader of register PLL_SIC_TOG"] pub type R = crate::R<u32, super::PLL_SIC_TOG>; #[doc = "Writer for register PLL_SIC_TOG"] pub type W = crate::W<u32, super::PLL_SIC_TOG>; #[doc = "Register PLL_SIC_TOG `reset()`'s with value 0x0001_2000"] impl crate::ResetValue for super::PLL_SIC_TOG { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0x0001_2000 } } #[doc = "This field controls the USB PLL feedback loop divider\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum PLL_DIV_SEL_A { #[doc = "0: PLL reference frequency = 24MHz"] _00, #[doc = "1: PLL reference frequency = 16MHz"] _01, } impl From<PLL_DIV_SEL_A> for u8 { #[inline(always)] fn from(variant: PLL_DIV_SEL_A) -> Self { match variant { PLL_DIV_SEL_A::_00 => 0, PLL_DIV_SEL_A::_01 => 1, } } } #[doc = "Reader of field `PLL_DIV_SEL`"] pub type PLL_DIV_SEL_R = crate::R<u8, PLL_DIV_SEL_A>; impl PLL_DIV_SEL_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> crate::Variant<u8, PLL_DIV_SEL_A> { use crate::Variant::*; match self.bits { 0 => Val(PLL_DIV_SEL_A::_00), 1 => Val(PLL_DIV_SEL_A::_01), i => Res(i), } } #[doc = "Checks if the value of the field is `_00`"] #[inline(always)] pub fn is_00(&self) -> bool { *self == PLL_DIV_SEL_A::_00 } #[doc = "Checks if the value of the field is `_01`"] #[inline(always)] pub fn is_01(&self) -> bool { *self == PLL_DIV_SEL_A::_01 } } #[doc = "Write proxy for field `PLL_DIV_SEL`"] pub struct PLL_DIV_SEL_W<'a> { w: &'a mut W, } impl<'a> PLL_DIV_SEL_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: PLL_DIV_SEL_A) -> &'a mut W { unsafe { self.bits(variant.into()) } } #[doc = "PLL reference frequency = 24MHz"] #[inline(always)] pub fn _00(self) -> &'a mut W { self.variant(PLL_DIV_SEL_A::_00) } #[doc = "PLL reference frequency = 16MHz"] #[inline(always)] pub fn _01(self) -> &'a mut W { self.variant(PLL_DIV_SEL_A::_01) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0x03) | ((value as u32) & 0x03); self.w } } #[doc = "Reader of field `PLL_EN_USB_CLKS`"] pub type PLL_EN_USB_CLKS_R = crate::R<bool, bool>; #[doc = "Write proxy for field `PLL_EN_USB_CLKS`"] pub struct PLL_EN_USB_CLKS_W<'a> { w: &'a mut W, } impl<'a> PLL_EN_USB_CLKS_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6); self.w } } #[doc = "Reader of field `PLL_HOLD_RING_OFF`"] pub type PLL_HOLD_RING_OFF_R = crate::R<bool, bool>; #[doc = "Write proxy for field `PLL_HOLD_RING_OFF`"] pub struct PLL_HOLD_RING_OFF_W<'a> { w: &'a mut W, } impl<'a> PLL_HOLD_RING_OFF_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11); self.w } } #[doc = "Reader of field `PLL_POWER`"] pub type PLL_POWER_R = crate::R<bool, bool>; #[doc = "Write proxy for field `PLL_POWER`"] pub struct PLL_POWER_W<'a> { w: &'a mut W, } impl<'a> PLL_POWER_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12); self.w } } #[doc = "Reader of field `PLL_ENABLE`"] pub type PLL_ENABLE_R = crate::R<bool, bool>; #[doc = "Write proxy for field `PLL_ENABLE`"] pub struct PLL_ENABLE_W<'a> { w: &'a mut W, } impl<'a> PLL_ENABLE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 13)) | (((value as u32) & 0x01) << 13); self.w } } #[doc = "Reader of field `PLL_BYPASS`"] pub type PLL_BYPASS_R = crate::R<bool, bool>; #[doc = "Write proxy for field `PLL_BYPASS`"] pub struct PLL_BYPASS_W<'a> { w: &'a mut W, } impl<'a> PLL_BYPASS_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16); self.w } } #[doc = "USB PLL lock status indicator\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum PLL_LOCK_A { #[doc = "0: PLL is not currently locked"] _0, #[doc = "1: PLL is currently locked"] _1, } impl From<PLL_LOCK_A> for bool { #[inline(always)] fn from(variant: PLL_LOCK_A) -> Self { match variant { PLL_LOCK_A::_0 => false, PLL_LOCK_A::_1 => true, } } } #[doc = "Reader of field `PLL_LOCK`"] pub type PLL_LOCK_R = crate::R<bool, PLL_LOCK_A>; impl PLL_LOCK_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> PLL_LOCK_A { match self.bits { false => PLL_LOCK_A::_0, true => PLL_LOCK_A::_1, } } #[doc = "Checks if the value of the field is `_0`"] #[inline(always)] pub fn is_0(&self) -> bool { *self == PLL_LOCK_A::_0 } #[doc = "Checks if the value of the field is `_1`"] #[inline(always)] pub fn is_1(&self) -> bool { *self == PLL_LOCK_A::_1 } } impl R { #[doc = "Bits 0:1 - This field controls the USB PLL feedback loop divider"] #[inline(always)] pub fn pll_div_sel(&self) -> PLL_DIV_SEL_R { PLL_DIV_SEL_R::new((self.bits & 0x03) as u8) } #[doc = "Bit 6 - Enable the USB clock output from the USB PHY PLL."] #[inline(always)] pub fn pll_en_usb_clks(&self) -> PLL_EN_USB_CLKS_R { PLL_EN_USB_CLKS_R::new(((self.bits >> 6) & 0x01) != 0) } #[doc = "Bit 11 - Analog debug bit"] #[inline(always)] pub fn pll_hold_ring_off(&self) -> PLL_HOLD_RING_OFF_R { PLL_HOLD_RING_OFF_R::new(((self.bits >> 11) & 0x01) != 0) } #[doc = "Bit 12 - Power up the USB PLL."] #[inline(always)] pub fn pll_power(&self) -> PLL_POWER_R { PLL_POWER_R::new(((self.bits >> 12) & 0x01) != 0) } #[doc = "Bit 13 - Enable the clock output from the USB PLL."] #[inline(always)] pub fn pll_enable(&self) -> PLL_ENABLE_R { PLL_ENABLE_R::new(((self.bits >> 13) & 0x01) != 0) } #[doc = "Bit 16 - Bypass the USB PLL."] #[inline(always)] pub fn pll_bypass(&self) -> PLL_BYPASS_R { PLL_BYPASS_R::new(((self.bits >> 16) & 0x01) != 0) } #[doc = "Bit 31 - USB PLL lock status indicator"] #[inline(always)] pub fn pll_lock(&self) -> PLL_LOCK_R { PLL_LOCK_R::new(((self.bits >> 31) & 0x01) != 0) } } impl W { #[doc = "Bits 0:1 - This field controls the USB PLL feedback loop divider"] #[inline(always)] pub fn pll_div_sel(&mut self) -> PLL_DIV_SEL_W { PLL_DIV_SEL_W { w: self } } #[doc = "Bit 6 - Enable the USB clock output from the USB PHY PLL."] #[inline(always)] pub fn pll_en_usb_clks(&mut self) -> PLL_EN_USB_CLKS_W { PLL_EN_USB_CLKS_W { w: self } } #[doc = "Bit 11 - Analog debug bit"] #[inline(always)] pub fn pll_hold_ring_off(&mut self) -> PLL_HOLD_RING_OFF_W { PLL_HOLD_RING_OFF_W { w: self } } #[doc = "Bit 12 - Power up the USB PLL."] #[inline(always)] pub fn pll_power(&mut self) -> PLL_POWER_W { PLL_POWER_W { w: self } } #[doc = "Bit 13 - Enable the clock output from the USB PLL."] #[inline(always)] pub fn pll_enable(&mut self) -> PLL_ENABLE_W { PLL_ENABLE_W { w: self } } #[doc = "Bit 16 - Bypass the USB PLL."] #[inline(always)] pub fn pll_bypass(&mut self) -> PLL_BYPASS_W { PLL_BYPASS_W { w: self } } }
#[doc = "Reader of register PLL_SIC_TOG"] pub type R = crate::R<u32, super::PLL_SIC_TOG>; #[doc = "Writer for register PLL_SIC_TOG"] pub type W = crate::W<u32, super::PLL_SIC_TOG>; #[doc = "Register PLL_SIC_TOG `reset()`'s with value 0x0001_2000"] impl crate::ResetValue for super::PLL_SIC_TOG { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0x0001_2000 } } #[doc = "This field controls the USB PLL feedback loop divider\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum PLL_DIV_SEL_A { #[doc = "0: PLL reference frequency = 24MHz"] _00, #[doc = "1: PLL reference frequency = 16MHz"] _01, } impl From<PLL_DIV_SEL_A> for u8 { #[inline(always)] fn from(variant: PLL_DIV_SEL_A) -> Self { match variant { PLL_DIV_SEL_A::_00 => 0, PLL_DIV_SEL_A::_01 => 1, } } } #[doc = "Reader of field `PLL_DIV_SEL`"] pub type PLL_DIV_SEL_R = crate::R<u8, PLL_DIV_SEL_A>; impl PLL_DIV_SEL_R { #[doc = r"Get enumerated values variant"] #[inline(always)]
#[doc = "Checks if the value of the field is `_00`"] #[inline(always)] pub fn is_00(&self) -> bool { *self == PLL_DIV_SEL_A::_00 } #[doc = "Checks if the value of the field is `_01`"] #[inline(always)] pub fn is_01(&self) -> bool { *self == PLL_DIV_SEL_A::_01 } } #[doc = "Write proxy for field `PLL_DIV_SEL`"] pub struct PLL_DIV_SEL_W<'a> { w: &'a mut W, } impl<'a> PLL_DIV_SEL_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: PLL_DIV_SEL_A) -> &'a mut W { unsafe { self.bits(variant.into()) } } #[doc = "PLL reference frequency = 24MHz"] #[inline(always)] pub fn _00(self) -> &'a mut W { self.variant(PLL_DIV_SEL_A::_00) } #[doc = "PLL reference frequency = 16MHz"] #[inline(always)] pub fn _01(self) -> &'a mut W { self.variant(PLL_DIV_SEL_A::_01) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0x03) | ((value as u32) & 0x03); self.w } } #[doc = "Reader of field `PLL_EN_USB_CLKS`"] pub type PLL_EN_USB_CLKS_R = crate::R<bool, bool>; #[doc = "Write proxy for field `PLL_EN_USB_CLKS`"] pub struct PLL_EN_USB_CLKS_W<'a> { w: &'a mut W, } impl<'a> PLL_EN_USB_CLKS_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6); self.w } } #[doc = "Reader of field `PLL_HOLD_RING_OFF`"] pub type PLL_HOLD_RING_OFF_R = crate::R<bool, bool>; #[doc = "Write proxy for field `PLL_HOLD_RING_OFF`"] pub struct PLL_HOLD_RING_OFF_W<'a> { w: &'a mut W, } impl<'a> PLL_HOLD_RING_OFF_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11); self.w } } #[doc = "Reader of field `PLL_POWER`"] pub type PLL_POWER_R = crate::R<bool, bool>; #[doc = "Write proxy for field `PLL_POWER`"] pub struct PLL_POWER_W<'a> { w: &'a mut W, } impl<'a> PLL_POWER_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12); self.w } } #[doc = "Reader of field `PLL_ENABLE`"] pub type PLL_ENABLE_R = crate::R<bool, bool>; #[doc = "Write proxy for field `PLL_ENABLE`"] pub struct PLL_ENABLE_W<'a> { w: &'a mut W, } impl<'a> PLL_ENABLE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 13)) | (((value as u32) & 0x01) << 13); self.w } } #[doc = "Reader of field `PLL_BYPASS`"] pub type PLL_BYPASS_R = crate::R<bool, bool>; #[doc = "Write proxy for field `PLL_BYPASS`"] pub struct PLL_BYPASS_W<'a> { w: &'a mut W, } impl<'a> PLL_BYPASS_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16); self.w } } #[doc = "USB PLL lock status indicator\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum PLL_LOCK_A { #[doc = "0: PLL is not currently locked"] _0, #[doc = "1: PLL is currently locked"] _1, } impl From<PLL_LOCK_A> for bool { #[inline(always)] fn from(variant: PLL_LOCK_A) -> Self { match variant { PLL_LOCK_A::_0 => false, PLL_LOCK_A::_1 => true, } } } #[doc = "Reader of field `PLL_LOCK`"] pub type PLL_LOCK_R = crate::R<bool, PLL_LOCK_A>; impl PLL_LOCK_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> PLL_LOCK_A { match self.bits { false => PLL_LOCK_A::_0, true => PLL_LOCK_A::_1, } } #[doc = "Checks if the value of the field is `_0`"] #[inline(always)] pub fn is_0(&self) -> bool { *self == PLL_LOCK_A::_0 } #[doc = "Checks if the value of the field is `_1`"] #[inline(always)] pub fn is_1(&self) -> bool { *self == PLL_LOCK_A::_1 } } impl R { #[doc = "Bits 0:1 - This field controls the USB PLL feedback loop divider"] #[inline(always)] pub fn pll_div_sel(&self) -> PLL_DIV_SEL_R { PLL_DIV_SEL_R::new((self.bits & 0x03) as u8) } #[doc = "Bit 6 - Enable the USB clock output from the USB PHY PLL."] #[inline(always)] pub fn pll_en_usb_clks(&self) -> PLL_EN_USB_CLKS_R { PLL_EN_USB_CLKS_R::new(((self.bits >> 6) & 0x01) != 0) } #[doc = "Bit 11 - Analog debug bit"] #[inline(always)] pub fn pll_hold_ring_off(&self) -> PLL_HOLD_RING_OFF_R { PLL_HOLD_RING_OFF_R::new(((self.bits >> 11) & 0x01) != 0) } #[doc = "Bit 12 - Power up the USB PLL."] #[inline(always)] pub fn pll_power(&self) -> PLL_POWER_R { PLL_POWER_R::new(((self.bits >> 12) & 0x01) != 0) } #[doc = "Bit 13 - Enable the clock output from the USB PLL."] #[inline(always)] pub fn pll_enable(&self) -> PLL_ENABLE_R { PLL_ENABLE_R::new(((self.bits >> 13) & 0x01) != 0) } #[doc = "Bit 16 - Bypass the USB PLL."] #[inline(always)] pub fn pll_bypass(&self) -> PLL_BYPASS_R { PLL_BYPASS_R::new(((self.bits >> 16) & 0x01) != 0) } #[doc = "Bit 31 - USB PLL lock status indicator"] #[inline(always)] pub fn pll_lock(&self) -> PLL_LOCK_R { PLL_LOCK_R::new(((self.bits >> 31) & 0x01) != 0) } } impl W { #[doc = "Bits 0:1 - This field controls the USB PLL feedback loop divider"] #[inline(always)] pub fn pll_div_sel(&mut self) -> PLL_DIV_SEL_W { PLL_DIV_SEL_W { w: self } } #[doc = "Bit 6 - Enable the USB clock output from the USB PHY PLL."] #[inline(always)] pub fn pll_en_usb_clks(&mut self) -> PLL_EN_USB_CLKS_W { PLL_EN_USB_CLKS_W { w: self } } #[doc = "Bit 11 - Analog debug bit"] #[inline(always)] pub fn pll_hold_ring_off(&mut self) -> PLL_HOLD_RING_OFF_W { PLL_HOLD_RING_OFF_W { w: self } } #[doc = "Bit 12 - Power up the USB PLL."] #[inline(always)] pub fn pll_power(&mut self) -> PLL_POWER_W { PLL_POWER_W { w: self } } #[doc = "Bit 13 - Enable the clock output from the USB PLL."] #[inline(always)] pub fn pll_enable(&mut self) -> PLL_ENABLE_W { PLL_ENABLE_W { w: self } } #[doc = "Bit 16 - Bypass the USB PLL."] #[inline(always)] pub fn pll_bypass(&mut self) -> PLL_BYPASS_W { PLL_BYPASS_W { w: self } } }
pub fn variant(&self) -> crate::Variant<u8, PLL_DIV_SEL_A> { use crate::Variant::*; match self.bits { 0 => Val(PLL_DIV_SEL_A::_00), 1 => Val(PLL_DIV_SEL_A::_01), i => Res(i), } }
function_block-full_function
[ { "content": " ///\n\n ///Registers marked with `Readable` can be also `modify`'ed\n\n pub trait Writable { } ///Reset value of the register\n", "file_path": "lib.rs", "rank": 0, "score": 220323.13240861555 }, { "content": " ///\n\n ///This value is initial value for `write` method.\n\n ///It can be also directly writed to register by `reset` method.\n\n pub trait ResetValue { ///Register size\n\n type Type ; ///Reset value of the register\n\n fn reset_value ( ) -> Self :: Type ; } ///This structure provides volatile access to register\n\n pub struct Reg < U , REG > { register : vcell :: VolatileCell < U > , _marker : marker :: PhantomData < REG > } unsafe impl < U : Send , REG > Send for Reg < U , REG > { } impl < U , REG > Reg < U , REG > where Self : Readable , U : Copy { ///Reads the contents of `Readable` register\n\n ///\n\n ///You can read the contents of a register in such way:\n\n ///```ignore\n\n ///let bits = periph.reg.read().bits();\n\n ///```\n\n ///or get the content of a particular field of a register.\n\n ///```ignore\n\n ///let reader = periph.reg.read();\n\n ///let bits = reader.field1().bits();\n\n ///let flag = reader.field2().bit_is_set();\n\n ///```\n\n # [ inline ( always ) ] pub fn read ( & self ) -> R < U , Self > { R { bits : self . register . get ( ) , _reg : marker :: PhantomData } } } impl < U , REG > Reg < U , REG > where Self : ResetValue < Type = U > + Writable , U : Copy { ///Writes the reset value to `Writable` register\n\n ///\n\n ///Resets the register to its initial state\n\n # [ inline ( always ) ] pub fn reset ( & self ) { self . register . set ( Self :: reset_value ( ) ) } } impl < U , REG > Reg < U , REG > where Self : ResetValue < Type = U > + Writable , U : Copy { ///Writes bits to `Writable` register\n\n ///\n", "file_path": "lib.rs", "rank": 1, "score": 220072.9232529727 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "src/generic.rs", "rank": 2, "score": 173006.40805211107 }, { "content": " ///\n\n ///Registers marked with `Writable` can be also `modify`'ed\n\n pub trait Readable { } ///This trait shows that register has `write`, `write_with_zero` and `reset` method\n", "file_path": "lib.rs", "rank": 3, "score": 137054.11730697207 }, { "content": "fn main() {\n\n if env::var_os(\"CARGO_FEATURE_RT\").is_some() {\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"device.x\"))\n\n .unwrap()\n\n .write_all(include_bytes!(\"device.x\"))\n\n .unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n println!(\"cargo:rerun-if-changed=device.x\");\n\n }\n\n println!(\"cargo:rerun-if-changed=build.rs\");\n\n}\n", "file_path": "build.rs", "rank": 4, "score": 67972.79078666838 }, { "content": "#[doc = \"Reader of register MATCH\"]\n\npub type R = crate::R<u32, super::MATCH>;\n\n#[doc = \"Writer for register MATCH\"]\n\npub type W = crate::W<u32, super::MATCH>;\n\n#[doc = \"Register MATCH `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::MATCH {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `MA1`\"]\n\npub type MA1_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `MA1`\"]\n\npub struct MA1_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> MA1_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/lpuart0/match_.rs", "rank": 5, "score": 62531.44355658657 }, { "content": "#[doc = \"Reader of register DEBUG\"]\n\npub type R = crate::R<u32, super::DEBUG>;\n\n#[doc = \"Writer for register DEBUG\"]\n\npub type W = crate::W<u32, super::DEBUG>;\n\n#[doc = \"Register DEBUG `reset()`'s with value 0x7f18_0000\"]\n\nimpl crate::ResetValue for super::DEBUG {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x7f18_0000\n\n }\n\n}\n\n#[doc = \"Reader of field `OTGIDPIOLOCK`\"]\n\npub type OTGIDPIOLOCK_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `OTGIDPIOLOCK`\"]\n\npub struct OTGIDPIOLOCK_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> OTGIDPIOLOCK_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/usbphy/debug.rs", "rank": 6, "score": 62521.66612240734 }, { "content": " self.w.bits = (self.w.bits & !(0x01 << 24)) | (((value as u32) & 0x01) << 24);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `SQUELCHRESETLENGTH`\"]\n\npub type SQUELCHRESETLENGTH_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `SQUELCHRESETLENGTH`\"]\n\npub struct SQUELCHRESETLENGTH_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SQUELCHRESETLENGTH_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x0f << 25)) | (((value as u32) & 0x0f) << 25);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `HOST_RESUME_DEBUG`\"]\n\npub type HOST_RESUME_DEBUG_R = crate::R<bool, bool>;\n", "file_path": "src/usbphy/debug.rs", "rank": 7, "score": 62494.48365077977 }, { "content": " #[inline(always)]\n\n pub unsafe fn bits(self, value: u16) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0x03ff) | ((value as u32) & 0x03ff);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `MA2`\"]\n\npub type MA2_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `MA2`\"]\n\npub struct MA2_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> MA2_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u16) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x03ff << 16)) | (((value as u32) & 0x03ff) << 16);\n\n self.w\n\n }\n\n}\n", "file_path": "src/lpuart0/match_.rs", "rank": 8, "score": 62488.70304103383 }, { "content": "impl<'a> ENHSTPULLDOWN_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x03 << 4)) | (((value as u32) & 0x03) << 4);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `TX2RXCOUNT`\"]\n\npub type TX2RXCOUNT_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `TX2RXCOUNT`\"]\n\npub struct TX2RXCOUNT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TX2RXCOUNT_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x0f << 8)) | (((value as u32) & 0x0f) << 8);\n\n self.w\n", "file_path": "src/usbphy/debug.rs", "rank": 9, "score": 62487.76656823425 }, { "content": "#[doc = \"Reader of field `HSTPULLDOWN`\"]\n\npub type HSTPULLDOWN_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `HSTPULLDOWN`\"]\n\npub struct HSTPULLDOWN_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> HSTPULLDOWN_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x03 << 2)) | (((value as u32) & 0x03) << 2);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `ENHSTPULLDOWN`\"]\n\npub type ENHSTPULLDOWN_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `ENHSTPULLDOWN`\"]\n\npub struct ENHSTPULLDOWN_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "src/usbphy/debug.rs", "rank": 10, "score": 62487.38663471544 }, { "content": " #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `SQUELCHRESETCOUNT`\"]\n\npub type SQUELCHRESETCOUNT_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `SQUELCHRESETCOUNT`\"]\n\npub struct SQUELCHRESETCOUNT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SQUELCHRESETCOUNT_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x1f << 16)) | (((value as u32) & 0x1f) << 16);\n\n self.w\n\n }\n\n}\n", "file_path": "src/usbphy/debug.rs", "rank": 11, "score": 62486.90422784278 }, { "content": " #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 30)) | (((value as u32) & 0x01) << 30);\n\n self.w\n\n }\n\n}\n\nimpl R {\n\n #[doc = \"Bit 0 - Once OTG ID from USBPHY_STATUS_OTGID_STATUS is sampled, use this to hold the value\"]\n\n #[inline(always)]\n\n pub fn otgidpiolock(&self) -> OTGIDPIOLOCK_R {\n\n OTGIDPIOLOCK_R::new((self.bits & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 1 - Use holding registers to assist in timing for external UTMI interface.\"]\n\n #[inline(always)]\n\n pub fn debug_interface_hold(&self) -> DEBUG_INTERFACE_HOLD_R {\n\n DEBUG_INTERFACE_HOLD_R::new(((self.bits >> 1) & 0x01) != 0)\n\n }\n\n #[doc = \"Bits 2:3 - This bit field selects whether to connect pulldown resistors on the USB_DP/USB_DM pins if the corresponding pulldown overdrive mode is enabled through USBPHY_DEBUG\\\\[5:4} Set bit 3 to value 1'b1 to connect the 15ohm pulldown on USB_DP line\"]\n\n #[inline(always)]\n\n pub fn hstpulldown(&self) -> HSTPULLDOWN_R {\n", "file_path": "src/usbphy/debug.rs", "rank": 12, "score": 62483.79730519117 }, { "content": "impl R {\n\n #[doc = \"Bits 0:9 - Match Address 1\"]\n\n #[inline(always)]\n\n pub fn ma1(&self) -> MA1_R {\n\n MA1_R::new((self.bits & 0x03ff) as u16)\n\n }\n\n #[doc = \"Bits 16:25 - Match Address 2\"]\n\n #[inline(always)]\n\n pub fn ma2(&self) -> MA2_R {\n\n MA2_R::new(((self.bits >> 16) & 0x03ff) as u16)\n\n }\n\n}\n\nimpl W {\n\n #[doc = \"Bits 0:9 - Match Address 1\"]\n\n #[inline(always)]\n\n pub fn ma1(&mut self) -> MA1_W {\n\n MA1_W { w: self }\n\n }\n\n #[doc = \"Bits 16:25 - Match Address 2\"]\n\n #[inline(always)]\n\n pub fn ma2(&mut self) -> MA2_W {\n\n MA2_W { w: self }\n\n }\n\n}\n", "file_path": "src/lpuart0/match_.rs", "rank": 13, "score": 62480.975061009616 }, { "content": " CLKGATE_R::new(((self.bits >> 30) & 0x01) != 0)\n\n }\n\n}\n\nimpl W {\n\n #[doc = \"Bit 0 - Once OTG ID from USBPHY_STATUS_OTGID_STATUS is sampled, use this to hold the value\"]\n\n #[inline(always)]\n\n pub fn otgidpiolock(&mut self) -> OTGIDPIOLOCK_W {\n\n OTGIDPIOLOCK_W { w: self }\n\n }\n\n #[doc = \"Bit 1 - Use holding registers to assist in timing for external UTMI interface.\"]\n\n #[inline(always)]\n\n pub fn debug_interface_hold(&mut self) -> DEBUG_INTERFACE_HOLD_W {\n\n DEBUG_INTERFACE_HOLD_W { w: self }\n\n }\n\n #[doc = \"Bits 2:3 - This bit field selects whether to connect pulldown resistors on the USB_DP/USB_DM pins if the corresponding pulldown overdrive mode is enabled through USBPHY_DEBUG\\\\[5:4} Set bit 3 to value 1'b1 to connect the 15ohm pulldown on USB_DP line\"]\n\n #[inline(always)]\n\n pub fn hstpulldown(&mut self) -> HSTPULLDOWN_W {\n\n HSTPULLDOWN_W { w: self }\n\n }\n\n #[doc = \"Bits 4:5 - This bit field selects host pulldown overdrive mode\"]\n", "file_path": "src/usbphy/debug.rs", "rank": 14, "score": 62478.41861083577 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `DEBUG_INTERFACE_HOLD`\"]\n\npub type DEBUG_INTERFACE_HOLD_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `DEBUG_INTERFACE_HOLD`\"]\n\npub struct DEBUG_INTERFACE_HOLD_W<'a> {\n", "file_path": "src/usbphy/debug.rs", "rank": 15, "score": 62478.082192992704 }, { "content": "#[doc = \"Reader of register CONTROL\"]\n\npub type R = crate::R<u32, super::CONTROL>;\n\n#[doc = \"Writer for register CONTROL\"]\n\npub type W = crate::W<u32, super::CONTROL>;\n\n#[doc = \"Register CONTROL `reset()`'s with value 0x0001_0000\"]\n\nimpl crate::ResetValue for super::CONTROL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x0001_0000\n\n }\n\n}\n\n#[doc = \"Interrupt Acknowledge\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum IACK_AW {\n\n #[doc = \"0: Do not clear the interrupt.\"]\n\n _0,\n\n #[doc = \"1: Clear the IF bit (interrupt flag).\"]\n\n _1,\n\n}\n", "file_path": "src/usbdcd/control.rs", "rank": 16, "score": 62475.85359728413 }, { "content": "#[doc = \"Reader of register CONTROL\"]\n\npub type R = crate::R<u32, super::CONTROL>;\n\n#[doc = \"Writer for register CONTROL\"]\n\npub type W = crate::W<u32, super::CONTROL>;\n\n#[doc = \"Register CONTROL `reset()`'s with value 0x0001_0000\"]\n\nimpl crate::ResetValue for super::CONTROL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x0001_0000\n\n }\n\n}\n\n#[doc = \"Interrupt Acknowledge\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum IACK_AW {\n\n #[doc = \"0: Do not clear the interrupt.\"]\n\n _0,\n\n #[doc = \"1: Clear the IF bit (interrupt flag).\"]\n\n _1,\n\n}\n", "file_path": "src/usbhsdcd/control.rs", "rank": 17, "score": 62475.85359728413 }, { "content": "#[doc = \"Reader of register CONTROL\"]\n\npub type R = crate::R<u8, super::CONTROL>;\n\n#[doc = \"Writer for register CONTROL\"]\n\npub type W = crate::W<u8, super::CONTROL>;\n\n#[doc = \"Register CONTROL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CONTROL {\n\n type Type = u8;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Provides control of the DP Pullup in USBOTG, if USB is configured in non-OTG device mode.\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum DPPULLUPNONOTG_A {\n\n #[doc = \"0: DP Pullup in non-OTG device mode is not enabled.\"]\n\n _0,\n\n #[doc = \"1: DP Pullup in non-OTG device mode is enabled.\"]\n\n _1,\n\n}\n", "file_path": "src/usb0/control.rs", "rank": 18, "score": 62475.33173199605 }, { "content": "#[doc = \"Write proxy for field `HOST_RESUME_DEBUG`\"]\n\npub struct HOST_RESUME_DEBUG_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> HOST_RESUME_DEBUG_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 29)) | (((value as u32) & 0x01) << 29);\n\n self.w\n", "file_path": "src/usbphy/debug.rs", "rank": 19, "score": 62471.916496795755 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> DEBUG_INTERFACE_HOLD_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);\n\n self.w\n\n }\n\n}\n", "file_path": "src/usbphy/debug.rs", "rank": 20, "score": 62470.4668667981 }, { "content": "#[doc = \"Reader of field `ENSQUELCHRESET`\"]\n\npub type ENSQUELCHRESET_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `ENSQUELCHRESET`\"]\n\npub struct ENSQUELCHRESET_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ENSQUELCHRESET_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "src/usbphy/debug.rs", "rank": 21, "score": 62469.726654990794 }, { "content": " SQUELCHRESETCOUNT_R::new(((self.bits >> 16) & 0x1f) as u8)\n\n }\n\n #[doc = \"Bit 24 - Set bit to allow squelch to reset high-speed receive.\"]\n\n #[inline(always)]\n\n pub fn ensquelchreset(&self) -> ENSQUELCHRESET_R {\n\n ENSQUELCHRESET_R::new(((self.bits >> 24) & 0x01) != 0)\n\n }\n\n #[doc = \"Bits 25:28 - Duration of RESET in terms of the number of 480-MHz cycles.\"]\n\n #[inline(always)]\n\n pub fn squelchresetlength(&self) -> SQUELCHRESETLENGTH_R {\n\n SQUELCHRESETLENGTH_R::new(((self.bits >> 25) & 0x0f) as u8)\n\n }\n\n #[doc = \"Bit 29 - Choose to trigger the host resume SE0 with HOST_FORCE_LS_SE0 = 0 or UTMI_SUSPEND = 1.\"]\n\n #[inline(always)]\n\n pub fn host_resume_debug(&self) -> HOST_RESUME_DEBUG_R {\n\n HOST_RESUME_DEBUG_R::new(((self.bits >> 29) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 30 - Gate Test Clocks\"]\n\n #[inline(always)]\n\n pub fn clkgate(&self) -> CLKGATE_R {\n", "file_path": "src/usbphy/debug.rs", "rank": 22, "score": 62468.097577090586 }, { "content": " }\n\n}\n\n#[doc = \"Reader of field `ENTX2RXCOUNT`\"]\n\npub type ENTX2RXCOUNT_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `ENTX2RXCOUNT`\"]\n\npub struct ENTX2RXCOUNT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ENTX2RXCOUNT_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/usbphy/debug.rs", "rank": 23, "score": 62465.868186917614 }, { "content": " }\n\n}\n\n#[doc = \"Reader of field `CLKGATE`\"]\n\npub type CLKGATE_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `CLKGATE`\"]\n\npub struct CLKGATE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CLKGATE_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/usbphy/debug.rs", "rank": 24, "score": 62465.868186917614 }, { "content": " #[inline(always)]\n\n pub fn ensquelchreset(&mut self) -> ENSQUELCHRESET_W {\n\n ENSQUELCHRESET_W { w: self }\n\n }\n\n #[doc = \"Bits 25:28 - Duration of RESET in terms of the number of 480-MHz cycles.\"]\n\n #[inline(always)]\n\n pub fn squelchresetlength(&mut self) -> SQUELCHRESETLENGTH_W {\n\n SQUELCHRESETLENGTH_W { w: self }\n\n }\n\n #[doc = \"Bit 29 - Choose to trigger the host resume SE0 with HOST_FORCE_LS_SE0 = 0 or UTMI_SUSPEND = 1.\"]\n\n #[inline(always)]\n\n pub fn host_resume_debug(&mut self) -> HOST_RESUME_DEBUG_W {\n\n HOST_RESUME_DEBUG_W { w: self }\n\n }\n\n #[doc = \"Bit 30 - Gate Test Clocks\"]\n\n #[inline(always)]\n\n pub fn clkgate(&mut self) -> CLKGATE_W {\n\n CLKGATE_W { w: self }\n\n }\n\n}\n", "file_path": "src/usbphy/debug.rs", "rank": 25, "score": 62461.37841552665 }, { "content": " HSTPULLDOWN_R::new(((self.bits >> 2) & 0x03) as u8)\n\n }\n\n #[doc = \"Bits 4:5 - This bit field selects host pulldown overdrive mode\"]\n\n #[inline(always)]\n\n pub fn enhstpulldown(&self) -> ENHSTPULLDOWN_R {\n\n ENHSTPULLDOWN_R::new(((self.bits >> 4) & 0x03) as u8)\n\n }\n\n #[doc = \"Bits 8:11 - Delay in between the end of transmit to the beginning of receive\"]\n\n #[inline(always)]\n\n pub fn tx2rxcount(&self) -> TX2RXCOUNT_R {\n\n TX2RXCOUNT_R::new(((self.bits >> 8) & 0x0f) as u8)\n\n }\n\n #[doc = \"Bit 12 - Set this bit to allow a countdown to transition in between TX and RX.\"]\n\n #[inline(always)]\n\n pub fn entx2rxcount(&self) -> ENTX2RXCOUNT_R {\n\n ENTX2RXCOUNT_R::new(((self.bits >> 12) & 0x01) != 0)\n\n }\n\n #[doc = \"Bits 16:20 - Delay in between the detection of squelch to the reset of high-speed RX.\"]\n\n #[inline(always)]\n\n pub fn squelchresetcount(&self) -> SQUELCHRESETCOUNT_R {\n", "file_path": "src/usbphy/debug.rs", "rank": 26, "score": 62461.17155349867 }, { "content": " #[inline(always)]\n\n pub fn enhstpulldown(&mut self) -> ENHSTPULLDOWN_W {\n\n ENHSTPULLDOWN_W { w: self }\n\n }\n\n #[doc = \"Bits 8:11 - Delay in between the end of transmit to the beginning of receive\"]\n\n #[inline(always)]\n\n pub fn tx2rxcount(&mut self) -> TX2RXCOUNT_W {\n\n TX2RXCOUNT_W { w: self }\n\n }\n\n #[doc = \"Bit 12 - Set this bit to allow a countdown to transition in between TX and RX.\"]\n\n #[inline(always)]\n\n pub fn entx2rxcount(&mut self) -> ENTX2RXCOUNT_W {\n\n ENTX2RXCOUNT_W { w: self }\n\n }\n\n #[doc = \"Bits 16:20 - Delay in between the detection of squelch to the reset of high-speed RX.\"]\n\n #[inline(always)]\n\n pub fn squelchresetcount(&mut self) -> SQUELCHRESETCOUNT_W {\n\n SQUELCHRESETCOUNT_W { w: self }\n\n }\n\n #[doc = \"Bit 24 - Set bit to allow squelch to reset high-speed receive.\"]\n", "file_path": "src/usbphy/debug.rs", "rank": 27, "score": 62455.86472830945 }, { "content": "#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _SHCSR;\n\n#[doc = \"`read()` method returns [shcsr::R](shcsr::R) reader structure\"]\n\nimpl crate::Readable for SHCSR {}\n\n#[doc = \"`write(|w| ..)` method takes [shcsr::W](shcsr::W) writer structure\"]\n\nimpl crate::Writable for SHCSR {}\n\n#[doc = \"System Handler Control and State Register\"]\n\npub mod shcsr;\n\n#[doc = \"Configurable Fault Status Registers\\n\\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about avaliable fields see [cfsr](cfsr) module\"]\n\npub type CFSR = crate::Reg<u32, _CFSR>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _CFSR;\n\n#[doc = \"`read()` method returns [cfsr::R](cfsr::R) reader structure\"]\n\nimpl crate::Readable for CFSR {}\n\n#[doc = \"`write(|w| ..)` method takes [cfsr::W](cfsr::W) writer structure\"]\n\nimpl crate::Writable for CFSR {}\n\n#[doc = \"Configurable Fault Status Registers\"]\n\npub mod cfsr;\n", "file_path": "src/system_control.rs", "rank": 28, "score": 62440.79806425414 }, { "content": "#[doc(hidden)]\n\npub struct _FPCAR;\n\n#[doc = \"`read()` method returns [fpcar::R](fpcar::R) reader structure\"]\n\nimpl crate::Readable for FPCAR {}\n\n#[doc = \"`write(|w| ..)` method takes [fpcar::W](fpcar::W) writer structure\"]\n\nimpl crate::Writable for FPCAR {}\n\n#[doc = \"Floating-point Context Address Register\"]\n\npub mod fpcar;\n\n#[doc = \"Floating-point Default Status Control Register\\n\\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about avaliable fields see [fpdscr](fpdscr) module\"]\n\npub type FPDSCR = crate::Reg<u32, _FPDSCR>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _FPDSCR;\n\n#[doc = \"`read()` method returns [fpdscr::R](fpdscr::R) reader structure\"]\n\nimpl crate::Readable for FPDSCR {}\n\n#[doc = \"`write(|w| ..)` method takes [fpdscr::W](fpdscr::W) writer structure\"]\n\nimpl crate::Writable for FPDSCR {}\n\n#[doc = \"Floating-point Default Status Control Register\"]\n\npub mod fpdscr;\n", "file_path": "src/system_control.rs", "rank": 29, "score": 62440.62715464392 }, { "content": " #[doc = \"0xd88 - Coprocessor Access Control Register\"]\n\n pub cpacr: CPACR,\n\n _reserved18: [u8; 424usize],\n\n #[doc = \"0xf34 - Floating-point Context Control Register\"]\n\n pub fpccr: FPCCR,\n\n #[doc = \"0xf38 - Floating-point Context Address Register\"]\n\n pub fpcar: FPCAR,\n\n #[doc = \"0xf3c - Floating-point Default Status Control Register\"]\n\n pub fpdscr: FPDSCR,\n\n}\n\n#[doc = \"Auxiliary Control Register,\\n\\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about avaliable fields see [actlr](actlr) module\"]\n\npub type ACTLR = crate::Reg<u32, _ACTLR>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _ACTLR;\n\n#[doc = \"`read()` method returns [actlr::R](actlr::R) reader structure\"]\n\nimpl crate::Readable for ACTLR {}\n\n#[doc = \"`write(|w| ..)` method takes [actlr::W](actlr::W) writer structure\"]\n\nimpl crate::Writable for ACTLR {}\n\n#[doc = \"Auxiliary Control Register,\"]\n", "file_path": "src/system_control.rs", "rank": 30, "score": 62440.31993949323 }, { "content": "#[doc = \"`read()` method returns [cpacr::R](cpacr::R) reader structure\"]\n\nimpl crate::Readable for CPACR {}\n\n#[doc = \"`write(|w| ..)` method takes [cpacr::W](cpacr::W) writer structure\"]\n\nimpl crate::Writable for CPACR {}\n\n#[doc = \"Coprocessor Access Control Register\"]\n\npub mod cpacr;\n\n#[doc = \"Floating-point Context Control Register\\n\\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about avaliable fields see [fpccr](fpccr) module\"]\n\npub type FPCCR = crate::Reg<u32, _FPCCR>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _FPCCR;\n\n#[doc = \"`read()` method returns [fpccr::R](fpccr::R) reader structure\"]\n\nimpl crate::Readable for FPCCR {}\n\n#[doc = \"`write(|w| ..)` method takes [fpccr::W](fpccr::W) writer structure\"]\n\nimpl crate::Writable for FPCCR {}\n\n#[doc = \"Floating-point Context Control Register\"]\n\npub mod fpccr;\n\n#[doc = \"Floating-point Context Address Register\\n\\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about avaliable fields see [fpcar](fpcar) module\"]\n\npub type FPCAR = crate::Reg<u32, _FPCAR>;\n\n#[allow(missing_docs)]\n", "file_path": "src/system_control.rs", "rank": 31, "score": 62440.097107307374 }, { "content": "impl crate::Writable for AIRCR {}\n\n#[doc = \"Application Interrupt and Reset Control Register\"]\n\npub mod aircr;\n\n#[doc = \"System Control Register\\n\\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about avaliable fields see [scr](scr) module\"]\n\npub type SCR = crate::Reg<u32, _SCR>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _SCR;\n\n#[doc = \"`read()` method returns [scr::R](scr::R) reader structure\"]\n\nimpl crate::Readable for SCR {}\n\n#[doc = \"`write(|w| ..)` method takes [scr::W](scr::W) writer structure\"]\n\nimpl crate::Writable for SCR {}\n\n#[doc = \"System Control Register\"]\n\npub mod scr;\n\n#[doc = \"Configuration and Control Register\\n\\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about avaliable fields see [ccr](ccr) module\"]\n\npub type CCR = crate::Reg<u32, _CCR>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _CCR;\n\n#[doc = \"`read()` method returns [ccr::R](ccr::R) reader structure\"]\n", "file_path": "src/system_control.rs", "rank": 32, "score": 62439.658931912345 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Interrupt Flag\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum IF_A {\n\n #[doc = \"0: No interrupt is pending.\"]\n\n _0,\n\n #[doc = \"1: An interrupt is pending.\"]\n\n _1,\n\n}\n\nimpl From<IF_A> for bool {\n\n #[inline(always)]\n\n fn from(variant: IF_A) -> Self {\n\n match variant {\n", "file_path": "src/usbdcd/control.rs", "rank": 33, "score": 62439.05612144786 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Interrupt Flag\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum IF_A {\n\n #[doc = \"0: No interrupt is pending.\"]\n\n _0,\n\n #[doc = \"1: An interrupt is pending.\"]\n\n _1,\n\n}\n\nimpl From<IF_A> for bool {\n\n #[inline(always)]\n\n fn from(variant: IF_A) -> Self {\n\n match variant {\n", "file_path": "src/usbhsdcd/control.rs", "rank": 34, "score": 62439.05612144786 }, { "content": "pub mod actlr;\n\n#[doc = \"CPUID Base Register\\n\\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about avaliable fields see [cpuid](cpuid) module\"]\n\npub type CPUID = crate::Reg<u32, _CPUID>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _CPUID;\n\n#[doc = \"`read()` method returns [cpuid::R](cpuid::R) reader structure\"]\n\nimpl crate::Readable for CPUID {}\n\n#[doc = \"CPUID Base Register\"]\n\npub mod cpuid;\n\n#[doc = \"Interrupt Control and State Register\\n\\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about avaliable fields see [icsr](icsr) module\"]\n\npub type ICSR = crate::Reg<u32, _ICSR>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _ICSR;\n\n#[doc = \"`read()` method returns [icsr::R](icsr::R) reader structure\"]\n\nimpl crate::Readable for ICSR {}\n\n#[doc = \"`write(|w| ..)` method takes [icsr::W](icsr::W) writer structure\"]\n\nimpl crate::Writable for ICSR {}\n\n#[doc = \"Interrupt Control and State Register\"]\n", "file_path": "src/system_control.rs", "rank": 35, "score": 62438.6602508088 }, { "content": "pub mod icsr;\n\n#[doc = \"Vector Table Offset Register\\n\\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about avaliable fields see [vtor](vtor) module\"]\n\npub type VTOR = crate::Reg<u32, _VTOR>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _VTOR;\n\n#[doc = \"`read()` method returns [vtor::R](vtor::R) reader structure\"]\n\nimpl crate::Readable for VTOR {}\n\n#[doc = \"`write(|w| ..)` method takes [vtor::W](vtor::W) writer structure\"]\n\nimpl crate::Writable for VTOR {}\n\n#[doc = \"Vector Table Offset Register\"]\n\npub mod vtor;\n\n#[doc = \"Application Interrupt and Reset Control Register\\n\\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about avaliable fields see [aircr](aircr) module\"]\n\npub type AIRCR = crate::Reg<u32, _AIRCR>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _AIRCR;\n\n#[doc = \"`read()` method returns [aircr::R](aircr::R) reader structure\"]\n\nimpl crate::Readable for AIRCR {}\n\n#[doc = \"`write(|w| ..)` method takes [aircr::W](aircr::W) writer structure\"]\n", "file_path": "src/system_control.rs", "rank": 36, "score": 62438.02900600323 }, { "content": "#[doc = \"HardFault Status register\\n\\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about avaliable fields see [hfsr](hfsr) module\"]\n\npub type HFSR = crate::Reg<u32, _HFSR>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _HFSR;\n\n#[doc = \"`read()` method returns [hfsr::R](hfsr::R) reader structure\"]\n\nimpl crate::Readable for HFSR {}\n\n#[doc = \"`write(|w| ..)` method takes [hfsr::W](hfsr::W) writer structure\"]\n\nimpl crate::Writable for HFSR {}\n\n#[doc = \"HardFault Status register\"]\n\npub mod hfsr;\n\n#[doc = \"Debug Fault Status Register\\n\\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about avaliable fields see [dfsr](dfsr) module\"]\n\npub type DFSR = crate::Reg<u32, _DFSR>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _DFSR;\n\n#[doc = \"`read()` method returns [dfsr::R](dfsr::R) reader structure\"]\n\nimpl crate::Readable for DFSR {}\n\n#[doc = \"`write(|w| ..)` method takes [dfsr::W](dfsr::W) writer structure\"]\n\nimpl crate::Writable for DFSR {}\n", "file_path": "src/system_control.rs", "rank": 37, "score": 62437.86901870079 }, { "content": " pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16);\n\n self.w\n\n }\n\n}\n\n#[doc = \"BC1.2 compatibility. This bit cannot be changed after start detection.\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum BC12_A {\n\n #[doc = \"0: Compatible with BC1.1 (default)\"]\n\n _0,\n\n #[doc = \"1: Compatible with BC1.2\"]\n\n _1,\n\n}\n\nimpl From<BC12_A> for bool {\n\n #[inline(always)]\n\n fn from(variant: BC12_A) -> Self {\n\n match variant {\n\n BC12_A::_0 => false,\n\n BC12_A::_1 => true,\n\n }\n", "file_path": "src/usbhsdcd/control.rs", "rank": 38, "score": 62436.90408024914 }, { "content": " pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16);\n\n self.w\n\n }\n\n}\n\n#[doc = \"BC1.2 compatibility. This bit cannot be changed after start detection.\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum BC12_A {\n\n #[doc = \"0: Compatible with BC1.1 (default)\"]\n\n _0,\n\n #[doc = \"1: Compatible with BC1.2\"]\n\n _1,\n\n}\n\nimpl From<BC12_A> for bool {\n\n #[inline(always)]\n\n fn from(variant: BC12_A) -> Self {\n\n match variant {\n\n BC12_A::_0 => false,\n\n BC12_A::_1 => true,\n\n }\n", "file_path": "src/usbdcd/control.rs", "rank": 39, "score": 62436.90408024914 }, { "content": "pub struct _SHPR2;\n\n#[doc = \"`read()` method returns [shpr2::R](shpr2::R) reader structure\"]\n\nimpl crate::Readable for SHPR2 {}\n\n#[doc = \"`write(|w| ..)` method takes [shpr2::W](shpr2::W) writer structure\"]\n\nimpl crate::Writable for SHPR2 {}\n\n#[doc = \"System Handler Priority Register 2\"]\n\npub mod shpr2;\n\n#[doc = \"System Handler Priority Register 3\\n\\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about avaliable fields see [shpr3](shpr3) module\"]\n\npub type SHPR3 = crate::Reg<u32, _SHPR3>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _SHPR3;\n\n#[doc = \"`read()` method returns [shpr3::R](shpr3::R) reader structure\"]\n\nimpl crate::Readable for SHPR3 {}\n\n#[doc = \"`write(|w| ..)` method takes [shpr3::W](shpr3::W) writer structure\"]\n\nimpl crate::Writable for SHPR3 {}\n\n#[doc = \"System Handler Priority Register 3\"]\n\npub mod shpr3;\n\n#[doc = \"System Handler Control and State Register\\n\\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about avaliable fields see [shcsr](shcsr) module\"]\n\npub type SHCSR = crate::Reg<u32, _SHCSR>;\n", "file_path": "src/system_control.rs", "rank": 40, "score": 62436.50576669278 }, { "content": "impl crate::Readable for CCR {}\n\n#[doc = \"`write(|w| ..)` method takes [ccr::W](ccr::W) writer structure\"]\n\nimpl crate::Writable for CCR {}\n\n#[doc = \"Configuration and Control Register\"]\n\npub mod ccr;\n\n#[doc = \"System Handler Priority Register 1\\n\\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about avaliable fields see [shpr1](shpr1) module\"]\n\npub type SHPR1 = crate::Reg<u32, _SHPR1>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _SHPR1;\n\n#[doc = \"`read()` method returns [shpr1::R](shpr1::R) reader structure\"]\n\nimpl crate::Readable for SHPR1 {}\n\n#[doc = \"`write(|w| ..)` method takes [shpr1::W](shpr1::W) writer structure\"]\n\nimpl crate::Writable for SHPR1 {}\n\n#[doc = \"System Handler Priority Register 1\"]\n\npub mod shpr1;\n\n#[doc = \"System Handler Priority Register 2\\n\\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about avaliable fields see [shpr2](shpr2) module\"]\n\npub type SHPR2 = crate::Reg<u32, _SHPR2>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n", "file_path": "src/system_control.rs", "rank": 41, "score": 62435.569516854586 }, { "content": "#[doc = \"Debug Fault Status Register\"]\n\npub mod dfsr;\n\n#[doc = \"MemManage Address Register\\n\\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about avaliable fields see [mmfar](mmfar) module\"]\n\npub type MMFAR = crate::Reg<u32, _MMFAR>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _MMFAR;\n\n#[doc = \"`read()` method returns [mmfar::R](mmfar::R) reader structure\"]\n\nimpl crate::Readable for MMFAR {}\n\n#[doc = \"`write(|w| ..)` method takes [mmfar::W](mmfar::W) writer structure\"]\n\nimpl crate::Writable for MMFAR {}\n\n#[doc = \"MemManage Address Register\"]\n\npub mod mmfar;\n\n#[doc = \"BusFault Address Register\\n\\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about avaliable fields see [bfar](bfar) module\"]\n\npub type BFAR = crate::Reg<u32, _BFAR>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _BFAR;\n\n#[doc = \"`read()` method returns [bfar::R](bfar::R) reader structure\"]\n\nimpl crate::Readable for BFAR {}\n", "file_path": "src/system_control.rs", "rank": 42, "score": 62435.399122642724 }, { "content": "#[doc = \"`write(|w| ..)` method takes [bfar::W](bfar::W) writer structure\"]\n\nimpl crate::Writable for BFAR {}\n\n#[doc = \"BusFault Address Register\"]\n\npub mod bfar;\n\n#[doc = \"Auxiliary Fault Status Register\\n\\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about avaliable fields see [afsr](afsr) module\"]\n\npub type AFSR = crate::Reg<u32, _AFSR>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _AFSR;\n\n#[doc = \"`read()` method returns [afsr::R](afsr::R) reader structure\"]\n\nimpl crate::Readable for AFSR {}\n\n#[doc = \"`write(|w| ..)` method takes [afsr::W](afsr::W) writer structure\"]\n\nimpl crate::Writable for AFSR {}\n\n#[doc = \"Auxiliary Fault Status Register\"]\n\npub mod afsr;\n\n#[doc = \"Coprocessor Access Control Register\\n\\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about avaliable fields see [cpacr](cpacr) module\"]\n\npub type CPACR = crate::Reg<u32, _CPACR>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _CPACR;\n", "file_path": "src/system_control.rs", "rank": 43, "score": 62435.26494555274 }, { "content": " pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u8) & 0x01) << 4);\n\n self.w\n\n }\n\n}\n\nimpl R {\n\n #[doc = \"Bit 4 - Provides control of the DP Pullup in USBOTG, if USB is configured in non-OTG device mode.\"]\n\n #[inline(always)]\n\n pub fn dppullupnonotg(&self) -> DPPULLUPNONOTG_R {\n\n DPPULLUPNONOTG_R::new(((self.bits >> 4) & 0x01) != 0)\n\n }\n\n}\n\nimpl W {\n\n #[doc = \"Bit 4 - Provides control of the DP Pullup in USBOTG, if USB is configured in non-OTG device mode.\"]\n\n #[inline(always)]\n\n pub fn dppullupnonotg(&mut self) -> DPPULLUPNONOTG_W {\n\n DPPULLUPNONOTG_W { w: self }\n\n }\n\n}\n", "file_path": "src/usb0/control.rs", "rank": 44, "score": 62432.26301980437 }, { "content": "#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum SR_AW {\n\n #[doc = \"0: Do not perform a software reset.\"]\n\n _0,\n\n #[doc = \"1: Perform a software reset.\"]\n\n _1,\n\n}\n\nimpl From<SR_AW> for bool {\n\n #[inline(always)]\n\n fn from(variant: SR_AW) -> Self {\n\n match variant {\n\n SR_AW::_0 => false,\n\n SR_AW::_1 => true,\n\n }\n\n }\n\n}\n\n#[doc = \"Write proxy for field `SR`\"]\n\npub struct SR_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "src/usbdcd/control.rs", "rank": 45, "score": 62429.25806588661 }, { "content": "#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum SR_AW {\n\n #[doc = \"0: Do not perform a software reset.\"]\n\n _0,\n\n #[doc = \"1: Perform a software reset.\"]\n\n _1,\n\n}\n\nimpl From<SR_AW> for bool {\n\n #[inline(always)]\n\n fn from(variant: SR_AW) -> Self {\n\n match variant {\n\n SR_AW::_0 => false,\n\n SR_AW::_1 => true,\n\n }\n\n }\n\n}\n\n#[doc = \"Write proxy for field `SR`\"]\n\npub struct SR_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "src/usbhsdcd/control.rs", "rank": 46, "score": 62429.25806588661 }, { "content": " }\n\n #[doc = \"Checks if the value of the field is `_1`\"]\n\n #[inline(always)]\n\n pub fn is_1(&self) -> bool {\n\n *self == IF_A::_1\n\n }\n\n}\n\n#[doc = \"Interrupt Enable\\n\\nValue on reset: 1\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum IE_A {\n\n #[doc = \"0: Disable interrupts to the system.\"]\n\n _0,\n\n #[doc = \"1: Enable interrupts to the system.\"]\n\n _1,\n\n}\n\nimpl From<IE_A> for bool {\n\n #[inline(always)]\n\n fn from(variant: IE_A) -> Self {\n\n match variant {\n\n IE_A::_0 => false,\n", "file_path": "src/usbdcd/control.rs", "rank": 47, "score": 62429.1101697463 }, { "content": " }\n\n #[doc = \"Checks if the value of the field is `_1`\"]\n\n #[inline(always)]\n\n pub fn is_1(&self) -> bool {\n\n *self == IF_A::_1\n\n }\n\n}\n\n#[doc = \"Interrupt Enable\\n\\nValue on reset: 1\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum IE_A {\n\n #[doc = \"0: Disable interrupts to the system.\"]\n\n _0,\n\n #[doc = \"1: Enable interrupts to the system.\"]\n\n _1,\n\n}\n\nimpl From<IE_A> for bool {\n\n #[inline(always)]\n\n fn from(variant: IE_A) -> Self {\n\n match variant {\n\n IE_A::_0 => false,\n", "file_path": "src/usbhsdcd/control.rs", "rank": 48, "score": 62429.1101697463 }, { "content": "impl From<DPPULLUPNONOTG_A> for bool {\n\n #[inline(always)]\n\n fn from(variant: DPPULLUPNONOTG_A) -> Self {\n\n match variant {\n\n DPPULLUPNONOTG_A::_0 => false,\n\n DPPULLUPNONOTG_A::_1 => true,\n\n }\n\n }\n\n}\n\n#[doc = \"Reader of field `DPPULLUPNONOTG`\"]\n\npub type DPPULLUPNONOTG_R = crate::R<bool, DPPULLUPNONOTG_A>;\n\nimpl DPPULLUPNONOTG_R {\n\n #[doc = r\"Get enumerated values variant\"]\n\n #[inline(always)]\n\n pub fn variant(&self) -> DPPULLUPNONOTG_A {\n\n match self.bits {\n\n false => DPPULLUPNONOTG_A::_0,\n\n true => DPPULLUPNONOTG_A::_1,\n\n }\n\n }\n", "file_path": "src/usb0/control.rs", "rank": 49, "score": 62426.1816078043 }, { "content": " self.w\n\n }\n\n}\n\n#[doc = \"Start Change Detection Sequence\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum START_AW {\n\n #[doc = \"0: Do not start the sequence. Writes of this value have no effect.\"]\n\n _0,\n\n #[doc = \"1: Initiate the charger detection sequence. If the sequence is already running, writes of this value have no effect.\"]\n\n _1,\n\n}\n\nimpl From<START_AW> for bool {\n\n #[inline(always)]\n\n fn from(variant: START_AW) -> Self {\n\n match variant {\n\n START_AW::_0 => false,\n\n START_AW::_1 => true,\n\n }\n\n }\n\n}\n", "file_path": "src/usbdcd/control.rs", "rank": 50, "score": 62425.66997554377 }, { "content": " self.w\n\n }\n\n}\n\n#[doc = \"Start Change Detection Sequence\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum START_AW {\n\n #[doc = \"0: Do not start the sequence. Writes of this value have no effect.\"]\n\n _0,\n\n #[doc = \"1: Initiate the charger detection sequence. If the sequence is already running, writes of this value have no effect.\"]\n\n _1,\n\n}\n\nimpl From<START_AW> for bool {\n\n #[inline(always)]\n\n fn from(variant: START_AW) -> Self {\n\n match variant {\n\n START_AW::_0 => false,\n\n START_AW::_1 => true,\n\n }\n\n }\n\n}\n", "file_path": "src/usbhsdcd/control.rs", "rank": 51, "score": 62425.66997554377 }, { "content": " IF_A::_0 => false,\n\n IF_A::_1 => true,\n\n }\n\n }\n\n}\n\n#[doc = \"Reader of field `IF`\"]\n\npub type IF_R = crate::R<bool, IF_A>;\n\nimpl IF_R {\n\n #[doc = r\"Get enumerated values variant\"]\n\n #[inline(always)]\n\n pub fn variant(&self) -> IF_A {\n\n match self.bits {\n\n false => IF_A::_0,\n\n true => IF_A::_1,\n\n }\n\n }\n\n #[doc = \"Checks if the value of the field is `_0`\"]\n\n #[inline(always)]\n\n pub fn is_0(&self) -> bool {\n\n *self == IF_A::_0\n", "file_path": "src/usbdcd/control.rs", "rank": 52, "score": 62422.20115133451 }, { "content": " IF_A::_0 => false,\n\n IF_A::_1 => true,\n\n }\n\n }\n\n}\n\n#[doc = \"Reader of field `IF`\"]\n\npub type IF_R = crate::R<bool, IF_A>;\n\nimpl IF_R {\n\n #[doc = r\"Get enumerated values variant\"]\n\n #[inline(always)]\n\n pub fn variant(&self) -> IF_A {\n\n match self.bits {\n\n false => IF_A::_0,\n\n true => IF_A::_1,\n\n }\n\n }\n\n #[doc = \"Checks if the value of the field is `_0`\"]\n\n #[inline(always)]\n\n pub fn is_0(&self) -> bool {\n\n *self == IF_A::_0\n", "file_path": "src/usbhsdcd/control.rs", "rank": 53, "score": 62422.20115133451 }, { "content": " }\n\n}\n\n#[doc = \"Reader of field `BC12`\"]\n\npub type BC12_R = crate::R<bool, BC12_A>;\n\nimpl BC12_R {\n\n #[doc = r\"Get enumerated values variant\"]\n\n #[inline(always)]\n\n pub fn variant(&self) -> BC12_A {\n\n match self.bits {\n\n false => BC12_A::_0,\n\n true => BC12_A::_1,\n\n }\n\n }\n\n #[doc = \"Checks if the value of the field is `_0`\"]\n\n #[inline(always)]\n\n pub fn is_0(&self) -> bool {\n\n *self == BC12_A::_0\n\n }\n\n #[doc = \"Checks if the value of the field is `_1`\"]\n\n #[inline(always)]\n", "file_path": "src/usbdcd/control.rs", "rank": 54, "score": 62420.587172142965 }, { "content": " }\n\n}\n\n#[doc = \"Reader of field `BC12`\"]\n\npub type BC12_R = crate::R<bool, BC12_A>;\n\nimpl BC12_R {\n\n #[doc = r\"Get enumerated values variant\"]\n\n #[inline(always)]\n\n pub fn variant(&self) -> BC12_A {\n\n match self.bits {\n\n false => BC12_A::_0,\n\n true => BC12_A::_1,\n\n }\n\n }\n\n #[doc = \"Checks if the value of the field is `_0`\"]\n\n #[inline(always)]\n\n pub fn is_0(&self) -> bool {\n\n *self == BC12_A::_0\n\n }\n\n #[doc = \"Checks if the value of the field is `_1`\"]\n\n #[inline(always)]\n", "file_path": "src/usbhsdcd/control.rs", "rank": 55, "score": 62420.587172142965 }, { "content": " pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 25)) | (((value as u32) & 0x01) << 25);\n\n self.w\n\n }\n\n}\n\nimpl R {\n\n #[doc = \"Bit 8 - Interrupt Flag\"]\n\n #[inline(always)]\n\n pub fn if_(&self) -> IF_R {\n\n IF_R::new(((self.bits >> 8) & 0x01) != 0)\n", "file_path": "src/usbdcd/control.rs", "rank": 56, "score": 62420.53049710476 }, { "content": " pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 25)) | (((value as u32) & 0x01) << 25);\n\n self.w\n\n }\n\n}\n\nimpl R {\n\n #[doc = \"Bit 8 - Interrupt Flag\"]\n\n #[inline(always)]\n\n pub fn if_(&self) -> IF_R {\n\n IF_R::new(((self.bits >> 8) & 0x01) != 0)\n", "file_path": "src/usbhsdcd/control.rs", "rank": 57, "score": 62420.53049710476 }, { "content": " IE_A::_1 => true,\n\n }\n\n }\n\n}\n\n#[doc = \"Reader of field `IE`\"]\n\npub type IE_R = crate::R<bool, IE_A>;\n\nimpl IE_R {\n\n #[doc = r\"Get enumerated values variant\"]\n\n #[inline(always)]\n\n pub fn variant(&self) -> IE_A {\n\n match self.bits {\n\n false => IE_A::_0,\n\n true => IE_A::_1,\n\n }\n\n }\n\n #[doc = \"Checks if the value of the field is `_0`\"]\n\n #[inline(always)]\n\n pub fn is_0(&self) -> bool {\n\n *self == IE_A::_0\n\n }\n", "file_path": "src/usbhsdcd/control.rs", "rank": 58, "score": 62420.44488842945 }, { "content": " IE_A::_1 => true,\n\n }\n\n }\n\n}\n\n#[doc = \"Reader of field `IE`\"]\n\npub type IE_R = crate::R<bool, IE_A>;\n\nimpl IE_R {\n\n #[doc = r\"Get enumerated values variant\"]\n\n #[inline(always)]\n\n pub fn variant(&self) -> IE_A {\n\n match self.bits {\n\n false => IE_A::_0,\n\n true => IE_A::_1,\n\n }\n\n }\n\n #[doc = \"Checks if the value of the field is `_0`\"]\n\n #[inline(always)]\n\n pub fn is_0(&self) -> bool {\n\n *self == IE_A::_0\n\n }\n", "file_path": "src/usbdcd/control.rs", "rank": 59, "score": 62420.44488842945 }, { "content": "#[doc = r\"Register block\"]\n\n#[repr(C)]\n\npub struct RegisterBlock {\n\n _reserved0: [u8; 8usize],\n\n #[doc = \"0x08 - Auxiliary Control Register,\"]\n\n pub actlr: ACTLR,\n\n _reserved1: [u8; 3316usize],\n\n #[doc = \"0xd00 - CPUID Base Register\"]\n\n pub cpuid: CPUID,\n\n #[doc = \"0xd04 - Interrupt Control and State Register\"]\n\n pub icsr: ICSR,\n\n #[doc = \"0xd08 - Vector Table Offset Register\"]\n\n pub vtor: VTOR,\n\n #[doc = \"0xd0c - Application Interrupt and Reset Control Register\"]\n\n pub aircr: AIRCR,\n\n #[doc = \"0xd10 - System Control Register\"]\n\n pub scr: SCR,\n\n #[doc = \"0xd14 - Configuration and Control Register\"]\n\n pub ccr: CCR,\n\n #[doc = \"0xd18 - System Handler Priority Register 1\"]\n", "file_path": "src/system_control.rs", "rank": 60, "score": 62418.39378572118 }, { "content": " self.variant(START_AW::_1)\n\n }\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 24)) | (((value as u32) & 0x01) << 24);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Software Reset\\n\\nValue on reset: 0\"]\n", "file_path": "src/usbdcd/control.rs", "rank": 61, "score": 62415.714268185395 }, { "content": " self.variant(START_AW::_1)\n\n }\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 24)) | (((value as u32) & 0x01) << 24);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Software Reset\\n\\nValue on reset: 0\"]\n", "file_path": "src/usbhsdcd/control.rs", "rank": 62, "score": 62415.714268185395 }, { "content": "impl From<IACK_AW> for bool {\n\n #[inline(always)]\n\n fn from(variant: IACK_AW) -> Self {\n\n match variant {\n\n IACK_AW::_0 => false,\n\n IACK_AW::_1 => true,\n\n }\n\n }\n\n}\n\n#[doc = \"Write proxy for field `IACK`\"]\n\npub struct IACK_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> IACK_W<'a> {\n\n #[doc = r\"Writes `variant` to the field\"]\n\n #[inline(always)]\n\n pub fn variant(self, variant: IACK_AW) -> &'a mut W {\n\n {\n\n self.bit(variant.into())\n\n }\n", "file_path": "src/usbhsdcd/control.rs", "rank": 63, "score": 62413.904254555484 }, { "content": "impl From<IACK_AW> for bool {\n\n #[inline(always)]\n\n fn from(variant: IACK_AW) -> Self {\n\n match variant {\n\n IACK_AW::_0 => false,\n\n IACK_AW::_1 => true,\n\n }\n\n }\n\n}\n\n#[doc = \"Write proxy for field `IACK`\"]\n\npub struct IACK_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> IACK_W<'a> {\n\n #[doc = r\"Writes `variant` to the field\"]\n\n #[inline(always)]\n\n pub fn variant(self, variant: IACK_AW) -> &'a mut W {\n\n {\n\n self.bit(variant.into())\n\n }\n", "file_path": "src/usbdcd/control.rs", "rank": 64, "score": 62413.904254555484 }, { "content": " pub shpr1: SHPR1,\n\n #[doc = \"0xd1c - System Handler Priority Register 2\"]\n\n pub shpr2: SHPR2,\n\n #[doc = \"0xd20 - System Handler Priority Register 3\"]\n\n pub shpr3: SHPR3,\n\n #[doc = \"0xd24 - System Handler Control and State Register\"]\n\n pub shcsr: SHCSR,\n\n #[doc = \"0xd28 - Configurable Fault Status Registers\"]\n\n pub cfsr: CFSR,\n\n #[doc = \"0xd2c - HardFault Status register\"]\n\n pub hfsr: HFSR,\n\n #[doc = \"0xd30 - Debug Fault Status Register\"]\n\n pub dfsr: DFSR,\n\n #[doc = \"0xd34 - MemManage Address Register\"]\n\n pub mmfar: MMFAR,\n\n #[doc = \"0xd38 - BusFault Address Register\"]\n\n pub bfar: BFAR,\n\n #[doc = \"0xd3c - Auxiliary Fault Status Register\"]\n\n pub afsr: AFSR,\n\n _reserved17: [u8; 72usize],\n", "file_path": "src/system_control.rs", "rank": 65, "score": 62412.098241203166 }, { "content": " #[doc = \"Checks if the value of the field is `_0`\"]\n\n #[inline(always)]\n\n pub fn is_0(&self) -> bool {\n\n *self == DPPULLUPNONOTG_A::_0\n\n }\n\n #[doc = \"Checks if the value of the field is `_1`\"]\n\n #[inline(always)]\n\n pub fn is_1(&self) -> bool {\n\n *self == DPPULLUPNONOTG_A::_1\n\n }\n\n}\n\n#[doc = \"Write proxy for field `DPPULLUPNONOTG`\"]\n\npub struct DPPULLUPNONOTG_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DPPULLUPNONOTG_W<'a> {\n\n #[doc = r\"Writes `variant` to the field\"]\n\n #[inline(always)]\n\n pub fn variant(self, variant: DPPULLUPNONOTG_A) -> &'a mut W {\n\n {\n", "file_path": "src/usb0/control.rs", "rank": 66, "score": 62409.598234812445 }, { "content": " }\n\n #[doc = \"Bit 16 - Interrupt Enable\"]\n\n #[inline(always)]\n\n pub fn ie(&self) -> IE_R {\n\n IE_R::new(((self.bits >> 16) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 17 - BC1.2 compatibility. This bit cannot be changed after start detection.\"]\n\n #[inline(always)]\n\n pub fn bc12(&self) -> BC12_R {\n\n BC12_R::new(((self.bits >> 17) & 0x01) != 0)\n\n }\n\n}\n\nimpl W {\n\n #[doc = \"Bit 0 - Interrupt Acknowledge\"]\n\n #[inline(always)]\n\n pub fn iack(&mut self) -> IACK_W {\n\n IACK_W { w: self }\n\n }\n\n #[doc = \"Bit 16 - Interrupt Enable\"]\n\n #[inline(always)]\n", "file_path": "src/usbhsdcd/control.rs", "rank": 67, "score": 62409.30191265536 }, { "content": " }\n\n #[doc = \"Bit 16 - Interrupt Enable\"]\n\n #[inline(always)]\n\n pub fn ie(&self) -> IE_R {\n\n IE_R::new(((self.bits >> 16) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 17 - BC1.2 compatibility. This bit cannot be changed after start detection.\"]\n\n #[inline(always)]\n\n pub fn bc12(&self) -> BC12_R {\n\n BC12_R::new(((self.bits >> 17) & 0x01) != 0)\n\n }\n\n}\n\nimpl W {\n\n #[doc = \"Bit 0 - Interrupt Acknowledge\"]\n\n #[inline(always)]\n\n pub fn iack(&mut self) -> IACK_W {\n\n IACK_W { w: self }\n\n }\n\n #[doc = \"Bit 16 - Interrupt Enable\"]\n\n #[inline(always)]\n", "file_path": "src/usbdcd/control.rs", "rank": 68, "score": 62409.30191265536 }, { "content": "impl<'a> SR_W<'a> {\n\n #[doc = r\"Writes `variant` to the field\"]\n\n #[inline(always)]\n\n pub fn variant(self, variant: SR_AW) -> &'a mut W {\n\n {\n\n self.bit(variant.into())\n\n }\n\n }\n\n #[doc = \"Do not perform a software reset.\"]\n\n #[inline(always)]\n\n pub fn _0(self) -> &'a mut W {\n\n self.variant(SR_AW::_0)\n\n }\n\n #[doc = \"Perform a software reset.\"]\n\n #[inline(always)]\n\n pub fn _1(self) -> &'a mut W {\n\n self.variant(SR_AW::_1)\n\n }\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n", "file_path": "src/usbdcd/control.rs", "rank": 69, "score": 62409.26132486702 }, { "content": "impl<'a> SR_W<'a> {\n\n #[doc = r\"Writes `variant` to the field\"]\n\n #[inline(always)]\n\n pub fn variant(self, variant: SR_AW) -> &'a mut W {\n\n {\n\n self.bit(variant.into())\n\n }\n\n }\n\n #[doc = \"Do not perform a software reset.\"]\n\n #[inline(always)]\n\n pub fn _0(self) -> &'a mut W {\n\n self.variant(SR_AW::_0)\n\n }\n\n #[doc = \"Perform a software reset.\"]\n\n #[inline(always)]\n\n pub fn _1(self) -> &'a mut W {\n\n self.variant(SR_AW::_1)\n\n }\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n", "file_path": "src/usbhsdcd/control.rs", "rank": 70, "score": 62409.26132486702 }, { "content": " }\n\n #[doc = \"Compatible with BC1.2\"]\n\n #[inline(always)]\n\n pub fn _1(self) -> &'a mut W {\n\n self.variant(BC12_A::_1)\n\n }\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 17)) | (((value as u32) & 0x01) << 17);\n", "file_path": "src/usbdcd/control.rs", "rank": 71, "score": 62408.709393590856 }, { "content": " }\n\n #[doc = \"Compatible with BC1.2\"]\n\n #[inline(always)]\n\n pub fn _1(self) -> &'a mut W {\n\n self.variant(BC12_A::_1)\n\n }\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 17)) | (((value as u32) & 0x01) << 17);\n", "file_path": "src/usbhsdcd/control.rs", "rank": 72, "score": 62408.709393590856 }, { "content": "#[doc = \"Write proxy for field `START`\"]\n\npub struct START_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> START_W<'a> {\n\n #[doc = r\"Writes `variant` to the field\"]\n\n #[inline(always)]\n\n pub fn variant(self, variant: START_AW) -> &'a mut W {\n\n {\n\n self.bit(variant.into())\n\n }\n\n }\n\n #[doc = \"Do not start the sequence. Writes of this value have no effect.\"]\n\n #[inline(always)]\n\n pub fn _0(self) -> &'a mut W {\n\n self.variant(START_AW::_0)\n\n }\n\n #[doc = \"Initiate the charger detection sequence. If the sequence is already running, writes of this value have no effect.\"]\n\n #[inline(always)]\n\n pub fn _1(self) -> &'a mut W {\n", "file_path": "src/usbhsdcd/control.rs", "rank": 73, "score": 62408.62142579136 }, { "content": "#[doc = \"Write proxy for field `START`\"]\n\npub struct START_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> START_W<'a> {\n\n #[doc = r\"Writes `variant` to the field\"]\n\n #[inline(always)]\n\n pub fn variant(self, variant: START_AW) -> &'a mut W {\n\n {\n\n self.bit(variant.into())\n\n }\n\n }\n\n #[doc = \"Do not start the sequence. Writes of this value have no effect.\"]\n\n #[inline(always)]\n\n pub fn _0(self) -> &'a mut W {\n\n self.variant(START_AW::_0)\n\n }\n\n #[doc = \"Initiate the charger detection sequence. If the sequence is already running, writes of this value have no effect.\"]\n\n #[inline(always)]\n\n pub fn _1(self) -> &'a mut W {\n", "file_path": "src/usbdcd/control.rs", "rank": 74, "score": 62408.62142579136 }, { "content": " #[doc = \"Checks if the value of the field is `_1`\"]\n\n #[inline(always)]\n\n pub fn is_1(&self) -> bool {\n\n *self == IE_A::_1\n\n }\n\n}\n\n#[doc = \"Write proxy for field `IE`\"]\n\npub struct IE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> IE_W<'a> {\n\n #[doc = r\"Writes `variant` to the field\"]\n\n #[inline(always)]\n\n pub fn variant(self, variant: IE_A) -> &'a mut W {\n\n {\n\n self.bit(variant.into())\n\n }\n\n }\n\n #[doc = \"Disable interrupts to the system.\"]\n\n #[inline(always)]\n", "file_path": "src/usbhsdcd/control.rs", "rank": 75, "score": 62407.90433288711 }, { "content": " #[doc = \"Checks if the value of the field is `_1`\"]\n\n #[inline(always)]\n\n pub fn is_1(&self) -> bool {\n\n *self == IE_A::_1\n\n }\n\n}\n\n#[doc = \"Write proxy for field `IE`\"]\n\npub struct IE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> IE_W<'a> {\n\n #[doc = r\"Writes `variant` to the field\"]\n\n #[inline(always)]\n\n pub fn variant(self, variant: IE_A) -> &'a mut W {\n\n {\n\n self.bit(variant.into())\n\n }\n\n }\n\n #[doc = \"Disable interrupts to the system.\"]\n\n #[inline(always)]\n", "file_path": "src/usbdcd/control.rs", "rank": 76, "score": 62407.90433288711 }, { "content": " pub fn ie(&mut self) -> IE_W {\n\n IE_W { w: self }\n\n }\n\n #[doc = \"Bit 17 - BC1.2 compatibility. This bit cannot be changed after start detection.\"]\n\n #[inline(always)]\n\n pub fn bc12(&mut self) -> BC12_W {\n\n BC12_W { w: self }\n\n }\n\n #[doc = \"Bit 24 - Start Change Detection Sequence\"]\n\n #[inline(always)]\n\n pub fn start(&mut self) -> START_W {\n\n START_W { w: self }\n\n }\n\n #[doc = \"Bit 25 - Software Reset\"]\n\n #[inline(always)]\n\n pub fn sr(&mut self) -> SR_W {\n\n SR_W { w: self }\n\n }\n\n}\n", "file_path": "src/usbdcd/control.rs", "rank": 77, "score": 62403.438954022255 }, { "content": " pub fn ie(&mut self) -> IE_W {\n\n IE_W { w: self }\n\n }\n\n #[doc = \"Bit 17 - BC1.2 compatibility. This bit cannot be changed after start detection.\"]\n\n #[inline(always)]\n\n pub fn bc12(&mut self) -> BC12_W {\n\n BC12_W { w: self }\n\n }\n\n #[doc = \"Bit 24 - Start Change Detection Sequence\"]\n\n #[inline(always)]\n\n pub fn start(&mut self) -> START_W {\n\n START_W { w: self }\n\n }\n\n #[doc = \"Bit 25 - Software Reset\"]\n\n #[inline(always)]\n\n pub fn sr(&mut self) -> SR_W {\n\n SR_W { w: self }\n\n }\n\n}\n", "file_path": "src/usbhsdcd/control.rs", "rank": 78, "score": 62403.438954022255 }, { "content": " pub fn is_1(&self) -> bool {\n\n *self == BC12_A::_1\n\n }\n\n}\n\n#[doc = \"Write proxy for field `BC12`\"]\n\npub struct BC12_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> BC12_W<'a> {\n\n #[doc = r\"Writes `variant` to the field\"]\n\n #[inline(always)]\n\n pub fn variant(self, variant: BC12_A) -> &'a mut W {\n\n {\n\n self.bit(variant.into())\n\n }\n\n }\n\n #[doc = \"Compatible with BC1.1 (default)\"]\n\n #[inline(always)]\n\n pub fn _0(self) -> &'a mut W {\n\n self.variant(BC12_A::_0)\n", "file_path": "src/usbdcd/control.rs", "rank": 79, "score": 62403.34069480761 }, { "content": " pub fn is_1(&self) -> bool {\n\n *self == BC12_A::_1\n\n }\n\n}\n\n#[doc = \"Write proxy for field `BC12`\"]\n\npub struct BC12_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> BC12_W<'a> {\n\n #[doc = r\"Writes `variant` to the field\"]\n\n #[inline(always)]\n\n pub fn variant(self, variant: BC12_A) -> &'a mut W {\n\n {\n\n self.bit(variant.into())\n\n }\n\n }\n\n #[doc = \"Compatible with BC1.1 (default)\"]\n\n #[inline(always)]\n\n pub fn _0(self) -> &'a mut W {\n\n self.variant(BC12_A::_0)\n", "file_path": "src/usbhsdcd/control.rs", "rank": 80, "score": 62403.34069480761 }, { "content": " pub fn _0(self) -> &'a mut W {\n\n self.variant(IE_A::_0)\n\n }\n\n #[doc = \"Enable interrupts to the system.\"]\n\n #[inline(always)]\n\n pub fn _1(self) -> &'a mut W {\n\n self.variant(IE_A::_1)\n\n }\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n", "file_path": "src/usbdcd/control.rs", "rank": 81, "score": 62397.6544206797 }, { "content": " pub fn _0(self) -> &'a mut W {\n\n self.variant(IE_A::_0)\n\n }\n\n #[doc = \"Enable interrupts to the system.\"]\n\n #[inline(always)]\n\n pub fn _1(self) -> &'a mut W {\n\n self.variant(IE_A::_1)\n\n }\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n", "file_path": "src/usbhsdcd/control.rs", "rank": 82, "score": 62397.6544206797 }, { "content": " }\n\n #[doc = \"Do not clear the interrupt.\"]\n\n #[inline(always)]\n\n pub fn _0(self) -> &'a mut W {\n\n self.variant(IACK_AW::_0)\n\n }\n\n #[doc = \"Clear the IF bit (interrupt flag).\"]\n\n #[inline(always)]\n\n pub fn _1(self) -> &'a mut W {\n\n self.variant(IACK_AW::_1)\n\n }\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n", "file_path": "src/usbdcd/control.rs", "rank": 83, "score": 62397.61775105001 }, { "content": " }\n\n #[doc = \"Do not clear the interrupt.\"]\n\n #[inline(always)]\n\n pub fn _0(self) -> &'a mut W {\n\n self.variant(IACK_AW::_0)\n\n }\n\n #[doc = \"Clear the IF bit (interrupt flag).\"]\n\n #[inline(always)]\n\n pub fn _1(self) -> &'a mut W {\n\n self.variant(IACK_AW::_1)\n\n }\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n", "file_path": "src/usbhsdcd/control.rs", "rank": 84, "score": 62397.61775105001 }, { "content": " self.bit(variant.into())\n\n }\n\n }\n\n #[doc = \"DP Pullup in non-OTG device mode is not enabled.\"]\n\n #[inline(always)]\n\n pub fn _0(self) -> &'a mut W {\n\n self.variant(DPPULLUPNONOTG_A::_0)\n\n }\n\n #[doc = \"DP Pullup in non-OTG device mode is enabled.\"]\n\n #[inline(always)]\n\n pub fn _1(self) -> &'a mut W {\n\n self.variant(DPPULLUPNONOTG_A::_1)\n\n }\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n", "file_path": "src/usb0/control.rs", "rank": 85, "score": 62396.812998158355 }, { "content": "#[doc = \"This trait shows that register has `write`, `write_with_zero` and `reset` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Readable` can be also `modify`'ed\"]\n\npub trait Writable {}\n", "file_path": "src/generic.rs", "rank": 86, "score": 61344.336836084534 }, { "content": "#[doc = \"This trait shows that register has `read` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Writable` can be also `modify`'ed\"]\n\npub trait Readable {}\n", "file_path": "src/generic.rs", "rank": 87, "score": 61338.043374341425 }, { "content": "#[doc = \"Reader of register PLL_SIC\"]\n\npub type R = crate::R<u32, super::PLL_SIC>;\n\n#[doc = \"Writer for register PLL_SIC\"]\n\npub type W = crate::W<u32, super::PLL_SIC>;\n\n#[doc = \"Register PLL_SIC `reset()`'s with value 0x0001_2000\"]\n\nimpl crate::ResetValue for super::PLL_SIC {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x0001_2000\n\n }\n\n}\n\n#[doc = \"This field controls the USB PLL feedback loop divider\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum PLL_DIV_SEL_A {\n\n #[doc = \"0: PLL reference frequency = 24MHz\"]\n\n _00,\n\n #[doc = \"1: PLL reference frequency = 16MHz\"]\n\n _01,\n\n}\n", "file_path": "src/usbphy/pll_sic.rs", "rank": 88, "score": 59762.60221297199 }, { "content": "#[doc = \"Reader of register USB_SBUSCFG\"]\n\npub type R = crate::R<u32, super::USB_SBUSCFG>;\n\n#[doc = \"Writer for register USB_SBUSCFG\"]\n\npub type W = crate::W<u32, super::USB_SBUSCFG>;\n\n#[doc = \"Register USB_SBUSCFG `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::USB_SBUSCFG {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Burst mode\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum BURSTMODE_A {\n\n #[doc = \"0: INCR burst of unspecified length\"]\n\n _000,\n\n #[doc = \"1: INCR4, non-multiple transfers of INCR4 is decomposed into singles.\"]\n\n _001,\n\n #[doc = \"2: INCR8, non-multiple transfers of INCR8, is decomposed into INCR4 or singles.\"]\n", "file_path": "src/usbhs/usb_sbuscfg.rs", "rank": 89, "score": 59738.92821537358 }, { "content": "#[doc = \"Reader of register DEBUG_SET\"]\n\npub type R = crate::R<u32, super::DEBUG_SET>;\n\n#[doc = \"Writer for register DEBUG_SET\"]\n\npub type W = crate::W<u32, super::DEBUG_SET>;\n\n#[doc = \"Register DEBUG_SET `reset()`'s with value 0x7f18_0000\"]\n\nimpl crate::ResetValue for super::DEBUG_SET {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x7f18_0000\n\n }\n\n}\n\n#[doc = \"Reader of field `OTGIDPIOLOCK`\"]\n\npub type OTGIDPIOLOCK_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `OTGIDPIOLOCK`\"]\n\npub struct OTGIDPIOLOCK_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> OTGIDPIOLOCK_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/usbphy/debug_set.rs", "rank": 90, "score": 59728.89041597315 }, { "content": "#[doc = \"Reader of register DEBUG_TOG\"]\n\npub type R = crate::R<u32, super::DEBUG_TOG>;\n\n#[doc = \"Writer for register DEBUG_TOG\"]\n\npub type W = crate::W<u32, super::DEBUG_TOG>;\n\n#[doc = \"Register DEBUG_TOG `reset()`'s with value 0x7f18_0000\"]\n\nimpl crate::ResetValue for super::DEBUG_TOG {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x7f18_0000\n\n }\n\n}\n\n#[doc = \"Reader of field `OTGIDPIOLOCK`\"]\n\npub type OTGIDPIOLOCK_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `OTGIDPIOLOCK`\"]\n\npub struct OTGIDPIOLOCK_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> OTGIDPIOLOCK_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/usbphy/debug_tog.rs", "rank": 91, "score": 59728.89041597315 }, { "content": "#[doc = \"Reader of register DEBUG_CLR\"]\n\npub type R = crate::R<u32, super::DEBUG_CLR>;\n\n#[doc = \"Writer for register DEBUG_CLR\"]\n\npub type W = crate::W<u32, super::DEBUG_CLR>;\n\n#[doc = \"Register DEBUG_CLR `reset()`'s with value 0x7f18_0000\"]\n\nimpl crate::ResetValue for super::DEBUG_CLR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x7f18_0000\n\n }\n\n}\n\n#[doc = \"Reader of field `OTGIDPIOLOCK`\"]\n\npub type OTGIDPIOLOCK_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `OTGIDPIOLOCK`\"]\n\npub struct OTGIDPIOLOCK_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> OTGIDPIOLOCK_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/usbphy/debug_clr.rs", "rank": 92, "score": 59728.89041597315 }, { "content": "#[doc = \"USB PLL lock status indicator\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum PLL_LOCK_A {\n\n #[doc = \"0: PLL is not currently locked\"]\n\n _0,\n\n #[doc = \"1: PLL is currently locked\"]\n\n _1,\n\n}\n\nimpl From<PLL_LOCK_A> for bool {\n\n #[inline(always)]\n\n fn from(variant: PLL_LOCK_A) -> Self {\n\n match variant {\n\n PLL_LOCK_A::_0 => false,\n\n PLL_LOCK_A::_1 => true,\n\n }\n\n }\n\n}\n\n#[doc = \"Reader of field `PLL_LOCK`\"]\n\npub type PLL_LOCK_R = crate::R<bool, PLL_LOCK_A>;\n\nimpl PLL_LOCK_R {\n", "file_path": "src/usbphy/pll_sic.rs", "rank": 93, "score": 59712.19596920461 }, { "content": "impl From<PLL_DIV_SEL_A> for u8 {\n\n #[inline(always)]\n\n fn from(variant: PLL_DIV_SEL_A) -> Self {\n\n match variant {\n\n PLL_DIV_SEL_A::_00 => 0,\n\n PLL_DIV_SEL_A::_01 => 1,\n\n }\n\n }\n\n}\n\n#[doc = \"Reader of field `PLL_DIV_SEL`\"]\n\npub type PLL_DIV_SEL_R = crate::R<u8, PLL_DIV_SEL_A>;\n\nimpl PLL_DIV_SEL_R {\n\n #[doc = r\"Get enumerated values variant\"]\n\n #[inline(always)]\n\n pub fn variant(&self) -> crate::Variant<u8, PLL_DIV_SEL_A> {\n\n use crate::Variant::*;\n\n match self.bits {\n\n 0 => Val(PLL_DIV_SEL_A::_00),\n\n 1 => Val(PLL_DIV_SEL_A::_01),\n\n i => Res(i),\n", "file_path": "src/usbphy/pll_sic.rs", "rank": 94, "score": 59705.683344037396 }, { "content": " self.w.bits = (self.w.bits & !(0x01 << 24)) | (((value as u32) & 0x01) << 24);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `SQUELCHRESETLENGTH`\"]\n\npub type SQUELCHRESETLENGTH_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `SQUELCHRESETLENGTH`\"]\n\npub struct SQUELCHRESETLENGTH_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SQUELCHRESETLENGTH_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x0f << 25)) | (((value as u32) & 0x0f) << 25);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `HOST_RESUME_DEBUG`\"]\n\npub type HOST_RESUME_DEBUG_R = crate::R<bool, bool>;\n", "file_path": "src/usbphy/debug_set.rs", "rank": 95, "score": 59703.52629635872 }, { "content": " self.w.bits = (self.w.bits & !(0x01 << 24)) | (((value as u32) & 0x01) << 24);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `SQUELCHRESETLENGTH`\"]\n\npub type SQUELCHRESETLENGTH_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `SQUELCHRESETLENGTH`\"]\n\npub struct SQUELCHRESETLENGTH_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SQUELCHRESETLENGTH_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x0f << 25)) | (((value as u32) & 0x0f) << 25);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `HOST_RESUME_DEBUG`\"]\n\npub type HOST_RESUME_DEBUG_R = crate::R<bool, bool>;\n", "file_path": "src/usbphy/debug_clr.rs", "rank": 96, "score": 59703.52629635872 }, { "content": " self.w.bits = (self.w.bits & !(0x01 << 24)) | (((value as u32) & 0x01) << 24);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `SQUELCHRESETLENGTH`\"]\n\npub type SQUELCHRESETLENGTH_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `SQUELCHRESETLENGTH`\"]\n\npub struct SQUELCHRESETLENGTH_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SQUELCHRESETLENGTH_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x0f << 25)) | (((value as u32) & 0x0f) << 25);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `HOST_RESUME_DEBUG`\"]\n\npub type HOST_RESUME_DEBUG_R = crate::R<bool, bool>;\n", "file_path": "src/usbphy/debug_tog.rs", "rank": 97, "score": 59703.52629635872 }, { "content": " pub fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0x07) | ((value as u32) & 0x07);\n\n self.w\n\n }\n\n}\n\nimpl R {\n\n #[doc = \"Bits 0:2 - Burst mode\"]\n\n #[inline(always)]\n\n pub fn burstmode(&self) -> BURSTMODE_R {\n\n BURSTMODE_R::new((self.bits & 0x07) as u8)\n\n }\n\n}\n\nimpl W {\n\n #[doc = \"Bits 0:2 - Burst mode\"]\n\n #[inline(always)]\n\n pub fn burstmode(&mut self) -> BURSTMODE_W {\n\n BURSTMODE_W { w: self }\n\n }\n\n}\n", "file_path": "src/usbhs/usb_sbuscfg.rs", "rank": 98, "score": 59699.630868580854 }, { "content": " #[doc = \"Bit 13 - Enable the clock output from the USB PLL.\"]\n\n #[inline(always)]\n\n pub fn pll_enable(&self) -> PLL_ENABLE_R {\n\n PLL_ENABLE_R::new(((self.bits >> 13) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 16 - Bypass the USB PLL.\"]\n\n #[inline(always)]\n\n pub fn pll_bypass(&self) -> PLL_BYPASS_R {\n\n PLL_BYPASS_R::new(((self.bits >> 16) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 31 - USB PLL lock status indicator\"]\n\n #[inline(always)]\n\n pub fn pll_lock(&self) -> PLL_LOCK_R {\n\n PLL_LOCK_R::new(((self.bits >> 31) & 0x01) != 0)\n\n }\n\n}\n\nimpl W {\n\n #[doc = \"Bits 0:1 - This field controls the USB PLL feedback loop divider\"]\n\n #[inline(always)]\n\n pub fn pll_div_sel(&mut self) -> PLL_DIV_SEL_W {\n", "file_path": "src/usbphy/pll_sic.rs", "rank": 99, "score": 59699.25797703995 } ]
Rust
termwiz/src/hyperlink.rs
bcully/wezterm
ea401e1f58ca5a088ac5d5e1d7963f36269afb76
use anyhow::{anyhow, ensure, Error}; use regex::{Captures, Regex}; use serde::{self, Deserialize, Deserializer, Serialize}; use std::collections::HashMap; use std::fmt::{Display, Error as FmtError, Formatter}; use std::ops::Range; use std::sync::Arc; #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] pub struct Hyperlink { params: HashMap<String, String>, uri: String, implicit: bool, } impl Hyperlink { pub fn uri(&self) -> &str { &self.uri } pub fn params(&self) -> &HashMap<String, String> { &self.params } pub fn new<S: Into<String>>(uri: S) -> Self { Self { uri: uri.into(), params: HashMap::new(), implicit: false, } } #[inline] pub fn is_implicit(&self) -> bool { self.implicit } pub fn new_implicit<S: Into<String>>(uri: S) -> Self { Self { uri: uri.into(), params: HashMap::new(), implicit: true, } } pub fn new_with_id<S: Into<String>, S2: Into<String>>(uri: S, id: S2) -> Self { let mut params = HashMap::new(); params.insert("id".into(), id.into()); Self { uri: uri.into(), params, implicit: false, } } pub fn new_with_params<S: Into<String>>(uri: S, params: HashMap<String, String>) -> Self { Self { uri: uri.into(), params, implicit: false, } } pub fn parse(osc: &[&[u8]]) -> Result<Option<Hyperlink>, Error> { ensure!(osc.len() == 3, "wrong param count"); if osc[1].is_empty() && osc[2].is_empty() { Ok(None) } else { let param_str = String::from_utf8(osc[1].to_vec())?; let uri = String::from_utf8(osc[2].to_vec())?; let mut params = HashMap::new(); if !param_str.is_empty() { for pair in param_str.split(':') { let mut iter = pair.splitn(2, '='); let key = iter.next().ok_or_else(|| anyhow!("bad params"))?; let value = iter.next().ok_or_else(|| anyhow!("bad params"))?; params.insert(key.to_owned(), value.to_owned()); } } Ok(Some(Hyperlink::new_with_params(uri, params))) } } } impl Display for Hyperlink { fn fmt(&self, f: &mut Formatter) -> Result<(), FmtError> { write!(f, "8;")?; for (idx, (k, v)) in self.params.iter().enumerate() { if idx > 0 { write!(f, ":")?; } write!(f, "{}={}", k, v)?; } write!(f, ";{}", self.uri)?; Ok(()) } } #[derive(Debug, Clone, Deserialize)] pub struct Rule { #[serde(deserialize_with = "deserialize_regex")] regex: Regex, format: String, } fn deserialize_regex<'de, D>(deserializer: D) -> Result<Regex, D::Error> where D: Deserializer<'de>, { let s = String::deserialize(deserializer)?; Regex::new(&s).map_err(|e| serde::de::Error::custom(format!("{:?}", e))) } #[derive(Debug, PartialEq)] pub struct RuleMatch { pub range: Range<usize>, pub link: Arc<Hyperlink>, } struct Match<'t> { rule: &'t Rule, captures: Captures<'t>, } impl<'t> Match<'t> { fn len(&self) -> usize { let c0 = self.captures.get(0).unwrap(); c0.end() - c0.start() } fn range(&self) -> Range<usize> { let c0 = self.captures.get(0).unwrap(); c0.start()..c0.end() } fn expand(&self) -> String { let mut result = self.rule.format.clone(); for n in (0..self.captures.len()).rev() { let search = format!("${}", n); result = result.replace(&search, self.captures.get(n).unwrap().as_str()); } result } } impl Rule { pub fn new(regex: &str, format: &str) -> Result<Self, Error> { Ok(Self { regex: Regex::new(regex)?, format: format.to_owned(), }) } pub fn match_hyperlinks(line: &str, rules: &[Rule]) -> Vec<RuleMatch> { let mut matches = Vec::new(); for rule in rules.iter() { for captures in rule.regex.captures_iter(line) { matches.push(Match { rule, captures }); } } matches.sort_by(|a, b| b.len().cmp(&a.len())); matches .into_iter() .map(|m| { let url = m.expand(); let link = Arc::new(Hyperlink::new_implicit(url)); RuleMatch { link, range: m.range(), } }) .collect() } } #[cfg(test)] mod test { use super::*; #[test] fn parse_implicit() { let rules = vec![ Rule::new(r"\b\w+://(?:[\w.-]+)\.[a-z]{2,15}\S*\b", "$0").unwrap(), Rule::new(r"\b\w+@[\w-]+(\.[\w-]+)+\b", "mailto:$0").unwrap(), ]; assert_eq!( Rule::match_hyperlinks(" http://example.com", &rules), vec![RuleMatch { range: 2..20, link: Arc::new(Hyperlink::new_implicit("http://example.com")), }] ); assert_eq!( Rule::match_hyperlinks(" [email protected] [email protected]", &rules), vec![ RuleMatch { range: 18..34, link: Arc::new(Hyperlink::new_implicit("mailto:[email protected]")), }, RuleMatch { range: 2..17, link: Arc::new(Hyperlink::new_implicit("mailto:[email protected]")), }, ] ); } }
use anyhow::{anyhow, ensure, Error}; use regex::{Captures, Regex}; use serde::{self, Deserialize, Deserializer, Serialize}; use std::collections::HashMap; use std::fmt::{Display, Error as FmtError, Formatter}; use std::ops::Range; use std::sync::Arc; #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] pub struct Hyperlink { params: HashMap<String, String>, uri: String, implicit: bool, } impl Hyperlink { pub fn uri(&self) -> &str { &self.uri } pub fn params(&self) -> &HashMap<String, String> { &self.params } pub fn new<S: Into<String>>(uri: S) -> Self { Self { uri: uri.into(), params: HashMap::new(), implicit: false, } } #[inline] pub fn is_implicit(&self) -> bool { self.implicit } pub fn new_implicit<S: Into<String>>(uri: S) -> Self { Self { uri: uri.into(), params: HashMap::new(), implicit: true, } } pub fn new_with_id<S: Into<String>, S2: Into<String>>(uri: S, id: S2) -> Self { let mut params = HashMap::new(); params.insert("id".into(), id.into()); Self { uri: uri.into(), params, implicit: false, } } pub fn new_with_params<S: Into<String>>(uri: S, params: HashMap<String, String>) -> Self { Self { uri: uri.into(), params, implicit: false, } } pub fn parse(osc: &[&[u8]]) -> Result<Option<Hyperlink>, Error> { ensure!(osc.len() == 3, "wrong param count"); if osc[1].is_empty() && osc[2].is_empty() { Ok(None) } else { let param_str = String::from_utf8(osc[1].to_vec())?; let uri = String::from_utf8(osc[2].to_vec())?; let mut params = HashMap::new(); if !param_str.is_empty() { for pair in param_str.split(':') { let mut iter = pair.splitn(2, '='); let key = iter.next().ok_or_else(|| anyhow!("bad params"))?; let value = iter.next().ok_or_else(|| anyhow!("bad params"))?; params.insert(key.to_owned(), value.to_owned()); } } Ok(Some(Hyperlink::new_with_params(uri, params))) } } } impl Display for Hyperlink { fn fmt(&self, f: &mut Formatter) -> Result<(), FmtError> { write!(f, "8;")?; for (idx, (k, v)) in self.params.iter().enumerate() { if idx > 0 { write!(f, ":")?; } write!(f, "{}={}", k, v)?; } write!(f, ";{}", self.uri)?; Ok(()) } } #[derive(Debug, Clone, Deserialize)] pub struct Rule { #[serde(deserialize_with = "deserialize_regex")] regex: Regex, format: String, } fn deserialize_regex<'de, D>(deserializer: D) -> Result<Regex, D::Error> where D: Deserializer<'de>, { let s = String::deserialize(deserializer)?; Regex::new(&s).map_err(|e| serde::de::Error::custom(format!("{:?}", e))) } #[derive(Debug, PartialEq)] pub struct RuleMatch { pub range: Range<usize>, pub link: Arc<Hyperlink>, } struct Match<'t> { rule: &'t Rule, captures: Captures<'t>, } impl<'t> Match<'t> { fn len(&self) -> usize { let c0 = self.captures.get(0).unwrap(); c0.end() - c0.start() } fn range(&self) -> Range<usize> { let c0 = self.captures.get(0).unwrap(); c0.start()..c0.end() } fn expand(&self) -> String { let mut result = self.rule.format.clone(); for n in (0..self.captures.len()).rev() { let search = fo
} impl Rule { pub fn new(regex: &str, format: &str) -> Result<Self, Error> { Ok(Self { regex: Regex::new(regex)?, format: format.to_owned(), }) } pub fn match_hyperlinks(line: &str, rules: &[Rule]) -> Vec<RuleMatch> { let mut matches = Vec::new(); for rule in rules.iter() { for captures in rule.regex.captures_iter(line) { matches.push(Match { rule, captures }); } } matches.sort_by(|a, b| b.len().cmp(&a.len())); matches .into_iter() .map(|m| { let url = m.expand(); let link = Arc::new(Hyperlink::new_implicit(url)); RuleMatch { link, range: m.range(), } }) .collect() } } #[cfg(test)] mod test { use super::*; #[test] fn parse_implicit() { let rules = vec![ Rule::new(r"\b\w+://(?:[\w.-]+)\.[a-z]{2,15}\S*\b", "$0").unwrap(), Rule::new(r"\b\w+@[\w-]+(\.[\w-]+)+\b", "mailto:$0").unwrap(), ]; assert_eq!( Rule::match_hyperlinks(" http://example.com", &rules), vec![RuleMatch { range: 2..20, link: Arc::new(Hyperlink::new_implicit("http://example.com")), }] ); assert_eq!( Rule::match_hyperlinks(" [email protected] [email protected]", &rules), vec![ RuleMatch { range: 18..34, link: Arc::new(Hyperlink::new_implicit("mailto:[email protected]")), }, RuleMatch { range: 2..17, link: Arc::new(Hyperlink::new_implicit("mailto:[email protected]")), }, ] ); } }
rmat!("${}", n); result = result.replace(&search, self.captures.get(n).unwrap().as_str()); } result }
function_block-function_prefixed
[ { "content": "pub fn language_from_string(s: &str) -> Result<hb_language_t, Error> {\n\n unsafe {\n\n let lang = hb_language_from_string(s.as_ptr() as *const i8, s.len() as i32);\n\n ensure!(!lang.is_null(), \"failed to convert {} to language\");\n\n Ok(lang)\n\n }\n\n}\n\n\n", "file_path": "src/font/hbwrap.rs", "rank": 1, "score": 329155.11562448967 }, { "content": "pub fn feature_from_string(s: &str) -> Result<hb_feature_t, Error> {\n\n unsafe {\n\n let mut feature = mem::zeroed();\n\n ensure!(\n\n hb_feature_from_string(\n\n s.as_ptr() as *const i8,\n\n s.len() as i32,\n\n &mut feature as *mut _,\n\n ) != 0,\n\n \"failed to create feature from {}\",\n\n s\n\n );\n\n Ok(feature)\n\n }\n\n}\n\n\n\npub struct Font {\n\n font: *mut hb_font_t,\n\n}\n\n\n\nimpl Drop for Font {\n\n fn drop(&mut self) {\n\n unsafe {\n\n hb_font_destroy(self.font);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/font/hbwrap.rs", "rank": 2, "score": 329155.11562448967 }, { "content": "/// Construct a new instance of Terminal.\n\n/// The terminal will have a renderer that is influenced by the configuration\n\n/// in the provided `Capabilities` instance.\n\n/// The terminal will explicitly open `/dev/tty` on Unix systems and\n\n/// `CONIN$` and `CONOUT$` on Windows systems, so that it should yield a\n\n/// functioning console with minimal headaches.\n\n/// If you have a more advanced use case you will want to look to the\n\n/// constructors for `UnixTerminal` and `WindowsTerminal` and call whichever\n\n/// one is most suitable for your needs.\n\npub fn new_terminal(caps: Capabilities) -> Result<impl Terminal, Error> {\n\n SystemTerminal::new(caps)\n\n}\n\n\n\npub(crate) fn cast<T: NumCast + Display + Copy, U: NumCast>(n: T) -> Result<U, Error> {\n\n num::cast(n).ok_or_else(|| anyhow!(\"{} is out of bounds for this system\", n))\n\n}\n", "file_path": "termwiz/src/terminal/mod.rs", "rank": 3, "score": 309205.7581822735 }, { "content": "fn serialize_smallvec<S>(value: &SmallVec<[u8; 4]>, serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n // unsafety: this is safe because the Cell constructor guarantees\n\n // that the storage is valid utf8\n\n let s = unsafe { std::str::from_utf8_unchecked(value) };\n\n s.serialize(serializer)\n\n}\n\n\n\n/// Models the contents of a cell on the terminal display\n\n#[derive(Debug, Clone, Eq, PartialEq, Deserialize, Serialize)]\n\npub struct Cell {\n\n #[serde(\n\n deserialize_with = \"deserialize_smallvec\",\n\n serialize_with = \"serialize_smallvec\"\n\n )]\n\n text: SmallVec<[u8; 4]>,\n\n attrs: CellAttributes,\n\n}\n", "file_path": "termwiz/src/cell.rs", "rank": 4, "score": 307067.9082659294 }, { "content": "fn serialize<T: serde::Serialize>(t: &T) -> Result<(Vec<u8>, bool), Error> {\n\n let mut uncompressed = Vec::new();\n\n let mut encode = varbincode::Serializer::new(&mut uncompressed);\n\n t.serialize(&mut encode)?;\n\n\n\n if uncompressed.len() <= COMPRESS_THRESH {\n\n return Ok((uncompressed, false));\n\n }\n\n // It's a little heavy; let's try compressing it\n\n let mut compressed = Vec::new();\n\n let mut compress = zstd::Encoder::new(&mut compressed, zstd::DEFAULT_COMPRESSION_LEVEL)?;\n\n let mut encode = varbincode::Serializer::new(&mut compress);\n\n t.serialize(&mut encode)?;\n\n drop(encode);\n\n compress.finish()?;\n\n\n\n debug!(\n\n \"serialized+compress len {} vs {}\",\n\n compressed.len(),\n\n uncompressed.len()\n\n );\n\n\n\n if compressed.len() < uncompressed.len() {\n\n Ok((compressed, true))\n\n } else {\n\n Ok((uncompressed, false))\n\n }\n\n}\n\n\n", "file_path": "src/server/codec.rs", "rank": 5, "score": 307051.2124976852 }, { "content": "fn csi_u_encode(buf: &mut String, c: char, mods: KeyModifiers) -> Result<(), Error> {\n\n if ENABLE_CSI_U {\n\n write!(buf, \"\\x1b[{};{}u\", c as u32, 1 + encode_modifiers(mods))?;\n\n } else {\n\n // FIXME: this ignores the modifiers completely. That's sort of\n\n // OK, but eg: CTRL-SPACE should really send a NUL byte in that\n\n // case, so this isn't great\n\n write!(buf, \"{}\", c)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "term/src/terminalstate.rs", "rank": 6, "score": 292139.9850466133 }, { "content": "/// Spawn a new thread to execute the provided function.\n\n/// Returns a JoinHandle that implements the Future trait\n\n/// and that can be used to await and yield the return value\n\n/// from the thread.\n\n/// Can be called from any thread.\n\npub fn spawn_into_new_thread<F, T>(f: F) -> JoinHandle<Result<T>, ()>\n\nwhere\n\n F: FnOnce() -> Result<T>,\n\n F: Send + 'static,\n\n T: Send + 'static,\n\n{\n\n let (tx, rx) = sync_channel(1);\n\n\n\n // Holds the waker that may later observe\n\n // during the Future::poll call.\n\n struct WakerHolder {\n\n waker: Mutex<Option<Waker>>,\n\n }\n\n\n\n let holder = Arc::new(WakerHolder {\n\n waker: Mutex::new(None),\n\n });\n\n\n\n let thread_waker = Arc::clone(&holder);\n\n std::thread::spawn(move || {\n", "file_path": "promise/src/spawn.rs", "rank": 7, "score": 274544.92632270727 }, { "content": "#[inline]\n\npub fn succeeded(error: FT_Error) -> bool {\n\n error == freetype::FT_Err_Ok as FT_Error\n\n}\n\n\n", "file_path": "src/font/ftwrap.rs", "rank": 8, "score": 271273.320841569 }, { "content": "/// This is the key function from this module; it uses serde to\n\n/// \"parse\" a lua value into a Rust type that implements Deserialize.\n\npub fn from_lua_value<T>(value: Value) -> Result<T, Error>\n\nwhere\n\n T: DeserializeOwned,\n\n{\n\n T::deserialize(ValueWrapper(value))\n\n}\n\n\n", "file_path": "src/scripting/serde_lua/mod.rs", "rank": 9, "score": 269958.3081813865 }, { "content": "#[cfg_attr(feature = \"cargo-clippy\", allow(clippy::trivially_copy_pass_by_ref))]\n\nfn serialize_notnan<S>(value: &NotNan<f32>, serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n value.into_inner().serialize(serializer)\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]\n\npub struct TextureCoordinate {\n\n #[serde(\n\n deserialize_with = \"deserialize_notnan\",\n\n serialize_with = \"serialize_notnan\"\n\n )]\n\n pub x: NotNan<f32>,\n\n #[serde(\n\n deserialize_with = \"deserialize_notnan\",\n\n serialize_with = \"serialize_notnan\"\n\n )]\n\n pub y: NotNan<f32>,\n\n}\n", "file_path": "termwiz/src/image.rs", "rank": 10, "score": 268429.7042742316 }, { "content": "/// If there was an error loading the preferred configuration,\n\n/// return it, otherwise return the current configuration\n\npub fn configuration_result() -> Result<ConfigHandle, Error> {\n\n if let Some(error) = CONFIG.get_error() {\n\n bail!(\"{}\", error);\n\n }\n\n Ok(CONFIG.get())\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 11, "score": 265732.44390708307 }, { "content": "#[cfg(not(target_os = \"macos\"))]\n\n#[doc(hidden)]\n\npub fn poll_impl(pfd: &mut [pollfd], duration: Option<Duration>) -> anyhow::Result<usize> {\n\n let poll_result = unsafe {\n\n libc::poll(\n\n pfd.as_mut_ptr(),\n\n pfd.len() as _,\n\n duration\n\n .map(|wait| wait.as_millis() as libc::c_int)\n\n .unwrap_or(-1),\n\n )\n\n };\n\n if poll_result < 0 {\n\n Err(std::io::Error::last_os_error().into())\n\n } else {\n\n Ok(poll_result as usize)\n\n }\n\n}\n\n\n\n// macOS has a broken poll(2) implementation, so we introduce a layer to deal with that here\n\n#[cfg(target_os = \"macos\")]\n\nmod macos {\n", "file_path": "filedescriptor/src/unix.rs", "rank": 12, "score": 262836.4839789801 }, { "content": "#[doc(hidden)]\n\npub fn poll_impl(pfd: &mut [pollfd], duration: Option<Duration>) -> anyhow::Result<usize> {\n\n let poll_result = unsafe {\n\n WSAPoll(\n\n pfd.as_mut_ptr(),\n\n pfd.len() as _,\n\n duration\n\n .map(|wait| wait.as_millis() as libc::c_int)\n\n .unwrap_or(-1),\n\n )\n\n };\n\n if poll_result < 0 {\n\n Err(std::io::Error::last_os_error().into())\n\n } else {\n\n Ok(poll_result as usize)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use std::io::{Read, Write};\n", "file_path": "filedescriptor/src/windows.rs", "rank": 13, "score": 262836.4839789801 }, { "content": "fn main() -> Result<(), Error> {\n\n let caps = Capabilities::new_from_env()?;\n\n let mut terminal = new_terminal(caps)?;\n\n terminal.set_raw_mode()?;\n\n\n\n while let Some(event) = terminal.poll_input(None)? {\n\n print!(\"{:?}\\r\\n\", event);\n\n if event == InputEvent::Key(CTRL_C) {\n\n break;\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "termwiz/examples/key_tester.rs", "rank": 14, "score": 256768.44561220944 }, { "content": "fn deserialize_smallvec<'de, D>(deserializer: D) -> Result<SmallVec<[u8; 4]>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n let text = String::deserialize(deserializer)?;\n\n Ok(SmallVec::from_slice(text.as_bytes()))\n\n}\n\n\n", "file_path": "termwiz/src/cell.rs", "rank": 15, "score": 245095.1220175324 }, { "content": "/// Extract a name from the name table\n\nfn get_name(name_table_data: &[u8], name_id: u16) -> anyhow::Result<String> {\n\n let cstr = allsorts::get_name::fontcode_get_name(name_table_data, name_id)?\n\n .ok_or_else(|| anyhow!(\"name_id {} not found\", name_id))?;\n\n cstr.into_string()\n\n .map_err(|e| anyhow!(\"name_id {} is not representable as String: {}\", name_id, e))\n\n}\n", "file_path": "src/font/parser.rs", "rank": 16, "score": 244579.79196185857 }, { "content": "#[allow(unused)]\n\npub fn message_box_ok(message: &str) {\n\n let title = \"wezterm\";\n\n let message = message.to_string();\n\n\n\n promise::spawn::block_on(run(60, 10, move |mut term| {\n\n term.render(&[\n\n Change::Title(title.to_string()),\n\n Change::Text(message.to_string()),\n\n ])\n\n .map_err(Error::msg)?;\n\n\n\n let mut editor = LineEditor::new(&mut term);\n\n editor.set_prompt(\"press enter to continue.\");\n\n\n\n let mut host = NopLineEditorHost::default();\n\n editor.read_line(&mut host).ok();\n\n Ok(())\n\n }))\n\n .ok();\n\n}\n", "file_path": "src/termwiztermtab.rs", "rank": 17, "score": 243019.54232635998 }, { "content": "fn de_keycode<'de, D>(deserializer: D) -> Result<KeyCode, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n let s = String::deserialize(deserializer)?;\n\n\n\n macro_rules! m {\n\n ($($val:ident),* $(,)?) => {\n\n $(\n\n if s == stringify!($val) {\n\n return Ok(KeyCode::$val);\n\n }\n\n )*\n\n }\n\n }\n\n\n\n m!(\n\n Hyper,\n\n Super,\n\n Meta,\n", "file_path": "src/config/keys.rs", "rank": 18, "score": 237761.3011491808 }, { "content": "/// If the GUI has been started, pops up a window with the supplied error\n\n/// message framed as a configuration error.\n\n/// If there is no GUI front end, generates a toast notification instead.\n\npub fn show_configuration_error_message(err: &str) {\n\n log::error!(\"While (re)loading configuration: {}\", err);\n\n if crate::frontend::has_gui_front_end() {\n\n let ui = get_error_window();\n\n\n\n let mut wrapped = textwrap::fill(&err, 78);\n\n wrapped.push_str(\"\\n\");\n\n ui.output_str(&wrapped);\n\n } else {\n\n crate::toast_notification(\"Wezterm Configuration\", &err);\n\n }\n\n}\n", "file_path": "src/connui.rs", "rank": 19, "score": 237495.8360379956 }, { "content": "/// A little helper to convert i64 -> u8 if safe\n\nfn to_u8(v: i64) -> Result<u8, ()> {\n\n if v <= i64::from(u8::max_value()) {\n\n Ok(v as u8)\n\n } else {\n\n Err(())\n\n }\n\n}\n\n\n", "file_path": "termwiz/src/escape/csi.rs", "rank": 20, "score": 237076.27943512198 }, { "content": "fn consume_stream<F: Read, T: Write>(mut from_stream: F, mut to_stream: T) -> anyhow::Result<()> {\n\n let mut buf = [0u8; 8192];\n\n\n\n loop {\n\n let size = from_stream.read(&mut buf)?;\n\n if size == 0 {\n\n break;\n\n }\n\n to_stream.write_all(&buf[0..size])?;\n\n to_stream.flush()?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 21, "score": 236012.7389043315 }, { "content": "/// Create a `Terminal` with the recommended settings for use with\n\n/// a `LineEditor`.\n\npub fn line_editor_terminal() -> anyhow::Result<impl Terminal> {\n\n let hints = ProbeHints::new_from_env().mouse_reporting(Some(false));\n\n let caps = Capabilities::new_with_hints(hints)?;\n\n new_terminal(caps)\n\n}\n", "file_path": "termwiz/src/lineedit/mod.rs", "rank": 22, "score": 235601.64639825578 }, { "content": "/// Examines a set of FileDescriptors to see if some of them are ready for I/O,\n\n/// or if certain events have occurred on them.\n\n///\n\n/// This uses the system native readiness checking mechanism, which on Windows\n\n/// means that it does NOT use IOCP and that this only works with sockets on\n\n/// Windows. If you need IOCP then the `mio` crate is recommended for a much\n\n/// more scalable solution.\n\n///\n\n/// On macOS, the `poll(2)` implementation has problems when used with eg: pty\n\n/// descriptors, so this implementation of poll uses the `select(2)` interface\n\n/// under the covers. That places a limit on the maximum file descriptor value\n\n/// that can be passed to poll. If a file descriptor is out of range then an\n\n/// error will returned. This limitation could potentially be lifted in the\n\n/// future.\n\n///\n\n/// On Windows, `WSAPoll` is used to implement readiness checking, which has\n\n/// the consequence that it can only be used with sockets.\n\n///\n\n/// If `duration` is `None`, then `poll` will block until any of the requested\n\n/// events are ready. Otherwise, `duration` specifies how long to wait for\n\n/// readiness before giving up.\n\n///\n\n/// The return value is the number of entries that were satisfied; `0` means\n\n/// that none were ready after waiting for the specified duration.\n\n///\n\n/// The `pfd` array is mutated and the `revents` field is updated to indicate\n\n/// which of the events were received.\n\npub fn poll(pfd: &mut [pollfd], duration: Option<Duration>) -> anyhow::Result<usize> {\n\n poll_impl(pfd, duration)\n\n}\n\n\n", "file_path": "filedescriptor/src/lib.rs", "rank": 23, "score": 235354.31676131592 }, { "content": "pub fn use_ime(enable: bool) {\n\n USE_IME.store(enable, Ordering::Relaxed);\n\n}\n\n\n", "file_path": "window/src/os/macos/window.rs", "rank": 24, "score": 232432.35993721074 }, { "content": "/// Returns the number of cells visually occupied by a grapheme.\n\n/// The input string must be a single grapheme.\n\npub fn grapheme_column_width(s: &str) -> usize {\n\n // Due to this issue:\n\n // https://github.com/unicode-rs/unicode-width/issues/4\n\n // we cannot simply use the unicode-width crate to compute\n\n // the desired value.\n\n // Let's check for emoji-ness for ourselves first\n\n use xi_unicode::EmojiExt;\n\n for c in s.chars() {\n\n if c.is_emoji_modifier_base() || c.is_emoji_modifier() {\n\n // treat modifier sequences as double wide\n\n return 2;\n\n }\n\n }\n\n UnicodeWidthStr::width(s)\n\n}\n\n\n\n/// Models a change in the attributes of a cell in a stream of changes.\n\n/// Each variant specifies one of the possible attributes; the corresponding\n\n/// value holds the new value to be used for that attribute.\n\n#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]\n", "file_path": "termwiz/src/cell.rs", "rank": 25, "score": 231142.95079193334 }, { "content": "/// Returns the number of cells visually occupied by a sequence\n\n/// of graphemes\n\npub fn unicode_column_width(s: &str) -> usize {\n\n use unicode_segmentation::UnicodeSegmentation;\n\n s.graphemes(true).map(grapheme_column_width).sum()\n\n}\n\n\n", "file_path": "termwiz/src/cell.rs", "rank": 26, "score": 231137.71006414556 }, { "content": "pub fn ssh_connect(remote_address: &str, username: &str) -> anyhow::Result<ssh2::Session> {\n\n let mut ui = ConnectionUI::new();\n\n ui.title(\"🔐 wezterm: SSH authentication\");\n\n let res = ssh_connect_with_ui(remote_address, username, &mut ui);\n\n match res {\n\n Ok(sess) => {\n\n ui.close();\n\n Ok(sess)\n\n }\n\n Err(err) => {\n\n ui.output_str(&format!(\"\\nFailed: {}\", err));\n\n Err(err)\n\n }\n\n }\n\n}\n\n\n\npub struct RemoteSshDomain {\n\n pty_system: Box<dyn PtySystem>,\n\n id: DomainId,\n\n name: String,\n", "file_path": "src/ssh.rs", "rank": 27, "score": 230317.40901585092 }, { "content": "fn de_modifiers<'de, D>(deserializer: D) -> Result<Modifiers, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n let s = String::deserialize(deserializer)?;\n\n let mut mods = Modifiers::NONE;\n\n for ele in s.split('|') {\n\n if ele == \"SHIFT\" {\n\n mods |= Modifiers::SHIFT;\n\n } else if ele == \"ALT\" || ele == \"OPT\" || ele == \"META\" {\n\n mods |= Modifiers::ALT;\n\n } else if ele == \"CTRL\" {\n\n mods |= Modifiers::CTRL;\n\n } else if ele == \"SUPER\" || ele == \"CMD\" || ele == \"WIN\" {\n\n mods |= Modifiers::SUPER;\n\n } else if ele == \"NONE\" || ele == \"\" {\n\n mods |= Modifiers::NONE;\n\n } else {\n\n return Err(serde::de::Error::custom(format!(\n\n \"invalid modifier name {} in {}\",\n\n ele, s\n\n )));\n\n }\n\n }\n\n Ok(mods)\n\n}\n", "file_path": "src/config/keys.rs", "rank": 28, "score": 228859.00377271965 }, { "content": "fn default_hyperlink_rules() -> Vec<hyperlink::Rule> {\n\n vec![\n\n // URL with a protocol\n\n hyperlink::Rule::new(r\"\\b\\w+://(?:[\\w.-]+)\\.[a-z]{2,15}\\S*\\b\", \"$0\").unwrap(),\n\n // implicit mailto link\n\n hyperlink::Rule::new(r\"\\b\\w+@[\\w-]+(\\.[\\w-]+)+\\b\", \"mailto:$0\").unwrap(),\n\n ]\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 29, "score": 227269.01957022335 }, { "content": "pub fn range_is_empty<T: Integer>(range: &Range<T>) -> bool {\n\n range.start == range.end\n\n}\n\n\n", "file_path": "rangeset/src/lib.rs", "rank": 30, "score": 223687.4556723847 }, { "content": "pub fn spawn_tls_listener(tls_server: &TlsDomainServer) -> Result<(), Error> {\n\n openssl::init();\n\n\n\n let mut acceptor = SslAcceptor::mozilla_modern(SslMethod::tls())?;\n\n\n\n let cert_file = tls_server\n\n .pem_cert\n\n .clone()\n\n .unwrap_or_else(|| PKI.server_pem());\n\n acceptor\n\n .set_certificate_file(&cert_file, SslFiletype::PEM)\n\n .context(format!(\n\n \"set_certificate_file to {} for TLS listener\",\n\n cert_file.display()\n\n ))?;\n\n\n\n if let Some(chain_file) = tls_server.pem_ca.as_ref() {\n\n acceptor\n\n .set_certificate_chain_file(&chain_file)\n\n .context(format!(\n", "file_path": "src/server/listener/ossl.rs", "rank": 31, "score": 223457.8742382148 }, { "content": "pub fn to_lua_value<'lua, T>(lua: &'lua Lua, input: T) -> Result<Value<'lua>, Error>\n\nwhere\n\n T: Serialize,\n\n{\n\n let serializer = LuaSerializer { lua };\n\n input.serialize(serializer)\n\n}\n\n\n\n#[derive(Debug, Error)]\n\npub enum Error {\n\n #[error(\"{}\", msg)]\n\n Custom { msg: String },\n\n}\n\n\n\nimpl Error {\n\n fn lua(e: mlua::Error) -> Error {\n\n Error::custom(e)\n\n }\n\n}\n\n\n", "file_path": "src/scripting/serde_lua/ser.rs", "rank": 32, "score": 221203.6533898413 }, { "content": "fn main() -> Result<(), Error> {\n\n let caps = Capabilities::new_from_env()?;\n\n\n\n let terminal = new_terminal(caps)?;\n\n\n\n let mut buf = BufferedTerminal::new(terminal)?;\n\n\n\n let mut block = Surface::new(5, 5);\n\n block.add_change(Change::ClearScreen(AnsiColor::Blue.into()));\n\n buf.draw_from_screen(&block, 10, 10);\n\n\n\n buf.add_change(Change::Attribute(AttributeChange::Foreground(\n\n AnsiColor::Maroon.into(),\n\n )));\n\n buf.add_change(\"Hello world\\r\\n\");\n\n buf.add_change(Change::Attribute(AttributeChange::Foreground(\n\n AnsiColor::Red.into(),\n\n )));\n\n buf.add_change(\"and in red here\\r\\n\");\n\n buf.add_change(Change::CursorPosition {\n", "file_path": "termwiz/examples/hello.rs", "rank": 33, "score": 219885.80986519554 }, { "content": "pub fn running_under_wsl() -> bool {\n\n #[cfg(unix)]\n\n unsafe {\n\n let mut name: libc::utsname = std::mem::zeroed();\n\n if libc::uname(&mut name) == 0 {\n\n let version = std::ffi::CStr::from_ptr(name.version.as_ptr())\n\n .to_string_lossy()\n\n .into_owned();\n\n return version.contains(\"Microsoft\");\n\n }\n\n };\n\n\n\n false\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct SshParameters {\n\n pub username: String,\n\n pub host_and_port: String,\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 34, "score": 219545.0265764061 }, { "content": "fn read_pipe_with_timeout(mut file: FileDescriptor) -> anyhow::Result<String> {\n\n let mut result = Vec::new();\n\n\n\n file.set_non_blocking(true)?;\n\n let mut pfd = libc::pollfd {\n\n fd: file.as_raw_fd(),\n\n events: libc::POLLIN,\n\n revents: 0,\n\n };\n\n\n\n let mut buf = [0u8; 8192];\n\n\n\n loop {\n\n if unsafe { libc::poll(&mut pfd, 1, 3000) == 1 } {\n\n match file.read(&mut buf) {\n\n Ok(size) if size == 0 => {\n\n break;\n\n }\n\n Ok(size) => {\n\n result.extend_from_slice(&buf[..size]);\n", "file_path": "window/src/os/wayland/window.rs", "rank": 35, "score": 217994.52755477856 }, { "content": "fn main() -> Result<(), Error> {\n\n let caps = Capabilities::new_from_env()?;\n\n\n\n let mut terminal = new_terminal(caps)?;\n\n terminal.set_raw_mode()?;\n\n\n\n let mut buf = BufferedTerminal::new(terminal)?;\n\n\n\n buf.add_change(Change::Attribute(AttributeChange::Foreground(\n\n AnsiColor::Maroon.into(),\n\n )));\n\n buf.add_change(\"Hello world\\r\\n\");\n\n buf.add_change(Change::Attribute(AttributeChange::Foreground(\n\n AnsiColor::Red.into(),\n\n )));\n\n buf.add_change(\"and in red here\\r\\n\");\n\n\n\n buf.flush()?;\n\n\n\n Ok(())\n\n}\n", "file_path": "termwiz/examples/buffered_terminal.rs", "rank": 36, "score": 215137.88175869628 }, { "content": "fn main() -> Result<(), Error> {\n\n let caps = Capabilities::new_from_env()?;\n\n let mut terminal = new_terminal(caps)?;\n\n\n\n terminal.render(&[\n\n Change::Attribute(AttributeChange::Foreground(AnsiColor::Maroon.into())),\n\n Change::Text(\"Hello world\\r\\n\".into()),\n\n Change::Attribute(AttributeChange::Foreground(AnsiColor::Red.into())),\n\n Change::Text(\"and in red here\\r\\n\".into()),\n\n ])?;\n\n\n\n Ok(())\n\n}\n", "file_path": "termwiz/examples/terminal_direct.rs", "rank": 37, "score": 215137.88175869628 }, { "content": "#[cfg(feature = \"widgets\")]\n\nfn main() -> Result<(), Error> {\n\n // Start with an empty string; typing into the app will\n\n // update this string.\n\n let mut typed_text = String::new();\n\n\n\n {\n\n // Create a terminal and put it into full screen raw mode\n\n let caps = Capabilities::new_from_env()?;\n\n let mut buf = BufferedTerminal::new(new_terminal(caps)?)?;\n\n buf.terminal().set_raw_mode()?;\n\n\n\n // Set up the UI\n\n let mut ui = Ui::new();\n\n\n\n ui.set_root(MainScreen::new(&mut typed_text));\n\n\n\n loop {\n\n ui.process_event_queue()?;\n\n\n\n // After updating and processing all of the widgets, compose them\n", "file_path": "termwiz/examples/widgets_basic.rs", "rank": 38, "score": 215137.88175869628 }, { "content": "fn suggesterr(e: SuggestValueError) -> Error {\n\n match e {\n\n SuggestValueError::UnknownEditVariable => anyhow!(\"Unknown edit variable\"),\n\n SuggestValueError::InternalSolverError(e) => anyhow!(\"Internal solver error: {}\", e),\n\n }\n\n}\n\n\n", "file_path": "termwiz/src/widgets/layout.rs", "rank": 39, "score": 213349.29673006095 }, { "content": "/// A convenience function that wraps Base91Decoder; it decodes a slice of data\n\n/// and returns a vector holding the unencoded binary data.\n\npub fn decode(buf: &[u8]) -> Vec<u8> {\n\n let mut result = Vec::with_capacity(buf.len());\n\n {\n\n let mut writer = Base91Decoder::new(&mut result);\n\n writer.write_all(buf).unwrap();\n\n writer.flush().unwrap();\n\n }\n\n result\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n #[test]\n\n fn test() {\n\n assert_eq!(encode(b\"hello\\n\"), b\"TPwJh>UA\");\n\n assert_eq!(decode(b\"TPwJh>UA\"), b\"hello\\n\");\n\n }\n\n\n\n #[test]\n", "file_path": "base91/src/lib.rs", "rank": 40, "score": 211556.70040212775 }, { "content": "/// A convenience function that wraps Base91Encoder; it encodes a slice of data\n\n/// and returns a vector holding the base91 encoded data.\n\npub fn encode(buf: &[u8]) -> Vec<u8> {\n\n let mut result = Vec::with_capacity((buf.len() * 123) / 100);\n\n {\n\n let mut writer = Base91Encoder::new(&mut result);\n\n writer.write_all(buf).unwrap();\n\n writer.flush().unwrap();\n\n }\n\n result\n\n}\n\n\n\n/// `Base91Decoder` wraps an impl of `std::io::Write` and does itself impl `std::io::Write`,\n\n/// and performs a base91 decode operation on the bytes that are written to it.\n\n/// It is important to remember to `flush` the writer at end of the data, as the encoder\n\n/// maintains up to 1 byte of pending data; the Drop impl will implicitly flush on\n\n/// your behalf, but will mask any error that may occur during the flush.\n\npub struct Base91Decoder<'a> {\n\n writer: &'a mut dyn Write,\n\n accumulator: u64,\n\n bits: u32,\n\n value: Option<u8>,\n", "file_path": "base91/src/lib.rs", "rank": 41, "score": 211556.70040212775 }, { "content": "#[cfg(not(target_os = \"linux\"))]\n\n#[doc(hidden)]\n\npub fn socketpair_impl() -> anyhow::Result<(FileDescriptor, FileDescriptor)> {\n\n let mut fds = [-1i32; 2];\n\n let res = unsafe { libc::socketpair(libc::PF_LOCAL, libc::SOCK_STREAM, 0, fds.as_mut_ptr()) };\n\n if res == -1 {\n\n bail!(\n\n \"failed to create a socketpair: {:?}\",\n\n std::io::Error::last_os_error()\n\n )\n\n } else {\n\n let mut read = FileDescriptor {\n\n handle: OwnedHandle {\n\n handle: fds[0],\n\n handle_type: (),\n\n },\n\n };\n\n let mut write = FileDescriptor {\n\n handle: OwnedHandle {\n\n handle: fds[1],\n\n handle_type: (),\n\n },\n\n };\n\n read.handle.cloexec()?;\n\n write.handle.cloexec()?;\n\n Ok((read, write))\n\n }\n\n}\n\n\n\npub use libc::{pollfd, POLLERR, POLLHUP, POLLIN, POLLOUT};\n\nuse std::time::Duration;\n\n\n", "file_path": "filedescriptor/src/unix.rs", "rank": 42, "score": 210829.02431116588 }, { "content": "#[doc(hidden)]\n\npub fn socketpair_impl() -> anyhow::Result<(FileDescriptor, FileDescriptor)> {\n\n init_winsock();\n\n\n\n let s = socket(AF_INET, SOCK_STREAM, 0)?;\n\n\n\n let mut in_addr: SOCKADDR_IN = unsafe { std::mem::zeroed() };\n\n in_addr.sin_family = AF_INET as _;\n\n unsafe {\n\n *in_addr.sin_addr.S_un.S_addr_mut() = htonl(INADDR_LOOPBACK);\n\n }\n\n\n\n unsafe {\n\n if bind(\n\n s.as_raw_handle() as _,\n\n std::mem::transmute(&in_addr),\n\n std::mem::size_of_val(&in_addr) as _,\n\n ) != 0\n\n {\n\n bail!(\"bind failed: {}\", IoError::last_os_error());\n\n }\n", "file_path": "filedescriptor/src/windows.rs", "rank": 43, "score": 210829.02431116588 }, { "content": "/// Returns true if a GUI frontend has been initialized, which implies that\n\n/// it makes sense (and is safe) to use the window crate and associated\n\n/// functionality\n\npub fn has_gui_front_end() -> bool {\n\n HAS_GUI_FRONT_END.load(Ordering::Acquire)\n\n}\n\n\n", "file_path": "src/frontend/mod.rs", "rank": 44, "score": 210399.09896673355 }, { "content": "pub fn is_opengl_enabled() -> bool {\n\n USE_OPENGL.load(Ordering::Acquire)\n\n}\n\n\n\nimpl GuiFrontEnd {\n\n pub fn try_new_no_opengl() -> anyhow::Result<Rc<dyn FrontEnd>> {\n\n USE_OPENGL.store(false, Ordering::Release);\n\n Self::try_new()\n\n }\n\n\n\n pub fn try_new() -> anyhow::Result<Rc<dyn FrontEnd>> {\n\n #[cfg(all(unix, not(target_os = \"macos\")))]\n\n {\n\n if !configuration().enable_wayland {\n\n Connection::disable_wayland();\n\n }\n\n }\n\n let connection = Connection::init()?;\n\n let front_end = Rc::new(GuiFrontEnd { connection });\n\n Ok(front_end)\n", "file_path": "src/frontend/gui/mod.rs", "rank": 45, "score": 210388.99529877084 }, { "content": "fn write_pipe_with_timeout(mut file: FileDescriptor, data: &[u8]) -> anyhow::Result<()> {\n\n file.set_non_blocking(true)?;\n\n let mut pfd = libc::pollfd {\n\n fd: file.as_raw_fd(),\n\n events: libc::POLLOUT,\n\n revents: 0,\n\n };\n\n\n\n let mut buf = data;\n\n\n\n while !buf.is_empty() {\n\n if unsafe { libc::poll(&mut pfd, 1, 3000) == 1 } {\n\n match file.write(buf) {\n\n Ok(size) if size == 0 => {\n\n bail!(\"zero byte write\");\n\n }\n\n Ok(size) => {\n\n buf = &buf[size..];\n\n }\n\n Err(e) => bail!(\"error writing to pipe: {}\", e),\n\n }\n\n } else {\n\n bail!(\"timed out writing to pipe\");\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "window/src/os/wayland/window.rs", "rank": 46, "score": 208861.5628896581 }, { "content": "fn username_from_env() -> anyhow::Result<String> {\n\n #[cfg(unix)]\n\n const USER: &str = \"USER\";\n\n #[cfg(windows)]\n\n const USER: &str = \"USERNAME\";\n\n\n\n std::env::var(USER).with_context(|| format!(\"while resolving {} env var\", USER))\n\n}\n\n\n\nimpl Display for SshParameters {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{}@{}\", self.username, self.host_and_port)\n\n }\n\n}\n\n\n\nimpl FromStr for SshParameters {\n\n type Err = anyhow::Error;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n let parts: Vec<&str> = s.split('@').collect();\n", "file_path": "src/main.rs", "rank": 47, "score": 207697.118893157 }, { "content": "pub fn unix_connect_with_retry(path: &Path) -> Result<UnixStream, std::io::Error> {\n\n let mut error = std::io::Error::last_os_error();\n\n\n\n for iter in 0..10 {\n\n if iter > 0 {\n\n std::thread::sleep(std::time::Duration::from_millis(iter * 10));\n\n }\n\n match UnixStream::connect(path) {\n\n Ok(stream) => return Ok(stream),\n\n Err(err) => error = err,\n\n }\n\n }\n\n\n\n Err(error)\n\n}\n\n\n", "file_path": "src/server/client.rs", "rank": 48, "score": 206641.51773283706 }, { "content": "fn deserialize_notnan<'de, D>(deserializer: D) -> Result<NotNan<f32>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n let value = f32::deserialize(deserializer)?;\n\n NotNan::new(value).map_err(|e| serde::de::Error::custom(format!(\"{:?}\", e)))\n\n}\n\n\n", "file_path": "termwiz/src/image.rs", "rank": 49, "score": 201399.3281858599 }, { "content": "/// A convenience around `tabulate_output` that returns a String holding\n\n/// the formatted data.\n\npub fn tabulate_output_as_string<S: std::string::ToString>(\n\n columns: &[Column],\n\n rows: &[Vec<S>],\n\n) -> Result<String, std::io::Error> {\n\n let mut output: Vec<u8> = vec![];\n\n tabulate_output(columns, rows, &mut output)?;\n\n String::from_utf8(output)\n\n .map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, format!(\"{}\", e)))\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn basics() {\n\n let cols = vec![\n\n Column {\n\n name: \"hello\".to_string(),\n\n alignment: Alignment::Left,\n", "file_path": "tabout/src/lib.rs", "rank": 50, "score": 200593.5148663617 }, { "content": "// TODO: expose is_double_click_word in config file\n\nfn is_double_click_word(s: &str) -> bool {\n\n match s.len() {\n\n 1 => match s.chars().nth(0).unwrap() {\n\n ' ' | '\\t' | '\\n' | '{' | '[' | '}' | ']' | '(' | ')' | '\"' | '\\'' => false,\n\n _ => true,\n\n },\n\n 0 => false,\n\n _ => true,\n\n }\n\n}\n\n\n\nimpl SelectionRange {\n\n /// Create a new range that starts at the specified location\n\n pub fn start(start: SelectionCoordinate) -> Self {\n\n let end = start;\n\n Self { start, end }\n\n }\n\n\n\n /// Computes the selection range for the line around the specified coords\n\n pub fn line_around(start: SelectionCoordinate) -> Self {\n", "file_path": "src/frontend/gui/selection.rs", "rank": 51, "score": 199677.3905920895 }, { "content": "fn locate_offset_table<'a>(f: &OpenTypeFile<'a>, idx: usize) -> anyhow::Result<OffsetTable<'a>> {\n\n match &f.font {\n\n OpenTypeFont::Single(ttf) => Ok(ttf.clone()),\n\n OpenTypeFont::Collection(ttc) => {\n\n let offset_table_offset = ttc\n\n .offset_tables\n\n .read_item(idx)\n\n .map_err(|e| anyhow!(\"font idx={} is not present in ttc file: {}\", idx, e))?;\n\n let ttf = f\n\n .scope\n\n .offset(offset_table_offset as usize)\n\n .read::<OffsetTable>()?;\n\n Ok(ttf.clone())\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/font/parser.rs", "rank": 52, "score": 199197.2395673602 }, { "content": "pub fn spawn_listener() -> anyhow::Result<()> {\n\n let config = configuration();\n\n for unix_dom in &config.unix_domains {\n\n let mut listener = local::LocalListener::with_domain(unix_dom)?;\n\n thread::spawn(move || {\n\n listener.run();\n\n });\n\n }\n\n\n\n for tls_server in &config.tls_servers {\n\n ossl::spawn_tls_listener(tls_server)?;\n\n }\n\n Ok(())\n\n}\n", "file_path": "src/server/listener/mod.rs", "rank": 53, "score": 198209.29899894557 }, { "content": "struct Performer<'a, F: FnMut(Action) + 'a> {\n\n callback: &'a mut F,\n\n}\n\n\n\nimpl<'a, F: FnMut(Action)> VTActor for Performer<'a, F> {\n\n fn print(&mut self, c: char) {\n\n (self.callback)(Action::Print(c));\n\n }\n\n\n\n fn execute_c0_or_c1(&mut self, byte: u8) {\n\n match num::FromPrimitive::from_u8(byte) {\n\n Some(code) => (self.callback)(Action::Control(code)),\n\n None => error!(\"impossible C0/C1 control code {:?} was dropped\", byte),\n\n }\n\n }\n\n\n\n fn dcs_hook(\n\n &mut self,\n\n params: &[i64],\n\n intermediates: &[u8],\n", "file_path": "termwiz/src/escape/parser/mod.rs", "rank": 54, "score": 198143.95367130553 }, { "content": "/// Returns true if r1 intersects r2\n\npub fn intersects_range<T: Ord + Copy>(r1: Range<T>, r2: Range<T>) -> bool {\n\n use std::cmp::{max, min};\n\n let start = max(r1.start, r2.start);\n\n let end = min(r1.end, r2.end);\n\n\n\n end > start\n\n}\n\n\n\n/// Position allows referring to an absolute visible row number\n\n/// or a position relative to some existing row number (typically\n\n/// where the cursor is located). Both of the cases are represented\n\n/// as signed numbers so that the math and error checking for out\n\n/// of range values can be deferred to the point where we execute\n\n/// the request.\n\n#[derive(Debug)]\n\npub enum Position {\n\n Absolute(VisibleRowIndex),\n\n Relative(i64),\n\n}\n\n\n", "file_path": "term/src/lib.rs", "rank": 55, "score": 196128.69292360533 }, { "content": "fn linear_f32_to_srgb8_using_table(f: f32) -> u8 {\n\n let minval = f32::from_bits(MINVAL);\n\n let almost_one = f32::from_bits(ALMOST_ONE);\n\n\n\n let f = if f < minval {\n\n minval\n\n } else if f > almost_one {\n\n almost_one\n\n } else {\n\n f\n\n };\n\n\n\n let f_bits = f.to_bits();\n\n let tab = unsafe { *F32_TO_U8_TABLE.get_unchecked(((f_bits - MINVAL) >> 20) as usize) };\n\n let bias = (tab >> 16) << 9;\n\n let scale = tab & 0xffff;\n\n\n\n let t = (f_bits >> 12) & 0xff;\n\n\n\n ((bias + scale * t) >> 16) as u8\n\n}\n\n\n", "file_path": "window/src/color.rs", "rank": 56, "score": 195939.13498293387 }, { "content": "fn compute_runtime_dir() -> Result<PathBuf, Error> {\n\n if let Some(runtime) = dirs::runtime_dir() {\n\n return Ok(runtime.join(\"wezterm\"));\n\n }\n\n\n\n let home = dirs::home_dir().ok_or_else(|| anyhow!(\"can't find home dir\"))?;\n\n Ok(home.join(\".local/share/wezterm\"))\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 57, "score": 195218.57633089728 }, { "content": "fn default_true() -> bool {\n\n true\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 58, "score": 195208.22405555556 }, { "content": "pub fn start_overlay<T, F>(\n\n term_window: &TermWindow,\n\n tab: &Rc<dyn Tab>,\n\n func: F,\n\n) -> (\n\n Rc<dyn Tab>,\n\n Pin<Box<dyn std::future::Future<Output = Option<anyhow::Result<T>>>>>,\n\n)\n\nwhere\n\n T: Send + 'static,\n\n F: Send + 'static + FnOnce(TabId, TermWizTerminal) -> anyhow::Result<T>,\n\n{\n\n let tab_id = tab.tab_id();\n\n let dims = tab.renderer().get_dimensions();\n\n let (tw_term, tw_tab) = allocate(dims.cols, dims.viewport_rows);\n\n\n\n let window = term_window.window.clone().unwrap();\n\n\n\n let future = promise::spawn::spawn_into_new_thread(move || {\n\n let res = func(tab_id, tw_term);\n\n TermWindow::schedule_cancel_overlay(window, tab_id);\n\n res\n\n });\n\n\n\n (Rc::new(tw_tab), Box::pin(future))\n\n}\n", "file_path": "src/frontend/gui/overlay/mod.rs", "rank": 59, "score": 194947.6153940219 }, { "content": "/// This is a conceptually simple function that computes the bounds\n\n/// of the whitespace delimited word at the specified cursor position\n\n/// in the supplied line string.\n\n/// It returns the range and the corresponding slice out of the line.\n\n/// This function is sufficient for example purposes; in a real application\n\n/// the equivalent function would need to be aware of quoting and other\n\n/// application specific context.\n\nfn word_at_cursor(line: &str, cursor_position: usize) -> Option<(std::ops::Range<usize>, &str)> {\n\n let char_indices: Vec<(usize, char)> = line.char_indices().collect();\n\n if char_indices.is_empty() {\n\n return None;\n\n }\n\n let char_position = char_indices\n\n .iter()\n\n .position(|(idx, _)| *idx == cursor_position)\n\n .unwrap_or(char_indices.len());\n\n\n\n // Look back until we find whitespace\n\n let mut start_position = char_position;\n\n while start_position > 0\n\n && start_position <= char_indices.len()\n\n && !char_indices[start_position - 1].1.is_whitespace()\n\n {\n\n start_position -= 1;\n\n }\n\n\n\n // Look forwards until we find whitespace\n", "file_path": "termwiz/examples/line_editor.rs", "rank": 60, "score": 193919.0374950367 }, { "content": "/// Helper function to set the close-on-exec flag for a raw descriptor\n\nfn cloexec(fd: RawFd) -> Result<(), Error> {\n\n let flags = unsafe { libc::fcntl(fd, libc::F_GETFD) };\n\n if flags == -1 {\n\n bail!(\n\n \"fcntl to read flags failed: {:?}\",\n\n io::Error::last_os_error()\n\n );\n\n }\n\n let result = unsafe { libc::fcntl(fd, libc::F_SETFD, flags | libc::FD_CLOEXEC) };\n\n if result == -1 {\n\n bail!(\n\n \"fcntl to set CLOEXEC failed: {:?}\",\n\n io::Error::last_os_error()\n\n );\n\n }\n\n Ok(())\n\n}\n\n\n\nimpl SlavePty for UnixSlavePty {\n\n fn spawn_command(&self, builder: CommandBuilder) -> Result<Box<dyn Child>, Error> {\n", "file_path": "pty/src/unix.rs", "rank": 61, "score": 192655.99588929603 }, { "content": "pub fn poll_for_read(pfd: &mut [pollfd]) {\n\n if let Err(e) = poll(pfd, None) {\n\n log::error!(\"poll failed for {}\", e);\n\n }\n\n}\n", "file_path": "src/server/pollable.rs", "rank": 62, "score": 192395.5317651396 }, { "content": "/// Spawn a future with normal priority.\n\npub fn spawn<F, R>(future: F) -> JoinHandle<R, ()>\n\nwhere\n\n F: Future<Output = R> + 'static,\n\n R: 'static,\n\n{\n\n let (task, handle) =\n\n async_task::spawn_local(future, |task| ON_MAIN_THREAD.lock().unwrap()(task), ());\n\n task.schedule();\n\n handle\n\n}\n\n\n", "file_path": "promise/src/spawn.rs", "rank": 63, "score": 191842.05039986194 }, { "content": "/// Returns true if r1 intersects r2\n\npub fn intersects_range<T: Integer + Copy + Debug>(r1: &Range<T>, r2: &Range<T>) -> bool {\n\n let start = max(r1.start, r2.start);\n\n let end = min(r1.end, r2.end);\n\n\n\n end > start\n\n}\n\n\n", "file_path": "rangeset/src/lib.rs", "rank": 64, "score": 190435.4557475201 }, { "content": "/// Spawn a future into the tokio runtime, spawning the tokio runtime\n\n/// if it hasn't already been started up. The tokio runtime (in the\n\n/// context of this crate) is intended primarily for scheduling network\n\n/// IO. Most futures should be spawned via the other functions provided\n\n/// by this module.\n\npub fn tokio_spawn<F>(future: F) -> tokio::task::JoinHandle<F::Output>\n\nwhere\n\n F: Future + Send + 'static,\n\n F::Output: Send + 'static,\n\n{\n\n TOKIO.spawn(future)\n\n}\n\n\n", "file_path": "promise/src/spawn.rs", "rank": 65, "score": 189828.4218023421 }, { "content": "#[allow(dead_code)]\n\npub fn use_default_configuration() {\n\n CONFIG.use_defaults();\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 66, "score": 189325.25256403477 }, { "content": "pub fn alloc_domain_id() -> DomainId {\n\n DOMAIN_ID.fetch_add(1, ::std::sync::atomic::Ordering::Relaxed)\n\n}\n\n\n", "file_path": "src/mux/domain.rs", "rank": 67, "score": 188550.33912182692 }, { "content": "pub fn alloc_tab_id() -> TabId {\n\n TAB_ID.fetch_add(1, ::std::sync::atomic::Ordering::Relaxed)\n\n}\n\n\n\nconst PASTE_CHUNK_SIZE: usize = 1024;\n\n\n", "file_path": "src/mux/tab.rs", "rank": 68, "score": 188550.33912182692 }, { "content": "pub fn pki_dir() -> anyhow::Result<PathBuf> {\n\n compute_runtime_dir().map(|d| d.join(\"pki\"))\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 69, "score": 188032.8582224786 }, { "content": "/// Spawn a future into the main thread; it will be polled in the\n\n/// main thread.\n\n/// This function can be called from any thread.\n\n/// If you are on the main thread already, consider using\n\n/// spawn() instead to lift the `Send` requirement.\n\npub fn spawn_into_main_thread<F, R>(future: F) -> JoinHandle<R, ()>\n\nwhere\n\n F: Future<Output = R> + Send + 'static,\n\n R: Send + 'static,\n\n{\n\n let (task, handle) = async_task::spawn(future, |task| ON_MAIN_THREAD.lock().unwrap()(task), ());\n\n task.schedule();\n\n handle\n\n}\n\n\n", "file_path": "promise/src/spawn.rs", "rank": 70, "score": 185726.01309201168 }, { "content": "/// Spawn a future with low priority; it will be polled only after\n\n/// all other normal priority items are processed.\n\npub fn spawn_with_low_priority<F, R>(future: F) -> JoinHandle<R, ()>\n\nwhere\n\n F: Future<Output = R> + 'static,\n\n R: 'static,\n\n{\n\n let (task, handle) = async_task::spawn_local(\n\n future,\n\n |task| ON_MAIN_THREAD_LOW_PRI.lock().unwrap()(task),\n\n (),\n\n );\n\n task.schedule();\n\n handle\n\n}\n\n\n\n/// Block the current thread until the passed future completes.\n\npub use async_std::task::block_on;\n\n\n\npub async fn join_handle_result<T>(handle: JoinHandle<anyhow::Result<T>, ()>) -> anyhow::Result<T> {\n\n handle\n\n .await\n\n .ok_or_else(|| anyhow::anyhow!(\"task was cancelled or panicked\"))?\n\n}\n", "file_path": "promise/src/spawn.rs", "rank": 71, "score": 185721.42431049646 }, { "content": "#[derive(Debug)]\n\n#[repr(C)]\n\nstruct NSRangePointer(*mut NSRange);\n\n\n\nimpl std::fmt::Debug for NSRange {\n\n fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::result::Result<(), std::fmt::Error> {\n\n fmt.debug_struct(\"NSRange\")\n\n .field(\"location\", &self.0.location)\n\n .field(\"length\", &self.0.length)\n\n .finish()\n\n }\n\n}\n\n\n\nunsafe impl objc::Encode for NSRange {\n\n fn encode() -> objc::Encoding {\n\n let encoding = format!(\n\n \"{{NSRange={}{}}}\",\n\n NSUInteger::encode().as_str(),\n\n NSUInteger::encode().as_str()\n\n );\n\n unsafe { objc::Encoding::from_str(&encoding) }\n\n }\n", "file_path": "window/src/os/macos/window.rs", "rank": 72, "score": 184526.42744981166 }, { "content": "fn terminate_with_error_message(err: &str) -> ! {\n\n log::error!(\"{}; terminating\", err);\n\n fatal_toast_notification(\"Wezterm Error\", &err);\n\n std::process::exit(1);\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 73, "score": 184388.0636260179 }, { "content": "pub fn create_user_owned_dirs(p: &Path) -> anyhow::Result<()> {\n\n let mut builder = DirBuilder::new();\n\n builder.recursive(true);\n\n\n\n #[cfg(unix)]\n\n {\n\n builder.mode(0o700);\n\n }\n\n\n\n builder.create(p)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 74, "score": 182595.1182213056 }, { "content": "pub fn async_ssh_connect(remote_address: &str, username: &str) -> Future<ssh2::Session> {\n\n let mut promise = Promise::new();\n\n let future = promise.get_future().unwrap();\n\n let remote_address = remote_address.to_owned();\n\n let username = username.to_owned();\n\n std::thread::spawn(move || promise.result(ssh_connect(&remote_address, &username)));\n\n future\n\n}\n\n\n", "file_path": "src/ssh.rs", "rank": 75, "score": 181823.16008507274 }, { "content": "/// Spawn a future into the main thread; it will be polled in\n\n/// the main thread in the low priority queue--all other normal\n\n/// priority items will be drained before considering low priority\n\n/// spawns.\n\n/// If you are on the main thread already, consider using `spawn_with_low_priority`\n\n/// instead to lift the `Send` requirement.\n\npub fn spawn_into_main_thread_with_low_priority<F, R>(future: F) -> JoinHandle<R, ()>\n\nwhere\n\n F: Future<Output = R> + Send + 'static,\n\n R: Send + 'static,\n\n{\n\n let (task, handle) = async_task::spawn(\n\n future,\n\n |task| ON_MAIN_THREAD_LOW_PRI.lock().unwrap()(task),\n\n (),\n\n );\n\n task.schedule();\n\n handle\n\n}\n\n\n", "file_path": "promise/src/spawn.rs", "rank": 76, "score": 180218.64060214555 }, { "content": "/// Create a pair of connected sockets\n\n///\n\n/// This implementation creates a pair of SOCK_STREAM sockets.\n\npub fn socketpair() -> anyhow::Result<(FileDescriptor, FileDescriptor)> {\n\n socketpair_impl()\n\n}\n", "file_path": "filedescriptor/src/lib.rs", "rank": 77, "score": 179014.63460289146 }, { "content": "/// Ungh: https://github.com/microsoft/WSL/issues/4456\n\nfn utf16_to_utf8<'lua>(_: &'lua Lua, text: mlua::String) -> mlua::Result<String> {\n\n let bytes = text.as_bytes();\n\n\n\n if bytes.len() % 2 != 0 {\n\n return Err(mlua::Error::external(anyhow!(\n\n \"input data has odd length, cannot be utf16\"\n\n )));\n\n }\n\n\n\n // This is \"safe\" because we checked that the length seems reasonable,\n\n // and our new slice is within those same bounds.\n\n let wide: &[u16] =\n\n unsafe { std::slice::from_raw_parts(bytes.as_ptr() as *const u16, bytes.len() / 2) };\n\n\n\n String::from_utf16(wide).map_err(|e| mlua::Error::external(e))\n\n}\n\n\n", "file_path": "src/scripting/mod.rs", "rank": 78, "score": 175581.20815657984 }, { "content": "fn split_by_newlines<'lua>(_: &'lua Lua, text: String) -> mlua::Result<Vec<String>> {\n\n Ok(text\n\n .lines()\n\n .map(|s| {\n\n // Ungh, `str.lines()` is supposed to split by `\\n` or `\\r\\n`, but I've\n\n // found that it is necessary to have an additional trim here in order\n\n // to actually remove the `\\r`.\n\n s.trim_end_matches('\\r').to_string()\n\n })\n\n .collect())\n\n}\n\n\n", "file_path": "src/scripting/mod.rs", "rank": 79, "score": 175581.20815657984 }, { "content": "fn read_dir<'lua>(_: &'lua Lua, path: String) -> mlua::Result<Vec<String>> {\n\n let dir = std::fs::read_dir(path).map_err(|e| mlua::Error::external(e))?;\n\n let mut entries = vec![];\n\n for entry in dir {\n\n let entry = entry.map_err(|e| mlua::Error::external(e))?;\n\n if let Some(utf8) = entry.path().to_str() {\n\n entries.push(utf8.to_string());\n\n } else {\n\n return Err(mlua::Error::external(anyhow!(\n\n \"path entry {} is not representable as utf8\",\n\n entry.path().display()\n\n )));\n\n }\n\n }\n\n Ok(entries)\n\n}\n\n\n", "file_path": "src/scripting/mod.rs", "rank": 80, "score": 175581.20815657984 }, { "content": "fn adderr(e: AddConstraintError) -> Error {\n\n anyhow!(\"{:?}\", e)\n\n}\n\n\n\nimpl Default for LayoutState {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n\n\n\nimpl LayoutState {\n\n /// Create a new `LayoutState`\n\n pub fn new() -> Self {\n\n let mut solver = Solver::new();\n\n let screen_width = Variable::new();\n\n let screen_height = Variable::new();\n\n solver\n\n .add_edit_variable(screen_width, STRONG)\n\n .expect(\"failed to add screen_width to solver\");\n\n solver\n", "file_path": "termwiz/src/widgets/layout.rs", "rank": 81, "score": 174551.6631055987 }, { "content": "#[derive(PartialEq, Eq, Hash)]\n\nstruct ShapeCacheKey((TextStyle, String));\n\n\n\npub struct TermWindow {\n\n pub window: Option<Window>,\n\n /// When we most recently received keyboard focus\n\n focused: Option<Instant>,\n\n fonts: Rc<FontConfiguration>,\n\n /// Window dimensions and dpi\n\n dimensions: Dimensions,\n\n /// Terminal dimensions\n\n terminal_size: PtySize,\n\n mux_window_id: MuxWindowId,\n\n render_metrics: RenderMetrics,\n\n render_state: RenderState,\n\n keys: KeyMap,\n\n show_tab_bar: bool,\n\n show_scroll_bar: bool,\n\n tab_bar: TabBarState,\n\n last_mouse_coords: (usize, i64),\n\n scroll_drag_start: Option<isize>,\n", "file_path": "src/frontend/gui/termwindow.rs", "rank": 82, "score": 174271.7549026981 }, { "content": "fn read_scroll_speed(name: &str) -> io::Result<i16> {\n\n let hkcu = RegKey::predef(HKEY_CURRENT_USER);\n\n let desktop = hkcu.open_subkey(\"Control Panel\\\\Desktop\")?;\n\n desktop\n\n .get_value::<String, _>(name)\n\n .and_then(|v| v.parse().map_err(|_| io::ErrorKind::InvalidData.into()))\n\n}\n\n\n\nunsafe fn mouse_wheel(hwnd: HWND, msg: UINT, wparam: WPARAM, lparam: LPARAM) -> Option<LRESULT> {\n\n if let Some(inner) = rc_from_hwnd(hwnd) {\n\n let (modifiers, mouse_buttons) = mods_and_buttons(wparam);\n\n let coords = mouse_coords(lparam);\n\n let delta = GET_WHEEL_DELTA_WPARAM(wparam);\n\n let scaled_delta = if msg == WM_MOUSEWHEEL {\n\n delta * (*WHEEL_SCROLL_LINES)\n\n } else {\n\n delta * (*WHEEL_SCROLL_CHARS)\n\n };\n\n let mut position = scaled_delta / WHEEL_DELTA;\n\n let remainder = delta % WHEEL_DELTA;\n", "file_path": "window/src/os/windows/window.rs", "rank": 83, "score": 173286.27957115232 }, { "content": "fn encode_modifiers(mods: KeyModifiers) -> u8 {\n\n let mut number = 0;\n\n if mods.contains(KeyModifiers::SHIFT) {\n\n number |= 1;\n\n }\n\n if mods.contains(KeyModifiers::ALT) {\n\n number |= 2;\n\n }\n\n if mods.contains(KeyModifiers::CTRL) {\n\n number |= 4;\n\n }\n\n number\n\n}\n\n\n\n// FIXME: provide an option to enable this, because it is super annoying\n\n// in vim when accidentally pressing shift-space and it emits a sequence\n\n// that undoes some number of commands\n\nconst ENABLE_CSI_U: bool = false;\n\n\n", "file_path": "term/src/terminalstate.rs", "rank": 84, "score": 170936.34714129695 }, { "content": "/// Returns the system hostname.\n\n/// Errors may occur while retrieving the hostname from the system,\n\n/// or if the hostname isn't a UTF-8 string.\n\nfn hostname<'lua>(_: &'lua Lua, _: ()) -> mlua::Result<String> {\n\n let hostname = hostname::get().map_err(|e| mlua::Error::external(e))?;\n\n match hostname.to_str() {\n\n Some(hostname) => Ok(hostname.to_owned()),\n\n None => Err(mlua::Error::external(anyhow!(\"hostname isn't UTF-8\"))),\n\n }\n\n}\n\n\n", "file_path": "src/scripting/mod.rs", "rank": 85, "score": 170850.30747050294 }, { "content": "/// Given a set of column headers and the row content,\n\n/// automatically compute the column widths and then format\n\n/// the data to the output stream.\n\n/// If a given row has more columns than are defined in the\n\n/// columns slice, then a left aligned column with no label\n\n/// will be assumed.\n\npub fn tabulate_output<S: std::string::ToString, W: std::io::Write>(\n\n columns: &[Column],\n\n rows: &[Vec<S>],\n\n output: &mut W,\n\n) -> Result<(), std::io::Error> {\n\n let mut col_widths: Vec<usize> = columns\n\n .iter()\n\n .map(|c| unicode_column_width(&c.name))\n\n .collect();\n\n\n\n let mut display_rows: Vec<Vec<String>> = vec![];\n\n for src_row in rows {\n\n let dest_row: Vec<String> = src_row.iter().map(|col| col.to_string()).collect();\n\n for (idx, col) in dest_row.iter().enumerate() {\n\n let col_width = unicode_column_width(col);\n\n if let Some(width) = col_widths.get_mut(idx) {\n\n *width = (*width).max(col_width);\n\n } else {\n\n col_widths.push(col_width);\n\n }\n", "file_path": "tabout/src/lib.rs", "rank": 86, "score": 168062.84769492436 }, { "content": "/// Set up a lua context for executing some code.\n\n/// The path to the directory containing the configuration is\n\n/// passed in and is used to pre-set some global values in\n\n/// the environment.\n\n///\n\n/// The `package.path` is configured to search the user's\n\n/// wezterm specific config paths for lua modules, should\n\n/// they choose to `require` additional code from their config.\n\n///\n\n/// A `wezterm` module is registered so that the script can\n\n/// `require \"wezterm\"` and call into functions provided by\n\n/// wezterm. The wezterm module contains:\n\n/// * `executable_dir` - the directory containing the wezterm\n\n/// executable. This is potentially useful for portable\n\n/// installs on Windows.\n\n/// * `config_dir` - the directory containing the wezterm\n\n/// configuration.\n\n/// * `log_error` - a function that logs to stderr (or the server\n\n/// log file for daemonized wezterm).\n\n/// * `target_triple` - the rust compilation target triple.\n\n/// * `version` - the version of the running wezterm instance.\n\n/// * `home_dir` - the path to the user's home directory\n\n///\n\n/// In addition to this, the lua standard library, except for\n\n/// the `debug` module, is also available to the script.\n\npub fn make_lua_context(config_dir: &Path) -> anyhow::Result<Lua> {\n\n let lua = Lua::new();\n\n\n\n {\n\n let globals = lua.globals();\n\n // This table will be the `wezterm` module in the script\n\n let wezterm_mod = lua.create_table()?;\n\n\n\n let package: Table = globals.get(\"package\")?;\n\n let package_path: String = package.get(\"path\")?;\n\n let mut path_array: Vec<String> = package_path.split(\";\").map(|s| s.to_owned()).collect();\n\n\n\n fn prefix_path(array: &mut Vec<String>, path: &Path) {\n\n array.insert(0, format!(\"{}/?.lua\", path.display()));\n\n array.insert(1, format!(\"{}/?/init.lua\", path.display()));\n\n }\n\n\n\n prefix_path(&mut path_array, &crate::config::HOME_DIR.join(\".wezterm\"));\n\n prefix_path(\n\n &mut path_array,\n", "file_path": "src/scripting/mod.rs", "rank": 87, "score": 167781.26055916084 }, { "content": "/// Convert a rust string to a windows wide string\n\nfn wide_string(s: &str) -> Vec<u16> {\n\n use std::os::windows::ffi::OsStrExt;\n\n std::ffi::OsStr::new(s)\n\n .encode_wide()\n\n .chain(std::iter::once(0))\n\n .collect()\n\n}\n", "file_path": "window/src/os/windows/mod.rs", "rank": 88, "score": 162833.15026961087 }, { "content": "/// Computes the r1 - r2, which may result in up to two non-overlapping ranges.\n\npub fn range_subtract<T: Integer + Copy + Debug>(\n\n r1: &Range<T>,\n\n r2: &Range<T>,\n\n) -> (Option<Range<T>>, Option<Range<T>>) {\n\n let i_start = max(r1.start, r2.start);\n\n let i_end = min(r1.end, r2.end);\n\n\n\n if i_end > i_start {\n\n let a = if i_start == r1.start {\n\n // Intersection overlaps with the LHS\n\n None\n\n } else {\n\n // The LHS up to the intersection\n\n Some(r1.start..r1.end.min(i_start))\n\n };\n\n\n\n let b = if i_end == r1.end {\n\n // Intersection overlaps with the RHS\n\n None\n\n } else {\n", "file_path": "rangeset/src/lib.rs", "rank": 89, "score": 158412.1043399687 }, { "content": "/// Computes the intersection of r1 and r2\n\npub fn range_intersection<T: Integer + Copy + Debug>(\n\n r1: &Range<T>,\n\n r2: &Range<T>,\n\n) -> Option<Range<T>> {\n\n let start = max(r1.start, r2.start);\n\n let end = min(r1.end, r2.end);\n\n\n\n if end > start {\n\n Some(start..end)\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "rangeset/src/lib.rs", "rank": 90, "score": 158401.39113842367 }, { "content": "pub fn unicode_column_width_of_change_slice(s: &[Change]) -> usize {\n\n s.iter()\n\n .map(|c| {\n\n if c.is_text() {\n\n unicode_column_width(c.text())\n\n } else {\n\n 0\n\n }\n\n })\n\n .sum()\n\n}\n\n\n", "file_path": "tabout/src/lib.rs", "rank": 91, "score": 158370.30678008494 }, { "content": "/// Convert the input value to 1-based u32.\n\n/// The intent is to protect consumers from out of range values\n\n/// when operating on the data, while balancing strictness with\n\n/// practical implementation bugs. For example, it is common\n\n/// to see 0 values being emitted from existing libraries, and\n\n/// we desire to see the intended output.\n\n/// Ensures that the value is in the range 1..=max_value.\n\n/// If the input is 0 it is treated as 1. If the value is\n\n/// otherwise outside that range, an error is propagated and\n\n/// that will typically case the sequence to be reported via\n\n/// the Unspecified placeholder.\n\nfn to_1b_u32(v: i64) -> Result<u32, ()> {\n\n if v == 0 {\n\n Ok(1)\n\n } else if v > 0 && v <= i64::from(u32::max_value()) {\n\n Ok(v as u32)\n\n } else {\n\n Err(())\n\n }\n\n}\n\n\n\nmacro_rules! noparams {\n\n ($ns:ident, $variant:ident, $params:expr) => {{\n\n if $params.len() != 0 {\n\n Err(())\n\n } else {\n\n Ok(CSI::$ns($ns::$variant))\n\n }\n\n }};\n\n}\n\n\n", "file_path": "termwiz/src/escape/csi.rs", "rank": 92, "score": 156800.72118958575 }, { "content": "fn run_serial(config: config::ConfigHandle, opts: &SerialCommand) -> anyhow::Result<()> {\n\n let fontconfig = Rc::new(FontConfiguration::new());\n\n\n\n let mut serial = portable_pty::serial::SerialTty::new(&opts.port);\n\n if let Some(baud) = opts.baud {\n\n serial.set_baud_rate(serial::BaudRate::from_speed(baud));\n\n }\n\n\n\n let pty_system = Box::new(serial);\n\n let domain: Arc<dyn Domain> = Arc::new(LocalDomain::with_pty_system(\"local\", pty_system));\n\n let mux = Rc::new(mux::Mux::new(Some(domain.clone())));\n\n Mux::set_mux(&mux);\n\n\n\n let front_end = opts.front_end.unwrap_or(config.front_end);\n\n let gui = front_end.try_new()?;\n\n block_on(domain.attach())?; // FIXME: blocking\n\n\n\n let window_id = mux.new_empty_window();\n\n let tab = block_on(domain.spawn(config.initial_size(), None, None, window_id))?; // FIXME: blocking\n\n gui.spawn_new_window(&fontconfig, &tab, window_id)?;\n\n\n\n maybe_show_configuration_error_window();\n\n gui.run_forever()\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 93, "score": 156665.3251050702 }, { "content": "fn toast_notification(title: &str, message: &str) {\n\n #[cfg(not(windows))]\n\n {\n\n notify_rust::Notification::new()\n\n .summary(title)\n\n .body(message)\n\n // Stay on the screen until dismissed\n\n .hint(notify_rust::NotificationHint::Resident(true))\n\n // timeout isn't respected on macos\n\n .timeout(0)\n\n .show()\n\n .ok();\n\n }\n\n\n\n #[cfg(windows)]\n\n {\n\n let title = title.to_owned();\n\n let message = message.to_owned();\n\n\n\n // We need to be in a different thread from the caller\n", "file_path": "src/main.rs", "rank": 94, "score": 155129.4059433784 }, { "content": "/// A channel that can be polled together with a socket.\n\n/// This uses the self-pipe trick but with a unix domain\n\n/// socketpair.\n\n/// In theory this should also work on windows, but will require\n\n/// windows 10 w/unix domain socket support.\n\npub fn pollable_channel<T>() -> anyhow::Result<(PollableSender<T>, PollableReceiver<T>)> {\n\n let (sender, receiver) = channel();\n\n let (mut write, mut read) = socketpair()?;\n\n\n\n write.set_non_blocking(true)?;\n\n read.set_non_blocking(true)?;\n\n\n\n Ok((\n\n PollableSender {\n\n sender,\n\n write: Arc::new(Mutex::new(FileDescriptor::new(write))),\n\n },\n\n PollableReceiver {\n\n receiver,\n\n read: RefCell::new(FileDescriptor::new(read)),\n\n },\n\n ))\n\n}\n\n\n", "file_path": "src/server/pollable.rs", "rank": 95, "score": 154232.5903377335 }, { "content": "/// Translate an error and value into a result\n\nfn ft_result<T>(err: FT_Error, t: T) -> anyhow::Result<T> {\n\n if succeeded(err) {\n\n Ok(t)\n\n } else {\n\n Err(anyhow!(\"FreeType error {:?} 0x{:x}\", err, err))\n\n }\n\n}\n\n\n", "file_path": "src/font/ftwrap.rs", "rank": 96, "score": 154230.61586472386 }, { "content": "pub fn reload() {\n\n CONFIG.reload();\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 97, "score": 154225.14352519836 }, { "content": "fn process_unilateral(local_domain_id: DomainId, decoded: DecodedPdu) -> anyhow::Result<()> {\n\n if let Some(tab_id) = decoded.pdu.tab_id() {\n\n let pdu = decoded.pdu;\n\n promise::spawn::spawn_into_main_thread(async move {\n\n let mux = Mux::get().unwrap();\n\n let client_domain = mux\n\n .get_domain(local_domain_id)\n\n .ok_or_else(|| anyhow!(\"no such domain {}\", local_domain_id))?;\n\n let client_domain = client_domain\n\n .downcast_ref::<ClientDomain>()\n\n .ok_or_else(|| {\n\n anyhow!(\"domain {} is not a ClientDomain instance\", local_domain_id)\n\n })?;\n\n\n\n let local_tab_id = client_domain\n\n .remote_to_local_tab_id(tab_id)\n\n .ok_or_else(|| anyhow!(\"remote tab id {} does not have a local tab id\", tab_id))?;\n\n let tab = mux\n\n .get_tab(local_tab_id)\n\n .ok_or_else(|| anyhow!(\"no such tab {}\", local_tab_id))?;\n", "file_path": "src/server/client.rs", "rank": 98, "score": 153607.56273332055 } ]
Rust
src/recovery/hystart.rs
ehaydenr/quiche
d0b40f791fd46f1ffdf0357f18e1ba5953723a59
use std::cmp; use std::time::Duration; use std::time::Instant; use crate::packet; use crate::recovery; const LOW_CWND: usize = 16; const MIN_RTT_THRESH: Duration = Duration::from_millis(4); const MAX_RTT_THRESH: Duration = Duration::from_millis(16); pub const LSS_DIVISOR: f64 = 0.25; pub const N_RTT_SAMPLE: usize = 8; #[derive(Default)] pub struct Hystart { enabled: bool, window_end: Option<u64>, last_round_min_rtt: Option<Duration>, current_round_min_rtt: Option<Duration>, rtt_sample_count: usize, lss_start_time: Option<Instant>, } impl std::fmt::Debug for Hystart { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, "window_end={:?} ", self.window_end)?; write!(f, "last_round_min_rtt={:?} ", self.last_round_min_rtt)?; write!(f, "current_round_min_rtt={:?} ", self.current_round_min_rtt)?; write!(f, "rtt_sample_count={:?} ", self.rtt_sample_count)?; write!(f, "lss_start_time={:?} ", self.lss_start_time)?; Ok(()) } } impl Hystart { pub fn new(enabled: bool) -> Self { Self { enabled, ..Default::default() } } pub fn enabled(&self) -> bool { self.enabled } pub fn lss_start_time(&self) -> Option<Instant> { self.lss_start_time } pub fn in_lss(&self, epoch: packet::Epoch) -> bool { self.enabled && epoch == packet::EPOCH_APPLICATION && self.lss_start_time().is_some() } pub fn start_round(&mut self, pkt_num: u64) { if self.window_end.is_none() { *self = Hystart { enabled: self.enabled, window_end: Some(pkt_num), last_round_min_rtt: self.current_round_min_rtt, current_round_min_rtt: None, rtt_sample_count: 0, lss_start_time: None, }; } } pub fn try_enter_lss( &mut self, packet: &recovery::Acked, rtt: Duration, cwnd: usize, now: Instant, max_datagram_size: usize, ) -> bool { if self.lss_start_time().is_none() { if let Some(current_round_min_rtt) = self.current_round_min_rtt { self.current_round_min_rtt = Some(cmp::min(current_round_min_rtt, rtt)); } else { self.current_round_min_rtt = Some(rtt); } self.rtt_sample_count += 1; if cwnd >= (LOW_CWND * max_datagram_size) && self.rtt_sample_count >= N_RTT_SAMPLE && self.current_round_min_rtt.is_some() && self.last_round_min_rtt.is_some() { let rtt_thresh = cmp::max( self.last_round_min_rtt.unwrap() / 8, MIN_RTT_THRESH, ); let rtt_thresh = cmp::min(rtt_thresh, MAX_RTT_THRESH); if self.current_round_min_rtt.unwrap() >= (self.last_round_min_rtt.unwrap() + rtt_thresh) { self.lss_start_time = Some(now); } } if let Some(end_pkt_num) = self.window_end { if packet.pkt_num >= end_pkt_num { self.window_end = None; } } } self.lss_start_time.is_some() } pub fn lss_cwnd( &self, pkt_size: usize, bytes_acked: usize, cwnd: usize, ssthresh: usize, max_datagram_size: usize, ) -> usize { let k = cwnd as f64 / (LSS_DIVISOR * ssthresh as f64); cwnd + cmp::min( pkt_size, max_datagram_size * recovery::ABC_L - cmp::min(bytes_acked, max_datagram_size * recovery::ABC_L), ) / k as usize } pub fn congestion_event(&mut self) { self.window_end = None; self.lss_start_time = None; } } #[cfg(test)] mod tests { use super::*; #[test] fn start_round() { let mut hspp = Hystart::default(); let pkt_num = 100; hspp.start_round(pkt_num); assert_eq!(hspp.window_end, Some(pkt_num)); assert_eq!(hspp.current_round_min_rtt, None); } #[test] fn lss_cwnd() { let hspp = Hystart::default(); let datagram_size = 1200; let mut cwnd = 24000; let ssthresh = 24000; let lss_cwnd = hspp.lss_cwnd(datagram_size, 0, cwnd, ssthresh, datagram_size); assert_eq!( cwnd + (datagram_size as f64 * LSS_DIVISOR) as usize, lss_cwnd ); cwnd = lss_cwnd; let lss_cwnd = hspp.lss_cwnd( datagram_size, datagram_size, cwnd, ssthresh, datagram_size, ); assert_eq!( cwnd + (datagram_size as f64 * LSS_DIVISOR) as usize, lss_cwnd ); } #[test] fn congestion_event() { let mut hspp = Hystart::default(); let pkt_num = 100; hspp.start_round(pkt_num); assert_eq!(hspp.window_end, Some(pkt_num)); hspp.congestion_event(); assert_eq!(hspp.window_end, None); } }
use std::cmp; use std::time::Duration; use std::time::Instant; use crate::packet; use crate::recovery; const LOW_CWND: usize = 16; const MIN_RTT_THRESH: Duration = Duration::from_millis(4); const MAX_RTT_THRESH: Duration = Duration::from_millis(16); pub const LSS_DIVISOR: f64 = 0.25; pub const N_RTT_SAMPLE: usize = 8; #[derive(Default)] pub struct Hystart { enabled: bool, window_end: Option<u64>, last_round_min_rtt: Option<Duration>, current_round_min_rtt: Option<Duration>, rtt_sample_count: usize, lss_start_time: Option<Instant>, } impl std::fmt::Debug for Hystart { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, "window_end={:?} ", self.window_end)?; write!(f, "last_round_min_rtt={:?} ", self.last_round_min_rtt)?; write!(f, "current_round_min_rtt={:?} ", self.current_round_min_rtt)?; write!(f, "rtt_sample_count={:?} ", self.rtt_sample_count)?; write!(f, "lss_start_time={:?} ", self.lss_start_time)?; Ok(()) } } impl Hystart { pub fn new(enabled: bool) -> Self { Self { enabled, ..Default::default() } } pub fn enabled(&self) -> bool { self.enabled } pub fn lss_start_time(&self) -> Option<Instant> { self.lss_start_time } pub fn in_lss(&self, epoch: packet::Epoch) -> bool { self.enabled && epoch == packet::EPOCH_APPLICATION && self.lss_start_time().is_some() } pub fn start_round(&mut self, pkt_num: u64) {
} pub fn try_enter_lss( &mut self, packet: &recovery::Acked, rtt: Duration, cwnd: usize, now: Instant, max_datagram_size: usize, ) -> bool { if self.lss_start_time().is_none() { if let Some(current_round_min_rtt) = self.current_round_min_rtt { self.current_round_min_rtt = Some(cmp::min(current_round_min_rtt, rtt)); } else { self.current_round_min_rtt = Some(rtt); } self.rtt_sample_count += 1; if cwnd >= (LOW_CWND * max_datagram_size) && self.rtt_sample_count >= N_RTT_SAMPLE && self.current_round_min_rtt.is_some() && self.last_round_min_rtt.is_some() { let rtt_thresh = cmp::max( self.last_round_min_rtt.unwrap() / 8, MIN_RTT_THRESH, ); let rtt_thresh = cmp::min(rtt_thresh, MAX_RTT_THRESH); if self.current_round_min_rtt.unwrap() >= (self.last_round_min_rtt.unwrap() + rtt_thresh) { self.lss_start_time = Some(now); } } if let Some(end_pkt_num) = self.window_end { if packet.pkt_num >= end_pkt_num { self.window_end = None; } } } self.lss_start_time.is_some() } pub fn lss_cwnd( &self, pkt_size: usize, bytes_acked: usize, cwnd: usize, ssthresh: usize, max_datagram_size: usize, ) -> usize { let k = cwnd as f64 / (LSS_DIVISOR * ssthresh as f64); cwnd + cmp::min( pkt_size, max_datagram_size * recovery::ABC_L - cmp::min(bytes_acked, max_datagram_size * recovery::ABC_L), ) / k as usize } pub fn congestion_event(&mut self) { self.window_end = None; self.lss_start_time = None; } } #[cfg(test)] mod tests { use super::*; #[test] fn start_round() { let mut hspp = Hystart::default(); let pkt_num = 100; hspp.start_round(pkt_num); assert_eq!(hspp.window_end, Some(pkt_num)); assert_eq!(hspp.current_round_min_rtt, None); } #[test] fn lss_cwnd() { let hspp = Hystart::default(); let datagram_size = 1200; let mut cwnd = 24000; let ssthresh = 24000; let lss_cwnd = hspp.lss_cwnd(datagram_size, 0, cwnd, ssthresh, datagram_size); assert_eq!( cwnd + (datagram_size as f64 * LSS_DIVISOR) as usize, lss_cwnd ); cwnd = lss_cwnd; let lss_cwnd = hspp.lss_cwnd( datagram_size, datagram_size, cwnd, ssthresh, datagram_size, ); assert_eq!( cwnd + (datagram_size as f64 * LSS_DIVISOR) as usize, lss_cwnd ); } #[test] fn congestion_event() { let mut hspp = Hystart::default(); let pkt_num = 100; hspp.start_round(pkt_num); assert_eq!(hspp.window_end, Some(pkt_num)); hspp.congestion_event(); assert_eq!(hspp.window_end, None); } }
if self.window_end.is_none() { *self = Hystart { enabled: self.enabled, window_end: Some(pkt_num), last_round_min_rtt: self.current_round_min_rtt, current_round_min_rtt: None, rtt_sample_count: 0, lss_start_time: None, }; }
if_condition
[ { "content": "/// Returns true if the stream is bidirectional.\n\npub fn is_bidi(stream_id: u64) -> bool {\n\n (stream_id & 0x2) == 0\n\n}\n\n\n\n/// An iterator over QUIC streams.\n\n#[derive(Default)]\n\npub struct StreamIter {\n\n streams: Vec<u64>,\n\n}\n\n\n\nimpl StreamIter {\n\n #[inline]\n\n fn from(streams: &HashSet<u64>) -> Self {\n\n StreamIter {\n\n streams: streams.iter().copied().collect(),\n\n }\n\n }\n\n}\n\n\n\nimpl Iterator for StreamIter {\n", "file_path": "src/stream.rs", "rank": 0, "score": 179295.29532218393 }, { "content": "/// Returns true if the stream was created locally.\n\npub fn is_local(stream_id: u64, is_server: bool) -> bool {\n\n (stream_id & 0x1) == (is_server as u64)\n\n}\n\n\n", "file_path": "src/stream.rs", "rank": 1, "score": 179023.04949534734 }, { "content": "/// Returns how many bytes it would take to encode `v` as a variable-length\n\n/// integer.\n\npub fn varint_len(v: u64) -> usize {\n\n if v <= 63 {\n\n 1\n\n } else if v <= 16383 {\n\n 2\n\n } else if v <= 1_073_741_823 {\n\n 4\n\n } else if v <= 4_611_686_018_427_387_903 {\n\n 8\n\n } else {\n\n unreachable!()\n\n }\n\n}\n\n\n", "file_path": "src/octets.rs", "rank": 2, "score": 172735.81340129024 }, { "content": "pub fn rand_u64() -> u64 {\n\n let mut buf = [0; 8];\n\n\n\n rand_bytes(&mut buf);\n\n\n\n u64::from_ne_bytes(buf)\n\n}\n\n\n", "file_path": "src/rand.rs", "rank": 3, "score": 170330.49606238047 }, { "content": "pub fn rand_u64_uniform(max: u64) -> u64 {\n\n let chunk_size = u64::max_value() / max;\n\n let end_of_last_chunk = chunk_size * max;\n\n\n\n let mut r = rand_u64();\n\n\n\n while r >= end_of_last_chunk {\n\n r = rand_u64();\n\n }\n\n\n\n r / chunk_size\n\n}\n\n\n\nextern {\n\n fn RAND_bytes(buf: *mut u8, len: libc::size_t) -> libc::c_int;\n\n}\n", "file_path": "src/rand.rs", "rank": 4, "score": 159648.35973805853 }, { "content": "pub fn pkt_num_len(pn: u64) -> Result<usize> {\n\n let len = if pn < u64::from(std::u8::MAX) {\n\n 1\n\n } else if pn < u64::from(std::u16::MAX) {\n\n 2\n\n } else if pn < u64::from(std::u32::MAX) {\n\n 4\n\n } else {\n\n return Err(Error::InvalidPacket);\n\n };\n\n\n\n Ok(len)\n\n}\n\n\n", "file_path": "src/packet.rs", "rank": 5, "score": 151244.45529300784 }, { "content": "pub fn decode_pkt_num(largest_pn: u64, truncated_pn: u64, pn_len: usize) -> u64 {\n\n let pn_nbits = pn_len * 8;\n\n let expected_pn = largest_pn + 1;\n\n let pn_win = 1 << pn_nbits;\n\n let pn_hwin = pn_win / 2;\n\n let pn_mask = pn_win - 1;\n\n let candidate_pn = (expected_pn & !pn_mask) | truncated_pn;\n\n\n\n if candidate_pn + pn_hwin <= expected_pn && candidate_pn < (1 << 62) - pn_win\n\n {\n\n return candidate_pn + pn_win;\n\n }\n\n\n\n if candidate_pn > expected_pn + pn_hwin && candidate_pn >= pn_win {\n\n return candidate_pn - pn_win;\n\n }\n\n\n\n candidate_pn\n\n}\n\n\n", "file_path": "src/packet.rs", "rank": 6, "score": 151214.13648125104 }, { "content": "pub fn encode_pkt_num(pn: u64, b: &mut octets::OctetsMut) -> Result<()> {\n\n let len = pkt_num_len(pn)?;\n\n\n\n match len {\n\n 1 => b.put_u8(pn as u8)?,\n\n\n\n 2 => b.put_u16(pn as u16)?,\n\n\n\n 3 => b.put_u24(pn as u32)?,\n\n\n\n 4 => b.put_u32(pn as u32)?,\n\n\n\n _ => return Err(Error::InvalidPacket),\n\n };\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/packet.rs", "rank": 7, "score": 150994.93125677065 }, { "content": "fn range_overlaps(r: &Range<u64>, other: &Range<u64>) -> bool {\n\n other.start >= r.start && other.start <= r.end ||\n\n other.end >= r.start && other.end <= r.end\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn insert_non_overlapping() {\n\n let mut r = RangeSet::default();\n\n assert_eq!(r.inner.len(), 0);\n\n let empty: &[u64] = &[];\n\n assert_eq!(&r.flatten().collect::<Vec<u64>>(), &empty);\n\n\n\n r.insert(4..7);\n\n assert_eq!(r.inner.len(), 1);\n\n assert_eq!(&r.flatten().collect::<Vec<u64>>(), &[4, 5, 6]);\n\n\n", "file_path": "src/ranges.rs", "rank": 8, "score": 140179.7030896852 }, { "content": "pub fn encode(src: &[u8], out: &mut octets::OctetsMut, low: bool) -> Result<()> {\n\n let mut bits: u64 = 0;\n\n let mut bits_left = 40;\n\n\n\n for &b in src {\n\n let b = if low { b.to_ascii_lowercase() } else { b };\n\n\n\n let (nbits, code) = ENCODE_TABLE[b as usize];\n\n\n\n bits |= code << (bits_left - nbits);\n\n bits_left -= nbits;\n\n\n\n while bits_left <= 32 {\n\n out.put_u8((bits >> 32) as u8)?;\n\n\n\n bits <<= 8;\n\n bits_left += 8;\n\n }\n\n }\n\n\n\n if bits_left != 40 {\n\n // This writes the EOS token\n\n bits |= (1 << bits_left) - 1;\n\n\n\n out.put_u8((bits >> 32) as u8)?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/h3/qpack/huffman/mod.rs", "rank": 9, "score": 135150.2447792971 }, { "content": "#[inline]\n\npub fn version_is_supported(version: u32) -> bool {\n\n matches!(\n\n version,\n\n PROTOCOL_VERSION_DRAFT27 |\n\n PROTOCOL_VERSION_DRAFT28 |\n\n PROTOCOL_VERSION_DRAFT29\n\n )\n\n}\n\n\n\n/// Pushes a frame to the output packet if there is enough space.\n\n///\n\n/// Returns `true` on success, `false` otherwise. In case of failure it means\n\n/// there is no room to add the frame in the packet. You may retry to add the\n\n/// frame later.\n\nmacro_rules! push_frame_to_pkt {\n\n ($out:expr, $frames:expr, $frame:expr, $left:expr) => {{\n\n if $frame.wire_len() <= $left {\n\n $left -= $frame.wire_len();\n\n\n\n $frame.to_bytes(&mut $out)?;\n", "file_path": "src/lib.rs", "rank": 10, "score": 134381.53799398517 }, { "content": "pub fn collapse_cwnd(r: &mut Recovery) {\n\n r.congestion_window = r.max_datagram_size * recovery::MINIMUM_WINDOW_PACKETS;\n\n r.bytes_acked_sl = 0;\n\n r.bytes_acked_ca = 0;\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n use std::time::Duration;\n\n\n\n #[test]\n\n fn reno_init() {\n\n let mut cfg = crate::Config::new(crate::PROTOCOL_VERSION).unwrap();\n\n cfg.set_cc_algorithm(recovery::CongestionControlAlgorithm::Reno);\n\n\n\n let r = Recovery::new(&cfg);\n\n\n\n assert!(r.cwnd() > 0);\n", "file_path": "src/recovery/reno.rs", "rank": 11, "score": 134320.48411932072 }, { "content": "pub fn rand_bytes(buf: &mut [u8]) {\n\n unsafe {\n\n RAND_bytes(buf.as_mut_ptr(), buf.len());\n\n }\n\n}\n\n\n", "file_path": "src/rand.rs", "rank": 12, "score": 134320.48411932072 }, { "content": "fn decode_int(b: &mut octets::Octets, prefix: usize) -> Result<u64> {\n\n let mask = 2u64.pow(prefix as u32) - 1;\n\n\n\n let mut val = u64::from(b.get_u8()?);\n\n val &= mask;\n\n\n\n if val < mask {\n\n return Ok(val);\n\n }\n\n\n\n let mut shift = 0;\n\n\n\n while b.cap() > 0 {\n\n let byte = b.get_u8()?;\n\n\n\n let inc = u64::from(byte & 0x7f)\n\n .checked_shl(shift)\n\n .ok_or(Error::BufferTooShort)?;\n\n\n\n val = val.checked_add(inc).ok_or(Error::BufferTooShort)?;\n", "file_path": "src/h3/qpack/decoder.rs", "rank": 13, "score": 129487.52585864451 }, { "content": "pub fn on_packet_sent(r: &mut Recovery, sent_bytes: usize, _now: Instant) {\n\n r.bytes_in_flight += sent_bytes;\n\n}\n\n\n", "file_path": "src/recovery/reno.rs", "rank": 14, "score": 129424.89191986487 }, { "content": "pub fn encode_output_length(src: &[u8], low: bool) -> Result<usize> {\n\n let mut bits: usize = 0;\n\n\n\n for &b in src {\n\n let b = if low { b.to_ascii_lowercase() } else { b };\n\n\n\n let (nbits, _) = ENCODE_TABLE[b as usize];\n\n bits += nbits;\n\n }\n\n\n\n let mut len = bits / 8;\n\n\n\n if bits & 7 != 0 {\n\n len += 1;\n\n }\n\n\n\n Ok(len)\n\n}\n\n\n", "file_path": "src/h3/qpack/huffman/mod.rs", "rank": 15, "score": 124461.93254987829 }, { "content": "/// Handles newly writable streams.\n\nfn handle_writable(client: &mut Client, stream_id: u64) {\n\n let conn = &mut client.conn;\n\n\n\n debug!(\"{} stream {} is writable\", conn.trace_id(), stream_id);\n\n\n\n if !client.partial_responses.contains_key(&stream_id) {\n\n return;\n\n }\n\n\n\n let resp = client.partial_responses.get_mut(&stream_id).unwrap();\n\n let body = &resp.body[resp.written..];\n\n\n\n let written = match conn.stream_send(stream_id, &body, true) {\n\n Ok(v) => v,\n\n\n\n Err(quiche::Error::Done) => 0,\n\n\n\n Err(e) => {\n\n client.partial_responses.remove(&stream_id);\n\n\n", "file_path": "examples/server.rs", "rank": 16, "score": 121939.69764879896 }, { "content": "pub fn retry(\n\n scid: &[u8], dcid: &[u8], new_scid: &[u8], token: &[u8], version: u32,\n\n out: &mut [u8],\n\n) -> Result<usize> {\n\n let mut b = octets::OctetsMut::with_slice(out);\n\n\n\n if !crate::version_is_supported(version) {\n\n return Err(Error::UnknownVersion);\n\n }\n\n\n\n let hdr = Header {\n\n ty: Type::Retry,\n\n version,\n\n dcid: ConnectionId::from_ref(scid),\n\n scid: ConnectionId::from_ref(new_scid),\n\n pkt_num: 0,\n\n pkt_num_len: 0,\n\n token: Some(token.to_vec()),\n\n versions: None,\n\n key_phase: false,\n", "file_path": "src/packet.rs", "rank": 17, "score": 119359.40026094837 }, { "content": "#[inline]\n\npub fn accept(\n\n scid: &ConnectionId, odcid: Option<&ConnectionId>, config: &mut Config,\n\n) -> Result<Pin<Box<Connection>>> {\n\n let conn = Connection::new(scid, odcid, config, true)?;\n\n\n\n Ok(conn)\n\n}\n\n\n\n/// Creates a new client-side connection.\n\n///\n\n/// The `scid` parameter is used as the connection's source connection ID,\n\n/// while the optional `server_name` parameter is used to verify the peer's\n\n/// certificate.\n\n///\n\n/// ## Examples:\n\n///\n\n/// ```no_run\n\n/// # let mut config = quiche::Config::new(0xbabababa)?;\n\n/// # let server_name = \"quic.tech\";\n\n/// # let scid = quiche::ConnectionId::from_ref(&[0xba; 16]);\n\n/// let conn = quiche::connect(Some(&server_name), &scid, &mut config)?;\n\n/// # Ok::<(), quiche::Error>(())\n\n/// ```\n", "file_path": "src/lib.rs", "rank": 18, "score": 119359.40026094837 }, { "content": "#[inline]\n\npub fn connect(\n\n server_name: Option<&str>, scid: &ConnectionId, config: &mut Config,\n\n) -> Result<Pin<Box<Connection>>> {\n\n let conn = Connection::new(scid, None, config, false)?;\n\n\n\n if let Some(server_name) = server_name {\n\n conn.handshake.lock().unwrap().set_host_name(server_name)?;\n\n }\n\n\n\n Ok(conn)\n\n}\n\n\n\n/// Writes a version negotiation packet.\n\n///\n\n/// The `scid` and `dcid` parameters are the source connection ID and the\n\n/// destination connection ID extracted from the received client's Initial\n\n/// packet that advertises an unsupported version.\n\n///\n\n/// ## Examples:\n\n///\n", "file_path": "src/lib.rs", "rank": 19, "score": 119359.40026094837 }, { "content": "#[inline]\n\npub fn retry(\n\n scid: &ConnectionId, dcid: &ConnectionId, new_scid: &ConnectionId,\n\n token: &[u8], version: u32, out: &mut [u8],\n\n) -> Result<usize> {\n\n packet::retry(scid, dcid, new_scid, token, version, out)\n\n}\n\n\n\n/// Returns true if the given protocol version is supported.\n", "file_path": "src/lib.rs", "rank": 20, "score": 119359.40026094837 }, { "content": "/// Handles newly writable streams.\n\nfn handle_writable(client: &mut Client, stream_id: u64) {\n\n let conn = &mut client.conn;\n\n let http3_conn = &mut client.http3_conn.as_mut().unwrap();\n\n\n\n debug!(\"{} stream {} is writable\", conn.trace_id(), stream_id);\n\n\n\n if !client.partial_responses.contains_key(&stream_id) {\n\n return;\n\n }\n\n\n\n let resp = client.partial_responses.get_mut(&stream_id).unwrap();\n\n\n\n if let Some(ref headers) = resp.headers {\n\n match http3_conn.send_response(conn, stream_id, &headers, false) {\n\n Ok(_) => (),\n\n\n\n Err(quiche::h3::Error::StreamBlocked) => {\n\n return;\n\n },\n\n\n", "file_path": "examples/http3-server.rs", "rank": 21, "score": 118335.10619017924 }, { "content": "/// Returns how long the variable-length integer is, given its first byte.\n\npub fn varint_parse_len(first: u8) -> usize {\n\n match first >> 6 {\n\n 0 => 1,\n\n 1 => 2,\n\n 2 => 4,\n\n 3 => 8,\n\n _ => unreachable!(),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn get_u() {\n\n let d = [\n\n 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18,\n\n ];\n\n\n", "file_path": "src/octets.rs", "rank": 22, "score": 117242.48259531653 }, { "content": "pub fn decrypt_hdr(\n\n b: &mut octets::OctetsMut, hdr: &mut Header, aead: &crypto::Open,\n\n) -> Result<()> {\n\n let mut first = {\n\n let (first_buf, _) = b.split_at(1)?;\n\n first_buf.as_ref()[0]\n\n };\n\n\n\n let mut pn_and_sample = b.peek_bytes_mut(MAX_PKT_NUM_LEN + SAMPLE_LEN)?;\n\n\n\n let (mut ciphertext, sample) = pn_and_sample.split_at(MAX_PKT_NUM_LEN)?;\n\n\n\n let ciphertext = ciphertext.as_mut();\n\n\n\n let mask = aead.new_mask(sample.as_ref())?;\n\n\n\n if Header::is_long(first) {\n\n first ^= mask[0] & 0x0f;\n\n } else {\n\n first ^= mask[0] & 0x1f;\n", "file_path": "src/packet.rs", "rank": 23, "score": 115079.18786207151 }, { "content": "pub fn negotiate_version(\n\n scid: &[u8], dcid: &[u8], out: &mut [u8],\n\n) -> Result<usize> {\n\n let mut b = octets::OctetsMut::with_slice(out);\n\n\n\n let first = rand::rand_u8() | FORM_BIT;\n\n\n\n b.put_u8(first)?;\n\n b.put_u32(0)?;\n\n\n\n b.put_u8(scid.len() as u8)?;\n\n b.put_bytes(&scid)?;\n\n b.put_u8(dcid.len() as u8)?;\n\n b.put_bytes(&dcid)?;\n\n b.put_u32(crate::PROTOCOL_VERSION_DRAFT29)?;\n\n b.put_u32(crate::PROTOCOL_VERSION_DRAFT28)?;\n\n b.put_u32(crate::PROTOCOL_VERSION_DRAFT27)?;\n\n\n\n Ok(b.off())\n\n}\n\n\n", "file_path": "src/packet.rs", "rank": 24, "score": 115079.18786207151 }, { "content": "pub fn encrypt_pkt(\n\n b: &mut octets::OctetsMut, pn: u64, pn_len: usize, payload_len: usize,\n\n payload_offset: usize, aead: &crypto::Seal,\n\n) -> Result<usize> {\n\n let (mut header, mut payload) = b.split_at(payload_offset)?;\n\n\n\n // Encrypt + authenticate payload.\n\n let ciphertext = payload.slice(payload_len)?;\n\n aead.seal_with_u64_counter(pn, header.as_ref(), ciphertext)?;\n\n\n\n encrypt_hdr(&mut header, pn_len, ciphertext, aead)?;\n\n\n\n Ok(payload_offset + payload_len)\n\n}\n\n\n", "file_path": "src/packet.rs", "rank": 25, "score": 115079.18786207151 }, { "content": "pub fn encrypt_hdr(\n\n b: &mut octets::OctetsMut, pn_len: usize, payload: &[u8], aead: &crypto::Seal,\n\n) -> Result<()> {\n\n let sample = &payload[4 - pn_len..16 + (4 - pn_len)];\n\n\n\n let mask = aead.new_mask(sample)?;\n\n\n\n let (mut first, mut rest) = b.split_at(1)?;\n\n\n\n let first = first.as_mut();\n\n\n\n if Header::is_long(first[0]) {\n\n first[0] ^= mask[0] & 0x0f;\n\n } else {\n\n first[0] ^= mask[0] & 0x1f;\n\n }\n\n\n\n let pn_buf = rest.slice_last(pn_len)?;\n\n for i in 0..pn_len {\n\n pn_buf[i] ^= mask[i + 1];\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/packet.rs", "rank": 26, "score": 115079.18786207151 }, { "content": "#[inline]\n\npub fn negotiate_version(\n\n scid: &ConnectionId, dcid: &ConnectionId, out: &mut [u8],\n\n) -> Result<usize> {\n\n packet::negotiate_version(scid, dcid, out)\n\n}\n\n\n\n/// Writes a stateless retry packet.\n\n///\n\n/// The `scid` and `dcid` parameters are the source connection ID and the\n\n/// destination connection ID extracted from the received client's Initial\n\n/// packet, while `new_scid` is the server's new source connection ID and\n\n/// `token` is the address validation token the client needs to echo back.\n\n///\n\n/// The application is responsible for generating the address validation\n\n/// token to be sent to the client, and verifying tokens sent back by the\n\n/// client. The generated token should include the `dcid` parameter, such\n\n/// that it can be later extracted from the token and passed to the\n\n/// [`accept()`] function as its `odcid` parameter.\n\n///\n\n/// [`accept()`]: fn.accept.html\n", "file_path": "src/lib.rs", "rank": 27, "score": 115079.18786207151 }, { "content": "/// Generates an HTTP/3 GREASE variable length integer.\n\nfn grease_value() -> u64 {\n\n let n = super::rand::rand_u64_uniform(148_764_065_110_560_899);\n\n 31 * n + 33\n\n}\n\n\n\n#[doc(hidden)]\n\npub mod testing {\n\n use super::*;\n\n\n\n use crate::testing;\n\n\n\n /// Session is an HTTP/3 test helper structure. It holds a client, server\n\n /// and pipe that allows them to communicate.\n\n ///\n\n /// `default()` creates a session with some sensible default\n\n /// configuration. `with_configs()` allows for providing a specific\n\n /// configuration.\n\n ///\n\n /// `handshake()` performs all the steps needed to establish an HTTP/3\n\n /// connection.\n", "file_path": "src/h3/mod.rs", "rank": 28, "score": 111361.63949973555 }, { "content": "pub fn derive_hdr_key(\n\n aead: Algorithm, secret: &[u8], out: &mut [u8],\n\n) -> Result<()> {\n\n const LABEL: &[u8] = b\"quic hp\";\n\n\n\n let key_len = aead.key_len();\n\n\n\n if key_len > out.len() {\n\n return Err(Error::CryptoFail);\n\n }\n\n\n\n let secret = hkdf::Prk::new_less_safe(aead.get_ring_digest(), secret);\n\n hkdf_expand_label(&secret, LABEL, &mut out[..key_len])\n\n}\n\n\n", "file_path": "src/crypto.rs", "rank": 29, "score": 111240.17167358939 }, { "content": "pub fn encode_crypto_header(\n\n offset: u64, length: u64, b: &mut octets::OctetsMut,\n\n) -> Result<()> {\n\n b.put_varint(0x06)?;\n\n\n\n b.put_varint(offset)?;\n\n\n\n // Always encode length field as 2-byte varint.\n\n b.put_varint_with_len(length, 2)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/frame.rs", "rank": 30, "score": 111240.17167358939 }, { "content": "pub fn encode_stream_header(\n\n stream_id: u64, offset: u64, length: u64, fin: bool,\n\n b: &mut octets::OctetsMut,\n\n) -> Result<()> {\n\n let mut ty: u8 = 0x08;\n\n\n\n // Always encode offset.\n\n ty |= 0x04;\n\n\n\n // Always encode length.\n\n ty |= 0x02;\n\n\n\n if fin {\n\n ty |= 0x01;\n\n }\n\n\n\n b.put_varint(u64::from(ty))?;\n\n\n\n b.put_varint(stream_id)?;\n\n b.put_varint(offset)?;\n\n\n\n // Always encode length field as 2-byte varint.\n\n b.put_varint_with_len(length, 2)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/frame.rs", "rank": 31, "score": 111240.17167358939 }, { "content": "pub fn verify_retry_integrity(\n\n b: &octets::OctetsMut, odcid: &[u8], version: u32,\n\n) -> Result<()> {\n\n let tag = compute_retry_integrity_tag(b, odcid, version)?;\n\n\n\n ring::constant_time::verify_slices_are_equal(\n\n &b.as_ref()[..aead::AES_128_GCM.tag_len()],\n\n tag.as_ref(),\n\n )\n\n .map_err(|_| Error::CryptoFail)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/packet.rs", "rank": 32, "score": 111240.17167358939 }, { "content": "pub fn derive_pkt_key(\n\n aead: Algorithm, secret: &[u8], out: &mut [u8],\n\n) -> Result<()> {\n\n const LABEL: &[u8] = b\"quic key\";\n\n\n\n let key_len = aead.key_len();\n\n\n\n if key_len > out.len() {\n\n return Err(Error::CryptoFail);\n\n }\n\n\n\n let secret = hkdf::Prk::new_less_safe(aead.get_ring_digest(), secret);\n\n hkdf_expand_label(&secret, LABEL, &mut out[..key_len])\n\n}\n\n\n", "file_path": "src/crypto.rs", "rank": 33, "score": 111240.17167358939 }, { "content": "pub fn connect(\n\n args: ClientArgs, conn_args: CommonArgs,\n\n output_sink: impl FnMut(String) + 'static,\n\n) -> Result<(), ClientError> {\n\n let mut buf = [0; 65535];\n\n let mut out = [0; MAX_DATAGRAM_SIZE];\n\n\n\n let output_sink =\n\n Rc::new(RefCell::new(output_sink)) as Rc<RefCell<dyn FnMut(_)>>;\n\n\n\n // Setup the event loop.\n\n let poll = mio::Poll::new().unwrap();\n\n let mut events = mio::Events::with_capacity(1024);\n\n\n\n // We'll only connect to the first server provided in URL list.\n\n let connect_url = &args.urls[0];\n\n\n\n // Resolve server address.\n\n let peer_addr = if let Some(addr) = &args.connect_to {\n\n addr.parse().unwrap()\n", "file_path": "tools/apps/src/client.rs", "rank": 34, "score": 111240.17167358939 }, { "content": "pub fn derive_pkt_iv(\n\n aead: Algorithm, secret: &[u8], out: &mut [u8],\n\n) -> Result<()> {\n\n const LABEL: &[u8] = b\"quic iv\";\n\n\n\n let nonce_len = aead.nonce_len();\n\n\n\n if nonce_len > out.len() {\n\n return Err(Error::CryptoFail);\n\n }\n\n\n\n let secret = hkdf::Prk::new_less_safe(aead.get_ring_digest(), secret);\n\n hkdf_expand_label(&secret, LABEL, &mut out[..nonce_len])\n\n}\n\n\n", "file_path": "src/crypto.rs", "rank": 35, "score": 111240.17167358939 }, { "content": "pub fn decrypt_pkt<'a>(\n\n b: &'a mut octets::OctetsMut, pn: u64, pn_len: usize, payload_len: usize,\n\n aead: &crypto::Open,\n\n) -> Result<octets::Octets<'a>> {\n\n let payload_offset = b.off();\n\n\n\n let (header, mut payload) = b.split_at(payload_offset)?;\n\n\n\n let payload_len = payload_len\n\n .checked_sub(pn_len)\n\n .ok_or(Error::InvalidPacket)?;\n\n\n\n let mut ciphertext = payload.peek_bytes_mut(payload_len)?;\n\n\n\n let payload_len =\n\n aead.open_with_u64_counter(pn, header.as_ref(), ciphertext.as_mut())?;\n\n\n\n Ok(b.get_bytes(payload_len)?)\n\n}\n\n\n", "file_path": "src/packet.rs", "rank": 36, "score": 108312.97165778355 }, { "content": "pub fn derive_initial_key_material(\n\n cid: &[u8], version: u32, is_server: bool,\n\n) -> Result<(Open, Seal)> {\n\n let mut secret = [0; 32];\n\n\n\n let aead = Algorithm::AES128_GCM;\n\n\n\n let key_len = aead.key_len();\n\n let nonce_len = aead.nonce_len();\n\n\n\n let initial_secret = derive_initial_secret(&cid, version);\n\n\n\n // Client.\n\n let mut client_key = vec![0; key_len];\n\n let mut client_iv = vec![0; nonce_len];\n\n let mut client_hp_key = vec![0; key_len];\n\n\n\n derive_client_initial_secret(&initial_secret, &mut secret)?;\n\n derive_pkt_key(aead, &secret, &mut client_key)?;\n\n derive_pkt_iv(aead, &secret, &mut client_iv)?;\n", "file_path": "src/crypto.rs", "rank": 37, "score": 107777.47868122248 }, { "content": "pub fn run(\n\n test: &mut crate::Http3Test, peer_addr: std::net::SocketAddr,\n\n verify_peer: bool, idle_timeout: u64, max_data: u64,\n\n) -> Result<(), Http3TestError> {\n\n const MAX_DATAGRAM_SIZE: usize = 1350;\n\n\n\n let mut buf = [0; 65535];\n\n let mut out = [0; MAX_DATAGRAM_SIZE];\n\n\n\n let max_stream_data = max_data;\n\n\n\n let version = if let Some(v) = std::env::var_os(\"QUIC_VERSION\") {\n\n match v.to_str() {\n\n Some(\"current\") => quiche::PROTOCOL_VERSION,\n\n\n\n Some(v) => u32::from_str_radix(v, 16).unwrap(),\n\n\n\n _ => 0xbaba_baba,\n\n }\n\n } else {\n", "file_path": "tools/http3_test/src/runner.rs", "rank": 38, "score": 107777.47868122248 }, { "content": "fn parse_stream_frame(ty: u64, b: &mut octets::Octets) -> Result<Frame> {\n\n let first = ty as u8;\n\n\n\n let stream_id = b.get_varint()?;\n\n\n\n let offset = if first & 0x04 != 0 {\n\n b.get_varint()?\n\n } else {\n\n 0\n\n };\n\n\n\n let len = if first & 0x02 != 0 {\n\n b.get_varint()? as usize\n\n } else {\n\n b.cap()\n\n };\n\n\n\n if offset + len as u64 >= MAX_STREAM_SIZE {\n\n return Err(Error::InvalidFrame);\n\n }\n\n\n\n let fin = first & 0x01 != 0;\n\n\n\n let data = b.get_bytes(len)?;\n\n let data = stream::RangeBuf::from(data.as_ref(), offset, fin);\n\n\n\n Ok(Frame::Stream { stream_id, data })\n\n}\n\n\n", "file_path": "src/frame.rs", "rank": 39, "score": 106519.29342567621 }, { "content": "fn parse_ack_frame(_ty: u64, b: &mut octets::Octets) -> Result<Frame> {\n\n let largest_ack = b.get_varint()?;\n\n let ack_delay = b.get_varint()?;\n\n let block_count = b.get_varint()?;\n\n let ack_block = b.get_varint()?;\n\n\n\n if largest_ack < ack_block {\n\n return Err(Error::InvalidFrame);\n\n }\n\n\n\n let mut smallest_ack = largest_ack - ack_block;\n\n\n\n let mut ranges = ranges::RangeSet::default();\n\n\n\n #[allow(clippy::range_plus_one)]\n\n ranges.insert(smallest_ack..largest_ack + 1);\n\n\n\n for _i in 0..block_count {\n\n let gap = b.get_varint()?;\n\n\n", "file_path": "src/frame.rs", "rank": 40, "score": 106519.29342567621 }, { "content": "fn parse_datagram_frame(ty: u64, b: &mut octets::Octets) -> Result<Frame> {\n\n let first = ty as u8;\n\n\n\n let len = if first & 0x01 != 0 {\n\n b.get_varint()? as usize\n\n } else {\n\n b.cap()\n\n };\n\n\n\n let data = b.get_bytes(len)?;\n\n\n\n Ok(Frame::Datagram {\n\n data: Vec::from(data.buf()),\n\n })\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n", "file_path": "src/frame.rs", "rank": 41, "score": 106519.29342567621 }, { "content": "fn lookup_static<T: NameValue>(h: &T) -> Option<(u64, bool)> {\n\n let mut name_match = None;\n\n\n\n for (i, e) in super::static_table::STATIC_TABLE.iter().enumerate() {\n\n // Match header name first.\n\n if h.name().len() == e.0.len() && h.name().eq_ignore_ascii_case(e.0) {\n\n // No header value to match, return early.\n\n if e.1.is_empty() {\n\n return Some((i as u64, false));\n\n }\n\n\n\n // Match header value.\n\n if h.value().len() == e.1.len() && h.value() == e.1 {\n\n return Some((i as u64, true));\n\n }\n\n\n\n // Remember name-only match for later, but keep searching.\n\n name_match = Some((i as u64, false));\n\n }\n\n }\n\n\n\n name_match\n\n}\n\n\n", "file_path": "src/h3/qpack/encoder.rs", "rank": 42, "score": 105813.26389426872 }, { "content": "struct AppData(*mut c_void);\n\nunsafe impl Send for AppData {}\n\nunsafe impl Sync for AppData {}\n\n\n\n#[no_mangle]\n\npub extern fn quiche_conn_stream_init_application_data(\n\n conn: &mut Connection, stream_id: u64, data: *mut c_void,\n\n) -> c_int {\n\n match conn.stream_init_application_data(stream_id, AppData(data)) {\n\n Ok(_) => 0,\n\n\n\n Err(e) => e.to_c() as c_int,\n\n }\n\n}\n\n\n\n#[no_mangle]\n\npub extern fn quiche_conn_stream_application_data(\n\n conn: &mut Connection, stream_id: u64,\n\n) -> *mut c_void {\n\n match conn.stream_application_data(stream_id) {\n", "file_path": "src/ffi.rs", "rank": 43, "score": 104956.51798577645 }, { "content": "/// Makes a buffered writer for a qlog.\n\npub fn make_qlog_writer(\n\n dir: &std::ffi::OsStr, role: &str, id: &str,\n\n) -> std::io::BufWriter<std::fs::File> {\n\n let mut path = std::path::PathBuf::from(dir);\n\n let filename = format!(\"{}-{}.qlog\", role, id);\n\n path.push(filename);\n\n\n\n match std::fs::File::create(&path) {\n\n Ok(f) => std::io::BufWriter::new(f),\n\n\n\n Err(e) => panic!(\n\n \"Error creating qlog file attempted path was {:?}: {}\",\n\n path, e\n\n ),\n\n }\n\n}\n\n\n", "file_path": "tools/apps/src/common.rs", "rank": 44, "score": 104638.3601032777 }, { "content": "pub fn rand_u8() -> u8 {\n\n let mut buf = [0; 1];\n\n\n\n rand_bytes(&mut buf);\n\n\n\n buf[0]\n\n}\n\n\n", "file_path": "src/rand.rs", "rank": 45, "score": 104473.95546930144 }, { "content": "fn encode_str(v: &str, prefix: usize, b: &mut octets::OctetsMut) -> Result<()> {\n\n let len = super::huffman::encode_output_length(v.as_bytes(), false)?;\n\n\n\n encode_int(len as u64, 0x80, prefix, b)?;\n\n\n\n super::huffman::encode(v.as_bytes(), b, false)?;\n\n\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n use crate::octets;\n\n\n\n #[test]\n\n fn encode_int1() {\n\n let expected = [0b01010];\n\n let mut encoded = [0; 1];\n", "file_path": "src/h3/qpack/encoder.rs", "rank": 46, "score": 103687.8578572225 }, { "content": "pub fn decode(b: &mut octets::Octets) -> Result<Vec<u8>> {\n\n // Max compression ratio is >= 0.5\n\n let mut out = Vec::with_capacity(b.len() << 1);\n\n\n\n let mut decoder = Decoder::new();\n\n\n\n while b.cap() > 0 {\n\n let byte = b.get_u8()?;\n\n\n\n if let Some(b) = decoder.decode4(byte >> 4)? {\n\n out.push(b);\n\n }\n\n\n\n if let Some(b) = decoder.decode4(byte & 0xf)? {\n\n out.push(b);\n\n }\n\n }\n\n\n\n if !decoder.is_final() {\n\n return Err(Error::InvalidHuffmanEncoding);\n\n }\n\n\n\n Ok(out)\n\n}\n\n\n", "file_path": "src/h3/qpack/huffman/mod.rs", "rank": 47, "score": 103387.41243328732 }, { "content": "fn on_packet_sent(r: &mut Recovery, sent_bytes: usize, now: Instant) {\n\n // See https://github.com/torvalds/linux/commit/30927520dbae297182990bb21d08762bcc35ce1d\n\n // First transmit when no packets in flight\n\n let cubic = &mut r.cubic_state;\n\n\n\n if let Some(last_sent_time) = cubic.last_sent_time {\n\n if r.bytes_in_flight == 0 {\n\n let delta = now - last_sent_time;\n\n\n\n // We were application limited (idle) for a while.\n\n // Shift epoch start to keep cwnd growth to cubic curve.\n\n if let Some(recovery_start_time) = r.congestion_recovery_start_time {\n\n if delta.as_nanos() > 0 {\n\n r.congestion_recovery_start_time =\n\n Some(recovery_start_time + delta);\n\n }\n\n }\n\n }\n\n }\n\n\n\n cubic.last_sent_time = Some(now);\n\n\n\n reno::on_packet_sent(r, sent_bytes, now);\n\n}\n\n\n", "file_path": "src/recovery/cubic.rs", "rank": 48, "score": 99533.82104122743 }, { "content": "/// Handles incoming HTTP/0.9 requests.\n\nfn handle_stream(client: &mut Client, stream_id: u64, buf: &[u8], root: &str) {\n\n let conn = &mut client.conn;\n\n\n\n if buf.len() > 4 && &buf[..4] == b\"GET \" {\n\n let uri = &buf[4..buf.len()];\n\n let uri = String::from_utf8(uri.to_vec()).unwrap();\n\n let uri = String::from(uri.lines().next().unwrap());\n\n let uri = std::path::Path::new(&uri);\n\n let mut path = std::path::PathBuf::from(root);\n\n\n\n for c in uri.components() {\n\n if let std::path::Component::Normal(v) = c {\n\n path.push(v)\n\n }\n\n }\n\n\n\n info!(\n\n \"{} got GET request for {:?} on stream {}\",\n\n conn.trace_id(),\n\n path,\n", "file_path": "examples/server.rs", "rank": 49, "score": 98644.4766549701 }, { "content": "fn collapse_cwnd(r: &mut Recovery) {\n\n let cubic = &mut r.cubic_state;\n\n\n\n r.congestion_recovery_start_time = None;\n\n\n\n cubic.w_last_max = r.congestion_window as f64;\n\n cubic.w_max = cubic.w_last_max;\n\n\n\n // 4.7 Timeout - reduce ssthresh based on BETA_CUBIC\n\n r.ssthresh = (r.congestion_window as f64 * BETA_CUBIC) as usize;\n\n r.ssthresh = cmp::max(\n\n r.ssthresh,\n\n r.max_datagram_size * recovery::MINIMUM_WINDOW_PACKETS,\n\n );\n\n\n\n cubic.cwnd_inc = 0;\n\n\n\n reno::collapse_cwnd(r);\n\n}\n\n\n", "file_path": "src/recovery/cubic.rs", "rank": 50, "score": 96265.14894531148 }, { "content": "pub fn stdout_sink(out: String) {\n\n print!(\"{}\", out);\n\n}\n\n\n\n/// ALPN helpers.\n\n///\n\n/// This module contains constants and functions for working with ALPN.\n\npub mod alpns {\n\n pub const HTTP_09: [&str; 4] = [\"hq-29\", \"hq-28\", \"hq-27\", \"http/0.9\"];\n\n pub const HTTP_3: [&str; 3] = [\"h3-29\", \"h3-28\", \"h3-27\"];\n\n pub const SIDUCK: [&str; 2] = [\"siduck\", \"siduck-00\"];\n\n\n\n pub fn length_prefixed(alpns: &[&str]) -> Vec<u8> {\n\n let mut out = Vec::new();\n\n\n\n for s in alpns {\n\n out.push(s.len() as u8);\n\n out.extend_from_slice(s.as_bytes());\n\n }\n\n\n", "file_path": "tools/apps/src/common.rs", "rank": 51, "score": 93039.36192124774 }, { "content": "// The ring HKDF expand() API does not accept an arbitrary output length, so we\n\n// need to hide the `usize` length as part of a type that implements the trait\n\n// `ring::hkdf::KeyType` in order to trick ring into accepting it.\n\nstruct ArbitraryOutputLen(usize);\n\n\n\nimpl hkdf::KeyType for ArbitraryOutputLen {\n\n fn len(&self) -> usize {\n\n self.0\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn derive_initial_secrets() {\n\n let dcid = [0x83, 0x94, 0xc8, 0xf0, 0x3e, 0x51, 0x57, 0x08];\n\n\n\n let mut secret = [0; 32];\n\n let mut pkt_key = [0; 16];\n\n let mut pkt_iv = [0; 12];\n\n let mut hdr_key = [0; 16];\n", "file_path": "src/crypto.rs", "rank": 52, "score": 91322.6042816411 }, { "content": "fn sub_abs(lhs: Duration, rhs: Duration) -> Duration {\n\n if lhs > rhs {\n\n lhs - rhs\n\n } else {\n\n rhs - lhs\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn lookup_cc_algo_ok() {\n\n let algo = CongestionControlAlgorithm::from_str(\"reno\").unwrap();\n\n assert_eq!(algo, CongestionControlAlgorithm::Reno);\n\n }\n\n\n\n #[test]\n\n fn lookup_cc_algo_bad() {\n", "file_path": "src/recovery/mod.rs", "rank": 53, "score": 90846.17139198579 }, { "content": "fn dump_json(reqs: &[Http3Request], output_sink: &mut dyn FnMut(String)) {\n\n let mut out = String::new();\n\n\n\n writeln!(out, \"{{\").unwrap();\n\n writeln!(out, \" \\\"entries\\\": [\").unwrap();\n\n let mut reqs = reqs.iter().peekable();\n\n\n\n while let Some(req) = reqs.next() {\n\n writeln!(out, \" {{\").unwrap();\n\n writeln!(out, \" \\\"request\\\":{{\").unwrap();\n\n writeln!(out, \" \\\"headers\\\":[\").unwrap();\n\n\n\n let mut req_hdrs = req.hdrs.iter().peekable();\n\n while let Some(h) = req_hdrs.next() {\n\n writeln!(out, \" {{\").unwrap();\n\n writeln!(out, \" \\\"name\\\": \\\"{}\\\",\", h.name()).unwrap();\n\n writeln!(out, \" \\\"value\\\": \\\"{}\\\"\", h.value()).unwrap();\n\n\n\n if req_hdrs.peek().is_some() {\n\n writeln!(out, \" }},\").unwrap();\n", "file_path": "tools/apps/src/common.rs", "rank": 54, "score": 90335.34605439627 }, { "content": "fn get_ex_data_from_ptr<'a, T>(ptr: *mut SSL, idx: c_int) -> Option<&'a mut T> {\n\n unsafe {\n\n let data = SSL_get_ex_data(ptr, idx) as *mut T;\n\n data.as_mut()\n\n }\n\n}\n\n\n", "file_path": "src/tls.rs", "rank": 55, "score": 80172.83178057513 }, { "content": "fn make_nonce(iv: &[u8], counter: u64) -> aead::Nonce {\n\n let mut nonce = [0; aead::NONCE_LEN];\n\n nonce.copy_from_slice(&iv);\n\n\n\n // XOR the last bytes of the IV with the counter. This is equivalent to\n\n // left-padding the counter with zero bytes.\n\n for (a, b) in nonce[4..].iter_mut().zip(counter.to_be_bytes().iter()) {\n\n *a ^= b;\n\n }\n\n\n\n aead::Nonce::assume_unique_for_key(nonce)\n\n}\n\n\n", "file_path": "src/crypto.rs", "rank": 56, "score": 78940.57351940114 }, { "content": "fn decode_str(b: &mut octets::Octets) -> Result<String> {\n\n let first = b.peek_u8()?;\n\n\n\n let huff = first & 0x80 == 0x80;\n\n\n\n let len = decode_int(b, 7)? as usize;\n\n\n\n let mut val = b.get_bytes(len)?;\n\n\n\n let val = if huff {\n\n super::huffman::decode(&mut val)?\n\n } else {\n\n val.to_vec()\n\n };\n\n\n\n let val = String::from_utf8(val).map_err(|_| Error::InvalidHeaderValue)?;\n\n Ok(val)\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "src/h3/qpack/decoder.rs", "rank": 57, "score": 76532.22795260107 }, { "content": "fn get_cipher_from_ptr(cipher: *const SSL_CIPHER) -> Result<crypto::Algorithm> {\n\n let cipher_id = unsafe { SSL_CIPHER_get_id(cipher) };\n\n\n\n let alg = match cipher_id {\n\n 0x0300_1301 => crypto::Algorithm::AES128_GCM,\n\n 0x0300_1302 => crypto::Algorithm::AES256_GCM,\n\n 0x0300_1303 => crypto::Algorithm::ChaCha20_Poly1305,\n\n _ => return Err(Error::TlsFail),\n\n };\n\n\n\n Ok(alg)\n\n}\n\n\n\nextern fn set_read_secret(\n\n ssl: *mut SSL, level: crypto::Level, cipher: *const SSL_CIPHER,\n\n secret: *const u8, secret_len: usize,\n\n) -> c_int {\n\n let conn =\n\n match get_ex_data_from_ptr::<Connection>(ssl, *QUICHE_EX_DATA_INDEX) {\n\n Some(v) => v,\n", "file_path": "src/tls.rs", "rank": 58, "score": 74386.60695722426 }, { "content": "fn derive_client_initial_secret(prk: &hkdf::Prk, out: &mut [u8]) -> Result<()> {\n\n const LABEL: &[u8] = b\"client in\";\n\n hkdf_expand_label(prk, LABEL, out)\n\n}\n\n\n", "file_path": "src/crypto.rs", "rank": 59, "score": 72471.3553697297 }, { "content": "fn derive_server_initial_secret(prk: &hkdf::Prk, out: &mut [u8]) -> Result<()> {\n\n const LABEL: &[u8] = b\"server in\";\n\n hkdf_expand_label(prk, LABEL, out)\n\n}\n\n\n", "file_path": "src/crypto.rs", "rank": 60, "score": 72471.3553697297 }, { "content": "fn map_result_ptr<'a, T>(bssl_result: *const T) -> Result<&'a T> {\n\n match unsafe { bssl_result.as_ref() } {\n\n Some(v) => Ok(v),\n\n None => Err(Error::TlsFail),\n\n }\n\n}\n\n\n", "file_path": "src/tls.rs", "rank": 61, "score": 70991.15931306467 }, { "content": "fn lookup_static(idx: u64) -> Result<(&'static str, &'static str)> {\n\n if idx >= super::static_table::STATIC_TABLE.len() as u64 {\n\n return Err(Error::InvalidStaticTableIndex);\n\n }\n\n\n\n Ok(super::static_table::STATIC_TABLE[idx as usize])\n\n}\n\n\n", "file_path": "src/h3/qpack/decoder.rs", "rank": 62, "score": 70432.79204249688 }, { "content": "struct Client {\n\n conn: std::pin::Pin<Box<quiche::Connection>>,\n\n\n\n partial_responses: HashMap<u64, PartialResponse>,\n\n}\n\n\n", "file_path": "examples/server.rs", "rank": 63, "score": 67484.14812924554 }, { "content": "struct Logger {\n\n cb: extern fn(line: *const u8, argp: *mut c_void),\n\n argp: std::sync::atomic::AtomicPtr<c_void>,\n\n}\n\n\n\nimpl log::Log for Logger {\n\n fn enabled(&self, _metadata: &log::Metadata) -> bool {\n\n true\n\n }\n\n\n\n fn log(&self, record: &log::Record) {\n\n let line = format!(\"{}: {}\\0\", record.target(), record.args());\n\n (self.cb)(line.as_ptr(), self.argp.load(atomic::Ordering::Relaxed));\n\n }\n\n\n\n fn flush(&self) {}\n\n}\n\n\n\n#[no_mangle]\n\npub extern fn quiche_enable_debug_logging(\n", "file_path": "src/ffi.rs", "rank": 64, "score": 67484.14812924554 }, { "content": "fn main() {\n\n let mut buf = [0; 65535];\n\n let mut out = [0; MAX_DATAGRAM_SIZE];\n\n\n\n let mut args = std::env::args();\n\n\n\n let cmd = &args.next().unwrap();\n\n\n\n if args.len() != 0 {\n\n println!(\"Usage: {}\", cmd);\n\n println!(\"\\nSee tools/apps/ for more complete implementations.\");\n\n return;\n\n }\n\n\n\n // Setup the event loop.\n\n let poll = mio::Poll::new().unwrap();\n\n let mut events = mio::Events::with_capacity(1024);\n\n\n\n // Create the UDP listening socket, and register it with the event loop.\n\n let socket = net::UdpSocket::bind(\"127.0.0.1:4433\").unwrap();\n", "file_path": "examples/server.rs", "rank": 65, "score": 66948.48112164758 }, { "content": "fn main() {\n\n let mut buf = [0; 65535];\n\n let mut out = [0; MAX_DATAGRAM_SIZE];\n\n\n\n let mut args = std::env::args();\n\n\n\n let cmd = &args.next().unwrap();\n\n\n\n if args.len() != 1 {\n\n println!(\"Usage: {} URL\", cmd);\n\n println!(\"\\nSee tools/apps/ for more complete implementations.\");\n\n return;\n\n }\n\n\n\n let url = url::Url::parse(&args.next().unwrap()).unwrap();\n\n\n\n // Setup the event loop.\n\n let poll = mio::Poll::new().unwrap();\n\n let mut events = mio::Events::with_capacity(1024);\n\n\n", "file_path": "examples/client.rs", "rank": 66, "score": 66948.48112164758 }, { "content": "fn main() {\n\n if cfg!(feature = \"boringssl-vendored\") && !cfg!(feature = \"boring-sys\") {\n\n let bssl_dir = std::env::var(\"QUICHE_BSSL_PATH\").unwrap_or_else(|_| {\n\n let mut cfg = get_boringssl_cmake_config();\n\n\n\n if cfg!(feature = \"fuzzing\") {\n\n cfg.cxxflag(\"-DBORINGSSL_UNSAFE_DETERMINISTIC_MODE\")\n\n .cxxflag(\"-DBORINGSSL_UNSAFE_FUZZER_MODE\");\n\n }\n\n\n\n cfg.build_target(\"bssl\").build().display().to_string()\n\n });\n\n\n\n let build_path = get_boringssl_platform_output_path();\n\n let build_dir = format!(\"{}/build/{}\", bssl_dir, build_path);\n\n println!(\"cargo:rustc-link-search=native={}\", build_dir);\n\n\n\n println!(\"cargo:rustc-link-lib=static=crypto\");\n\n println!(\"cargo:rustc-link-lib=static=ssl\");\n\n }\n", "file_path": "src/build.rs", "rank": 67, "score": 66948.48112164758 }, { "content": "struct Client {\n\n conn: std::pin::Pin<Box<quiche::Connection>>,\n\n\n\n http3_conn: Option<quiche::h3::Connection>,\n\n\n\n partial_responses: HashMap<u64, PartialResponse>,\n\n}\n\n\n", "file_path": "examples/http3-server.rs", "rank": 68, "score": 65077.53952273157 }, { "content": "#[derive(Clone, Debug, PartialEq)]\n\nstruct TransportParams {\n\n pub original_destination_connection_id: Option<ConnectionId<'static>>,\n\n pub max_idle_timeout: u64,\n\n pub stateless_reset_token: Option<Vec<u8>>,\n\n pub max_udp_payload_size: u64,\n\n pub initial_max_data: u64,\n\n pub initial_max_stream_data_bidi_local: u64,\n\n pub initial_max_stream_data_bidi_remote: u64,\n\n pub initial_max_stream_data_uni: u64,\n\n pub initial_max_streams_bidi: u64,\n\n pub initial_max_streams_uni: u64,\n\n pub ack_delay_exponent: u64,\n\n pub max_ack_delay: u64,\n\n pub disable_active_migration: bool,\n\n // pub preferred_address: ...,\n\n pub active_conn_id_limit: u64,\n\n pub initial_source_connection_id: Option<ConnectionId<'static>>,\n\n pub retry_source_connection_id: Option<ConnectionId<'static>>,\n\n pub max_datagram_frame_size: Option<u64>,\n\n}\n", "file_path": "src/lib.rs", "rank": 69, "score": 65077.53952273157 }, { "content": "struct PartialResponse {\n\n body: Vec<u8>,\n\n\n\n written: usize,\n\n}\n\n\n", "file_path": "examples/server.rs", "rank": 70, "score": 65077.53952273157 }, { "content": "fn main() {\n\n let mut buf = [0; 65535];\n\n let mut out = [0; MAX_DATAGRAM_SIZE];\n\n\n\n let mut args = std::env::args();\n\n\n\n let cmd = &args.next().unwrap();\n\n\n\n if args.len() != 0 {\n\n println!(\"Usage: {}\", cmd);\n\n println!(\"\\nSee tools/apps/ for more complete implementations.\");\n\n return;\n\n }\n\n\n\n // Setup the event loop.\n\n let poll = mio::Poll::new().unwrap();\n\n let mut events = mio::Events::with_capacity(1024);\n\n\n\n // Create the UDP listening socket, and register it with the event loop.\n\n let socket = net::UdpSocket::bind(\"127.0.0.1:4433\").unwrap();\n", "file_path": "examples/http3-server.rs", "rank": 71, "score": 64550.90714144552 }, { "content": "fn main() {\n\n let mut args = std::env::args();\n\n\n\n let cmd = &args.next().unwrap();\n\n\n\n if args.len() != 1 {\n\n println!(\"Usage: {} FILE\", cmd);\n\n return;\n\n }\n\n\n\n let file = File::open(&args.next().unwrap()).unwrap();\n\n let file = BufReader::new(&file);\n\n\n\n let mut enc = h3::qpack::Encoder::new();\n\n\n\n let mut headers: Vec<h3::Header> = Vec::new();\n\n\n\n let mut stream_id = 1u64;\n\n\n\n for line in file.lines().map(Result::unwrap) {\n", "file_path": "examples/qpack-encode.rs", "rank": 72, "score": 64550.90714144552 }, { "content": "fn main() {\n\n // TODO: parse params from file name.\n\n\n\n let mut args = std::env::args();\n\n\n\n let cmd = &args.next().unwrap();\n\n\n\n if args.len() != 1 {\n\n println!(\"Usage: {} FILE\", cmd);\n\n return;\n\n }\n\n\n\n let mut file = File::open(&args.next().unwrap()).unwrap();\n\n\n\n let mut dec = qpack::Decoder::new();\n\n\n\n loop {\n\n let mut stream_id: [u8; 8] = [0; 8];\n\n let mut len: [u8; 4] = [0; 4];\n\n\n", "file_path": "examples/qpack-decode.rs", "rank": 73, "score": 64550.90714144552 }, { "content": "fn main() {\n\n let mut buf = [0; 65535];\n\n let mut out = [0; MAX_DATAGRAM_SIZE];\n\n\n\n let mut args = std::env::args();\n\n\n\n let cmd = &args.next().unwrap();\n\n\n\n if args.len() != 1 {\n\n println!(\"Usage: {} URL\", cmd);\n\n println!(\"\\nSee tools/apps/ for more complete implementations.\");\n\n return;\n\n }\n\n\n\n let url = url::Url::parse(&args.next().unwrap()).unwrap();\n\n\n\n // Setup the event loop.\n\n let poll = mio::Poll::new().unwrap();\n\n let mut events = mio::Events::with_capacity(1024);\n\n\n", "file_path": "examples/http3-client.rs", "rank": 74, "score": 64550.90714144552 }, { "content": "struct PartialResponse {\n\n headers: Option<Vec<quiche::h3::Header>>,\n\n\n\n body: Vec<u8>,\n\n\n\n written: usize,\n\n}\n\n\n", "file_path": "examples/http3-server.rs", "rank": 75, "score": 62932.47955613405 }, { "content": "struct QpackStreams {\n\n pub encoder_stream_id: Option<u64>,\n\n pub decoder_stream_id: Option<u64>,\n\n}\n\n\n\n/// An HTTP/3 connection.\n\npub struct Connection {\n\n is_server: bool,\n\n\n\n next_request_stream_id: u64,\n\n next_uni_stream_id: u64,\n\n\n\n streams: HashMap<u64, stream::Stream>,\n\n\n\n local_settings: ConnectionSettings,\n\n peer_settings: ConnectionSettings,\n\n\n\n control_stream_id: Option<u64>,\n\n peer_control_stream_id: Option<u64>,\n\n\n", "file_path": "src/h3/mod.rs", "rank": 76, "score": 62932.47955613405 }, { "content": "#[repr(C)]\n\n#[allow(non_camel_case_types)]\n\nstruct SSL_QUIC_METHOD {\n\n set_read_secret: extern fn(\n\n ssl: *mut SSL,\n\n level: crypto::Level,\n\n cipher: *const SSL_CIPHER,\n\n secret: *const u8,\n\n secret_len: usize,\n\n ) -> c_int,\n\n\n\n set_write_secret: extern fn(\n\n ssl: *mut SSL,\n\n level: crypto::Level,\n\n cipher: *const SSL_CIPHER,\n\n secret: *const u8,\n\n secret_len: usize,\n\n ) -> c_int,\n\n\n\n add_handshake_data: extern fn(\n\n ssl: *mut SSL,\n\n level: crypto::Level,\n", "file_path": "src/tls.rs", "rank": 77, "score": 62932.47955613405 }, { "content": "struct ConnectionSettings {\n\n pub max_header_list_size: Option<u64>,\n\n pub qpack_max_table_capacity: Option<u64>,\n\n pub qpack_blocked_streams: Option<u64>,\n\n pub h3_datagram: Option<u64>,\n\n}\n\n\n", "file_path": "src/h3/mod.rs", "rank": 78, "score": 62932.47955613405 }, { "content": "/// Maps an `Error` to `Error::Done`, or itself.\n\n///\n\n/// When a received packet that hasn't yet been authenticated triggers a failure\n\n/// it should, in most cases, be ignored, instead of raising a connection error,\n\n/// to avoid potential man-in-the-middle and man-on-the-side attacks.\n\n///\n\n/// However, if no other packet was previously received, the connection should\n\n/// indeed be closed as the received packet might just be network background\n\n/// noise, and it shouldn't keep resources occupied indefinitely.\n\n///\n\n/// This function maps an error to `Error::Done` to ignore a packet failure\n\n/// without aborting the connection, except when no other packet was previously\n\n/// received, in which case the error itself is returned, but only on the\n\n/// server-side as the client will already have armed the idle timer.\n\n///\n\n/// This must only be used for errors preceding packet authentication. Failures\n\n/// happening after a packet has been authenticated should still cause the\n\n/// connection to be aborted.\n\nfn drop_pkt_on_err(\n\n e: Error, recv_count: usize, is_server: bool, trace_id: &str,\n\n) -> Error {\n\n // On the server, if no other packet has been successflully processed, abort\n\n // the connection to avoid keeping the connection open when only junk is\n\n // received.\n\n if is_server && recv_count == 0 {\n\n return e;\n\n }\n\n\n\n trace!(\"{} dropped invalid packet\", trace_id);\n\n\n\n // Ignore other invalid packets that haven't been authenticated to prevent\n\n // man-in-the-middle and man-on-the-side attacks.\n\n Error::Done\n\n}\n\n\n\n/// Statistics about the connection.\n\n///\n\n/// A connections's statistics can be collected using the [`stats()`] method.\n", "file_path": "src/lib.rs", "rank": 79, "score": 62416.49150500655 }, { "content": "fn on_packet_acked(\n\n r: &mut Recovery, packet: &Acked, epoch: packet::Epoch, now: Instant,\n\n) {\n\n r.bytes_in_flight = r.bytes_in_flight.saturating_sub(packet.size);\n\n\n\n if r.in_congestion_recovery(packet.time_sent) {\n\n return;\n\n }\n\n\n\n if r.app_limited {\n\n return;\n\n }\n\n\n\n if r.congestion_window < r.ssthresh {\n\n // Slow start.\n\n let cwnd_inc = cmp::min(\n\n packet.size,\n\n r.max_datagram_size * recovery::ABC_L -\n\n cmp::min(\n\n r.bytes_acked_sl,\n", "file_path": "src/recovery/reno.rs", "rank": 80, "score": 62413.89992391672 }, { "content": "fn congestion_event(\n\n r: &mut Recovery, time_sent: Instant, epoch: packet::Epoch, now: Instant,\n\n) {\n\n let in_congestion_recovery = r.in_congestion_recovery(time_sent);\n\n\n\n // Start a new congestion event if packet was sent after the\n\n // start of the previous congestion recovery period.\n\n if !in_congestion_recovery {\n\n r.congestion_recovery_start_time = Some(now);\n\n\n\n // Fast convergence\n\n if r.cubic_state.w_max < r.cubic_state.w_last_max {\n\n r.cubic_state.w_last_max = r.cubic_state.w_max;\n\n r.cubic_state.w_max =\n\n r.cubic_state.w_max as f64 * (1.0 + BETA_CUBIC) / 2.0;\n\n } else {\n\n r.cubic_state.w_last_max = r.cubic_state.w_max;\n\n }\n\n\n\n r.cubic_state.w_max = r.congestion_window as f64;\n", "file_path": "src/recovery/cubic.rs", "rank": 81, "score": 62413.89992391672 }, { "content": "fn hkdf_expand_label(\n\n prk: &hkdf::Prk, label: &[u8], out: &mut [u8],\n\n) -> Result<()> {\n\n const LABEL_PREFIX: &[u8] = b\"tls13 \";\n\n\n\n let out_len = (out.len() as u16).to_be_bytes();\n\n let label_len = (LABEL_PREFIX.len() + label.len()) as u8;\n\n\n\n let info = [&out_len, &[label_len][..], LABEL_PREFIX, label, &[0][..]];\n\n\n\n prk.expand(&info, ArbitraryOutputLen(out.len()))\n\n .map_err(|_| Error::CryptoFail)?\n\n .fill(out)\n\n .map_err(|_| Error::CryptoFail)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/crypto.rs", "rank": 82, "score": 62413.89992391672 }, { "content": "fn write_pkg_config() {\n\n use std::io::prelude::*;\n\n\n\n let profile = std::env::var(\"PROFILE\").unwrap();\n\n let manifest_dir = std::env::var(\"CARGO_MANIFEST_DIR\").unwrap();\n\n let target_dir = format!(\"{}/target/{}\", manifest_dir, profile);\n\n\n\n let out_path = std::path::Path::new(&target_dir).join(\"quiche.pc\");\n\n let mut out_file = std::fs::File::create(&out_path).unwrap();\n\n\n\n let include_dir = format!(\"{}/include\", manifest_dir);\n\n let version = std::env::var(\"CARGO_PKG_VERSION\").unwrap();\n\n\n\n let output = format!(\n\n \"# quiche\n\n\n\nincludedir={}\n\nlibdir={}\n\n\n\nName: quiche\n", "file_path": "src/build.rs", "rank": 83, "score": 62413.89992391672 }, { "content": "fn congestion_event(\n\n r: &mut Recovery, time_sent: Instant, epoch: packet::Epoch, now: Instant,\n\n) {\n\n // Start a new congestion event if packet was sent after the\n\n // start of the previous congestion recovery period.\n\n if !r.in_congestion_recovery(time_sent) {\n\n r.congestion_recovery_start_time = Some(now);\n\n\n\n r.congestion_window = (r.congestion_window as f64 *\n\n recovery::LOSS_REDUCTION_FACTOR)\n\n as usize;\n\n\n\n r.congestion_window = cmp::max(\n\n r.congestion_window,\n\n r.max_datagram_size * recovery::MINIMUM_WINDOW_PACKETS,\n\n );\n\n\n\n r.bytes_acked_ca = (r.congestion_window as f64 *\n\n recovery::LOSS_REDUCTION_FACTOR) as usize;\n\n\n\n r.ssthresh = r.congestion_window;\n\n\n\n if r.hystart.in_lss(epoch) {\n\n r.hystart.congestion_event();\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/recovery/reno.rs", "rank": 84, "score": 62413.89992391672 }, { "content": "/// Builds an HTTP/3 response given a request.\n\nfn build_response(\n\n root: &str, request: &[quiche::h3::Header],\n\n) -> (Vec<quiche::h3::Header>, Vec<u8>) {\n\n let mut file_path = std::path::PathBuf::from(root);\n\n let mut path = std::path::Path::new(\"\");\n\n let mut method = \"\";\n\n\n\n // Look for the request's path and method.\n\n for hdr in request {\n\n match hdr.name() {\n\n \":path\" => {\n\n path = std::path::Path::new(hdr.value());\n\n },\n\n\n\n \":method\" => {\n\n method = hdr.value();\n\n },\n\n\n\n _ => (),\n\n }\n", "file_path": "examples/http3-server.rs", "rank": 85, "score": 62413.89992391672 }, { "content": "fn log_ssl_error() {\n\n let err = [0; 1024];\n\n\n\n unsafe {\n\n let e = ERR_peek_error();\n\n ERR_error_string_n(e, err.as_ptr(), err.len());\n\n }\n\n\n\n trace!(\"{}\", std::str::from_utf8(&err).unwrap());\n\n}\n\n\n\nextern {\n\n // SSL_METHOD\n\n fn TLS_method() -> *const SSL_METHOD;\n\n\n\n // SSL_CTX\n\n fn SSL_CTX_new(method: *const SSL_METHOD) -> *mut SSL_CTX;\n\n fn SSL_CTX_free(ctx: *mut SSL_CTX);\n\n\n\n fn SSL_CTX_use_certificate_chain_file(\n", "file_path": "src/tls.rs", "rank": 86, "score": 62413.89992391672 }, { "content": "/// Handles incoming HTTP/3 requests.\n\nfn handle_request(\n\n client: &mut Client, stream_id: u64, headers: &[quiche::h3::Header],\n\n root: &str,\n\n) {\n\n let conn = &mut client.conn;\n\n let http3_conn = &mut client.http3_conn.as_mut().unwrap();\n\n\n\n info!(\n\n \"{} got request {:?} on stream id {}\",\n\n conn.trace_id(),\n\n headers,\n\n stream_id\n\n );\n\n\n\n // We decide the response based on headers alone, so stop reading the\n\n // request stream so that any body is ignored and pointless Data events\n\n // are not generated.\n\n conn.stream_shutdown(stream_id, quiche::Shutdown::Read, 0)\n\n .unwrap();\n\n\n", "file_path": "examples/http3-server.rs", "rank": 87, "score": 62413.89992391672 }, { "content": "fn on_packet_acked(\n\n r: &mut Recovery, packet: &Acked, epoch: packet::Epoch, now: Instant,\n\n) {\n\n let in_congestion_recovery = r.in_congestion_recovery(packet.time_sent);\n\n\n\n r.bytes_in_flight = r.bytes_in_flight.saturating_sub(packet.size);\n\n\n\n if in_congestion_recovery {\n\n return;\n\n }\n\n\n\n if r.app_limited {\n\n return;\n\n }\n\n\n\n if r.congestion_window < r.ssthresh {\n\n // Slow start.\n\n let cwnd_inc = cmp::min(\n\n packet.size,\n\n r.max_datagram_size * recovery::ABC_L -\n", "file_path": "src/recovery/cubic.rs", "rank": 88, "score": 62413.89992391672 }, { "content": "/// Represents an HTTP/3 formatted request.\n\nstruct Http3Request {\n\n url: url::Url,\n\n cardinal: u64,\n\n stream_id: Option<u64>,\n\n hdrs: Vec<quiche::h3::Header>,\n\n response_hdrs: Vec<quiche::h3::Header>,\n\n response_body: Vec<u8>,\n\n response_body_max: usize,\n\n response_writer: Option<std::io::BufWriter<std::fs::File>>,\n\n}\n\n\n\npub struct Http09Conn {\n\n stream_id: u64,\n\n reqs_sent: usize,\n\n reqs_complete: usize,\n\n reqs: Vec<Http09Request>,\n\n output_sink: Rc<RefCell<dyn FnMut(String)>>,\n\n}\n\n\n\nimpl Default for Http09Conn {\n", "file_path": "tools/apps/src/common.rs", "rank": 89, "score": 61008.528319696874 }, { "content": "struct Decoder {\n\n state: usize,\n\n maybe_eos: bool,\n\n}\n\n\n\nimpl Decoder {\n\n fn new() -> Decoder {\n\n Decoder {\n\n state: 0,\n\n maybe_eos: false,\n\n }\n\n }\n\n\n\n // Decodes 4 bits\n\n fn decode4(&mut self, input: u8) -> Result<Option<u8>> {\n\n const MAYBE_EOS: u8 = 1;\n\n const DECODED: u8 = 2;\n\n const ERROR: u8 = 4;\n\n\n\n // (next-state, byte, flags)\n", "file_path": "src/h3/qpack/huffman/mod.rs", "rank": 90, "score": 61008.528319696874 }, { "content": "#[derive(Default)]\n\nstruct RateSample {\n\n delivery_rate: u64,\n\n\n\n interval: Duration,\n\n\n\n delivered: usize,\n\n\n\n prior_delivered: usize,\n\n\n\n prior_time: Option<Instant>,\n\n\n\n send_elapsed: Duration,\n\n\n\n ack_elapsed: Duration,\n\n}\n\n\n\nimpl std::fmt::Debug for RateSample {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n write!(f, \"delivery_rate={:?} \", self.delivery_rate)?;\n\n write!(f, \"interval={:?} \", self.interval)?;\n", "file_path": "src/recovery/delivery_rate.rs", "rank": 91, "score": 61008.528319696874 }, { "content": "fn compute_retry_integrity_tag(\n\n b: &octets::OctetsMut, odcid: &[u8], version: u32,\n\n) -> Result<aead::Tag> {\n\n const RETRY_INTEGRITY_KEY: [u8; 16] = [\n\n 0xcc, 0xce, 0x18, 0x7e, 0xd0, 0x9a, 0x09, 0xd0, 0x57, 0x28, 0x15, 0x5a,\n\n 0x6c, 0xb9, 0x6b, 0xe1,\n\n ];\n\n\n\n const RETRY_INTEGRITY_NONCE: [u8; aead::NONCE_LEN] = [\n\n 0xe5, 0x49, 0x30, 0xf9, 0x7f, 0x21, 0x36, 0xf0, 0x53, 0x0a, 0x8c, 0x1c,\n\n ];\n\n\n\n const RETRY_INTEGRITY_KEY_OLD: [u8; 16] = [\n\n 0x4d, 0x32, 0xec, 0xdb, 0x2a, 0x21, 0x33, 0xc8, 0x41, 0xe4, 0x04, 0x3d,\n\n 0xf2, 0x7d, 0x44, 0x30,\n\n ];\n\n\n\n const RETRY_INTEGRITY_NONCE_OLD: [u8; aead::NONCE_LEN] = [\n\n 0x4d, 0x16, 0x11, 0xd0, 0x55, 0x13, 0xa5, 0x52, 0xc5, 0x87, 0xd5, 0x75,\n\n ];\n", "file_path": "src/packet.rs", "rank": 92, "score": 60497.171374382815 }, { "content": "fn parse_push_promise(\n\n payload_length: u64, b: &mut octets::Octets,\n\n) -> Result<Frame> {\n\n let push_id = b.get_varint()?;\n\n let header_block_length = payload_length - octets::varint_len(push_id) as u64;\n\n let header_block = b.get_bytes(header_block_length as usize)?.to_vec();\n\n\n\n Ok(Frame::PushPromise {\n\n push_id,\n\n header_block,\n\n })\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn data() {\n\n let mut d = [42; 128];\n", "file_path": "src/h3/frame.rs", "rank": 93, "score": 60497.171374382815 }, { "content": "fn parse_settings_frame(\n\n b: &mut octets::Octets, settings_length: usize,\n\n) -> Result<Frame> {\n\n let mut max_header_list_size = None;\n\n let mut qpack_max_table_capacity = None;\n\n let mut qpack_blocked_streams = None;\n\n let mut h3_datagram = None;\n\n\n\n // Reject SETTINGS frames that are too long.\n\n if settings_length > MAX_SETTINGS_PAYLOAD_SIZE {\n\n return Err(super::Error::ExcessiveLoad);\n\n }\n\n\n\n while b.off() < settings_length {\n\n let setting_ty = b.get_varint()?;\n\n let settings_val = b.get_varint()?;\n\n\n\n match setting_ty {\n\n SETTINGS_QPACK_MAX_TABLE_CAPACITY => {\n\n qpack_max_table_capacity = Some(settings_val);\n", "file_path": "src/h3/frame.rs", "rank": 94, "score": 60497.171374382815 }, { "content": "fn encode_int(\n\n mut v: u64, first: u8, prefix: usize, b: &mut octets::OctetsMut,\n\n) -> Result<()> {\n\n let mask = 2u64.pow(prefix as u32) - 1;\n\n\n\n // Encode I on N bits.\n\n if v < mask {\n\n b.put_u8(first | v as u8)?;\n\n return Ok(());\n\n }\n\n\n\n // Encode (2^N - 1) on N bits.\n\n b.put_u8(first | mask as u8)?;\n\n\n\n v -= mask;\n\n\n\n while v >= 128 {\n\n // Encode (I % 128 + 128) on 8 bits.\n\n b.put_u8((v % 128 + 128) as u8)?;\n\n\n\n v >>= 7;\n\n }\n\n\n\n // Encode I on 8 bits.\n\n b.put_u8(v as u8)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/h3/qpack/encoder.rs", "rank": 95, "score": 60497.171374382815 }, { "content": "#[allow(non_camel_case_types)]\n\n#[repr(transparent)]\n\nstruct STACK_OF(c_void);\n\n\n", "file_path": "src/tls.rs", "rank": 96, "score": 59956.18553552777 }, { "content": "#[allow(non_camel_case_types)]\n\n#[repr(transparent)]\n\nstruct SSL(c_void);\n\n\n", "file_path": "src/tls.rs", "rank": 97, "score": 59956.18553552777 }, { "content": "#[allow(non_camel_case_types)]\n\n#[repr(transparent)]\n\n#[cfg(windows)]\n\nstruct X509(c_void);\n\n\n", "file_path": "src/tls.rs", "rank": 98, "score": 59956.18553552777 }, { "content": "/// Validates a stateless retry token.\n\n///\n\n/// This checks that the ticket includes the `\"quiche\"` static string, and that\n\n/// the client IP address matches the address stored in the ticket.\n\n///\n\n/// Note that this function is only an example and doesn't do any cryptographic\n\n/// authenticate of the token. *It should not be used in production system*.\n\nfn validate_token<'a>(\n\n src: &net::SocketAddr, token: &'a [u8],\n\n) -> Option<quiche::ConnectionId<'a>> {\n\n if token.len() < 6 {\n\n return None;\n\n }\n\n\n\n if &token[..6] != b\"quiche\" {\n\n return None;\n\n }\n\n\n\n let token = &token[6..];\n\n\n\n let addr = match src.ip() {\n\n std::net::IpAddr::V4(a) => a.octets().to_vec(),\n\n std::net::IpAddr::V6(a) => a.octets().to_vec(),\n\n };\n\n\n\n if token.len() < addr.len() || &token[..addr.len()] != addr.as_slice() {\n\n return None;\n\n }\n\n\n\n let token = &token[addr.len()..];\n\n\n\n Some(quiche::ConnectionId::from_ref(&token[..]))\n\n}\n\n\n", "file_path": "examples/server.rs", "rank": 99, "score": 59509.42435406105 } ]
Rust
src/ketos/string.rs
salewski/ketos
011287590ebeb6e6a199e34c8b9da14e2daeb1ce
use std::str::CharIndices; use crate::lexer::{BytePos, Span}; use crate::parser::{ParseError, ParseErrorKind}; pub fn parse_byte(s: &str, pos: BytePos) -> Result<(u8, usize), ParseError> { let mut r = StringReader::new(s, pos, StringType::Single); r.parse_byte() } pub fn parse_byte_string(s: &str, pos: BytePos) -> Result<(Vec<u8>, usize), ParseError> { let mut r = StringReader::new(s, pos, StringType::Normal); r.parse_byte_string() } pub fn parse_raw_byte_string(s: &str, pos: BytePos) -> Result<(Vec<u8>, usize), ParseError> { let mut r = StringReader::new(s, pos, StringType::Raw); r.parse_byte_string() } pub fn parse_char(s: &str, pos: BytePos) -> Result<(char, usize), ParseError> { let mut r = StringReader::new(s, pos, StringType::Single); r.parse_char() } pub fn parse_string(s: &str, pos: BytePos) -> Result<(String, usize), ParseError> { let mut r = StringReader::new(s, pos, StringType::Normal); r.parse_string() } pub fn parse_raw_string(s: &str, pos: BytePos) -> Result<(String, usize), ParseError> { let mut r = StringReader::new(s, pos, StringType::Raw); r.parse_string() } #[derive(Copy, Clone, Debug, Eq, PartialEq)] enum StringType { Single, Normal, Raw, } struct StringReader<'a> { chars: CharIndices<'a>, start: BytePos, last_index: usize, end_index: usize, ty: StringType, } impl<'a> StringReader<'a> { fn new(input: &str, pos: BytePos, ty: StringType) -> StringReader { StringReader{ chars: input.char_indices(), start: pos, last_index: 0, end_index: 0, ty, } } fn parse_byte(&mut self) -> Result<(u8, usize), ParseError> { self.expect('#', |slf, ch| ParseError::new(slf.span_one(), ParseErrorKind::InvalidChar(ch)))?; self.expect('b', |slf, ch| ParseError::new(slf.span_one(), ParseErrorKind::InvalidChar(ch)))?; self.expect('\'', |slf, ch| ParseError::new(slf.span_one(), ParseErrorKind::InvalidChar(ch)))?; let ch = match self.consume_char()? { '\'' => return Err(ParseError::new(self.span_one(), ParseErrorKind::InvalidChar('\''))), '\\' => self.parse_byte_escape()?, ch if ch.is_ascii() => ch as u8, ch => return Err(ParseError::new(self.span_one(), ParseErrorKind::InvalidByte(ch))) }; self.expect('\'', |slf, _| ParseError::new( slf.span_from(slf.start, 1), ParseErrorKind::UnterminatedChar))?; Ok((ch, self.last_index + 1)) } fn parse_char(&mut self) -> Result<(char, usize), ParseError> { self.expect('#', |slf, ch| ParseError::new(slf.span_one(), ParseErrorKind::InvalidChar(ch)))?; self.expect('\'', |slf, ch| ParseError::new(slf.span_one(), ParseErrorKind::InvalidChar(ch)))?; let ch = match self.consume_char()? { '\'' => return Err(ParseError::new(self.span_one(), ParseErrorKind::InvalidChar('\''))), '\\' => self.parse_char_escape()?, ch => ch }; self.expect('\'', |slf, _| ParseError::new( slf.span_from(slf.start, 1), ParseErrorKind::UnterminatedChar))?; Ok((ch, self.last_index + 1)) } fn parse_byte_string(&mut self) -> Result<(Vec<u8>, usize), ParseError> { let mut res = Vec::new(); let n_hash = if self.ty == StringType::Raw { self.parse_raw_prefix()? } else { self.expect('"', |slf, ch| ParseError::new(slf.span_one(), ParseErrorKind::InvalidChar(ch)))?; 0 }; loop { match self.consume_char()? { '"' => { if n_hash == 0 || self.check_end(n_hash)? { break; } else { res.push(b'"'); } } '\\' if self.ty == StringType::Normal => { if let Some(ch) = self.parse_byte_string_escape()? { res.push(ch); } } ch if ch.is_ascii() => { res.push(ch as u8); } ch => return Err(ParseError::new(self.span_one(), ParseErrorKind::InvalidByte(ch))) } } Ok((res, self.last_index + 1)) } fn parse_string(&mut self) -> Result<(String, usize), ParseError> { let mut res = String::new(); let n_hash = if self.ty == StringType::Raw { self.parse_raw_prefix()? } else { self.expect('"', |slf, ch| ParseError::new(slf.span_one(), ParseErrorKind::InvalidChar(ch)))?; 0usize }; loop { match self.consume_char()? { '"' => { if n_hash == 0 || self.check_end(n_hash)? { break; } else { res.push('"'); } } '\\' if self.ty == StringType::Normal => { if let Some(ch) = self.parse_string_escape()? { res.push(ch); } } ch => res.push(ch) } } Ok((res, self.last_index + 1)) } fn parse_raw_prefix(&mut self) -> Result<usize, ParseError> { let mut n_hash = 0; self.expect('r', |slf, ch| ParseError::new(slf.span_one(), ParseErrorKind::InvalidChar(ch)))?; loop { match self.consume_char()? { '#' => n_hash += 1, '"' => break, ch => return Err(ParseError::new(self.span_one(), ParseErrorKind::InvalidChar(ch))) } } Ok(n_hash) } fn check_end(&mut self, n_hash: usize) -> Result<bool, ParseError> { let save_chars = self.chars.clone(); let save_index = self.last_index; for _ in 0..n_hash { if self.consume_char()? != '#' { self.chars = save_chars; self.last_index = save_index; return Ok(false); } } Ok(true) } fn consume_char(&mut self) -> Result<char, ParseError> { match self.chars.next() { Some((ind, '\r')) => { self.last_index = ind; self.end_index = ind + 1; match self.chars.next() { Some((ind, '\n')) => { self.last_index = ind; self.end_index = ind + 1; Ok('\n') } _ => Err(ParseError::new( self.span_from(ind as BytePos, 1), ParseErrorKind::InvalidChar('\r'))) } } Some((ind, ch)) => { self.last_index = ind; self.end_index = ind + ch.len_utf8(); Ok(ch) } None => Err(ParseError::new(self.span_from(self.start, 1), if self.ty == StringType::Single { ParseErrorKind::UnterminatedChar } else { ParseErrorKind::UnterminatedString })) } } fn expect<F>(&mut self, ch: char, f: F) -> Result<(), ParseError> where F: FnOnce(&Self, char) -> ParseError { let c = self.consume_char()?; if c == ch { Ok(()) } else { Err(f(self, c)) } } fn parse_byte_escape(&mut self) -> Result<u8, ParseError> { match self.consume_char()? { '\\' => Ok(b'\\'), '\'' => Ok(b'\''), '"' => Ok(b'"'), '0' => Ok(b'\0'), 'n' => Ok(b'\n'), 'r' => Ok(b'\r'), 't' => Ok(b'\t'), 'u' => Err(ParseError::new(self.span_one(), ParseErrorKind::InvalidByteEscape('u'))), 'x' => self.parse_hex_byte_escape(), ch => Err(ParseError::new(self.span_one(), ParseErrorKind::UnknownCharEscape(ch))) } } fn parse_char_escape(&mut self) -> Result<char, ParseError> { match self.consume_char()? { '\\' => Ok('\\'), '\'' => Ok('\''), '"' => Ok('"'), '0' => Ok('\0'), 'n' => Ok('\n'), 'r' => Ok('\r'), 't' => Ok('\t'), 'u' => self.parse_unicode(), 'x' => self.parse_hex_char_escape(), ch => Err(ParseError::new(self.span_one(), ParseErrorKind::UnknownCharEscape(ch))) } } fn parse_byte_string_escape(&mut self) -> Result<Option<u8>, ParseError> { match self.peek_char()? { '\r' | '\n' => { self.consume_char()?; loop { match self.peek_char()? { ' ' | '\t' => { self.consume_char()?; }, _ => break } } Ok(None) } _ => self.parse_byte_escape().map(Some) } } fn parse_string_escape(&mut self) -> Result<Option<char>, ParseError> { match self.peek_char()? { '\r' | '\n' => { self.consume_char()?; loop { match self.peek_char()? { ' ' | '\t' => { self.consume_char()?; }, _ => break } } Ok(None) } _ => self.parse_char_escape().map(Some) } } fn parse_hex_byte_escape(&mut self) -> Result<u8, ParseError> { let a = match self.consume_char()? { ch if !ch.is_digit(16) => return Err(ParseError::new( self.span_one(), ParseErrorKind::InvalidNumericEscape('x'))), ch => ch }; let b = match self.consume_char()? { ch if !ch.is_digit(16) => return Err(ParseError::new( self.span_one(), ParseErrorKind::InvalidNumericEscape('x'))), ch => ch }; Ok(((a.to_digit(16).unwrap() << 4) | b.to_digit(16).unwrap()) as u8) } fn parse_hex_char_escape(&mut self) -> Result<char, ParseError> { let a = match self.consume_char()? { ch if !ch.is_digit(16) => return Err(ParseError::new( self.span_one(), ParseErrorKind::InvalidNumericEscape('x'))), ch => ch }; let b = match self.consume_char()? { ch if !ch.is_digit(16) => return Err(ParseError::new( self.span_one(), ParseErrorKind::InvalidNumericEscape('x'))), ch => ch }; if a > '7' { return Err(ParseError::new(self.back_span(1, 2), ParseErrorKind::InvalidNumericEscape('x'))); } Ok(((a.to_digit(16).unwrap() << 4) | b.to_digit(16).unwrap()) as u8 as char) } fn parse_unicode(&mut self) -> Result<char, ParseError> { self.expect('{', |slf, _| ParseError::new(slf.span_one(), ParseErrorKind::InvalidNumericEscape('u')))?; let mut n_digits = 0; let mut n_pad = 0; let mut total = 0; loop { match self.consume_char()? { '_' => { n_pad += 1; } '}' if n_digits != 0 => break, ch if ch.is_digit(16) => { if n_digits == 6 { return Err(ParseError::new(self.span_one(), ParseErrorKind::InvalidNumericEscape(ch))); } n_digits += 1; total = (total << 4) | ch.to_digit(16).unwrap(); } ch => return Err(ParseError::new( self.span_one(), ParseErrorKind::InvalidNumericEscape(ch))), } } ::std::char::from_u32(total) .ok_or_else(|| ParseError::new( self.back_span(n_digits + n_pad, n_digits + n_pad), ParseErrorKind::InvalidNumericEscape('u'))) } fn peek_char(&mut self) -> Result<char, ParseError> { match self.chars.clone().next() { Some((_, ch)) => Ok(ch), None => Err(ParseError::new(self.span_from(self.start, 1), if self.ty == StringType::Single { ParseErrorKind::UnterminatedChar } else { ParseErrorKind::UnterminatedString })) } } fn back_span(&self, back: BytePos, len: BytePos) -> Span { let start = self.start + self.last_index as BytePos - back; Span{lo: start, hi: start + len} } fn span_one(&self) -> Span { Span{ lo: self.start + self.last_index as BytePos, hi: self.start + self.end_index as BytePos, } } fn span_from(&self, start: BytePos, len: BytePos) -> Span { Span{lo: start, hi: start + len} } } #[cfg(test)] mod test { use crate::parser::ParseError; use super::{StringReader, StringType}; fn parse_bytes(s: &str) -> Result<Vec<u8>, ParseError> { let mut r = StringReader::new(s, 0, StringType::Normal); r.parse_byte_string().map(|r| r.0) } fn parse_char(s: &str) -> Result<char, ParseError> { let mut r = StringReader::new(s, 0, StringType::Single); r.parse_char().map(|r| r.0) } fn parse_string(s: &str, ty: StringType) -> Result<String, ParseError> { let mut r = StringReader::new(s, 0, ty); r.parse_string().map(|r| r.0) } #[test] fn test_parse_string() { let n = StringType::Normal; let r = StringType::Raw; assert_eq!(parse_char(r"#'a'").unwrap(), 'a'); assert_eq!(parse_char(r"#'\''").unwrap(), '\''); assert_eq!(parse_char(r"#'\x7f'").unwrap(), '\x7f'); assert_eq!(parse_char(r"#'\u{1234}'").unwrap(), '\u{1234}'); assert_eq!(parse_char(r"#'\u{1_2__3_4}'").unwrap(), '\u{1234}'); assert_eq!(parse_string(r#""foo""#, n).unwrap(), "foo"); assert_eq!(parse_string(r#"r"foo""#, r).unwrap(), "foo"); assert_eq!(parse_string(r##"r#""foo""#"##, r).unwrap(), r#""foo""#); } #[test] fn test_errors() { assert_eq!(parse_bytes(r#""abc\xff""#).unwrap(), b"abc\xff"); assert!(parse_bytes(r#""abc\u{ff}""#).is_err()); } }
use std::str::CharIndices; use crate::lexer::{BytePos, Span}; use crate::parser::{ParseError, ParseErrorKind}; pub fn parse_byte(s: &str, pos: BytePos) -> Result<(u8, usize), ParseError> { let mut r = StringReader::new(s, pos, StringType::Single); r.parse_byte() } pub fn parse_byte_string(s: &str, pos: BytePos) -> Result<(Vec<u8>, usize), ParseError> { let mut r = StringReader::new(s, pos, StringType::Normal); r.parse_byte_string() } pub fn parse_raw_byte_string(s: &str, pos: BytePos) -> Result<(Vec<u8>, usize), ParseError> { let mut r = StringReader::new(s, pos, StringType::Raw); r.parse_byte_string() } pub fn parse_char(s: &str, pos: BytePos) -> Result<(char, usize), ParseError> { let mut r = StringReader::new(s, pos, StringType::Single); r.parse_char() } pub fn parse_string(s: &str, pos: BytePos) -> Result<(String, usize), ParseError> { let mut r = StringReader::new(s, pos, StringType::Normal); r.parse_string() } pub fn parse_raw_string(s: &str, pos: BytePos) -> Result<(String, usize), ParseError> { let mut r = StringReader::new(s, pos, StringType::Raw); r.parse_string() } #[derive(Copy, Clone, Debug, Eq, PartialEq)] enum StringType { Single, Normal, Raw, } struct StringReader<'a> { chars: CharIndices<'a>, start: BytePos, last_index: usize, end_index: usize, ty: StringType, } impl<'a> StringReader<'a> { fn new(input: &str, pos: BytePos, ty: StringType) -> StringReader { StringReader{ chars: input.char_indices(), start: pos, last_index: 0, end_index: 0, ty, } } fn parse_byte(&mut self) -> Result<(u8, usize), ParseError> { self.expect('#', |slf, ch| ParseError::new(slf.span_one(), ParseErrorKind::InvalidChar(ch)))?; self.expect('b', |slf, ch| ParseError::new(slf.span_one(), ParseErrorKind::InvalidChar(ch)))?; self.expect('\'', |slf, ch| ParseError::new(slf.span_one(), ParseErrorKind::InvalidChar(ch)))?; let ch = match self.consume_char()? { '\'' => return Err(ParseError::new(self.span_one(), ParseErrorKind::InvalidChar('\''))), '\\' => self.parse_byte_escape()?, ch if ch.is_ascii() => ch as u8, ch => return Err(ParseError::new(self.span_one(), ParseErrorKind::InvalidByte(ch))) }; self.expect('\'', |slf, _| ParseError::new( slf.span_from(slf.start, 1), ParseErrorKind::UnterminatedChar))?; Ok((ch, self.last_index + 1)) } fn parse_char(&mut self) -> Result<(char, usize), ParseError> { self.expect('#', |slf, ch| ParseError::new(slf.span_one(), ParseErrorKind::InvalidChar(ch)))?; self.expect('\'', |slf, ch| ParseError::new(slf.span_one(), ParseErrorKind::InvalidChar(ch)))?; let ch = match self.consume_char()? { '\'' => return Err(ParseError::new(self.span_one(), ParseErrorKind::InvalidChar('\''))), '\\' => self.parse_char_escape()?, ch => ch }; self.expect('\'', |slf, _| ParseError::new( slf.span_from(slf.start, 1), ParseErrorKind::UnterminatedChar))?; Ok((ch, self.last_index + 1)) } fn parse_byte_string(&mut self) -> Result<(Vec<u8>, usize), ParseError> { let mut res = Vec::new(); let n_hash = if self.ty == StringType::Raw { self.parse_raw_prefix()? } else { self.expect('"', |slf, ch| ParseError::new(slf.span_one(), ParseErrorKind::InvalidChar(ch)))?; 0 }; loop { match self.consume_char()? { '"' => { if n_hash == 0 || self.check_end(n_hash)? { break; } else { res.push(b'"'); } } '\\' if self.ty == StringType::Normal => { if let Some(ch) = self.parse_byte_string_escape()? { res.push(ch); } } ch if ch.is_ascii() => { res.push(ch as u8); } ch => return Err(ParseError::new(self.span_one(), ParseErrorKind::InvalidByte(ch))) } } Ok((res, self.last_index + 1)) } fn parse_string(&mut self) -> Result<(String, usize), ParseError> { let mut res = String::new(); let n_hash = if self.ty == StringType::Raw { self.parse_raw_prefix()? } else { self.expect('"', |slf, ch| ParseError::new(slf.span_one(), ParseErrorKind::InvalidChar(ch)))?; 0usize }; loop { match self.consume_char()? { '"' => { if n_hash == 0 || self.check_end(n_hash)? { break; } else { res.push('"'); } } '\\' if self.ty == StringType::Normal => { if let Some(ch) = self.parse_string_escape()? { res.push(ch); } } ch => res.push(ch) } } Ok((res, self.last_index + 1)) } fn parse_raw_prefix(&mut self) -> Result<usize, ParseError> { let mut n_hash = 0; self.expect('r', |slf, ch| ParseError::new(slf.span_one(), ParseErrorKind::InvalidChar(ch)))?; loop { match self.consume_char()? { '#' => n_hash += 1, '"' => break, ch => return Err(ParseError::new(self.span_one(), ParseErrorKind::InvalidChar(ch))) } } Ok(n_hash) } fn check_end(&mut self, n_hash: usize) -> Result<bool, ParseError> { let save_chars = self.chars.clone(); let save_index = self.last_index; for _ in 0..n_hash { if self.consume_char()? != '#' { self.chars = save_chars; self.last_index = save_index; return Ok(false); } } Ok(true) } fn consume_char(&mut self) -> Result<char, ParseError> { match self.chars.next() { Some((ind, '\r')) => { self.last_index = ind; self.end_index = ind + 1; match self.chars.next() { Some((ind, '\n')) => { self.last_index = ind; self.end_index = ind + 1; Ok('\n') } _ => Err(ParseError::new( self.span_from(ind as BytePos, 1), ParseErrorKind::InvalidChar('\r'))) } } Some((ind, ch)) => { self.last_index = ind; self.end_index = ind + ch.len_utf8(); Ok(ch) } None => Err(ParseError::new(self.span_from(self.start, 1), if self.ty == StringType::Single { ParseErrorKind::UnterminatedChar } else { ParseErrorKind::UnterminatedString })) } } fn expect<F>(&mut self, ch: char, f: F) -> Result<(), ParseError> where F: FnOnce(&Self, char) -> ParseError { let c = self.consume_char()?; if c == ch { Ok(()) } else { Err(f(self, c)) } } fn parse_byte_escape(&mut self) -> Result<u8, ParseError> { match self.consume_char()? { '\\' => Ok(b'\\'), '\'' => Ok(b'\''), '"' => Ok(b'"'), '0' => Ok(b'\0'), 'n' => Ok(b'\n'), 'r' => Ok(b'\r'), 't' => Ok(b'\t'), 'u' => Err(ParseError::new(self.span_one(), ParseErrorKind::InvalidByteEscape('u'))), 'x' => self.parse_hex_byte_escape(), ch => Err(ParseError::new(self.span_one(), ParseErrorKind::UnknownCharEscape(ch))) } } fn parse_char_escape(&mut self) -> Result<char, ParseError> { match self.consume_char()? { '\\' => Ok('\\'), '\'' => Ok('\''), '"' => Ok('"'), '0' => Ok('\0'), 'n' => Ok('\n'), 'r' => Ok('\r'), 't' => Ok('\t'), 'u' => self.parse_unicode(), 'x' => self.parse_hex_char_escape(), ch => Err(ParseError::new(self.span_one(), ParseErrorKind::UnknownCharEscape(ch))) } } fn parse_byte_string_escape(&mut self) -> Result<Option<u8>, ParseError> { match self.peek_char()? { '\r' | '\n' => { self.consume_char()?; loop { match self.peek_char()? { ' ' | '\t' => { self.consume_char()?; }, _ => break } } Ok(None) } _ => self.parse_byte_escape().map(Some) } } fn parse_string_escape(&mut self) -> Result<Option<char>, ParseError> { match self.peek_char()? { '\r' | '\n' => { self.consume_char()?; loop { match self.peek_char()? { ' ' | '\t' => { self.consume_char()?; }, _ => break } } Ok(None) } _ => self.parse_char_escape().map(Some) } } fn parse_hex_byte_escape(&mut self) -> Result<u8, ParseError> { let a = match self.consume_char()? { ch if !ch.is_digit(16) => return Err(ParseError::new( self.span_one(), ParseErrorKind::InvalidNumericEscape('x'))), ch => ch }; let b = match self.consume_char()? { ch if !ch.is_digit(16) => return Err(ParseError::new( self.span_one(), ParseErrorKind::InvalidNumericEscape('x'))), ch => ch }; Ok(((a.to_digit(16).unwrap() << 4) | b.to_digit(16).unwrap()) as u8) } fn parse_hex_char_escape(&mut self) -> Result<char, ParseError> { let a = match self.consume_char()? { ch if !ch.is_digit(16) => return Err(ParseError::new( self.span_one(), ParseErrorKind::InvalidNumericEscape('x'))), ch => ch }; let b = match self.consume_char()? { ch if !ch.is_digit(16) => return Err(ParseError::new( self.span_one(), ParseErrorKind::InvalidNumericEscape('x'))), ch => ch }; if a > '7' { return Err(ParseError::new(self.back_span(1, 2), ParseErrorKind::InvalidNumericEscape('x'))); } Ok(((a.to_digit(16).unwrap() << 4) | b.to_digit(16).unwrap()) as u8 as char) } fn parse_unicode(&mut self) -> Result<char, ParseError> { self.expect('{', |slf, _| ParseError::new(slf.span_one(), ParseErrorKind::InvalidNumericEscape('u')))?; let mut n_digits = 0; let mut n_pad = 0; let mut total = 0; loop { match self.consume_char()? { '_' => { n_pad += 1; } '}' if n_digits != 0 => break, ch if ch.is_digit(16) => { if n_digits == 6 { return Err(ParseError::new(self.span_one(), ParseErrorKind::InvalidNumericEscape(ch))); } n_digits += 1; total = (total << 4) | ch.to_digit(16).unwrap(); } ch => return Err(ParseError::new( self.span_one(), ParseErrorKind::InvalidNumericEscape(ch))), } } ::std::char::from_u32(total) .ok_or_else(|| ParseError::new( self.back_span(n_digits + n_pad, n_digits + n_pad), ParseErrorKind::InvalidNumericEscape('u'))) } fn peek_char(&mut self) -> Result<char, ParseError> {
} fn back_span(&self, back: BytePos, len: BytePos) -> Span { let start = self.start + self.last_index as BytePos - back; Span{lo: start, hi: start + len} } fn span_one(&self) -> Span { Span{ lo: self.start + self.last_index as BytePos, hi: self.start + self.end_index as BytePos, } } fn span_from(&self, start: BytePos, len: BytePos) -> Span { Span{lo: start, hi: start + len} } } #[cfg(test)] mod test { use crate::parser::ParseError; use super::{StringReader, StringType}; fn parse_bytes(s: &str) -> Result<Vec<u8>, ParseError> { let mut r = StringReader::new(s, 0, StringType::Normal); r.parse_byte_string().map(|r| r.0) } fn parse_char(s: &str) -> Result<char, ParseError> { let mut r = StringReader::new(s, 0, StringType::Single); r.parse_char().map(|r| r.0) } fn parse_string(s: &str, ty: StringType) -> Result<String, ParseError> { let mut r = StringReader::new(s, 0, ty); r.parse_string().map(|r| r.0) } #[test] fn test_parse_string() { let n = StringType::Normal; let r = StringType::Raw; assert_eq!(parse_char(r"#'a'").unwrap(), 'a'); assert_eq!(parse_char(r"#'\''").unwrap(), '\''); assert_eq!(parse_char(r"#'\x7f'").unwrap(), '\x7f'); assert_eq!(parse_char(r"#'\u{1234}'").unwrap(), '\u{1234}'); assert_eq!(parse_char(r"#'\u{1_2__3_4}'").unwrap(), '\u{1234}'); assert_eq!(parse_string(r#""foo""#, n).unwrap(), "foo"); assert_eq!(parse_string(r#"r"foo""#, r).unwrap(), "foo"); assert_eq!(parse_string(r##"r#""foo""#"##, r).unwrap(), r#""foo""#); } #[test] fn test_errors() { assert_eq!(parse_bytes(r#""abc\xff""#).unwrap(), b"abc\xff"); assert!(parse_bytes(r#""abc\u{ff}""#).is_err()); } }
match self.chars.clone().next() { Some((_, ch)) => Ok(ch), None => Err(ParseError::new(self.span_from(self.start, 1), if self.ty == StringType::Single { ParseErrorKind::UnterminatedChar } else { ParseErrorKind::UnterminatedString })) }
if_condition
[ { "content": "fn consume_block_comment(start: usize, chars: &mut CharIndices) -> Result<usize, ParseErrorKind> {\n\n let mut n_blocks = 1;\n\n\n\n loop {\n\n match chars.next() {\n\n Some((_, '|')) => match chars.clone().next() {\n\n Some((ind, '#')) => {\n\n chars.next();\n\n n_blocks -= 1;\n\n if n_blocks == 0 {\n\n return Ok(ind - start + 1);\n\n }\n\n }\n\n Some(_) => (),\n\n None => break\n\n },\n\n Some((_, '#')) => match chars.clone().next() {\n\n Some((_, '|')) => {\n\n chars.next();\n\n n_blocks += 1;\n", "file_path": "src/ketos/lexer.rs", "rank": 3, "score": 386156.82134304044 }, { "content": "// Runs `F` twice; first, after compiling and executing input;\n\n// then, after encoding and decoding and loading the resulting ModuleCode.\n\nfn run<F>(input: &str, mut f: F) -> Result<(), Error>\n\n where F: FnMut(&Context) {\n\n let interp = new_interpreter();\n\n\n\n let code: Vec<_> = interp.compile_exprs(input)?\n\n .into_iter().map(Rc::new).collect();\n\n\n\n for code in &code {\n\n interp.execute_code(code.clone())?;\n\n }\n\n\n\n f(interp.context());\n\n\n\n let mut buf = Vec::new();\n\n let path = Path::new(\"<buffer>\");\n\n\n\n {\n\n let mcode = ModuleCode::new(code, interp.scope());\n\n let scope = interp.scope();\n\n let names = scope.borrow_names();\n", "file_path": "tests/encode.rs", "rank": 5, "score": 379699.8378247409 }, { "content": "fn parse_char(s: &str) -> Result<char, ParseError> {\n\n let (ch, _) = string::parse_char(s, 0)?;\n\n Ok(ch)\n\n}\n\n\n", "file_path": "src/ketos/parser.rs", "rank": 6, "score": 379142.94564897293 }, { "content": "fn parse_char(input: &str, pos: BytePos) -> Result<(Token, usize), ParseError> {\n\n let (_, size) = string::parse_char(input, pos)?;\n\n Ok((Token::Char(&input[..size]), size))\n\n}\n\n\n", "file_path": "src/ketos/lexer.rs", "rank": 8, "score": 335210.1347345855 }, { "content": "fn trim_first<F: FnOnce(char) -> bool>(s: &str, f: F) -> &str {\n\n let mut chars = s.chars();\n\n\n\n match chars.next() {\n\n Some(ch) if f(ch) => chars.as_str(),\n\n _ => s\n\n }\n\n}\n\n\n", "file_path": "src/ketos/parser.rs", "rank": 9, "score": 334659.2909132609 }, { "content": "fn parse_byte(s: &str) -> Result<u8, ParseError> {\n\n let (b, _) = string::parse_byte(s, 0)?;\n\n Ok(b)\n\n}\n\n\n", "file_path": "src/ketos/parser.rs", "rank": 10, "score": 330136.057493926 }, { "content": "/// Returns a structure which helps in highlighting a span within a body of text.\n\n///\n\n/// # Panics\n\n///\n\n/// Panics if `span` is not valid.\n\npub fn highlight_span(text: &str, span: Span) -> SpanDisplay {\n\n let line_start = match text[..span.lo as usize].rfind('\\n') {\n\n Some(pos) => pos + 1,\n\n None => 0\n\n };\n\n\n\n let line_end = match text[line_start..].find('\\n') {\n\n Some(pos) => line_start + pos,\n\n None => text.len()\n\n };\n\n\n\n let pre_chars = text[line_start..span.lo as usize].chars().count();\n\n let span_str = &text[span.lo as usize..span.hi as usize];\n\n\n\n // If the span spans multiple lines, just highlight to the end of the line.\n\n let span_chars = match span_str.find('\\n') {\n\n Some(pos) => span_str[..pos].chars().count(),\n\n None => span_str.chars().count()\n\n };\n\n\n", "file_path": "src/ketos/lexer.rs", "rank": 12, "score": 315744.1913048344 }, { "content": "fn consume_line_comment(start: usize, chars: &mut CharIndices) -> usize {\n\n let mut last = start;\n\n\n\n for (ind, ch) in chars {\n\n last = ind;\n\n if ch == '\\n' {\n\n break;\n\n }\n\n }\n\n\n\n last - start + 1\n\n}\n\n\n", "file_path": "src/ketos/lexer.rs", "rank": 13, "score": 306658.0914435019 }, { "content": "fn parse_identifier(input: &str) -> Result<(&str, usize), ParseErrorKind> {\n\n for (ind, ch) in input.char_indices() {\n\n if !is_identifier(ch) {\n\n return Ok((&input[..ind], ind));\n\n }\n\n }\n\n\n\n Ok((input, input.len()))\n\n}\n\n\n", "file_path": "src/ketos/lexer.rs", "rank": 14, "score": 299884.3416039129 }, { "content": "fn each_import<F>(items: &[Value], mut f: F) -> Result<(), CompileError>\n\n where F: FnMut(Name, Name) -> Result<(), CompileError> {\n\n let mut iter = items.iter();\n\n\n\n while let Some(item) = iter.next() {\n\n let (src, dest) = match *item {\n\n Value::Keyword(dest) => match iter.next() {\n\n Some(&Value::Name(src)) => (src, dest),\n\n _ => return Err(CompileError::SyntaxError(\n\n \"expected name following keyword\"))\n\n },\n\n Value::Name(name) => (name, name),\n\n _ => return Err(CompileError::SyntaxError(\"expected name or keyword\"))\n\n };\n\n\n\n f(src, dest)?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/ketos/compile.rs", "rank": 15, "score": 299223.07809097366 }, { "content": "fn parse_raw_string(input: &str, pos: BytePos) -> Result<(Token, usize), ParseError> {\n\n let (_, size) = string::parse_raw_string(input, pos)?;\n\n Ok((Token::String(&input[..size]), size))\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::{BytePos, Lexer, Span, Token};\n\n use crate::parser::ParseErrorKind;\n\n\n\n fn sp(lo: BytePos, hi: BytePos) -> Span {\n\n Span{lo, hi}\n\n }\n\n\n\n fn tokens(s: &str) -> Vec<(Span, Token)> {\n\n let mut lex = Lexer::new(s, 0);\n\n let mut res = Vec::new();\n\n\n\n loop {\n\n match lex.next_token().unwrap() {\n", "file_path": "src/ketos/lexer.rs", "rank": 16, "score": 297509.5388754668 }, { "content": "fn parse_raw_path(input: &str, pos: BytePos) -> Result<(Token, usize), ParseError> {\n\n let (_, size) = string::parse_raw_string(&input[2..], pos + 2)?;\n\n Ok((Token::Path(&input[..size + 2]), size + 2))\n\n}\n\n\n", "file_path": "src/ketos/lexer.rs", "rank": 17, "score": 297509.5388754668 }, { "content": "fn parse_raw_bytes(input: &str, pos: BytePos) -> Result<(Token, usize), ParseError> {\n\n let (_, size) = string::parse_raw_byte_string(&input[2..], pos + 2)?;\n\n Ok((Token::Bytes(&input[..size + 2]), size + 2))\n\n}\n\n\n", "file_path": "src/ketos/lexer.rs", "rank": 18, "score": 297509.5388754668 }, { "content": "fn lambda(s: &str) -> Result<Vec<u8>, Error> {\n\n let interp = Interpreter::new();\n\n let _exprs = interp.compile_exprs(s)?;\n\n\n\n match interp.scope().get_named_value(\"test\") {\n\n Some(Value::Lambda(ref l)) => Ok(l.code.code.clone().into_vec()),\n\n Some(ref v) => panic!(\"expected lambda; got {}\", v.type_name()),\n\n None => panic!(\"missing `test` function\")\n\n }\n\n}\n\n\n", "file_path": "tests/compile.rs", "rank": 19, "score": 296173.5820573133 }, { "content": "/// Read compiled bytecode\n\npub fn read_bytecode<R: Read>(r: &mut R, path: &Path, ctx: &Context)\n\n -> Result<ModuleCode, Error> {\n\n let mut buf = [0; 4];\n\n\n\n r.read_exact(&mut buf)\n\n .map_err(|e| IoError::new(IoMode::Read, path, e))?;\n\n check_magic_number(buf)?;\n\n\n\n r.read_exact(&mut buf)\n\n .map_err(|e| IoError::new(IoMode::Read, path, e))?;\n\n check_version(buf)?;\n\n\n\n let mut buf = Vec::new();\n\n r.read_to_end(&mut buf)\n\n .map_err(|e| IoError::new(IoMode::Read, path, e))?;\n\n\n\n let mut dec = ValueDecoder::new(ctx, &buf);\n\n\n\n let n_names = dec.read_uint()?;\n\n let mut names = NameInputConversion::new();\n", "file_path": "src/ketos/encode.rs", "rank": 20, "score": 290116.7437048651 }, { "content": "/// Reads bytes from a cursor without copying data.\n\nfn read_cursor<'a>(cur: &mut Cursor<&'a [u8]>, n: usize) -> Option<&'a [u8]> {\n\n let pos = cur.position() as usize;\n\n let bytes = *cur.get_ref();\n\n\n\n if bytes.len() < pos + n {\n\n None\n\n } else {\n\n cur.set_position((pos + n) as u64);\n\n Some(&bytes[pos..pos + n])\n\n }\n\n}\n\n\n\nmacro_rules! types {\n\n ( $( $name:ident = $value:expr , )+ ) => {\n\n /// Byte constants indicating the type of the following value.\n\n ///\n\n /// Any addition, deletion, or modification to these constants constitutes\n\n /// a breaking change to the bytecode format.\n\n mod types {\n\n $( pub const $name: u8 = $value; )+\n", "file_path": "src/ketos/encode.rs", "rank": 21, "score": 288630.8359943185 }, { "content": "fn parse_keyword(input: &str) -> Result<(Token, usize), ParseErrorKind> {\n\n parse_identifier(&input[1..]) // Skip leading ':'\n\n .and_then(|(ident, size)| {\n\n if size == 0 {\n\n Err(ParseErrorKind::InvalidToken)\n\n } else {\n\n Ok((Token::Keyword(ident), size + 1))\n\n }\n\n })\n\n}\n\n\n", "file_path": "src/ketos/lexer.rs", "rank": 22, "score": 278485.31680672325 }, { "content": "fn parse_number(input: &str) -> Result<(Token, usize), ParseErrorKind> {\n\n let mut digits = false;\n\n let mut dot = false;\n\n let mut exp = false;\n\n let mut exp_digit = false;\n\n let mut slash = false;\n\n let mut slash_digit = false;\n\n let mut size = input.len();\n\n\n\n let (base, prefix_offset, rest) = if input.starts_with(\"0x\") {\n\n (16, 2, &input[2..])\n\n } else if input.starts_with(\"0o\") {\n\n (8, 2, &input[2..])\n\n } else if input.starts_with(\"0b\") {\n\n (2, 2, &input[2..])\n\n } else if input.starts_with('-') {\n\n match input[1..].chars().next() {\n\n Some(ch) if ch.is_digit(10) => (10, 1, &input[1..]),\n\n // Actually a name beginning with '-' rather a number\n\n _ => return parse_name(input)\n", "file_path": "src/ketos/lexer.rs", "rank": 23, "score": 278485.31680672325 }, { "content": "fn parse_name(input: &str) -> Result<(Token, usize), ParseErrorKind> {\n\n parse_identifier(input).map(|(ident, size)| (Token::Name(ident), size))\n\n}\n\n\n", "file_path": "src/ketos/lexer.rs", "rank": 24, "score": 278485.31680672325 }, { "content": "/// Parses, compiles, and executes the given code within a context.\n\npub fn run_code(ctx: &Context, input: &str) -> Result<Value, Error> {\n\n let offset = ctx.scope().borrow_codemap_mut().add_source(input, None);\n\n\n\n let exprs = {\n\n let mut p = Parser::new(ctx, Lexer::new(input, offset));\n\n\n\n p.parse_exprs()?\n\n };\n\n\n\n let code = exprs.iter()\n\n .map(|v| compile(ctx, v))\n\n .collect::<Result<Vec<_>, _>>()?;\n\n\n\n let mut r = Value::Unit;\n\n\n\n for c in code {\n\n r = execute(ctx, Rc::new(c))?;\n\n }\n\n\n\n Ok(r)\n", "file_path": "src/ketos/run.rs", "rank": 25, "score": 269890.4542590598 }, { "content": "fn insert_doc_comment(mut items: Vec<Value>, doc: Option<(Span, &str)>)\n\n -> Result<Value, ParseError> {\n\n if let Some((sp, doc)) = doc {\n\n if items.len() == 3 {\n\n items.insert(2, format_doc_comment(doc).into());\n\n } else {\n\n return Err(ParseError::new(sp, ParseErrorKind::CannotDocumentItem));\n\n }\n\n }\n\n\n\n Ok(items.into())\n\n}\n\n\n", "file_path": "src/ketos/parser.rs", "rank": 26, "score": 269559.0555716604 }, { "content": "fn make_title_case(s: &mut String, mut start: usize) {\n\n loop {\n\n let (off, len) = match s[start..].split_whitespace().next() {\n\n Some(word) => (slice_offset(s, word), word.len()),\n\n None => break\n\n };\n\n\n\n make_first_uppercase(s, off);\n\n start = off + len;\n\n }\n\n}\n\n\n", "file_path": "src/ketos/string_fmt.rs", "rank": 27, "score": 268982.871471429 }, { "content": "fn join_string(sep: &str, args: &mut [Value]) -> Result<Value, Error> {\n\n let mut res = String::new();\n\n\n\n if let Some(value) = args.first() {\n\n match *value {\n\n Value::Char(ch) => res.push(ch),\n\n Value::String(ref s) => res.push_str(s),\n\n ref v => return Err(From::from(ExecError::expected(\"char or string\", v)))\n\n }\n\n\n\n for arg in &args[1..] {\n\n res.push_str(sep);\n\n match *arg {\n\n Value::Char(ch) => res.push(ch),\n\n Value::String(ref s) => res.push_str(s),\n\n ref v => return Err(From::from(ExecError::expected(\"char or string\", v)))\n\n }\n\n }\n\n }\n\n\n\n Ok(res.into())\n\n}\n\n\n", "file_path": "src/ketos/function.rs", "rank": 28, "score": 268447.0294715406 }, { "content": "fn parse_ratio(ctx: &Context, s: &str, sp: Span) -> Result<Ratio, Error> {\n\n let s = strip_underscores(s);\n\n\n\n check_integer(ctx, &s, 10)?;\n\n\n\n s.parse().map_err(|_| From::from(ParseError::new(sp,\n\n ParseErrorKind::LiteralParseError)))\n\n}\n\n\n", "file_path": "src/ketos/parser.rs", "rank": 29, "score": 267385.9588751787 }, { "content": "fn run_bench(b: &mut Bencher, setup: &str, input: &str) {\n\n let (interp, code) = compile(setup, input).unwrap();\n\n\n\n b.iter(|| interp.execute_code(code.clone()).unwrap());\n\n}\n\n\n", "file_path": "benches/core.rs", "rank": 30, "score": 266984.98792464996 }, { "content": "fn parse_path(input: &str, pos: BytePos) -> Result<(Token, usize), ParseError> {\n\n let (_, size) = string::parse_string(&input[2..], pos + 2)?;\n\n Ok((Token::Path(&input[..size + 2]), size + 2))\n\n}\n\n\n", "file_path": "src/ketos/lexer.rs", "rank": 31, "score": 264605.45808884746 }, { "content": "fn parse_string(input: &str, pos: BytePos) -> Result<(Token, usize), ParseError> {\n\n let (_, size) = string::parse_string(input, pos)?;\n\n Ok((Token::String(&input[..size]), size))\n\n}\n\n\n", "file_path": "src/ketos/lexer.rs", "rank": 32, "score": 264605.4580888474 }, { "content": "fn parse_byte(input: &str, pos: BytePos) -> Result<(Token, usize), ParseError> {\n\n let (_, size) = string::parse_byte(input, pos)?;\n\n Ok((Token::Byte(&input[..size]), size))\n\n}\n\n\n", "file_path": "src/ketos/lexer.rs", "rank": 33, "score": 264605.45808884746 }, { "content": "fn parse_bytes(input: &str, pos: BytePos) -> Result<(Token, usize), ParseError> {\n\n let (_, size) = string::parse_byte_string(&input[2..], pos + 2)?;\n\n Ok((Token::Bytes(&input[..size + 2]), size + 2))\n\n}\n\n\n", "file_path": "src/ketos/lexer.rs", "rank": 34, "score": 264605.45808884746 }, { "content": "fn check_integer(ctx: &Context, mut s: &str, base: u32) -> Result<(), RestrictError> {\n\n let limit = ctx.restrict().max_integer_size;\n\n\n\n if limit == usize::max_value() {\n\n return Ok(());\n\n }\n\n\n\n if s.starts_with('-') {\n\n s = &s[1..].trim_start_matches('0');\n\n } else {\n\n s = s.trim_start_matches('0');\n\n }\n\n\n\n // Approximate the number of bits that could be represented by a number of bytes.\n\n let n_bits = (s.len() as f32 * (base as f32).log2()).ceil() as usize;\n\n\n\n if n_bits > limit {\n\n Err(RestrictError::IntegerLimitExceeded)\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/ketos/parser.rs", "rank": 35, "score": 261905.1813133416 }, { "content": "// TODO: Should probably go into some utility module\n\n/// Returns the suitable plural suffix `\"\"` or `\"s\"` for count `n`.\n\npub fn plural(n: u32) -> &'static str {\n\n if n == 1 { \"\" } else { \"s\" }\n\n}\n\n\n\n/// Represents a function implemented in Rust.\n\n#[derive(Copy, Clone)]\n\npub struct Function {\n\n /// Function name\n\n pub name: Name,\n\n /// System function\n\n pub sys_fn: SystemFn,\n\n}\n\n\n\nimpl fmt::Debug for Function {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"Function {{ name: {:?}, ... }}\", self.name)\n\n }\n\n}\n\n\n\nimpl PartialEq for Function {\n", "file_path": "src/ketos/function.rs", "rank": 36, "score": 261417.57780270546 }, { "content": "fn make_uppercase(s: &mut String, start: usize) {\n\n if s[start..].is_ascii() {\n\n s[start..].make_ascii_uppercase();\n\n } else {\n\n let upper = s[start..].to_uppercase();\n\n s.truncate(start);\n\n s.push_str(&upper);\n\n }\n\n}\n\n\n", "file_path": "src/ketos/string_fmt.rs", "rank": 37, "score": 258203.15229448158 }, { "content": "fn make_lowercase(s: &mut String, start: usize) {\n\n if s[start..].is_ascii() {\n\n s[start..].make_ascii_lowercase();\n\n } else {\n\n let lower = s[start..].to_lowercase();\n\n s.truncate(start);\n\n s.push_str(&lower);\n\n }\n\n}\n\n\n", "file_path": "src/ketos/string_fmt.rs", "rank": 38, "score": 258203.15229448158 }, { "content": "fn make_first_uppercase(s: &mut String, start: usize) {\n\n let (ind, ch) = match s[start..].char_indices()\n\n .find(|&(_, ch)| !ch.is_whitespace()) {\n\n Some((ind, ch)) => (ind + start, ch),\n\n None => return\n\n };\n\n\n\n if ch.is_ascii() {\n\n s[ind..=ind].make_ascii_uppercase();\n\n } else {\n\n // Removing and inserting is slow, but what alternative do we have?\n\n let ch = s.remove(ind);\n\n let mut ind = ind;\n\n\n\n for upper_ch in ch.to_uppercase() {\n\n s.insert(ind, upper_ch);\n\n ind += upper_ch.len_utf8();\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/ketos/string_fmt.rs", "rank": 39, "score": 254667.7563682187 }, { "content": "fn check_version(num: [u8; 4]) -> Result<(), DecodeError> {\n\n let version = BigEndian::read_u32(&num);\n\n\n\n if version == BYTECODE_VERSION {\n\n Ok(())\n\n } else {\n\n Err(DecodeError::IncorrectVersion(version))\n\n }\n\n}\n\n\n", "file_path": "src/ketos/encode.rs", "rank": 40, "score": 253172.3942334386 }, { "content": "fn parse_bytes(s: &str) -> Result<Bytes, ParseError> {\n\n let (b, _) = if s.starts_with(\"#br\") {\n\n string::parse_raw_byte_string(&s[2..], 0)?\n\n } else {\n\n string::parse_byte_string(&s[2..], 0)?\n\n };\n\n Ok(Bytes::new(b))\n\n}\n\n\n", "file_path": "src/ketos/parser.rs", "rank": 41, "score": 252162.59546287658 }, { "content": "fn parse_string(s: &str) -> Result<String, ParseError> {\n\n let (s, _) = if s.starts_with('r') {\n\n string::parse_raw_string(s, 0)?\n\n } else {\n\n string::parse_string(s, 0)?\n\n };\n\n Ok(s)\n\n}\n\n\n", "file_path": "src/ketos/parser.rs", "rank": 42, "score": 252162.59546287658 }, { "content": "fn check_magic_number(num: [u8; 4]) -> Result<(), DecodeError> {\n\n if &num == MAGIC_NUMBER {\n\n Ok(())\n\n } else {\n\n Err(DecodeError::IncorrectMagicNumber(num))\n\n }\n\n}\n\n\n", "file_path": "src/ketos/encode.rs", "rank": 43, "score": 250544.93397789553 }, { "content": "fn parse_float(s: &str) -> Result<f64, ParseErrorKind> {\n\n strip_underscores(s).parse()\n\n .map_err(|_| ParseErrorKind::LiteralParseError)\n\n}\n\n\n", "file_path": "src/ketos/parser.rs", "rank": 44, "score": 249683.97544443764 }, { "content": "fn parse_path(s: &str) -> Result<PathBuf, ParseError> {\n\n let (s, _) = if s.starts_with(\"#pr\") {\n\n string::parse_raw_string(&s[2..], 0)?\n\n } else {\n\n string::parse_string(&s[2..], 0)?\n\n };\n\n Ok(PathBuf::from(s))\n\n}\n\n\n", "file_path": "src/ketos/parser.rs", "rank": 45, "score": 249539.99330201396 }, { "content": "fn eval(interp: &Interpreter, code: &str) -> Result<Value, Error> {\n\n interp.run_code(code, None)\n\n}\n\n\n", "file_path": "tests/structs.rs", "rank": 46, "score": 249278.39236180595 }, { "content": "#[derive(Debug, Eq, PartialEq, Serialize, Deserialize)]\n\nstruct StructC(u8, (u16, u32), [i32; 2]);\n\n\n\nconst STRUCT_3: &'static str =\n\n \"(StructC (1 (2 3) (4 5)))\";\n\n\n", "file_path": "tests/value_encode.rs", "rank": 47, "score": 247490.06730032415 }, { "content": "/// `=` returns whether the given arguments compare equal to one another.\n\n///\n\n/// Values of different types may not be compared. Attempts to do so will\n\n/// result in a `TypeMismatch` error.\n\nfn fn_eq(_ctx: &Context, args: &mut [Value]) -> Result<Value, Error> {\n\n let mut r = true;\n\n let v = &args[0];\n\n\n\n for arg in &args[1..] {\n\n let eq = v.is_equal(arg)?;\n\n\n\n if !eq {\n\n r = false;\n\n break;\n\n }\n\n }\n\n\n\n Ok(r.into())\n\n}\n\n\n", "file_path": "src/ketos/function.rs", "rank": 48, "score": 246380.21642483264 }, { "content": "/// `chars` returns a string transformed into a list of characters.\n\nfn fn_chars(_ctx: &Context, args: &mut [Value]) -> Result<Value, Error> {\n\n let s = get_string(&args[0])?;\n\n Ok(s.chars().collect::<Vec<_>>().into())\n\n}\n\n\n", "file_path": "src/ketos/function.rs", "rank": 49, "score": 246374.1960066249 }, { "content": "/// `eq` performs \"weak\" equality comparison of arguments.\n\n///\n\n/// Any case in which `=` would cause an error, `eq` instead returns `false`.\n\nfn fn_weak_eq(ctx: &Context, args: &mut [Value]) -> Result<Value, Error> {\n\n match fn_eq(ctx, args) {\n\n Ok(v) => Ok(v),\n\n Err(_) => Ok(false.into())\n\n }\n\n}\n\n\n", "file_path": "src/ketos/function.rs", "rank": 50, "score": 243396.3684725493 }, { "content": "/// `.=` assigns a value to one or more fields of a struct value.\n\n///\n\n/// ```lisp\n\n/// (.= foo :bar 1)\n\n/// ```\n\nfn fn_dot_eq(ctx: &Context, args: &mut [Value]) -> Result<Value, Error> {\n\n let value = args[0].take();\n\n\n\n let def = get_struct_def_for(ctx.scope(), &value)?;\n\n\n\n let mut fields = Vec::with_capacity(args.len() / 2);\n\n\n\n let mut iter = args[1..].iter_mut();\n\n\n\n while let Some(name) = iter.next() {\n\n let name = get_keyword(name)?;\n\n\n\n let value = match iter.next() {\n\n Some(v) => v.take(),\n\n None => return Err(From::from(ExecError::OddKeywordParams))\n\n };\n\n\n\n if fields.iter().any(|&(n, _)| n == name) {\n\n return Err(ExecError::DuplicateField(name).into());\n\n }\n\n\n\n fields.push((name, value));\n\n }\n\n\n\n def.def().replace_fields(ctx.scope(), &def, value, &mut fields)\n\n}\n\n\n", "file_path": "src/ketos/function.rs", "rank": 51, "score": 243391.1021449269 }, { "content": "fn slice_offset(a: &str, b: &str) -> usize {\n\n b.as_ptr() as usize - a.as_ptr() as usize\n\n}\n\n\n", "file_path": "src/ketos/string_fmt.rs", "rank": 52, "score": 242533.75251030287 }, { "content": "/// `use` imports a series of names from a module.\n\n///\n\n/// ```lisp\n\n/// (use foo (alpha beta gamma))\n\n///\n\n/// (use foo :self)\n\n/// ```\n\nfn op_use(compiler: &mut Compiler, args: &[Value]) -> Result<(), Error> {\n\n let mod_name = get_name(compiler, &args[0])?;\n\n\n\n // Replace operator item with more specific item\n\n compiler.trace.pop();\n\n compiler.trace.push(TraceItem::UseModule(compiler.ctx.scope().name(), mod_name));\n\n\n\n let ctx = compiler.ctx.clone();\n\n let mods = ctx.scope().modules();\n\n let m = mods.load_module(mod_name, &ctx)\n\n .map_err(|e| { compiler.extend_global_trace(); e })?;\n\n\n\n let mut imp_set = ImportSet::new(mod_name);\n\n\n\n match args[1] {\n\n Value::Keyword(standard_names::ALL) => {\n\n let names = compiler.scope().import_all(&m.scope);\n\n imp_set.names.extend(names.iter().map(|&n| (n, n)));\n\n }\n\n Value::Keyword(standard_names::SELF) => {\n", "file_path": "src/ketos/compile.rs", "rank": 53, "score": 241058.0314045389 }, { "content": "/// `let` defines a series of named value bindings.\n\n///\n\n/// ```lisp\n\n/// (let ((a (foo))\n\n/// (b (bar)))\n\n/// (baz a b))\n\n/// ```\n\nfn op_let(compiler: &mut Compiler, args: &[Value]) -> Result<(), Error> {\n\n let mut n_vars = 0;\n\n\n\n match args[0] {\n\n Value::Unit => (),\n\n Value::List(ref li) => {\n\n n_vars = li.len() as u32;\n\n for v in li {\n\n match *v {\n\n Value::List(ref li) if li.len() == 2 => {\n\n let name = get_name(compiler, &li[0])?;\n\n\n\n compiler.compile_value(&li[1])?;\n\n compiler.push_var(name);\n\n compiler.push_instruction(Instruction::Push)?;\n\n }\n\n _ => {\n\n compiler.set_trace_expr(v);\n\n return Err(From::from(CompileError::SyntaxError(\n\n \"expected list of 2 elements\")))\n", "file_path": "src/ketos/compile.rs", "rank": 54, "score": 241053.21705647348 }, { "content": "/// `struct` creates a struct definition and binds to global scope.\n\n///\n\n/// ```lisp\n\n/// (struct Foo ((name string)\n\n/// (num integer)))\n\n/// ```\n\nfn op_struct(compiler: &mut Compiler, args: &[Value]) -> Result<(), Error> {\n\n let name = get_name(compiler, &args[0])?;\n\n\n\n // Replace operator item with more specific item\n\n compiler.trace.pop();\n\n compiler.trace.push(TraceItem::DefineStruct(\n\n compiler.ctx.scope().name(), name));\n\n\n\n let (doc, body) = extract_doc_string(args)?;\n\n\n\n test_define_name(compiler.scope(), name)?;\n\n let mut fields = NameMap::new();\n\n\n\n match *body {\n\n Value::Unit => (),\n\n Value::List(ref li) => {\n\n for v in li {\n\n match *v {\n\n Value::List(ref li) if li.len() == 2 => {\n\n let fname = get_name(compiler, &li[0])?;\n", "file_path": "src/ketos/compile.rs", "rank": 55, "score": 240741.44203861494 }, { "content": "/// Calls the function currently being executed.\n\n///\n\n/// This operator enables lambda functions to be recursive.\n\n///\n\n/// ```lisp\n\n/// (lambda (n) (call-self (+ n 1)))\n\n/// ```\n\nfn op_call_self(compiler: &mut Compiler, args: &[Value]) -> Result<(), Error> {\n\n if !compiler.is_inside_lambda() {\n\n return Err(CompileError::SyntaxError(\"`call-self` outside lambda\").into());\n\n }\n\n\n\n for arg in args {\n\n compiler.compile_value(arg)?;\n\n compiler.push_instruction(Instruction::Push)?;\n\n }\n\n\n\n compiler.push_instruction(Instruction::CallSelf(args.len() as u32))?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/ketos/compile.rs", "rank": 56, "score": 237829.21469537704 }, { "content": "fn conv<T: FromValue>(interp: &Interpreter, code: &str) -> Result<T, Error> {\n\n let v = eval(interp, code)?;\n\n let t = T::from_value(v)?;\n\n Ok(t)\n\n}\n\n\n", "file_path": "tests/structs.rs", "rank": 57, "score": 237178.58043498418 }, { "content": "fn is_identifier(ch: char) -> bool {\n\n match ch {\n\n '!' | '$' | '%' | '&' | '*' | '+' | '-' | '.' | '/' |\n\n '<' | '=' | '>' | '?' | '^' | '_' | '|' => true,\n\n _ if ch.is_alphanumeric() => true,\n\n _ => false\n\n }\n\n}\n\n\n", "file_path": "src/ketos/lexer.rs", "rank": 58, "score": 236181.0332685919 }, { "content": "fn display_float(f: &mut fmt::Formatter, v: f64) -> fmt::Result {\n\n if is_normal(v) {\n\n let s = v.to_string();\n\n\n\n if s.contains('.') {\n\n write!(f, \"{}\", s)\n\n } else {\n\n write!(f, \"{}.0\", s)\n\n }\n\n } else {\n\n write!(f, \"{}\", v)\n\n }\n\n}\n\n\n", "file_path": "src/ketos/value.rs", "rank": 59, "score": 236105.433898997 }, { "content": "/// Writes a human-readable representation of a `Value` to the given `fmt::Write`.\n\n///\n\n/// `indent` specifies the base indentation for items contained within lists.\n\n/// Indentation is not applied to the top level value.\n\npub fn pretty_print<W: Write>(w: &mut W, names: &NameStore, v: &Value, indent: u32) -> fmt::Result {\n\n match *v {\n\n Value::List(ref li) => {\n\n let mut iter = li.iter();\n\n\n\n let first = iter.next().expect(\"empty list value\");\n\n\n\n let sub_indent = match *first {\n\n Value::Name(_) => indent + 2,\n\n _ => indent + 1\n\n };\n\n\n\n w.write_char('(')?;\n\n pretty_print(w, names, first, indent)?;\n\n\n\n if is_short_args(&li[1..]) {\n\n for v in iter {\n\n w.write_char(' ')?;\n\n pretty_print(w, names, v, sub_indent)?;\n\n }\n", "file_path": "src/ketos/pretty.rs", "rank": 60, "score": 235921.72266725858 }, { "content": "fn eval_str(s: &str) -> Result<String, Error> {\n\n let interp = Interpreter::new();\n\n\n\n let v = interp.run_single_expr(s, None)?;\n\n\n\n let s = FromValue::from_value(v).unwrap();\n\n Ok(s)\n\n}\n\n\n", "file_path": "tests/core.rs", "rank": 61, "score": 231513.87346966844 }, { "content": "fn write_indent<W: Write>(w: &mut W, n: u32) -> fmt::Result {\n\n for _ in 0..n {\n\n w.write_char(' ')?;\n\n }\n\n Ok(())\n\n}\n", "file_path": "src/ketos/pretty.rs", "rank": 62, "score": 230117.68661468878 }, { "content": "fn hello(what: &str) -> Result<String, Error> {\n\n Ok(format!(\"Hello, {}!\", what))\n\n}\n", "file_path": "examples/module.rs", "rank": 63, "score": 223506.10211784183 }, { "content": "fn hello(s: &str) -> Result<String, Error> {\n\n Ok(format!(\"Hello, {}!\", s))\n\n}\n\n\n", "file_path": "tests/foreign.rs", "rank": 64, "score": 223506.10211784183 }, { "content": "fn eval(s: &str) -> Result<String, Error> {\n\n let interp = Interpreter::new();\n\n\n\n let v = interp.run_single_expr(s, None)?;\n\n Ok(interp.format_value(&v))\n\n}\n\n\n", "file_path": "tests/core.rs", "rank": 65, "score": 223506.10211784183 }, { "content": "fn factorial(b: &mut Bencher, n: u32) {\n\n run_bench(b, r#\"\n\n (define (factorial n) (fac-recursive n 1))\n\n\n\n (define (fac-recursive n acc)\n\n (if (<= n 1)\n\n acc\n\n (fac-recursive (- n 1) (* n acc))))\n\n \"#,\n\n &format!(\"(factorial {})\", n));\n\n}\n\n\n", "file_path": "benches/core.rs", "rank": 66, "score": 222110.26180219208 }, { "content": "fn fib(b: &mut Bencher, n: u32) {\n\n run_bench(b, r#\"\n\n (define (fib n) (fib-recursive n 0 1))\n\n\n\n (define (fib-recursive n a b)\n\n (if (= n 0)\n\n a\n\n (fib-recursive (- n 1) b (+ a b))))\n\n \"#,\n\n &format!(\"(fib {})\", n));\n\n}\n\n\n", "file_path": "benches/core.rs", "rank": 67, "score": 222110.26180219208 }, { "content": "fn compile(setup: &str, input: &str) -> Result<(Interpreter, Rc<Code>), Error> {\n\n let interp = Interpreter::new();\n\n\n\n let _ = interp.run_code(setup, None)?;\n\n let code = interp.compile_single_expr(input, None)?;\n\n\n\n Ok((interp, Rc::new(code)))\n\n}\n\n\n", "file_path": "benches/core.rs", "rank": 68, "score": 220418.33018377319 }, { "content": "fn interp(code: &str) -> Result<Interpreter, Error> {\n\n let mut loader = FileModuleLoader::with_search_paths(vec![PathBuf::from(\"lib\")]);\n\n\n\n loader.set_read_bytecode(false);\n\n loader.set_write_bytecode(false);\n\n\n\n let interp = Interpreter::with_loader(\n\n Box::new(BuiltinModuleLoader.chain(loader)));\n\n\n\n interp.run_code(\"(use test (assert-eq))\", None)?;\n\n interp.run_code(code, None)?;\n\n\n\n Ok(interp)\n\n}\n\n\n", "file_path": "tests/value_encode.rs", "rank": 69, "score": 217616.91611426158 }, { "content": "fn run(s: &str) -> Result<Vec<String>, Error> {\n\n let interp = Interpreter::new();\n\n\n\n let c = interp.compile_exprs(s)?;\n\n c.into_iter().map(|c| interp.execute(c)\n\n .map(|v| interp.format_value(&v))).collect()\n\n}\n\n\n", "file_path": "tests/core.rs", "rank": 70, "score": 217449.16906722385 }, { "content": "/// Returns a sorted list of possible name completions for the given prefix.\n\n///\n\n/// Returns `None` if no possible completions exist.\n\npub fn complete_name(word: &str, scope: &GlobalScope)\n\n -> Option<Vec<String>> {\n\n let mut results = Vec::new();\n\n\n\n for name in MasterScope::names() {\n\n scope.with_name(name, |name| {\n\n if name.starts_with(word) {\n\n results.push(name.to_owned());\n\n }\n\n });\n\n }\n\n\n\n scope.with_values(|values| {\n\n for &(name, _) in values {\n\n scope.with_name(name, |name| {\n\n if name.starts_with(word) {\n\n results.push(name.to_owned());\n\n }\n\n });\n\n }\n", "file_path": "src/ketos/completion.rs", "rank": 71, "score": 215736.45846238115 }, { "content": "#[derive(Debug, ForeignValue, FromValue, FromValueRef, IntoValue)]\n\nstruct NoClone(&'static str);\n\n\n", "file_path": "tests/test_derive.rs", "rank": 72, "score": 215220.8415475702 }, { "content": "/// Fold constants for an anticommutative operation.\n\n/// There are two strategies for partial constant evaluation, depending on\n\n/// whether the first value is constant.\n\n///\n\n/// `(- foo 1 2 3)` -> `(- foo 6)`\n\n///\n\n/// `(- 1 foo 2 3)` -> `(- -4 foo)`\n\n///\n\n/// `solo_name` is used in the case of `(foo value identity)`.\n\nfn fold_anticommutative<F: FoldOp>(compiler: &mut Compiler,\n\n name: Name, solo_name: Name, args: &[Value])\n\n -> Result<ConstResult, Error> {\n\n let mut args = args.iter();\n\n let mut new_args = Vec::new();\n\n let mut value = None;\n\n\n\n let first = args.next().unwrap();\n\n\n\n let first_const = match compiler.eval_constant(first)? {\n\n ConstResult::IsRuntime => {\n\n new_args.push(first.clone());\n\n false\n\n }\n\n ConstResult::Partial(v) => {\n\n new_args.push(v);\n\n false\n\n }\n\n ConstResult::IsConstant => {\n\n F::type_check(first)?;\n", "file_path": "src/ketos/compile.rs", "rank": 73, "score": 215026.54788792838 }, { "content": "fn get_string(v: &Value) -> Result<&str, ExecError> {\n\n FromValueRef::from_value_ref(v)\n\n}\n\n\n", "file_path": "src/ketos/function.rs", "rank": 74, "score": 214872.62317637342 }, { "content": "/// Returns the first element of a list or string.\n\n///\n\n/// Returns an error in case of an empty list or string.\n\npub fn first(v: &Value) -> Result<Value, Error> {\n\n match *v {\n\n // There can't be an empty list, so this should never panic.\n\n Value::List(ref li) => Ok(li[0].clone()),\n\n Value::String(ref s) => match s.chars().next() {\n\n Some(ch) => Ok(ch.into()),\n\n None => Err(From::from(ExecError::OutOfBounds(0)))\n\n },\n\n Value::Bytes(ref b) => match b.iter().next() {\n\n Some(&b) => Ok(b.into()),\n\n None => Err(From::from(ExecError::OutOfBounds(0)))\n\n },\n\n ref v => Err(From::from(ExecError::expected(\"sequence\", v)))\n\n }\n\n}\n\n\n", "file_path": "src/ketos/function.rs", "rank": 75, "score": 214394.32433960686 }, { "content": "/// Returns the last element of a list or string.\n\n///\n\n/// Returns an error in case of an empty list or string.\n\npub fn last(v: &Value) -> Result<Value, Error> {\n\n match *v {\n\n Value::List(ref li) => Ok(li.last().cloned().unwrap()),\n\n Value::String(ref s) => match s.chars().next_back() {\n\n Some(ch) => Ok(ch.into()),\n\n None => Err(From::from(ExecError::OutOfBounds(0)))\n\n },\n\n Value::Bytes(ref b) => match b.iter().next_back() {\n\n Some(&b) => Ok(b.into()),\n\n None => Err(From::from(ExecError::OutOfBounds(0)))\n\n },\n\n ref v => Err(From::from(ExecError::expected(\"sequence\", v)))\n\n }\n\n}\n\n\n", "file_path": "src/ketos/function.rs", "rank": 76, "score": 214394.32433960686 }, { "content": "/// Returns all but the last element of a list or string.\n\n///\n\n/// Returns an error in case of an empty list or string.\n\npub fn init(v: &Value) -> Result<Value, Error> {\n\n match *v {\n\n Value::List(ref li) => {\n\n let len = li.len();\n\n Ok(li.slice(..len - 1).into())\n\n }\n\n Value::String(ref s) => {\n\n let mut chars = s.char_indices();\n\n\n\n match chars.next_back() {\n\n Some((idx, _)) => Ok(s.slice(..idx).into()),\n\n None => Err(From::from(ExecError::OutOfBounds(0)))\n\n }\n\n }\n\n Value::Bytes(ref b) => {\n\n if b.is_empty() {\n\n Err(From::from(ExecError::OutOfBounds(0)))\n\n } else {\n\n let len = b.len();\n\n Ok(b.slice(..len - 1).into())\n\n }\n\n }\n\n ref v => Err(From::from(ExecError::expected(\"sequence\", v)))\n\n }\n\n}\n\n\n", "file_path": "src/ketos/function.rs", "rank": 77, "score": 214394.32433960686 }, { "content": "/// Returns all but the first element of a list or string.\n\n///\n\n/// Returns an error in case of an empty list or string.\n\npub fn tail(v: &Value) -> Result<Value, Error> {\n\n match *v {\n\n Value::List(ref li) => {\n\n Ok(li.slice(1..).into())\n\n }\n\n Value::String(ref s) => {\n\n let mut chars = s.chars();\n\n\n\n match chars.next() {\n\n Some(ch) => Ok(s.slice(ch.len_utf8()..).into()),\n\n None => Err(From::from(ExecError::OutOfBounds(0)))\n\n }\n\n }\n\n Value::Bytes(ref b) => {\n\n if b.is_empty() {\n\n Err(From::from(ExecError::OutOfBounds(0)))\n\n } else {\n\n Ok(b.slice(1..).into())\n\n }\n\n }\n\n ref v => Err(From::from(ExecError::expected(\"sequence\", v)))\n\n }\n\n}\n\n\n", "file_path": "src/ketos/function.rs", "rank": 78, "score": 214394.32433960686 }, { "content": "fn run(restrict: RestrictConfig, code: &str) -> Result<(), Error> {\n\n let interp = Builder::new()\n\n .restrict(restrict)\n\n .finish();\n\n\n\n interp.run_code(code, None)?;\n\n Ok(())\n\n}\n\n\n\nmacro_rules! assert_matches_re {\n\n ( $e:expr , $re:expr ) => {\n\n assert_matches!($e, Error::RestrictError(e) if e == $re)\n\n }\n\n}\n\n\n", "file_path": "tests/restrict.rs", "rank": 79, "score": 211830.22142424088 }, { "content": "/// Returns the result of negating a value.\n\npub fn neg_number(v: Value) -> Result<Value, Error> {\n\n match v {\n\n Value::Float(f) => Ok((-f).into()),\n\n Value::Integer(i) => Ok((-i).into()),\n\n Value::Ratio(r) => Ok((-r).into()),\n\n ref v => Err(From::from(ExecError::expected(\"number\", v)))\n\n }\n\n}\n\n\n", "file_path": "src/ketos/function.rs", "rank": 80, "score": 211655.4806074449 }, { "content": "/// Returns a value rounded toward negative infinity.\n\npub fn floor_number(v: Value) -> Result<Value, Error> {\n\n match v {\n\n Value::Float(f) => Ok(f.floor().into()),\n\n Value::Integer(i) => Ok(i.into()),\n\n Value::Ratio(ref r) => Ok(r.floor().into()),\n\n ref v => Err(From::from(ExecError::expected(\"number\", v)))\n\n }\n\n}\n\n\n", "file_path": "src/ketos/function.rs", "rank": 81, "score": 211650.36635973962 }, { "content": "/// `is` returns whether a given expression matches the named type.\n\n///\n\n/// ```lisp\n\n/// (is 'integer 1)\n\n/// (is 'list '(1 2 3))\n\n/// ```\n\n///\n\n/// `is` also accepts `'number` as a type name, which matches `integer`, `float`,\n\n/// and `ratio` type values.\n\nfn fn_is(ctx: &Context, args: &mut [Value]) -> Result<Value, Error> {\n\n let name = get_name(&args[0])?;\n\n Ok(Value::Bool(value_is(ctx.scope(), &args[1], name)))\n\n}\n\n\n", "file_path": "src/ketos/function.rs", "rank": 82, "score": 210637.45782873948 }, { "content": "/// `not` returns the inverse of the given boolean value.\n\nfn fn_not(_ctx: &Context, args: &mut [Value]) -> Result<Value, Error> {\n\n match args[0] {\n\n Value::Bool(a) => Ok((!a).into()),\n\n ref v => Err(From::from(ExecError::expected(\"bool\", v)))\n\n }\n\n}\n\n\n", "file_path": "src/ketos/function.rs", "rank": 83, "score": 210633.06146747776 }, { "content": "/// Implements functionality for Ketos `struct` values on a Rust type.\n\n///\n\n/// A type implementing `StructValue` may be constructed with `new`,\n\n/// will provide field access with `.` and can create modified values with `.=`.\n\n///\n\n/// An implementation of this trait can be generated using `derive(StructValue)`\n\n/// with the `ketos_derive` crate.\n\npub trait StructValue: Sized + Clone + ForeignValue {\n\n /// Returns the `struct` name.\n\n fn struct_name() -> &'static str;\n\n\n\n /// Creates a value from a list of fields.\n\n ///\n\n /// An error should be returned if any fields are missing, superfluous,\n\n /// or the wrong type of value.\n\n fn from_fields(scope: &Scope, def: &Rc<StructDef>,\n\n fields: &mut [(Name, Value)]) -> Result<Self, Error>;\n\n\n\n /// Returns a list of field names.\n\n fn field_names() -> &'static [&'static str];\n\n\n\n /// Returns a copy of a field as a Ketos `Value`.\n\n ///\n\n /// If the named field does not exist, an error should be returned.\n\n fn get_field(&self, scope: &Scope, def: &Rc<StructDef>, name: Name) -> Result<Value, Error>;\n\n\n\n /// Modifies the value to replace named fields with provided values.\n\n ///\n\n /// If any names are invalid or any values are of incorrect type,\n\n /// an error should be returned.\n\n fn replace_fields(&mut self, scope: &Scope, def: &Rc<StructDef>,\n\n fields: &mut [(Name, Value)]) -> Result<(), Error>;\n\n}\n\n\n", "file_path": "src/ketos/structs.rs", "rank": 84, "score": 209244.60251416996 }, { "content": "fn eval(interp: &Interpreter, input: &str) -> Result<String, Error> {\n\n let v = interp.run_single_expr(input, None)?;\n\n Ok(interp.format_value(&v))\n\n}\n\n\n", "file_path": "tests/foreign.rs", "rank": 85, "score": 208973.63411136722 }, { "content": "/// `int` truncates a float or ratio value and returns its whole portion as an integer.\n\n///\n\n/// If the given value is infinite or `NaN`, an error will result.\n\nfn fn_int(_ctx: &Context, args: &mut [Value]) -> Result<Value, Error> {\n\n match args[0].take() {\n\n Value::Float(f) => match f {\n\n f if f.is_infinite() || f.is_nan() => Err(From::from(ExecError::Overflow)),\n\n f => Integer::from_f64(f)\n\n .map(Value::Integer).ok_or_else(|| From::from(ExecError::Overflow)),\n\n },\n\n Value::Integer(i) => Ok(i.into()),\n\n Value::Ratio(ref r) => Ok(r.to_integer().into()),\n\n ref v => Err(From::from(ExecError::expected(\"number\", v)))\n\n }\n\n}\n\n\n", "file_path": "src/ketos/function.rs", "rank": 86, "score": 208360.79111094412 }, { "content": "/// `nan` returns whether all given arguments are equal to `NaN`.\n\n/// Given no arguments, returns the value of `NaN`.\n\nfn fn_nan(_ctx: &Context, args: &mut [Value]) -> Result<Value, Error> {\n\n if args.is_empty() {\n\n Ok(f64::nan().into())\n\n } else {\n\n let mut r = true;\n\n\n\n for arg in args {\n\n if !get_float(arg)?.is_nan() {\n\n r = false;\n\n break;\n\n }\n\n }\n\n\n\n Ok(r.into())\n\n }\n\n}\n\n\n", "file_path": "src/ketos/function.rs", "rank": 87, "score": 208358.70105111328 }, { "content": "/// `is-instance` returns whether a given struct value is an instance of\n\n/// the named struct definition.\n\nfn fn_is_instance(_ctx: &Context, args: &mut [Value]) -> Result<Value, Error> {\n\n let def = get_struct_def(&args[0])?;\n\n\n\n let sv = &args[1];\n\n\n\n Ok(def.def().is_instance(sv, def).into())\n\n}\n\n\n", "file_path": "src/ketos/function.rs", "rank": 88, "score": 208358.04547610498 }, { "content": "/// `elt` returns an element from a list, starting at zero index.\n\n///\n\n/// ```lisp\n\n/// (elt '(1 2 3) 0)\n\n/// ```\n\nfn fn_elt(_ctx: &Context, args: &mut [Value]) -> Result<Value, Error> {\n\n let li = &args[0];\n\n let idx = &args[1];\n\n\n\n let idx = usize::from_value_ref(idx)?;\n\n\n\n match *li {\n\n Value::List(ref li) => li.get(idx).cloned()\n\n .ok_or_else(|| From::from(ExecError::OutOfBounds(idx))),\n\n Value::Bytes(ref b) => b.get(idx).cloned()\n\n .map(|b| b.into())\n\n .ok_or_else(|| From::from(ExecError::OutOfBounds(idx))),\n\n ref v => Err(From::from(ExecError::expected(\"indexable sequence\", v)))\n\n }\n\n}\n\n\n", "file_path": "src/ketos/function.rs", "rank": 89, "score": 208357.27942424687 }, { "content": "/// `join` joins a series of lists or strings and chars using a separator value.\n\n///\n\n/// ```lisp\n\n/// (join '(0) '(1 2 3) '(4 5 6))\n\n/// (join \":\" \"foo\" \"bar\")\n\n/// ```\n\nfn fn_join(_ctx: &Context, args: &mut [Value]) -> Result<Value, Error> {\n\n let (first, rest) = args.split_first_mut().unwrap();\n\n\n\n match *first {\n\n Value::Unit => concat_list(rest),\n\n Value::List(ref li) => join_list(li, rest),\n\n Value::Char(ch) => {\n\n let mut s = String::new();\n\n s.push(ch);\n\n join_string(&s, rest)\n\n }\n\n Value::String(ref s) if s.is_empty() => concat_string(rest),\n\n Value::String(ref s) => join_string(s, rest),\n\n Value::Bytes(ref s) => join_bytes(s, rest),\n\n ref v => Err(From::from(ExecError::expected(\"list or string\", v)))\n\n }\n\n}\n\n\n", "file_path": "src/ketos/function.rs", "rank": 90, "score": 208356.77341313855 }, { "content": "/// `/=` returns whether each given argument differs in value from each other argument.\n\n///\n\n/// Values of different types may not be compared. Attempts to do so will\n\n/// result in a `TypeMismatch` error.\n\nfn fn_ne(_ctx: &Context, args: &mut [Value]) -> Result<Value, Error> {\n\n let mut r = true;\n\n\n\n 'outer: for (i, lhs) in args.iter().enumerate() {\n\n for rhs in &args[i + 1..] {\n\n let eq = lhs.is_equal(rhs)?;\n\n\n\n if eq {\n\n r = false;\n\n break 'outer;\n\n }\n\n }\n\n }\n\n\n\n Ok(r.into())\n\n}\n\n\n", "file_path": "src/ketos/function.rs", "rank": 91, "score": 208356.2178469312 }, { "content": "/// `>` returns whether each argument compares greater than each successive argument.\n\n///\n\n/// Values of different types may not be compared. Attempts to do so will\n\n/// result in a `TypeMismatch` error.\n\nfn fn_gt(_ctx: &Context, args: &mut [Value]) -> Result<Value, Error> {\n\n let mut r = true;\n\n let mut v = &args[0];\n\n\n\n for arg in &args[1..] {\n\n let ord = v.compare(arg)?;\n\n\n\n if ord != Ordering::Greater {\n\n r = false;\n\n break;\n\n }\n\n v = arg;\n\n }\n\n\n\n Ok(r.into())\n\n}\n\n\n", "file_path": "src/ketos/function.rs", "rank": 92, "score": 208356.2178469312 }, { "content": "/// `<` returns whether each argument compares less than each successive argument.\n\n///\n\n/// Values of different types may not be compared. Attempts to do so will\n\n/// result in a `TypeMismatch` error.\n\nfn fn_lt(_ctx: &Context, args: &mut [Value]) -> Result<Value, Error> {\n\n let mut r = true;\n\n let mut v = &args[0];\n\n\n\n for arg in &args[1..] {\n\n let ord = v.compare(arg)?;\n\n\n\n if ord != Ordering::Less {\n\n r = false;\n\n break;\n\n }\n\n v = arg;\n\n }\n\n\n\n Ok(r.into())\n\n}\n\n\n", "file_path": "src/ketos/function.rs", "rank": 93, "score": 208356.2178469312 }, { "content": "/// `<=` returns whether each argument compares less than or equal to each\n\n/// successive argument.\n\n///\n\n/// Values of different types may not be compared. Attempts to do so will\n\n/// result in a `TypeMismatch` error.\n\nfn fn_le(_ctx: &Context, args: &mut [Value]) -> Result<Value, Error> {\n\n let mut r = true;\n\n let mut v = &args[0];\n\n\n\n for arg in &args[1..] {\n\n let ord = v.compare(arg)?;\n\n\n\n if ord == Ordering::Greater {\n\n r = false;\n\n break;\n\n }\n\n v = arg;\n\n }\n\n\n\n Ok(r.into())\n\n}\n\n\n", "file_path": "src/ketos/function.rs", "rank": 94, "score": 208356.12900039338 }, { "content": "/// `>=` returns whether each argument compares greater than or equal to each\n\n/// successive argument.\n\n///\n\n/// Values of different types may not be compared. Attempts to do so will\n\n/// result in a `TypeMismatch` error.\n\nfn fn_ge(_ctx: &Context, args: &mut [Value]) -> Result<Value, Error> {\n\n let mut r = true;\n\n let mut v = &args[0];\n\n\n\n for arg in &args[1..] {\n\n let ord = v.compare(arg)?;\n\n\n\n if ord == Ordering::Less {\n\n r = false;\n\n break;\n\n }\n\n v = arg;\n\n }\n\n\n\n Ok(r.into())\n\n}\n\n\n", "file_path": "src/ketos/function.rs", "rank": 95, "score": 208356.12900039338 }, { "content": "/// `*` returns the product of all arguments.\n\n///\n\n/// Given no arguments, returns the multiplicative identity, `1`.\n\nfn fn_mul(ctx: &Context, args: &mut [Value]) -> Result<Value, Error> {\n\n if args.is_empty() {\n\n return Ok(Integer::one().into());\n\n }\n\n\n\n let mut v = args[0].take();\n\n\n\n expect_number(&v)?;\n\n\n\n for arg in &args[1..] {\n\n expect_number(arg)?;\n\n v = mul_number(ctx, v, arg)?;\n\n }\n\n\n\n Ok(v)\n\n}\n\n\n", "file_path": "src/ketos/function.rs", "rank": 96, "score": 208353.31960869994 }, { "content": "/// `+` returns the sum of all arguments.\n\n///\n\n/// Given no arguments, returns the additive identity, `0`.\n\nfn fn_add(ctx: &Context, args: &mut [Value]) -> Result<Value, Error> {\n\n if args.is_empty() {\n\n return Ok(Integer::zero().into());\n\n }\n\n\n\n let mut v = args[0].take();\n\n\n\n expect_number(&v)?;\n\n\n\n for arg in &args[1..] {\n\n expect_number(arg)?;\n\n v = add_number(ctx, v, arg)?;\n\n }\n\n\n\n Ok(v)\n\n}\n\n\n", "file_path": "src/ketos/function.rs", "rank": 97, "score": 208353.31960869994 }, { "content": "/// `recip` returns the reciprocal of the given numeric value.\n\n/// If the value is of type integer, the value returned will be a ratio.\n\nfn fn_recip(_ctx: &Context, args: &mut [Value]) -> Result<Value, Error> {\n\n recip_number(args[0].take())\n\n}\n\n\n", "file_path": "src/ketos/function.rs", "rank": 98, "score": 208353.13062087307 }, { "content": "/// `inf` returns whether all given arguments are equal to positive or negative infinity.\n\n/// Given no arguments, returns the value of positive infinity.\n\nfn fn_inf(_ctx: &Context, args: &mut [Value]) -> Result<Value, Error> {\n\n if args.is_empty() {\n\n Ok(f64::INFINITY.into())\n\n } else {\n\n let mut r = true;\n\n\n\n for arg in args {\n\n if get_float(arg)?.is_finite() {\n\n r = false;\n\n break;\n\n }\n\n }\n\n\n\n Ok(r.into())\n\n }\n\n}\n\n\n", "file_path": "src/ketos/function.rs", "rank": 99, "score": 208353.05850714043 } ]
Rust
src/value.rs
Mingun/serde-gff
2bfacbb0b6b361da749082f99edaec474ccc6c7c
use indexmap::IndexMap; use crate::{Label, LocString, ResRef}; use crate::index::{U64Index, I64Index, F64Index, StringIndex, ResRefIndex, LocStringIndex, BinaryIndex}; #[derive(Debug, Clone, PartialEq)] pub enum SimpleValueRef { Byte(u8), Char(i8), Word(u16), Short(i16), Dword(u32), Int(i32), Dword64(U64Index), Int64(I64Index), Float(f32), Double(F64Index), String(StringIndex), ResRef(ResRefIndex), LocString(LocStringIndex), Void(BinaryIndex), } #[derive(Debug, Clone, PartialEq)] pub enum SimpleValue { Byte(u8), Char(i8), Word(u16), Short(i16), Dword(u32), Int(i32), Dword64(u64), Int64(i64), Float(f32), Double(f64), String(String), ResRef(ResRef), LocString(LocString), Void(Vec<u8>), } #[derive(Debug, Clone, PartialEq)] pub enum Value { Byte(u8), Char(i8), Word(u16), Short(i16), Dword(u32), Int(i32), Dword64(u64), Int64(i64), Float(f32), Double(f64), String(String), ResRef(ResRef), LocString(LocString), Void(Vec<u8>), Struct(IndexMap<Label, Value>), List(Vec<Value>), } impl From<SimpleValue> for Value { #[inline] fn from(value: SimpleValue) -> Value { use self::SimpleValue::*; match value { Byte(val) => Value::Byte(val), Char(val) => Value::Char(val), Word(val) => Value::Word(val), Short(val) => Value::Short(val), Dword(val) => Value::Dword(val), Int(val) => Value::Int(val), Dword64(val) => Value::Dword64(val), Int64(val) => Value::Int64(val), Float(val) => Value::Float(val), Double(val) => Value::Double(val), String(val) => Value::String(val), ResRef(val) => Value::ResRef(val), LocString(val) => Value::LocString(val), Void(val) => Value::Void(val), } } }
use indexmap::IndexMap; use crate::{Label, LocString, ResRef}; use crate::index::{U64Index, I64Index, F64Index, StringIndex, ResRefIndex, LocStringIndex, BinaryIndex}; #[derive(Debug, Clone, PartialEq)] pub enum SimpleValueRef { Byte(u8), Char(i8), Word(u16), Short(i16), Dword(u32), Int(i32), Dword64(U64Index), Int64(I64Index), Float(f32), Double(F64Index), String(StringIndex), ResRef(ResRefIndex), LocString(LocStringIndex), Void(BinaryIndex), } #[derive(Debug, Clone, PartialEq)] pub enum SimpleValue { Byte(u8), Char(i8), Word(u16),
(val) => Value::LocString(val), Void(val) => Value::Void(val), } } }
Short(i16), Dword(u32), Int(i32), Dword64(u64), Int64(i64), Float(f32), Double(f64), String(String), ResRef(ResRef), LocString(LocString), Void(Vec<u8>), } #[derive(Debug, Clone, PartialEq)] pub enum Value { Byte(u8), Char(i8), Word(u16), Short(i16), Dword(u32), Int(i32), Dword64(u64), Int64(i64), Float(f32), Double(f64), String(String), ResRef(ResRef), LocString(LocString), Void(Vec<u8>), Struct(IndexMap<Label, Value>), List(Vec<Value>), } impl From<SimpleValue> for Value { #[inline] fn from(value: SimpleValue) -> Value { use self::SimpleValue::*; match value { Byte(val) => Value::Byte(val), Char(val) => Value::Char(val), Word(val) => Value::Word(val), Short(val) => Value::Short(val), Dword(val) => Value::Dword(val), Int(val) => Value::Int(val), Dword64(val) => Value::Dword64(val), Int64(val) => Value::Int64(val), Float(val) => Value::Float(val), Double(val) => Value::Double(val), String(val) => Value::String(val), ResRef(val) => Value::ResRef(val), LocString
random
[ { "content": "/// Возможные представления ключа отображений в форматах данных\n\nenum Key {\n\n /// Ключ отображения является строкой, символом или массивом байт и соответствует\n\n /// метке поля\n\n Label(Label),\n\n /// Ключ отображения является числом и соответствует элементу многоязыковой строки\n\n String(StringKey),\n\n}\n", "file_path": "src/de/value.rs", "rank": 0, "score": 39430.20043977518 }, { "content": "#[derive(Debug)]\n\nenum Struct {\n\n /// Структура без полей\n\n NoFields,\n\n /// Структура, состоящая только из одного поля, содержит индекс этого поля\n\n OneField(usize),\n\n /// Структура, состоящая из двух и более полей. Содержит индекс списка и количество полей\n\n MultiField { list: FieldListIndex, fields: u32 }\n\n}\n\nimpl Struct {\n\n /// Преобразует промежуточное представление в окончательное, которое может быть записано в файл\n\n #[inline]\n\n fn into_raw(&self, offsets: &[u32]) -> raw::Struct {\n\n use self::Struct::*;\n\n\n\n match *self {\n\n NoFields => raw::Struct { tag: 0, offset: 0, fields: 0 },\n\n OneField(index) => raw::Struct { tag: 0, offset: index as u32, fields: 1 },\n\n MultiField { list, fields } => raw::Struct { tag: 0, offset: offsets[list.0], fields },\n\n }\n\n }\n\n}\n\n\n\n/// Промежуточное представление сериализуемого поля структуры. Содержит данные, которые после\n\n/// небольшого преобразования, возможного только после окончания сериализации, могут\n\n/// быть записаны в файл\n", "file_path": "src/ser/mod.rs", "rank": 1, "score": 39430.20043977518 }, { "content": "#[derive(Debug)]\n\nenum Field {\n\n /// Поле, представленное значением без внутренней структуры. Содержит метку поля и его значение\n\n Simple { label: LabelIndex, value: SimpleValueRef },\n\n /// Поле, представленное значением с внутренней структурой. Содержит метку поля и индекс\n\n /// промежуточного представления структуры в массиве [`structs`](struct.Serializer.html#field.structs)\n\n Struct { label: LabelIndex, struct_: StructIndex },\n\n /// Поле, представленное списком значений. Содержит метку поля и индекс списка в массиве\n\n /// [`list_indices`](struct.Serializer.html#field.list_indices)\n\n List { label: LabelIndex, list: ListIndex },\n\n}\n\nimpl Field {\n\n /// Преобразует промежуточное представление в окончательное, которое может быть записано в файл\n\n #[inline]\n\n fn into_raw(&self, offsets: &[u32]) -> Result<raw::Field> {\n\n use self::Field::*;\n\n\n\n Ok(match self {\n\n Simple { label, value } => value.into_raw(label.0)?,\n\n Struct { label, struct_ } => {\n\n let mut data = [0u8; 4];\n", "file_path": "src/ser/mod.rs", "rank": 2, "score": 39430.20043977518 }, { "content": "/// Типаж, реализуемый специальными структурами, хранящими индексы на записи в GFF-файле,\n\n/// позволяющий преобразовать их в реальное смещение для чтения информации из файла.\n\npub trait Index {\n\n /// Получает смещение от начала GFF-файла, в котором находятся индексируемые данные\n\n fn offset(&self, header: &Header) -> u64;\n\n}\n\n\n\n/// Макрос для объявления типизированной обертки над числом (или числами),\n\n/// представляющем(ими) индекс одной из структур данных в файле.\n\n///\n\n/// # Параметры 1\n\n/// - `$name`: Имя генерируемой структуры.\n\n/// - `$field`: Имя поля в заголовке, хранящее базовое смещение для структур,\n\n/// к которым производится доступ по данному индексу\n\n///\n\n/// # Параметры 2\n\n/// - `$name`: Имя генерируемой структуры. Структура реализует типаж `From` для\n\n/// конструирования из `u32`\n\n/// - `$field`: Имя поля в заголовке, хранящее базовое смещение для структур,\n\n/// к которым производится доступ по данному индексу\n\n/// - `$multiplier`: множитель для индекса, переводящий его в смещение в байтах\n\nmacro_rules! index {\n", "file_path": "src/index.rs", "rank": 3, "score": 36318.07972912323 }, { "content": "//--------------------------------------------------------------------------------------------------\n\npub trait TokenEmitter {\n\n /// Производит токен, открывающий структуру\n\n ///\n\n /// # Параметры\n\n /// - `tag`: Уникальный идентификатор типа структуры\n\n /// - `count`: Количество полей внутри данной структуры\n\n fn begin(&self, tag: Tag, count: u32) -> Token;\n\n /// Производит токен, закрывающий структуру\n\n fn end(&self) -> Token;\n\n /// Возвращает завершающее состояние, в которое необходимо перейти после испускания\n\n /// последнего токена\n\n fn next(self, state: Box<State>) -> State;\n\n}\n\n\n\n/// Корневая структура, представляющая весь GFF-документ\n\n#[derive(Debug, Clone, Copy)]\n\npub struct Root;\n\nimpl TokenEmitter for Root {\n\n fn begin(&self, tag: Tag, count: u32) -> Token {\n\n Token::RootBegin { tag, count }\n", "file_path": "src/parser/states.rs", "rank": 4, "score": 33251.29122210697 }, { "content": "//! Содержит реализацию структуры, описывающей ссылку на ресурс и реализацию типажей для\n\n//! конвертации других типов данных в ссылку и обратно\n\n\n\nuse std::fmt;\n\nuse std::str::{self, FromStr, Utf8Error};\n\nuse std::string::FromUtf8Error;\n\n\n\n/// Представляет ссылку на игровой ресурс, которым может быть шаблон объекта\n\n#[derive(Clone, PartialEq, Eq, Hash)]\n\npub struct ResRef(pub(crate) Vec<u8>);\n\n\n\nimpl ResRef {\n\n /// Возвращает представление данной ссылки на ресурс как строки, если она представлена в виде `UTF-8` строки\n\n #[inline]\n\n pub fn as_str(&self) -> Result<&str, Utf8Error> {\n\n str::from_utf8(&self.0)\n\n }\n\n /// Возвращает представление данной ссылки на ресурс как строки, если она представлена в виде `UTF-8` строки\n\n #[inline]\n\n pub fn as_string(self) -> Result<String, FromUtf8Error> {\n", "file_path": "src/resref.rs", "rank": 5, "score": 31180.790055006353 }, { "content": " String::from_utf8(self.0)\n\n }\n\n}\n\n\n\nimpl fmt::Debug for ResRef {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n if let Ok(value) = str::from_utf8(&self.0) {\n\n return write!(f, \"{}\", value);\n\n }\n\n self.0.fmt(f)\n\n }\n\n}\n\n\n\nimpl fmt::Display for ResRef {\n\n #[inline]\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n let value = self.as_str().map_err(|_| fmt::Error)?;\n\n write!(f, \"{}\", value)\n\n }\n\n}\n", "file_path": "src/resref.rs", "rank": 6, "score": 31170.958377817937 }, { "content": "\n\nimpl Into<String> for ResRef {\n\n #[inline]\n\n fn into(self) -> String {\n\n String::from_utf8(self.0).expect(\"ResRef contains non UTF-8 string\")\n\n }\n\n}\n\n\n\nimpl<'a> Into<&'a str> for &'a ResRef {\n\n #[inline]\n\n fn into(self) -> &'a str {\n\n str::from_utf8(&self.0).expect(\"ResRef contains non UTF-8 string\")\n\n }\n\n}\n\n\n\nimpl<'a> From<&'a str> for ResRef {\n\n #[inline]\n\n fn from(str: &'a str) -> Self { ResRef(str.as_bytes().to_owned()) }\n\n}\n\n\n\nimpl FromStr for ResRef {\n\n type Err = ();\n\n\n\n #[inline]\n\n fn from_str(str: &str) -> Result<Self, Self::Err> { Ok(str.into()) }\n\n}\n", "file_path": "src/resref.rs", "rank": 7, "score": 31170.958377817937 }, { "content": "#[inline]\n\npub fn to_vec<T>(signature: Signature, value: &T) -> Result<Vec<u8>>\n\n where T: Serialize + ?Sized,\n\n{\n\n let mut vec = Vec::new();\n\n to_writer(&mut vec, signature, value)?;\n\n Ok(vec)\n\n}\n\n\n\n/// Реализует метод, возвращающий ошибку при попытке сериализовать значение, с описанием\n\n/// причины, что GFF не поддерживает данный тип на верхнем уровне и требуется обернуть его\n\n/// в структуру\n\nmacro_rules! unsupported {\n\n ($ser_method:ident ( $($type:ty),* ) ) => (\n\n unsupported!($ser_method($($type),*) -> Self::Ok);\n\n );\n\n ($ser_method:ident ( $($type:ty),* ) -> $result:ty) => (\n\n fn $ser_method(self, $(_: $type),*) -> Result<$result> {\n\n Err(Error::Serialize(concat!(\n\n \"`\", stringify!($ser_method), \"` can't be implemented in GFF format. Wrap value to the struct and serialize struct\"\n\n ).into()))\n", "file_path": "src/ser/mod.rs", "rank": 8, "score": 20322.557960874507 }, { "content": "#[inline]\n\npub fn to_writer<W, T>(writer: &mut W, signature: Signature, value: &T) -> Result<()>\n\n where W: Write,\n\n T: Serialize + ?Sized,\n\n{\n\n let mut s = Serializer::default();\n\n value.serialize(&mut s)?;\n\n s.write(writer, signature, Version::V3_2)\n\n}\n\n/// Сериализует значение в массив. Значение должно являться Rust структурой или перечислением\n", "file_path": "src/ser/mod.rs", "rank": 9, "score": 18595.313407246478 }, { "content": "pub mod value;\n\npub mod error;\n\npub mod raw;\n\n\n\n// Модули, чье содержимое реэкспортируется, разделено для удобства сопровождения\n\nmod label;\n\nmod resref;\n\nmod string;\n\n\n\npub use crate::label::*;\n\npub use crate::resref::*;\n\npub use crate::string::*;\n\n\n\n// Модули для поддержки инфраструктуры serde\n\npub mod de;\n\npub mod ser;\n", "file_path": "src/lib.rs", "rank": 10, "score": 12.355118754815647 }, { "content": "//! Содержит реализацию структуры, описывающей название поля в GFF файле и реализацию типажей для\n\n//! конвертации других типов данных в метку и обратно\n\n\n\nuse std::fmt;\n\nuse std::result::Result;\n\nuse std::str::{from_utf8, FromStr, Utf8Error};\n\nuse crate::error::Error;\n\n\n\n/// Описание названия поля структуры GFF файла. GFF файл состоит из дерева структур, а каждая\n\n/// структура -- из полей с именем и значением. Имена полей представлены данной структурой\n\n#[derive(Clone, Copy, PartialEq, Eq, Hash)]\n\npub struct Label([u8; 16]);\n\n\n\nimpl Label {\n\n /// Возвращает представление данной метки как текста, если он представлен в виде `UTF-8` строки\n\n pub fn as_str(&self) -> Result<&str, Utf8Error> {\n\n for i in 0..self.0.len() {\n\n // Во внутреннем представлении данные метки продолжаются до первого нулевого символа,\n\n // однако сам нулевой символ не храниться -- это просто заполнитель\n\n if self.0[i] == 0 {\n", "file_path": "src/label.rs", "rank": 11, "score": 9.784376412154366 }, { "content": "//! Содержит реализации структур, описывающих строки, хранящиеся в GFF файле\n\nuse std::fmt;\n\nuse std::mem::transmute;\n\nuse std::collections::HashMap;\n\n\n\n/// Маска, определяющая идентификатор строки\n\nconst USER_TLK_MASK: u32 = 0x8000_0000;\n\n\n\n/// Индекс в файле `dialog.tlk`, содержащий локализованный текст\n\n#[derive(Clone, Copy, PartialEq, Eq, Hash)]\n\npub struct StrRef(pub(crate) u32);\n\n\n\nimpl StrRef {\n\n /// Определяет, является ли строка индексом не из основного TLK файла игры, а из TLK\n\n /// файла модуля. Строка является строкой из TLK файла модуля, если старший бит в ее\n\n /// идентификаторе взведен\n\n #[inline]\n\n pub fn is_user(&self) -> bool { self.0 & USER_TLK_MASK != 0 }\n\n\n\n /// Определяет индекс строки в TLK файле\n", "file_path": "src/string.rs", "rank": 12, "score": 9.563829971639441 }, { "content": "use crate::index::LabelIndex;\n\nuse crate::value::SimpleValueRef;\n\nuse super::Tag;\n\n\n\n/// Возможные виды событий, которые могут возникнуть при чтении GFF файла. Отражают\n\n/// появление в потоке значений из файла и структурных единиц (списков, структур, ...)\n\n#[derive(Debug, Clone)]\n\npub enum Token {\n\n /// Событие о начале разбора GFF-читателем структуры с индексом 0 в GFF файле.\n\n ///\n\n /// Возникает после чтения заголовка файла, и позиционирования на начало первой\n\n /// (с индексом 0) структуры, но перед ее чтением. Следующим событием может быть:\n\n /// - `RootEnd`: если корневая структура не содержит полей\n\n /// - `StructBegin`: если первым полем корневой структуры является значение с типом `Struct`\n\n /// - `ListBegin`: если первым полем корневой структуры является значение с типом `List`\n\n /// - `Value`: если первым полем корневой структуры является значение с любым другим типом\n\n RootBegin {\n\n /// Уникальный (по задумке) идентификатор типа структуры в файле. На самом деле движок,\n\n /// похоже, не проверяет уникальность этих значений\n\n tag: Tag,\n", "file_path": "src/parser/token.rs", "rank": 13, "score": 8.996445780485248 }, { "content": "//! Содержит реализацию структуры, описывающей версию GFF файла, реализацию типажей для\n\n//! конвертации других типов данных в версию и обратно и известные версии файлов\n\n\n\nuse std::fmt::{self, Display, Formatter};\n\nuse std::io::{Read, Write, Result};\n\n\n\n/// Версия формата файла. Записана во вторых 4-х байтах файла, сразу после сигнатуры\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\npub struct Version([u8; 4]);\n\n\n\nimpl Version {\n\n /// Версия GFF формата, являющаяся текущей. Заголовки, создаваемые без указания версии,\n\n /// имеют данную версию в качестве умолчания.\n\n //TODO: После решения https://github.com/rust-lang/rust/issues/24111 можно сделать функции\n\n // константными и использовать метод `new`.\n\n pub const V3_2: Version = Version(*b\"V3.2\");\n\n\n\n /// Создает новый объект версии из старшей и младшей половины версии\n\n #[inline]\n\n pub fn new(major: u8, minor: u8) -> Self {\n", "file_path": "src/ver.rs", "rank": 14, "score": 8.96962807468785 }, { "content": "//! Содержит реализацию структуры, описывающей сигнатуру GFF файла, реализацию типажей для\n\n//! конвертации других типов данных в сигнатуру и обратно и известные форматы файлов\n\n\n\nuse std::io::{Read, Write, Result};\n\n\n\n/// Определяет назначение файла. Сигнатура записана в первых 4-х байтах файла на диске\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\npub enum Signature {\n\n /// Информация о модуле\n\n IFO,\n\n\n\n /// Описание области\n\n ARE,\n\n /// Инстанции игровых объектов и динамические свойства области\n\n GIT,\n\n /// Комментарий к области\n\n GIC,\n\n\n\n /// Шаблон (blueprint) существа\n\n UTC,\n", "file_path": "src/sig.rs", "rank": 15, "score": 8.899119098022155 }, { "content": "//! Содержит описания структур заголовка GFF файла\n\n\n\nuse std::cmp::max;\n\nuse std::io::{Read, Write, Result};\n\nuse byteorder::{LE, ReadBytesExt, WriteBytesExt};\n\n\n\npub use crate::sig::*;\n\npub use crate::ver::*;\n\n\n\n/// Описание области файла, описывающей местоположение списков записей в файле\n\n#[derive(Debug, Default)]\n\npub struct Section {\n\n /// Смещение в байтах от начала файла в сериализованном виде\n\n pub offset: u32,\n\n /// Количество записей по смещению `offset`. Размер записи зависит от конкретного поля\n\n pub count: u32,\n\n}\n\n\n\nimpl Section {\n\n /// Читает описание области из потока\n", "file_path": "src/header.rs", "rank": 16, "score": 8.634149382130577 }, { "content": "//! Вспомогательный модуль, содержащий описание структур, непосредственно хранимых\n\n//! в GFF файле на диске. Обычно нет необходимости использовать данный модуль -- он\n\n//! может понадобиться только при отладке\n\nuse std::fmt;\n\nuse std::io::{Cursor, Read, Seek, SeekFrom, Write, Result};\n\nuse byteorder::{LE, ReadBytesExt, WriteBytesExt};\n\n\n\nuse crate::header::Header;\n\nuse crate::Label;\n\n\n\n/// Типы полей, которые возможно встретить в GFF файле\n\n#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]\n\n#[repr(u32)]\n\npub enum FieldType {\n\n /// Беззнаковое байтовое значение (от 0 до 255), занимающее один байт\n\n Byte,\n\n /// Символ текста в диапазоне `0x00-0xFF`, занимающий один байт\n\n Char,\n\n /// Беззнаковое целое (от 0 до 65535), занимающее 2 байта\n\n Word,\n", "file_path": "src/raw.rs", "rank": 17, "score": 8.620390561788515 }, { "content": "//! Состояния парсера GFF-формата\n\n\n\nuse std::io::{Read, Seek};\n\nuse crate::index::{FieldIndex, FieldIndicesIndex, LabelIndex, ListIndicesIndex, StructIndex};\n\nuse crate::error::{Error, Result};\n\nuse crate::parser::{Parser, Token, Tag};\n\nuse self::State::*;\n\n\n\n/// Возможные состояния, в которых может находиться парсер\n\n#[derive(Debug, Clone)]\n\npub enum State {\n\n /// Состояние, из которого начинается разбор GFF-файла.\n\n /// Переход из данного состояния генерирует токен [`RootBegin`].\n\n ///\n\n /// [`RootBegin`]: ../struct.Token.html#variant.RootBegin\n\n Start(ReadStruct<Root>),\n\n /// Состояние, в котором читается метка поля структуры и идет подготовка к чтению значения.\n\n /// Переход из данного состояния генерирует токен [`Label`].\n\n ///\n\n /// [`Label`]: ../struct.Token.html#variant.Label\n", "file_path": "src/parser/states.rs", "rank": 19, "score": 8.009711709944328 }, { "content": "#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct LocString {\n\n /// Индекс в TLK файле, содержащий локализованный текст\n\n pub str_ref: StrRef,\n\n /// Список локализованных строк для каждого языка и пола\n\n pub strings: Vec<SubString>,\n\n}\n\n\n\n/// Локализуемая строка, представленная в виде, в котором некорректные значения\n\n/// непредставимы.\n\n#[derive(Debug, Clone, PartialEq, Eq)]\n\npub enum GffString {\n\n /// Внешнее представление строки в виде индекса в TLK файле, содержащем локализованный\n\n /// текст. В зависимости от локализации текст будет разным\n\n External(StrRef),\n\n /// Внутреннее представление строки, хранимое внутри самого файла -- по строке для каждого\n\n /// языка и пола персонажа\n\n Internal(HashMap<StringKey, String>),\n\n}\n\nimpl From<LocString> for GffString {\n", "file_path": "src/string.rs", "rank": 20, "score": 7.221906341792231 }, { "content": "//! Реализация структуры, описывающей ошибки кодирования или декодирования GFF\n\n\n\nuse std::borrow::Cow;\n\nuse std::fmt;\n\nuse std::error;\n\nuse std::io;\n\nuse std::result;\n\nuse std::str::Utf8Error;\n\nuse std::string::FromUtf8Error;\n\nuse serde::de;\n\nuse serde::ser;\n\n\n\nuse crate::parser::Token;\n\nuse self::Error::*;\n\n\n\n/// Виды ошибок, который могут возникнуть при чтении и интерпретации GFF-файла\n\n#[derive(Debug)]\n\npub enum Error {\n\n /// Произошла ошибка чтения или записи из/в нижележащего буфера\n\n Io(io::Error),\n", "file_path": "src/error.rs", "rank": 21, "score": 7.070580983520888 }, { "content": " /// Строка предназначена для персонажа мужского или неопределенного пола\n\n Male = 0,\n\n /// Строка предназначена для персонажа женского пола\n\n Female = 1,\n\n}\n\n\n\n/// Ключ, используемый для индексации локализуемых строк во внутреннем представлении\n\n/// строк (когда строки внедрены в GFF файл, а не используются ссылки на строки в TLK\n\n/// файле).\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct StringKey(pub(crate) u32);\n\nimpl StringKey {\n\n /// Язык, на котором записан текст этой части многоязыковой строки\n\n pub fn language(&self) -> Language { unsafe { transmute(self.0 >> 1) } }\n\n /// Пол персонажа, для которого написан текст этой части многоязыковой строки\n\n pub fn gender(&self) -> Gender { unsafe { transmute(self.0 % 2) } }\n\n}\n\nimpl From<(Language, Gender)> for StringKey {\n\n #[inline]\n\n fn from(value: (Language, Gender)) -> Self {\n", "file_path": "src/string.rs", "rank": 22, "score": 6.490073528430288 }, { "content": " StringKey(((value.0 as u32) << 1) | value.1 as u32)\n\n }\n\n}\n\n/// Преобразует ключ в число, в котором он храниться в GFF файле по формуле:\n\n/// ```rust,ignore\n\n/// ((self.language() as u32) << 1) | self.gender() as u32\n\n/// ```\n\nimpl Into<u32> for StringKey {\n\n #[inline]\n\n fn into(self) -> u32 { self.0 }\n\n}\n\n\n\n/// Часть локализованной строки, хранящая информацию для одного языка и пола\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct SubString {\n\n /// Язык, на котором записан текст этой части многоязыковой строки, и пол\n\n /// персонажа, для которого он написан\n\n pub key: StringKey,\n\n /// Текст многоязыковой строки для указанного пола и языка\n\n pub string: String,\n", "file_path": "src/string.rs", "rank": 23, "score": 6.37901588231122 }, { "content": "//! Десериализатор для формата Bioware GFF (Generic File Format)\n\n\n\nuse std::io::{Read, Seek};\n\nuse encoding::{DecoderTrap, EncodingRef};\n\nuse serde::de::{self, IntoDeserializer, Visitor, DeserializeSeed};\n\n\n\nuse crate::value::{SimpleValueRef, Value};\n\nuse crate::error::{Error, Result};\n\nuse crate::parser::{Parser, Token};\n\n\n\nmod string;\n\nmod value;\n\n\n\n/// Структура для поддержки чтения GFF файлов в экосистеме serde\n\npub struct Deserializer<R: Read + Seek> {\n\n /// Итератор, поставляющий токены в процессе разбора файла\n\n parser: Parser<R>,\n\n /// Подсмотренный вперед на один переход токен\n\n peeked: Option<Token>,\n\n}\n", "file_path": "src/de/mod.rs", "rank": 24, "score": 6.279904080426703 }, { "content": "//! Сериализатор для формата Bioware GFF (Generic File Format)\n\n\n\nuse std::io::Write;\n\nuse byteorder::{LE, WriteBytesExt};\n\nuse indexmap::IndexSet;\n\nuse serde::ser::{self, Impossible, Serialize, SerializeMap, SerializeSeq,\n\n SerializeStruct, SerializeTuple, SerializeTupleStruct,\n\n SerializeTupleVariant, SerializeStructVariant};\n\n\n\nuse crate::Label;\n\nuse crate::error::{Error, Result};\n\nuse crate::header::{Header, Section, Signature, Version};\n\nuse crate::index::LabelIndex;\n\nuse crate::value::SimpleValueRef;\n\nuse crate::raw::{self, FieldType};\n\n\n\nmod value;\n\n\n\n/// Вспомогательная структура, описывающая индекс структуры, для типобезопасности\n\n#[derive(Debug, Copy, Clone)]\n", "file_path": "src/ser/mod.rs", "rank": 25, "score": 6.211716803376768 }, { "content": "//! Реализация потокового парсера GFF файла. См. описание структуры [`Parser`](struct.Parser.html)\n\n\n\nuse std::iter::FusedIterator;\n\nuse std::io::{Read, Seek, SeekFrom};\n\nuse byteorder::{LE, ReadBytesExt};\n\nuse encoding::{EncodingRef, DecoderTrap};\n\nuse encoding::all::UTF_8;\n\n\n\nuse crate::{Label, SubString, ResRef, StrRef};\n\nuse crate::error::{Error, Result};\n\nuse crate::header::Header;\n\nuse crate::index::{Index, LabelIndex, U64Index, I64Index, F64Index, StringIndex, ResRefIndex, LocStringIndex, BinaryIndex};\n\nuse crate::string::{LocString, StringKey};\n\nuse crate::value::{SimpleValue, SimpleValueRef};\n\n\n\nmod token;\n\nmod states;\n\n\n\nuse self::states::State;\n\npub use self::token::Token;\n", "file_path": "src/parser/mod.rs", "rank": 26, "score": 6.0416681901326355 }, { "content": " /// Итальянский язык\n\n Italian = 3,\n\n /// Испанский язык\n\n Spanish = 4,\n\n /// Польский язык\n\n Polish = 5,\n\n /// Корейский язык\n\n Korean = 128,\n\n /// Традиционный китайский\n\n ChineseTraditional = 129,\n\n /// Упрощенный китайский\n\n ChineseSimplified = 130,\n\n /// Японский\n\n Japanese= 131,\n\n}\n\n\n\n/// Виды пола персонажа, на которых могут храниться локализованные строки в объекте `LocString`\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\n#[repr(u32)]\n\npub enum Gender {\n", "file_path": "src/string.rs", "rank": 27, "score": 5.832495951405349 }, { "content": " };\n\n\n\n Ok((token, State::ReadItems(state)))\n\n }\n\n}\n\n//--------------------------------------------------------------------------------------------------\n\n/// Подготовительное состояние для чтения элемента списка. Читает из файла индекс\n\n/// структуры-элемента и переходит в состояние его чтения.\n\n#[derive(Debug, Clone)]\n\npub struct ReadItems {\n\n /// Индекс в таблице индексов, содержащий структуру-элемент для чтения\n\n index: ListIndicesIndex,\n\n /// Количество элементов, которое нужно прочитать\n\n count: u32,\n\n /// Состояние, в которое нужно перейти после завершения чтения списка\n\n state: Box<State>,\n\n}\n\nimpl ReadItems {\n\n /// # Возвращаемое значение\n\n /// Возвращает генерируемый в процессе разбора токен и новое состояние парсера\n", "file_path": "src/parser/states.rs", "rank": 28, "score": 5.754886057391146 }, { "content": "\n\n/// Уникальный идентификатор типа структуры, хранимой в GFF-файле\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\npub struct Tag(u32);\n\n\n\n/// Реализует потоковый (наподобие SAX) парсер GFF файла. Парсер реализует интерфейс\n\n/// итератора по [токенам]. Каждый вызов метода [`next_token`] возвращает следующий токен\n\n/// из потока, который сразу же может быть использован для анализа или сохранен для\n\n/// дальнейшего использования.\n\n///\n\n/// # События разбора\n\n/// Парсер представляет собой pull-down парсер, т.е. для получения данных его нужно опрашивать внешним\n\n/// циклом (в противоположность push-down парсеру, который испускает события при разборе очередного\n\n/// элемента).\n\n///\n\n/// Так как GFF файл может быть представлен в XML виде, и эта структура проще для представления в тексте,\n\n/// то ниже показан пример файла, в котором отмечены места после которых парсер генерирует токены при\n\n/// разборе. В виде кода Rust описанная структура данных может быть представлена таким образом:\n\n///\n\n/// ```rust,no_run\n", "file_path": "src/parser/mod.rs", "rank": 29, "score": 5.679314467192963 }, { "content": " #[inline]\n\n pub fn code(&self) -> u32 { self.0 & !USER_TLK_MASK }\n\n}\n\n\n\nimpl fmt::Debug for StrRef {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"code: {}, user: {}\", self.code(), self.is_user())\n\n }\n\n}\n\n\n\n/// Виды языков, на которых могут храниться локализованные строки в объекте `LocString`\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\n#[repr(u32)]\n\npub enum Language {\n\n /// Английский язык\n\n English = 0,\n\n /// Французский язык\n\n French = 1,\n\n /// Немецкий язык\n\n German = 2,\n", "file_path": "src/string.rs", "rank": 31, "score": 5.677337158159702 }, { "content": " ($(#[$attrs:meta])* $name:ident, $field:ident) => (\n\n $(#[$attrs])*\n\n #[derive(Debug, PartialEq, Eq, Clone, Copy)]\n\n pub struct $name(pub(crate) u32, pub(crate) u32);\n\n\n\n impl Index for $name {\n\n #[inline]\n\n fn offset(&self, header: &Header) -> u64 {\n\n let start = header.$field.offset as u64;\n\n let offset = self.0 as u64 + self.1 as u64 * 4;\n\n\n\n start + offset\n\n }\n\n }\n\n impl Add<u32> for $name {\n\n type Output = Self;\n\n\n\n fn add(self, rhs: u32) -> Self {\n\n $name(self.0, self.1 + rhs)\n\n }\n", "file_path": "src/index.rs", "rank": 32, "score": 5.6495779814660025 }, { "content": " ///\n\n /// # Параметры\n\n /// - `token`: Токен, полученный предшествующим вызовом [`next_token`]\n\n ///\n\n /// [`next_token`]: #method.next_token\n\n #[inline]\n\n pub fn skip_next(&mut self, token: Token) {\n\n self.state = self.state.clone().skip(token);\n\n }\n\n//-------------------------------------------------------------------------------------------------\n\n// Завершение чтения комплексных данных\n\n//-------------------------------------------------------------------------------------------------\n\n /// Читает из файла значение метки по указанному индексу.\n\n /// Не меняет позицию чтения в файле\n\n pub fn read_label(&mut self, index: LabelIndex) -> Result<Label> {\n\n let old = self.offset()?;\n\n self.seek(index)?;\n\n\n\n let mut label = [0u8; 16];\n\n self.reader.read_exact(&mut label)?;\n", "file_path": "src/parser/mod.rs", "rank": 33, "score": 5.626385108499781 }, { "content": " );\n\n }\n\n\n\n /// Создает тесты сериализации перечислений, для случаев, работающих одинаково на верхнем\n\n /// уровне, и как поле структуры\n\n macro_rules! enum_tests {\n\n ($mode:tt) => (\n\n #[derive(Serialize, Copy, Clone)]\n\n struct Value {\n\n value: u32,\n\n }\n\n /// Тестирует запись перечислений со значениями разных видов\n\n #[test]\n\n fn test_enum_externally_tagged() {\n\n #[derive(Serialize)]\n\n enum E {\n\n Unit,\n\n Newtype1(u32),\n\n Newtype2(Value),\n\n Tuple1(u32, u32),\n", "file_path": "src/ser/mod.rs", "rank": 34, "score": 5.589085735879617 }, { "content": " #[inline]\n\n pub fn read<R: Read>(reader: &mut R) -> Result<Self> {\n\n let mut sig = [0u8; 4];\n\n reader.read_exact(&mut sig)?;\n\n Ok(sig.into())\n\n }\n\n /// Записывает 4 байта сигнатуры в поток\n\n #[inline]\n\n pub fn write<W: Write>(&self, writer: &mut W) -> Result<()> {\n\n writer.write_all(self.as_ref())\n\n }\n\n}\n\n\n\nimpl From<[u8; 4]> for Signature {\n\n fn from(arr: [u8; 4]) -> Self {\n\n use self::Signature::*;\n\n\n\n match &arr {\n\n b\"IFO \" => IFO,\n\n\n", "file_path": "src/sig.rs", "rank": 35, "score": 5.480640158847482 }, { "content": " data: self,\n\n })\n\n }\n\n}\n\n\n\n/// Состояние для чтения одной указанной в индексе структуры данных.\n\n///\n\n/// В состоянии осуществляется переход к месту хранения структуры в файле,\n\n/// чтение индекса поля или списка полей и переход в состояние [`ReadField`],\n\n/// если поле одно, или [`ReadFields`], если их несколько.\n\n#[derive(Debug, Clone)]\n\npub struct ReadStruct<Data: TokenEmitter> {\n\n /// Индекс структуры, которую необходимо прочитать\n\n index: StructIndex,\n\n /// Состояние, в которое нужно вернуться\n\n state: Box<State>,\n\n /// Дополнительные данные\n\n data: Data,\n\n}\n\nimpl<Data: TokenEmitter> ReadStruct<Data> {\n", "file_path": "src/parser/states.rs", "rank": 37, "score": 5.395885305273875 }, { "content": " }\n\n );\n\n\n\n ($(#[$attrs:meta])* $name:ident, $field:ident, $multiplier:expr) => (\n\n $(#[$attrs])*\n\n #[derive(Debug, PartialEq, Eq, Clone, Copy)]\n\n pub struct $name(pub(crate) u32);\n\n impl Index for $name {\n\n #[inline]\n\n fn offset(&self, header: &Header) -> u64 {\n\n let start = header.$field.offset as u64;\n\n let offset = self.0 as u64 * $multiplier;\n\n\n\n start + offset\n\n }\n\n }\n\n impl From<u32> for $name {\n\n fn from(value: u32) -> Self { $name(value) }\n\n }\n\n );\n", "file_path": "src/index.rs", "rank": 38, "score": 5.339419524750465 }, { "content": " /// # Параметры\n\n /// - `reader`: Источник данных для чтения файла\n\n /// - `encoding`: Кодировка для декодирования символов в строках\n\n /// - `trap`: Способ обработки символов в строках, которые не удалось декодировать с\n\n /// использованием выбранной кодировки\n\n pub fn with_encoding(mut reader: R, encoding: EncodingRef, trap: DecoderTrap) -> Result<Self> {\n\n let header = Header::read(&mut reader)?;\n\n\n\n Ok(Parser { header, reader, encoding, trap, state: State::default() })\n\n }\n\n /// Возвращает следующий токен или ошибку, если данных не осталось или при их чтении возникли\n\n /// проблемы.\n\n pub fn next_token(&mut self) -> Result<Token> {\n\n let (token, next) = self.state.clone().next(self)?;\n\n self.state = next;\n\n Ok(token)\n\n }\n\n /// Быстро пропускает всю внутреннюю структуру, переводя парсер в состояние, при котором\n\n /// вызов [`next_token`] вернет следующий структурный элемент после пропущенного (следующее\n\n /// поле структуры или элемент списка).\n", "file_path": "src/parser/mod.rs", "rank": 39, "score": 5.339419524750465 }, { "content": " next.next(parser)\n\n },\n\n _ => {\n\n let value = parser.read_value_ref(self.tag)?;\n\n let token = Token::Value(value);\n\n\n\n Ok((token, *self.state))\n\n },\n\n }\n\n }\n\n #[inline]\n\n fn skip(self) -> State { *self.state }\n\n}\n\n//--------------------------------------------------------------------------------------------------\n\n/// Состояние чтения списка полей. Осуществляет переход к индексу списка и чтение поля\n\n#[derive(Debug, Clone)]\n\npub struct ReadFields {\n\n /// Индекс поля, которое необходимо прочитать\n\n index: FieldIndicesIndex,\n\n /// Количество полей, которое нужно прочитать\n", "file_path": "src/parser/states.rs", "rank": 40, "score": 5.329393758832475 }, { "content": " /// кодировкой декодера и возвращает полученную строку. Побочный эффект -- переход по указанному адресу\n\n pub fn read_string(&mut self, index: StringIndex) -> Result<String> {\n\n self.seek(index)?;\n\n self.read_string_impl()\n\n }\n\n /// Читает 1 байт длины и следующие за ними байты массива, возвращает прочитанный массив,\n\n /// обернутый в `ResRef`. Побочный эффект -- переход по указанному адресу\n\n pub fn read_resref(&mut self, index: ResRefIndex) -> Result<ResRef> {\n\n self.seek(index)?;\n\n\n\n let size = self.reader.read_u8()? as usize;\n\n let mut bytes = Vec::with_capacity(size);\n\n unsafe { bytes.set_len(size); }\n\n\n\n self.reader.read_exact(&mut bytes)?;\n\n Ok(ResRef(bytes))\n\n }\n\n /// Читает из файла значение поля по указанному индексу. Побочный эффект -- переход по указанному адресу\n\n pub fn read_loc_string(&mut self, index: LocStringIndex) -> Result<LocString> {\n\n self.seek(index)?;\n", "file_path": "src/parser/mod.rs", "rank": 41, "score": 5.162135174664691 }, { "content": " /// [`Field`]: struct.Field.html\n\n #[inline]\n\n pub fn is_complex(&self) -> bool {\n\n use self::FieldType::*;\n\n\n\n match *self {\n\n Dword64 | Int64 | Double | String | ResRef | LocString | Void => true,\n\n _ => false\n\n }\n\n }\n\n /// Возвращает `true`, если данные поля указанного типа хранятся внутри структуры [`Field`]\n\n ///\n\n /// [`Field`]: struct.Field.html\n\n #[inline]\n\n pub fn is_simple(&self) -> bool {\n\n !self.is_complex() && *self != FieldType::Struct && *self != FieldType::List\n\n }\n\n //TODO: После стабилизации https://github.com/rust-lang/rust/issues/33417 полностью перенести в TryFrom\n\n #[inline]\n\n fn from_u32(value: u32) -> Option<Self> {\n", "file_path": "src/raw.rs", "rank": 42, "score": 5.082494559005608 }, { "content": " fn next(self) -> Result<(Token, State)> {\n\n Ok((self.data.end(), *self.state))\n\n }\n\n #[inline]\n\n fn skip(self) -> State { *self.state }\n\n}\n\n//--------------------------------------------------------------------------------------------------\n\n/// Состояние чтения метки поля. Осуществляет переход к нужному полю, чтение метки и типа значения,\n\n/// затем переход в состояние чтения значения\n\n#[derive(Debug, Clone)]\n\npub struct ReadLabel {\n\n /// Индекс поля, которое необходимо прочитать\n\n index: FieldIndex,\n\n /// Состояние, в которое нужно вернуться\n\n state: Box<State>,\n\n}\n\nimpl ReadLabel {\n\n /// # Возвращаемое значение\n\n /// Возвращает генерируемый в процессе разбора токен и новое состояние парсера\n\n fn next<R: Read + Seek>(self, parser: &mut Parser<R>) -> Result<(Token, State)> {\n", "file_path": "src/parser/states.rs", "rank": 43, "score": 4.906193040570187 }, { "content": "/// - Общий список полей всех структур файла\n\n/// - Список уникальных названий полей\n\n/// - Список с данными полей\n\n/// - Вспомогательный список для индексов для сложных структур данных\n\n/// - Вспомогательный список для хранения списочных значений полей\n\n#[derive(Debug)]\n\npub struct Header {\n\n /// Конкретный вид GFF файла\n\n pub signature: Signature,\n\n /// Версия файла\n\n pub version: Version,\n\n\n\n /// Содержит смещение в байтах от начала файла области с расположением\n\n /// структур и их количество\n\n pub structs: Section,\n\n\n\n /// Содержит смещение в байтах от начала файла области с расположением\n\n /// полей структур и их количество\n\n pub fields: Section,\n\n\n", "file_path": "src/header.rs", "rank": 44, "score": 4.768525221787979 }, { "content": " fn skip(self) -> State { *self.state }\n\n}\n\nimpl Default for ReadStruct<Root> {\n\n fn default() -> Self {\n\n ReadStruct::<Root> {\n\n index: StructIndex(0),\n\n state: Finish.into(),\n\n data: Root,\n\n }\n\n }\n\n}\n\n//--------------------------------------------------------------------------------------------------\n\n#[derive(Debug, Clone)]\n\npub struct EndStruct<Data: TokenEmitter> {\n\n /// Состояние, в которое нужно вернуться\n\n state: Box<State>,\n\n /// Дополнительные данные\n\n data: Data,\n\n}\n\nimpl<Data: TokenEmitter> EndStruct<Data> {\n", "file_path": "src/parser/states.rs", "rank": 45, "score": 4.693230873546549 }, { "content": " // Переходим к полю в списке полей и читаем его\n\n parser.seek(self.index)?;\n\n let tag = parser.read_u32()?;\n\n let label = LabelIndex(parser.read_u32()?);\n\n\n\n let token = Token::Label(label);\n\n let state = ReadField { tag, state: self.state };\n\n\n\n Ok((token, State::ReadField(state)))\n\n }\n\n #[inline]\n\n fn skip(self) -> State { *self.state }\n\n}\n\n/// Состояние чтения значения поля. В зависимости от типа значения возвращает токен\n\n/// простого значения, начала списка или структуры\n\n#[derive(Debug, Clone)]\n\npub struct ReadField {\n\n /// Идентификатор типа поля, которое требуется прочитать\n\n tag: u32,\n\n /// Состояние, в которое нужно вернуться\n", "file_path": "src/parser/states.rs", "rank": 46, "score": 4.693230873546549 }, { "content": " return from_utf8(&self.0[0..i])\n\n }\n\n }\n\n return from_utf8(&self.0);\n\n }\n\n\n\n /// Пытается создать метку из указанного массива байт.\n\n ///\n\n /// # Ошибки\n\n /// В случае, если длина среза равна или превышает 16 байт, возвращается ошибка\n\n /// [`Error::TooLongLabel`](./error/enum.Error.html#variant.TooLongLabel)\n\n pub fn from_bytes(bytes: &[u8]) -> Result<Self, Error> {\n\n if bytes.len() > 16 {\n\n return Err(Error::TooLongLabel(bytes.len()));\n\n }\n\n\n\n let mut storage: [u8; 16] = Default::default();\n\n let range = 0..bytes.len();\n\n storage[range.clone()].copy_from_slice(&bytes[range]);\n\n Ok(storage.into())\n", "file_path": "src/label.rs", "rank": 47, "score": 4.593535522080252 }, { "content": " /// Содержит смещение в байтах от начала файла области с расположением\n\n /// меток полей в структурах и их количество\n\n pub labels: Section,\n\n\n\n /// Содержит смещение в байтах от начала файла области с расположением\n\n /// сериализованных значений полей и суммарное число байт данных\n\n pub field_data: Section,\n\n\n\n /// Содержит смещение в байтах от начала файла области с расположением\n\n /// индексов полей и их количество\n\n pub field_indices: Section,\n\n\n\n /// Содержит смещение в байтах от начала файла области с расположением\n\n /// индексов списков и их количество\n\n pub list_indices: Section,\n\n}\n\n\n\nimpl Header {\n\n /// Создает заголовок для пустого файла с указанным типом\n\n #[inline]\n", "file_path": "src/header.rs", "rank": 48, "score": 4.575921925192423 }, { "content": "//! Реализация парсера файлов формата Bioware GFF, используемых в играх на движке Aurora\n\n//! (Neverwinter Nights, The Witcher) и в игре Neverwinter Nights 2.\n\n//!\n\n//! Формат имеет некоторые ограничения:\n\n//! - элементами верхнего уровня могут быть только структуры или перечисления Rust в unit или struct варианте\n\n//! - имена полей структур не должны быть длиннее 16 байт в UTF-8. При нарушении при сериализации будет ошибка\n\n//! - то же самое касается ключей карт. Кроме того, ключами могут быть только строки (`&str` или `String`)\n\n//!\n\n//! # Пример\n\n//! ```rust\n\n//! use std::f32::consts::PI;\n\n//! use std::f64::consts::E;\n\n//! use std::io::Cursor;\n\n//! use serde::{Serialize, Deserialize};\n\n//!\n\n//! use serde_gff::de::Deserializer;\n\n//! use serde_gff::ser::to_vec;\n\n//! use serde_gff::value::Value;\n\n//!\n\n//! #[derive(Debug, Serialize, Deserialize)]\n", "file_path": "src/lib.rs", "rank": 49, "score": 4.548805881519371 }, { "content": " state: state,\n\n data: self,\n\n })\n\n }\n\n}\n\n\n\n/// Структура-элемент списка\n\n#[derive(Debug, Clone, Copy)]\n\npub struct Item {\n\n /// Порядковый номер элемента в списке\n\n index: u32,\n\n}\n\nimpl TokenEmitter for Item {\n\n fn begin(&self, tag: Tag, count: u32) -> Token {\n\n Token::ItemBegin { tag, count, index: self.index }\n\n }\n\n fn end(&self) -> Token { Token::ItemEnd }\n\n fn next(self, state: Box<State>) -> State {\n\n State::EndItem(EndStruct::<Self> {\n\n state: state,\n", "file_path": "src/parser/states.rs", "rank": 50, "score": 4.497987600654551 }, { "content": "//! Содержит реализацию типажа `Deserialize` для десериализации типа `Value`\n\n\n\nuse std::collections::HashMap;\n\nuse std::fmt;\n\nuse std::marker::PhantomData;\n\nuse indexmap::IndexMap;\n\nuse serde::forward_to_deserialize_any;\n\nuse serde::de::{Deserialize, Deserializer, Error, IntoDeserializer, SeqAccess, MapAccess, Visitor};\n\n\n\nuse crate::Label;\n\nuse crate::string::{GffString, StringKey};\n\nuse crate::value::Value;\n\n\n\nmacro_rules! string_key {\n\n ($method:ident, $type:ty) => (\n\n #[inline]\n\n fn $method<E>(self, value: $type) -> Result<Key, E>\n\n where E: Error,\n\n {\n\n Ok(Key::String(StringKey(value as u32)))\n\n }\n\n );\n\n}\n\n/// Возможные представления ключа отображений в форматах данных\n", "file_path": "src/de/value.rs", "rank": 51, "score": 4.490353915465494 }, { "content": " ///\n\n /// [метку]: ../struct.Label.html\n\n pub fields: Vec<Field>,\n\n /// Список меток из всех полей GFF файла. В корректном GFF файле каждая метка должна быть\n\n /// уникальна, однако неуникальность не является фатальной ошибкой -- просто неэффективным\n\n /// расходованием места\n\n pub labels: Vec<Label>,\n\n /// Данные для значений полей, которые не влезают в 4 байта и не могут храниться в структуре\n\n /// [поля](struct.Field.html)\n\n pub field_data: Vec<u8>,\n\n /// Плоский массив, содержащий индексы полей, которые входят в каждую структуру, содержащую\n\n /// более одного поля. Например, при наличии двух структур, первая из которых ссылается на поля\n\n /// 0 и 1, а вторая на поля 2, 3 и 4, массив может содержать `[0, 1, 2, 3, 4]` или `[2, 3, 4, 0, 1]`,\n\n /// в зависимости от того, в каком порядке будут записаны структуры\n\n pub field_indices: Vec<u32>,\n\n /// Плоский массив индексов структуры, которые входят в списки. Каждый элемент массива описывает\n\n /// или индекс структуры, или количество следующих индексов в массиве, которые относятся к одному\n\n /// списку. Например, если файл содержит два поля-списка, первое из которых состоит из структур\n\n /// 1 и 3, а второй -- из структур 0, 2 и 4, то массив может содержать `[2, 1, 3, 3, 0, 2, 4]`\n\n /// или `[3, 0, 2, 4, 2, 1, 3]` в зависимости от порядка записи списков. Каждый подсписок начинается\n", "file_path": "src/raw.rs", "rank": 52, "score": 4.433807585414354 }, { "content": "pub struct StructSerializer<'a> {\n\n /// Хранилище записываемых данных\n\n ser: &'a mut Serializer,\n\n /// Номер структуры в массиве `ser.structs`, которую нужно обновить по завершении\n\n /// сериализации структуры\n\n struct_index: StructIndex,\n\n /// Номер списка полей в массиве `ser.field_indices`, в который необходимо помещать\n\n /// индексы полей по мере их сериализации\n\n fields_index: FieldListIndex,\n\n}\n\nimpl<'a> StructSerializer<'a> {\n\n /// Сериализует значение, обновляя поле с указанным индексом\n\n #[inline]\n\n fn serialize_value<T>(&mut self, label: LabelIndex, value: &T) -> Result<()>\n\n where T: ?Sized + Serialize,\n\n {\n\n use self::Struct::*;\n\n\n\n let index = self.ser.fields.len();\n\n value.serialize(FieldSerializer { ser: self.ser, label })?;\n", "file_path": "src/ser/mod.rs", "rank": 53, "score": 4.363434554641721 }, { "content": "//! Содержит описание структур-индексов различных данных в GFF файле\n\n\n\nuse std::ops::Add;\n\n\n\nuse crate::header::Header;\n\n\n\n/// Типаж, реализуемый специальными структурами, хранящими индексы на записи в GFF-файле,\n\n/// позволяющий преобразовать их в реальное смещение для чтения информации из файла.\n", "file_path": "src/index.rs", "rank": 54, "score": 4.348223274613036 }, { "content": " Ok(())\n\n }\n\n}\n\nimpl fmt::Debug for Struct {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"Struct {{ tag: {:?}, offset: {:?}, fields: {:?} }}\", self.tag, self.offset, self.fields)\n\n }\n\n}\n\n\n\n/// Описание поля структуры, как оно хранится в GFF файле\n\npub struct Field {\n\n /// Идентификатор типа поля\n\n pub tag: u32,\n\n /// Индекс в массив меток, определяющий метку, привязанную к данному полю\n\n pub label: u32,\n\n /// Сами данные для простых данных или смещение в массиве с данными для комплексных\n\n /// типов. Также, если поле представляет собой структуру, то это индекс в массиве\n\n /// структур, а если список -- байтовое смещение в массиве списков (хотя сам массив списков\n\n /// состоит из элементов размером 4 байта).\n\n pub data: [u8; 4],\n", "file_path": "src/raw.rs", "rank": 55, "score": 4.278774123068702 }, { "content": " }\n\n fn end(&self) -> Token { Token::RootEnd }\n\n fn next(self, state: Box<State>) -> State {\n\n State::EndRoot(EndStruct::<Self> {\n\n state: state,\n\n data: self,\n\n })\n\n }\n\n}\n\n\n\n/// Структура-поле другой структуры, имеющая метку с названием поля\n\n#[derive(Debug, Clone, Copy)]\n\npub struct Struct;\n\nimpl TokenEmitter for Struct {\n\n fn begin(&self, tag: Tag, count: u32) -> Token {\n\n Token::StructBegin { tag, count }\n\n }\n\n fn end(&self) -> Token { Token::StructEnd }\n\n fn next(self, state: Box<State>) -> State {\n\n State::EndStruct(EndStruct::<Self> {\n", "file_path": "src/parser/states.rs", "rank": 56, "score": 4.113001097931377 }, { "content": "\n\n #[inline]\n\n fn end(self) -> Result<Self::Ok> { Ok(()) }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::collections::BTreeMap;\n\n use serde::Serialize;\n\n use super::to_vec as to_vec_;\n\n use serde_bytes::{Bytes, ByteBuf};\n\n\n\n /// Формирует байтовый массив, соответствующий сериализованной структуре с одним полем\n\n /// `value` заданного типа, который хранится в записи о самом поле.\n\n macro_rules! primitive_wrapped {\n\n ($type:expr; $b1:expr, $b2:expr, $b3:expr, $b4:expr) => (\n\n vec![\n\n // Заголовок\n\n b'G',b'F',b'F',b' ',// Тип файла\n\n b'V',b'3',b'.',b'2',// Версия\n", "file_path": "src/ser/mod.rs", "rank": 57, "score": 4.0425947891235605 }, { "content": " Version([b'V', major + b'0', b'.', minor + b'0'])\n\n }\n\n /// Старший номер версии формата файла, хранимый в байте 1 версии\n\n #[inline]\n\n pub fn major(&self) -> u8 { self.0[1] - b'0' }\n\n /// Младший номер версии формата файла, хранимый в байте 3 версии\n\n #[inline]\n\n pub fn minor(&self) -> u8 { self.0[3] - b'0' }\n\n\n\n /// Читает версию файла из потока\n\n #[inline]\n\n pub fn read<R: Read>(reader: &mut R) -> Result<Self> {\n\n let mut version = Version([0u8; 4]);\n\n reader.read(&mut version.0)?;\n\n Ok(version)\n\n }\n\n /// Записывает версию файла в поток\n\n #[inline]\n\n pub fn write<W: Write>(&self, writer: &mut W) -> Result<()> {\n\n writer.write_all(&self.0)\n", "file_path": "src/ver.rs", "rank": 58, "score": 4.036799937586779 }, { "content": "#[derive(Debug, Clone)]\n\nstruct ReadList {\n\n /// Индекс в таблице индексов, содержащий структуру-элемент для чтения\n\n index: ListIndicesIndex,\n\n /// Состояние, в которое нужно перейти после завершения чтения списка\n\n state: Box<State>,\n\n}\n\nimpl ReadList {\n\n /// # Возвращаемое значение\n\n /// Возвращает генерируемый в процессе разбора токен и новое состояние парсера\n\n fn next<R: Read + Seek>(self, parser: &mut Parser<R>) -> Result<(Token, State)> {\n\n // Переходим к списку индексов структур-элементов списка и читаем его размер\n\n parser.seek(self.index)?;\n\n let count = parser.read_u32()?;\n\n\n\n // Сообщаем о начале списка и переходим в состояние чтения первого элемента\n\n let token = Token::ListBegin(count);\n\n let state = ReadItems {\n\n index: self.index + 1,\n\n count: count,\n\n state: self.state,\n", "file_path": "src/parser/states.rs", "rank": 59, "score": 4.032073045205185 }, { "content": "//! Содержит реализацию конвертирования типа GFF строки в десериализатор\n\n//! с помощью которого из него могут быть прочитаны другие совместимые типы.\n\n\n\nuse std::marker::PhantomData;\n\n\n\nuse serde::forward_to_deserialize_any;\n\nuse serde::de::{Deserializer, Error, IntoDeserializer, Visitor};\n\n\n\nuse crate::string::{GffString, StringKey};\n\n\n\nimpl<'de, E> IntoDeserializer<'de, E> for StringKey\n\n where E: Error,\n\n{\n\n type Deserializer = StringKeyDeserializer<E>;\n\n\n\n #[inline]\n\n fn into_deserializer(self) -> Self::Deserializer {\n\n StringKeyDeserializer { value: self, marker: PhantomData }\n\n }\n\n}\n", "file_path": "src/de/string.rs", "rank": 60, "score": 3.9872656025711803 }, { "content": "#[derive(Debug, Copy, Clone)]\n\nstruct FieldListIndex(usize);\n\n\n\n/// Вспомогательная структура, описывающая индекс списка элементов GFF списка, для типобезопасности\n", "file_path": "src/ser/mod.rs", "rank": 61, "score": 3.9589639400689833 }, { "content": "#[derive(Debug, Copy, Clone)]\n\nstruct StructIndex(usize);\n\n\n\n/// Вспомогательная структура, описывающая индекс списка полей структуры, для типобезопасности.\n\n/// Любая GFF структура, имеющая более двух полей, ссылается по такому индексу на список с\n\n/// перечислением имеющихся у нее полей\n", "file_path": "src/ser/mod.rs", "rank": 62, "score": 3.9589639400689833 }, { "content": "#[derive(Debug, Copy, Clone)]\n\nstruct ListIndex(usize);\n\n\n\n/// Промежуточное представление сериализуемых структур. Содержит данные, которые после\n\n/// небольшого преобразования, возможного только после окончания сериализации, могут\n\n/// быть записаны в файл\n", "file_path": "src/ser/mod.rs", "rank": 63, "score": 3.9589639400689833 }, { "content": " pub offset: u32,\n\n /// Количество полей структуры\n\n pub fields: u32,\n\n}\n\nimpl Struct {\n\n /// Читает 12 байт значения структуры из потока\n\n #[inline]\n\n pub fn read<R: Read>(reader: &mut R) -> Result<Self> {\n\n Ok(Struct {\n\n tag: reader.read_u32::<LE>()?,\n\n offset: reader.read_u32::<LE>()?,\n\n fields: reader.read_u32::<LE>()?,\n\n })\n\n }\n\n /// Записывает 12 байт значения структуры в поток\n\n #[inline]\n\n pub fn write<W: Write>(&self, writer: &mut W) -> Result<()> {\n\n writer.write_u32::<LE>(self.tag)?;\n\n writer.write_u32::<LE>(self.offset)?;\n\n writer.write_u32::<LE>(self.fields)?;\n", "file_path": "src/raw.rs", "rank": 64, "score": 3.753755639611764 }, { "content": "/// 12. [`ItemEnd`]. Элемент списка прочитан\n\n/// 13. [`ListEnd`]. Весь список прочитан\n\n/// 14. [`RootEnd`]. Файл прочитан\n\n///\n\n/// # Пример\n\n/// В данном примере читается файл с диска, и в потоковом режиме выводится на экран, формируя\n\n/// что-то, напоминающее JSON.\n\n///\n\n/// ```rust\n\n/// use std::fs::File;\n\n/// use serde_gff::parser::Parser;\n\n/// use serde_gff::parser::Token::*;\n\n///\n\n/// // Читаем файл с диска и создаем парсер. При создании парсер сразу же читает небольшую\n\n/// // порцию данных -- заголовок, которая нужна ему для правильного разрешения ссылок\n\n/// let file = File::open(\"test-data/all.gff\").expect(\"test file not exist\");\n\n/// let mut parser = Parser::new(file).expect(\"reading GFF header failed\");\n\n/// let mut indent = 0;\n\n/// loop {\n\n/// // В данном случае мы используем методы типажа Iterator для итерирования по файлу, так\n", "file_path": "src/parser/mod.rs", "rank": 65, "score": 3.727615896250246 }, { "content": " })\n\n }\n\n}\n\n\n\n#[cfg(nightly)]\n\nimpl TryFrom<u32> for FieldType {\n\n type Error = NoneError;\n\n\n\n #[inline]\n\n fn try_from(value: u32) -> Result<Self, Self::Error> {\n\n Ok(self.from_u32(value)?)\n\n }\n\n}\n\n\n\n/// Описание структуры, как оно хранится в GFF файле\n\npub struct Struct {\n\n /// Идентификатор типа структуры. Игрой на самом деле почти никогда не используется.\n\n /// При записи сюда сериализатор всегда записывает сюда 0\n\n pub tag: u32,\n\n /// Или индекс в массив полей (если `self.fields == 1`), или в смещение в массиве индексов полей\n", "file_path": "src/raw.rs", "rank": 66, "score": 3.6450683646795716 }, { "content": "//! Содержит реализацию типажа `Serialize` для сериализации типа `Value`\n\n\n\nuse serde::ser::{Serialize, SerializeMap, Serializer};\n\n\n\nuse crate::Label;\n\nuse crate::value::Value;\n\n\n\nimpl Serialize for Label {\n\n #[inline]\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where S: Serializer,\n\n {\n\n serializer.serialize_bytes(self.as_ref())\n\n }\n\n}\n\n\n\nimpl Serialize for Value {\n\n #[inline]\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where S: Serializer,\n", "file_path": "src/ser/value.rs", "rank": 67, "score": 3.644709048937022 }, { "content": " enum_tests!(toplevel);\n\n }\n\n\n\n mod as_field {\n\n //! Тестирует сериализацию различных значений, когда они включены как поле в структуру\n\n use super::*;\n\n use crate::error::Result;\n\n\n\n /// Сериализует значение, оборачивая его в структуру, т.к. формат не поддерживает на\n\n /// верхнем уровне ничего, кроме структур\n\n #[inline]\n\n fn to_result<T>(value: T) -> Result<Vec<u8>>\n\n where T: Serialize,\n\n {\n\n #[derive(Serialize)]\n\n struct Storage<T: Serialize> {\n\n value: T\n\n }\n\n to_vec_((*b\"GFF \").into(), &Storage { value })\n\n }\n", "file_path": "src/ser/mod.rs", "rank": 68, "score": 3.5966705720947845 }, { "content": "//! };\n\n//!\n\n//! let mut vec = to_vec((*b\"GFF \").into(), &data).expect(\"can't write data\");\n\n//! // Важный нюанс - не забыть, что создание десериализатора читает заголовок и возвращает\n\n//! // Result, а не сам десериализатор, поэтому требуется распаковка результата\n\n//! let mut de = Deserializer::new(Cursor::new(vec)).expect(\"can't read GFF header\");\n\n//! let val = Value::deserialize(&mut de).expect(\"can't deserialize data\");\n\n//!\n\n//! println!(\"{:#?}\", val);\n\n//! }\n\n//! ```\n\n#![warn(missing_docs)]\n\n\n\n// Модули описания заголовка\n\nmod sig;\n\nmod ver;\n\npub mod header;\n\n\n\npub mod parser;\n\npub mod index;\n", "file_path": "src/lib.rs", "rank": 69, "score": 3.5921202439004 }, { "content": " pub fn new(signature: Signature) -> Self {\n\n Self::with_version(signature, Version::V3_2)\n\n }\n\n /// Создает заголовок для пустого файла с указанным типом и версией\n\n #[inline]\n\n pub fn with_version(signature: Signature, version: Version) -> Self {\n\n Header {\n\n signature,\n\n version,\n\n structs: Section::default(),\n\n fields: Section::default(),\n\n labels: Section::default(),\n\n field_data: Section::default(),\n\n field_indices: Section::default(),\n\n list_indices: Section::default(),\n\n }\n\n }\n\n /// Читает значение GFF заголовка из потока\n\n pub fn read<R: Read>(reader: &mut R) -> Result<Self> {\n\n Ok(Header {\n", "file_path": "src/header.rs", "rank": 70, "score": 3.5135378183297448 }, { "content": " }\n\n}\n\n\n\n#[cfg(test)]\n\nmod empty_file {\n\n //! Тестирование разбора пустого файла - содержащего только заголовок и структуру верхнего уровня\n\n use std::fs::File;\n\n use serde::Deserialize;\n\n use super::Deserializer;\n\n\n\n fn run<'de, T: Deserialize<'de>>(type_name: &str) -> T {\n\n // Читаемый файл содержит только одну пустую структуру верхнего уровня\n\n let file = File::open(\"test-data/empty.gff\").expect(\"test file 'empty.gff' not exist\");\n\n let mut deserializer = Deserializer::new(file).expect(\"can't read GFF header\");\n\n\n\n Deserialize::deserialize(&mut deserializer).expect(&format!(\"can't deserialize to {}\", type_name))\n\n }\n\n\n\n #[test]\n\n fn to_unit() {\n", "file_path": "src/de/mod.rs", "rank": 71, "score": 3.50816362947089 }, { "content": "\n\n self.reader.seek(old)?;\n\n Ok(label.into())\n\n }\n\n /// Читает из файла значение поля по указанному индексу. Побочный эффект -- переход по указанному адресу\n\n pub fn read_u64(&mut self, index: U64Index) -> Result<u64> {\n\n self.seek(index)?;\n\n self.reader.read_u64::<LE>().map_err(Into::into)\n\n }\n\n /// Читает из файла значение поля по указанному индексу. Побочный эффект -- переход по указанному адресу\n\n pub fn read_i64(&mut self, index: I64Index) -> Result<i64> {\n\n self.seek(index)?;\n\n self.reader.read_i64::<LE>().map_err(Into::into)\n\n }\n\n /// Читает из файла значение поля по указанному индексу. Побочный эффект -- переход по указанному адресу\n\n pub fn read_f64(&mut self, index: F64Index) -> Result<f64> {\n\n self.seek(index)?;\n\n self.reader.read_f64::<LE>().map_err(Into::into)\n\n }\n\n /// Читает 4 байта длины и следующие за ними байты строки, интерпретирует их в соответствии с\n", "file_path": "src/parser/mod.rs", "rank": 72, "score": 3.456820889644073 }, { "content": " /// Тестирует запись списков с элементом-структурой. Только такие списки могут быть записаны\n\n #[test]\n\n fn test_list_with_struct_item() {\n\n #[derive(Serialize, Clone)]\n\n struct Item<T: Serialize + Clone> {\n\n value: T\n\n }\n\n\n\n let array = [\n\n Item { value: 41u8 },\n\n Item { value: 42u8 },\n\n Item { value: 43u8 },\n\n ];\n\n let owned = array.to_vec();\n\n\n\n assert!(is_err(owned));\n\n assert!(is_err(&array[..]));\n\n assert!(is_err(array));\n\n }\n\n map_tests!();\n", "file_path": "src/ser/mod.rs", "rank": 73, "score": 3.3983622817365355 }, { "content": "serde-gff\n\n=========\n\n[![Crates.io](https://img.shields.io/crates/v/serde_gff.svg)](https://crates.io/crates/serde_gff)\n\n[![Документация](https://docs.rs/serde-gff/badge.svg)](https://docs.rs/serde-gff)\n\n[![Лицензия MIT](https://img.shields.io/crates/l/serde_gff.svg)](https://github.com/Mingun/serde-gff/blob/master/LICENSE)\n\n\n\nGeneric File Format (GFF) -- формат файлов, используемый играми на движке Bioware Aurora:\n\nNewerwinter Nights, The Witcher и Newerwinter Nights 2.\n\n\n\nФормат имеет некоторые ограничения:\n\n- элементами верхнего уровня могут быть только структуры или перечисления Rust в unit или struct варианте\n\n- имена полей структур не должны быть длиннее 16 байт в UTF-8. При нарушении при сериализации будет ошибка\n\n- то же самое касается ключей карт. Кроме того, ключами могут быть только строки (`&str` или `String`)\n\n\n\nУстановка\n\n---------\n\nВыполните в корне проекта\n\n```sh\n\ncargo add serde_gff\n\n```\n\nили добавьте следующую строку в `Cargo.toml`:\n\n```toml\n\n[dependencies]\n\nserde_gff = \"0.2\"\n\n```\n\n\n\nПример\n\n------\n\n```rust\n\nuse std::f32::consts::PI;\n\nuse std::f64::consts::E;\n\nuse std::io::Cursor;\n\nuse serde::{Serialize, Deserialize};\n\n\n\nuse serde_gff::de::Deserializer;\n\nuse serde_gff::ser::to_vec;\n\nuse serde_gff::value::Value;\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\nstruct Item { u8: u8, i8: i8 }\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\nstruct Struct {\n\n f32: f32,\n\n f64: f64,\n\n\n\n #[serde(with = \"serde_bytes\")]\n\n bytes: Vec<u8>,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\n#[allow(non_snake_case)]\n\nstruct Test {\n\n u16: u16,\n\n i16: i16,\n\n u32: u32,\n\n i32: i32,\n\n u64: u64,\n\n i64: i64,\n\n\n\n string: String,\n\n\n\n Struct: Struct,\n\n list: Vec<Item>,\n", "file_path": "readme.md", "rank": 74, "score": 3.3820199937900175 }, { "content": " let t = $E::Tuple2($value, $value);\n\n let expected = ($value, $value);\n\n assert_eq!(to_vec(t), to_vec(expected));\n\n );\n\n }\n\n\n\n mod toplevel {\n\n //! Тестирует сериализацию различных значений, когда они не включены ни в какую структуру\n\n use super::*;\n\n use crate::error::Result;\n\n\n\n #[inline]\n\n fn to_result<T>(value: T) -> Result<Vec<u8>>\n\n where T: Serialize,\n\n {\n\n to_vec_((*b\"GFF \").into(), &value)\n\n }\n\n\n\n #[inline]\n\n fn is_err<T>(value: T) -> bool\n", "file_path": "src/ser/mod.rs", "rank": 75, "score": 3.3744802265383957 }, { "content": " ///\n\n /// Данный метод меняет внутреннюю позицию чтения парсера, однако это не несет за собой\n\n /// негативных последствий, если сразу после вызова данного метода выполнить переход к\n\n /// следующему токену при итерации по токенам парсера. См. пример в описании структуры\n\n /// [`Parser`].\n\n ///\n\n /// [индексы]: ../index/trait.Index.html\n\n /// [`Parser`]: struct.Parser.html\n\n pub fn read_value(&mut self, value: SimpleValueRef) -> Result<SimpleValue> {\n\n use self::SimpleValueRef::*;\n\n\n\n Ok(match value {\n\n Byte(val) => SimpleValue::Byte(val),\n\n Char(val) => SimpleValue::Char(val),\n\n Word(val) => SimpleValue::Word(val),\n\n Short(val) => SimpleValue::Short(val),\n\n Dword(val) => SimpleValue::Dword(val),\n\n Int(val) => SimpleValue::Int(val),\n\n Dword64(val) => SimpleValue::Dword64(self.read_u64(val)?),\n\n Int64(val) => SimpleValue::Int64(self.read_i64(val)?),\n", "file_path": "src/parser/mod.rs", "rank": 76, "score": 3.34987227422048 }, { "content": " #[inline]\n\n pub fn read<R: Read>(reader: &mut R) -> Result<Self> {\n\n Ok(Section {\n\n offset: reader.read_u32::<LE>()?,\n\n count: reader.read_u32::<LE>()?,\n\n })\n\n }\n\n /// Записывает описание области файла в поток\n\n #[inline]\n\n pub fn write<W: Write>(&self, writer: &mut W) -> Result<()> {\n\n writer.write_u32::<LE>(self.offset)?;\n\n writer.write_u32::<LE>(self.count)\n\n }\n\n}\n\n\n\n///////////////////////////////////////////////////////////////////////////////////////////////////\n\n\n\n/// Заголовок GFF файла. Заголовок содержит вид файла, версию формата и информацию о\n\n/// 6 областях, файла, содержащих данные:\n\n/// - Список структур в файле\n", "file_path": "src/header.rs", "rank": 77, "score": 3.232415118680917 }, { "content": " let item = Item { value: 42 };\n\n\n\n assert!(is_err((42u32, 42f32)));\n\n assert!(is_err((item, 42f32)));\n\n assert!(is_err(Tuple1(42, 42.0)));\n\n assert!(is_err(Tuple2(item, 42.0)));\n\n }\n\n\n\n /// Тестирует запись кортежа из значений структур разных типов\n\n #[test]\n\n fn test_tuple_with_struct_item() {\n\n #[derive(Serialize, Clone, Copy)]\n\n struct Item1 { value: u32 }\n\n #[derive(Serialize, Clone, Copy)]\n\n struct Item2 { value: f32 }\n\n #[derive(Serialize)]\n\n struct Tuple(Item1, Item2);\n\n\n\n let item1 = Item1 { value: 42 };\n\n let item2 = Item2 { value: 42.0 };\n", "file_path": "src/ser/mod.rs", "rank": 78, "score": 3.152990890096509 }, { "content": "\n\n newtype_test!(() = ());\n\n\n\n #[derive(Serialize, Clone, Copy)]\n\n struct Item1 { payload: u32 };\n\n #[derive(Serialize, Clone, Copy)]\n\n struct Item2 { value: u64 };\n\n\n\n let item1 = Item1 { payload: 123456789 };\n\n let item2 = Item2 { value: 0xDEAD_BEAF_00FF_FF00 };\n\n\n\n newtype_test!((Item1, Item2) = (item1, item2));\n\n\n\n #[derive(Serialize)]\n\n struct Unit;\n\n newtype_test!(Unit = Unit);\n\n\n\n #[derive(Serialize)]\n\n struct Newtype(Item1);\n\n newtype_test!(Newtype = Newtype(item1));\n", "file_path": "src/ser/mod.rs", "rank": 79, "score": 3.152990890096509 }, { "content": " newtype_test!(i8);\n\n newtype_test!(i16);\n\n newtype_test!(i32);\n\n newtype_test!(i64);\n\n\n\n newtype_test!(f32);\n\n newtype_test!(f64);\n\n\n\n newtype_test!(bool);\n\n\n\n newtype_test!(String, \"some string\".into());\n\n newtype_test!(ByteBuf, ByteBuf::from(b\"some vector\".to_vec()));\n\n\n\n newtype_test!(() = ());\n\n\n\n #[derive(Serialize, Clone, Copy)]\n\n struct Item1 { payload: u32 };\n\n #[derive(Serialize, Clone, Copy)]\n\n struct Item2 { value: u64 };\n\n\n", "file_path": "src/ser/mod.rs", "rank": 80, "score": 3.1303885969117506 }, { "content": " fn test_tuple_with_non_struct_item() {\n\n #[derive(Serialize, Clone, Copy)]\n\n struct Item { value: u32 }\n\n #[derive(Serialize)]\n\n struct Tuple1(u32, f32);\n\n #[derive(Serialize)]\n\n struct Tuple2(Item, f32);\n\n\n\n let item = Item { value: 42 };\n\n\n\n assert!(is_err((42u32, 42f32)));\n\n assert!(is_err((item, 42f32)));\n\n assert!(is_err(Tuple1(42, 42.0)));\n\n assert!(is_err(Tuple2(item, 42.0)));\n\n }\n\n\n\n /// Тестирует запись кортежа из значений структур разных типов\n\n #[test]\n\n fn test_tuple_with_struct_item() {\n\n #[derive(Serialize, Clone, Copy)]\n", "file_path": "src/ser/mod.rs", "rank": 81, "score": 3.1303885969117506 }, { "content": " b\"PTT \" => PTT,\n\n\n\n b\"BIC \" => BIC,\n\n\n\n _ => Other(arr),\n\n }\n\n }\n\n}\n\n\n\nimpl AsRef<[u8]> for Signature {\n\n fn as_ref(&self) -> &[u8] {\n\n use self::Signature::*;\n\n\n\n match *self {\n\n IFO => b\"IFO \",\n\n\n\n ARE => b\"ARE \",\n\n GIT => b\"GIT \",\n\n GIC => b\"GIC \",\n\n\n", "file_path": "src/sig.rs", "rank": 82, "score": 3.0708500506118863 }, { "content": " use self::FieldType::*;\n\n\n\n Some(match value {\n\n 0 => Byte,\n\n 1 => Char,\n\n 2 => Word,\n\n 3 => Short,\n\n 4 => Dword,\n\n 5 => Int,\n\n 6 => Dword64,\n\n 7 => Int64,\n\n 8 => Float,\n\n 9 => Double,\n\n 10 => String,\n\n 11 => ResRef,\n\n 12 => LocString,\n\n 13 => Void,\n\n 14 => Struct,\n\n 15 => List,\n\n _ => return None,\n", "file_path": "src/raw.rs", "rank": 83, "score": 2.907401435166852 }, { "content": " self.field_indices.write(writer)?;\n\n self.list_indices.write(writer)\n\n }\n\n /// Возвращает нижнюю границу на количество токенов, которые может произвести\n\n /// данный файл\n\n #[inline]\n\n pub fn token_count(&self) -> usize {\n\n // Для каждой структуры - токен начала и окончания\n\n // Для каждого списка - токен начала и окончания\n\n let size = (self.structs.count + self.list_indices.count)*2;\n\n\n\n // Т.к. каждое поле может быть списком или структурой, то они уже подсчитываются\n\n // в списках и структурах. Поэтому минимальное количество вычисляем, как максимум\n\n // из того, что нам смогут дать поля или структуры со списками\n\n max(size, self.fields.count) as usize\n\n }\n\n}\n", "file_path": "src/header.rs", "rank": 84, "score": 2.887946650632913 }, { "content": "}\n\nimpl Field {\n\n /// Читает 12 байт значения поля из потока\n\n #[inline]\n\n pub fn read<R: Read>(reader: &mut R) -> Result<Self> {\n\n let tag = reader.read_u32::<LE>()?;\n\n let label = reader.read_u32::<LE>()?;\n\n let mut data = [0u8; 4];\n\n reader.read_exact(&mut data)?;\n\n\n\n Ok(Field { tag, label, data })\n\n }\n\n /// Записывает 12 байт значения поля в поток\n\n #[inline]\n\n pub fn write<W: Write>(&self, writer: &mut W) -> Result<()> {\n\n writer.write_u32::<LE>(self.tag as u32)?;\n\n writer.write_u32::<LE>(self.label)?;\n\n writer.write_all(&self.data)?;\n\n Ok(())\n\n }\n", "file_path": "src/raw.rs", "rank": 85, "score": 2.864846364313115 }, { "content": " use super::Label;\n\n\n\n #[test]\n\n fn label_constructs_from_str() {\n\n assert_eq!(Label::from(*b\"short\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\"), \"short\".parse().unwrap());\n\n assert_eq!(Label::from(*b\"exact_16_chars_\\0\"), \"exact_16_chars_\".parse().unwrap());\n\n assert!(\"more_then_16_char\".parse::<Label>().is_err());\n\n }\n\n}\n", "file_path": "src/label.rs", "rank": 86, "score": 2.7604729080929196 }, { "content": " // Данные\n\n 5,0,0,0, b'v',b'a',b'l',b'u',b'e',\n\n 13,0,0,0, b'a',b'n',b'o',b't',b'h',b'e',b'r',b' ',b'v',b'a',b'l',b'u',b'e',\n\n\n\n // Списки полей структур\n\n 0,0,0,0, 1,0,0,0,// Список 1, поля 1 и 2\n\n ];\n\n assert_eq!(to_vec(test), expected);\n\n }\n\n\n\n /// Тестирует запись кортежа из значений разных типов, не все из которых являются структурами\n\n #[test]\n\n fn test_tuple_with_non_struct_item() {\n\n #[derive(Serialize, Clone, Copy)]\n\n struct Item { value: u32 }\n\n #[derive(Serialize)]\n\n struct Tuple1(u32, f32);\n\n #[derive(Serialize)]\n\n struct Tuple2(Item, f32);\n\n\n", "file_path": "src/ser/mod.rs", "rank": 87, "score": 2.758542009851908 }, { "content": "\n\nimpl<R: Read + Seek> Deserializer<R> {\n\n /// Создает десериализатор для чтения GFF файла из указанного источника данных с использованием\n\n /// кодировки `UTF-8` для декодирования строк и генерацией ошибки в случае, если декодировать\n\n /// набор байт, как строку в этой кодировке, не удалось.\n\n ///\n\n /// # Параметры\n\n /// - `reader`: Источник данных для чтения файла\n\n ///\n\n /// # Ошибки\n\n /// В случае, если не удалось прочитать заголовок GFF файла -- например, он слишком короткий\n\n pub fn new(reader: R) -> Result<Self> {\n\n Ok(Deserializer { parser: Parser::new(reader)?, peeked: None })\n\n }\n\n /// Создает десериализатор для чтения GFF файла из указанного источника данных с использованием\n\n /// указанной кодировки для декодирования строк.\n\n ///\n\n /// # Параметры\n\n /// - `reader`: Источник данных для чтения файла\n\n /// - `encoding`: Кодировка для декодирования символов в строках\n", "file_path": "src/de/mod.rs", "rank": 88, "score": 2.740133822752227 }, { "content": " let array = [\n\n 41u8,\n\n 42u8,\n\n 43u8,\n\n ];\n\n let owned = array.to_vec();\n\n\n\n assert!(is_err(owned));\n\n assert!(is_err(&array[..]));\n\n assert!(is_err(array));\n\n }\n\n\n\n /// Тестирует запись списков с элементом-структурой. Только такие списки могут быть записаны\n\n #[test]\n\n fn test_list_with_struct_item() {\n\n #[derive(Serialize, Copy, Clone)]\n\n struct Item { value: u8 }\n\n\n\n let array = [\n\n Item { value: 41 },\n", "file_path": "src/ser/mod.rs", "rank": 90, "score": 2.6578057942055864 }, { "content": " encoding: EncodingRef,\n\n /// Способ обработки ошибок декодирования строк\n\n trap: DecoderTrap,\n\n /// Текущее состояние разбора\n\n state: State,\n\n}\n\n\n\nimpl<R: Read + Seek> Parser<R> {\n\n /// Создает парсер для чтения GFF файла из указанного источника данных с использованием\n\n /// кодировки `UTF-8` для декодирования строк и генерацией ошибки в случае, если декодировать\n\n /// набор байт, как строку в этой кодировке, не удалось.\n\n ///\n\n /// # Параметры\n\n /// - `reader`: Источник данных для чтения файла\n\n pub fn new(reader: R) -> Result<Self> {\n\n Self::with_encoding(reader, UTF_8, DecoderTrap::Strict)\n\n }\n\n /// Создает парсер для чтения GFF файла из указанного источника данных с использованием\n\n /// указанной кодировки для декодирования строк.\n\n ///\n", "file_path": "src/parser/mod.rs", "rank": 91, "score": 2.6067151734886056 }, { "content": " {\n\n <Self as SerializeStruct>::serialize_field(self, key, value)\n\n }\n\n #[inline]\n\n fn end(self) -> Result<()> { <Self as SerializeStruct>::end(self) }\n\n}\n\n\n\n/// Сериализует все поля списка или кортежа, заполняя массив с индексами элементов списка\n\npub struct ListSerializer<'a> {\n\n /// Хранилище записываемых данных\n\n ser: &'a mut Serializer,\n\n /// Индекс в массиве `ser.list_indices`, определяющий заполняемый данным сериализатором\n\n /// список с индексами структур, составляющих элементы списка.\n\n list_index: ListIndex,\n\n}\n\n\n\nimpl<'a> SerializeSeq for ListSerializer<'a> {\n\n type Ok = ();\n\n type Error = Error;\n\n\n", "file_path": "src/ser/mod.rs", "rank": 92, "score": 2.5447622250693165 }, { "content": " Void(val) => { storage.write_u32::<LE>(val.0)?; FieldType::Void },\n\n }\n\n };\n\n Ok(raw::Field { tag: type_ as u32, label, data })\n\n }\n\n}\n\n\n\n/// Структура для сериализации значения Rust в Bioware GFF.\n\n///\n\n/// Формат поддерживает непосредственную сериализацию только структур, перечислений и отображений.\n\n/// Остальные значения необходимо обернуть в одну из этих структур данных для возможности их\n\n/// сериализации.\n\n#[derive(Default, Debug)]\n\npub struct Serializer {\n\n /// Массив, содержащий описания структур в файле\n\n structs: Vec<Struct>,\n\n /// Массив, содержащий описания полей структур в файле\n\n fields: Vec<Field>,\n\n /// Множество, содержащие названия всех полей всех структур файла в порядке их добавления\n\n labels: IndexSet<Label>,\n", "file_path": "src/ser/mod.rs", "rank": 93, "score": 2.514877093688124 }, { "content": " where D: Deserializer<'de>,\n\n {\n\n deserializer.deserialize_any(LabelVisitor)\n\n }\n\n}\n\n\n\n/// Десериализатор, в котором источником данных является метка\n\n#[derive(Debug)]\n\npub struct LabelDeserializer<E> {\n\n /// Источник данных, из которого достаются данные для десериализации других структур\n\n value: Label,\n\n /// Фиктивный элемент, для связывания типа ошибки `E`\n\n marker: PhantomData<E>,\n\n}\n\nimpl<'de, E> IntoDeserializer<'de, E> for Label\n\n where E: Error,\n\n{\n\n type Deserializer = LabelDeserializer<E>;\n\n\n\n #[inline]\n", "file_path": "src/de/value.rs", "rank": 94, "score": 2.514877093688124 }, { "content": "\n\n let t = E::Tuple2(value, value);\n\n let expected = to_vec(Tuple2 { Tuple2: [value, value] });\n\n assert_eq!(to_vec(t), expected);\n\n\n\n let s = E::Struct { value: 42 };\n\n let expected = to_vec(Struct { Struct: value });\n\n assert_eq!(to_vec(s), expected);\n\n }\n\n\n\n /// Тестирует запись перечислений со значениями разных видов при записи тега,\n\n /// определяющего вариант перечисления, в качестве одного из поля данных, на\n\n /// одном уровне с другими полями\n\n #[test]\n\n fn test_enum_internally_tagged() {\n\n #[derive(Serialize)]\n\n #[serde(tag = \"tag\")]\n\n enum E {\n\n Unit,\n\n Newtype1(u32),\n", "file_path": "src/ser/mod.rs", "rank": 95, "score": 2.4579053831442925 }, { "content": " let token = self.next_token()?;\n\n unimplemented!(\"`deserialize_tuple_struct(name: {}, len: {})` not yet supported. Token: {:?}\", name, len, token)\n\n }\n\n fn deserialize_struct<V>(self, _name: &'static str, _fields: &'static [&'static str], visitor: V) -> Result<V::Value>\n\n where V: Visitor<'de>,\n\n {\n\n self.deserialize_map(visitor)\n\n }\n\n fn deserialize_enum<V>(self, name: &'static str, variants: &'static [&'static str], _visitor: V) -> Result<V::Value>\n\n where V: Visitor<'de>,\n\n {\n\n let token = self.next_token()?;\n\n unimplemented!(\"`deserialize_enum(name: {}, variants: {})` not yet supported. Token: {:?}\", name, variants.len(), token)\n\n }\n\n}\n\n\n\nimpl<'de, 'a, R: Read + Seek> de::MapAccess<'de> for &'a mut Deserializer<R> {\n\n type Error = Error;\n\n\n\n fn next_key_seed<K>(&mut self, seed: K) -> Result<Option<K::Value>>\n", "file_path": "src/de/mod.rs", "rank": 96, "score": 2.4286525142237236 }, { "content": " #[derive(Serialize)]\n\n #[serde(untagged)]\n\n enum E {\n\n Unit,\n\n Newtype1(u32),\n\n Newtype2(Value),\n\n Tuple1(u32, u32),\n\n Tuple2(Value, Value),\n\n Struct { value: u32 },\n\n }\n\n /// Значения Newtype2 и Struct перечисления должны сериализоваться также, как эта структура\n\n #[derive(Serialize, Copy, Clone)]\n\n struct Value { value: u32 };\n\n\n\n let value = Value { value: 42 };\n\n\n\n let u = E::Unit;\n\n let expected = ();\n\n assert_eq!(to_vec(u), to_vec(expected));\n\n\n", "file_path": "src/ser/mod.rs", "rank": 97, "score": 2.42195327366854 }, { "content": "{\n\n type Deserializer = GffStringDeserializer<E>;\n\n\n\n #[inline]\n\n fn into_deserializer(self) -> Self::Deserializer {\n\n GffStringDeserializer { value: self, marker: PhantomData }\n\n }\n\n}\n\n\n\n/// Десериализатор, использующий в качестве источника данных тип [`GffString`].\n\n///\n\n/// В зависимости от типа хранимой строки позволяет прочитать из значения либо `u32`,\n\n/// являющемся StrRef индексом, либо отображение из `u32` (содержащего комбинированное\n\n/// значение языка и пола строки) на `String` с текстом строки для данного языка и пола.\n\n///\n\n/// [`GffString`]: ../../enum.GffString.html\n\n#[derive(Debug)]\n\npub struct GffStringDeserializer<E> {\n\n /// Источник данных, из которого достаются данные для десериализации других структур\n\n value: GffString,\n", "file_path": "src/de/string.rs", "rank": 98, "score": 2.4020409194422903 }, { "content": " self.peeked = Some(self.next_token()?);\n\n }\n\n match self.peeked {\n\n Some(ref value) => Ok(value),\n\n _ => unreachable!(),\n\n }\n\n }\n\n /// Десериализует все примитивные типы GFF файла (все типы, кроме структур и списков)\n\n fn deserialize_value<'de, V>(&mut self, value: SimpleValueRef, visitor: V) -> Result<V::Value>\n\n where V: Visitor<'de>,\n\n {\n\n use serde::Deserializer;\n\n\n\n let value: Value = self.parser.read_value(value)?.into();\n\n value.into_deserializer().deserialize_any(visitor)\n\n }\n\n}\n\n\n\n/// Реализует разбор простых типов данных.\n\n///\n", "file_path": "src/de/mod.rs", "rank": 99, "score": 2.3710471486735796 } ]
Rust
src/lib.rs
wangkang/hiredis
10bf22dc35c26e3846a025866993a2f361441d13
extern crate hiredis_sys as ffi; extern crate libc; use libc::{c_char, c_int, size_t}; use std::convert::{From, Into}; use std::ffi::{CStr, CString}; use std::marker::PhantomData; use std::{error, fmt, mem, slice}; macro_rules! raise( ($message:expr) => (return Err(Error::from($message))); ); macro_rules! success( ($context:expr) => (unsafe { if (*$context.raw).err != ffi::REDIS_OK { return Err(Error { kind: ErrorKind::from((*$context.raw).err as isize), message: c_str_to_string!((*$context.raw).errstr.as_ptr() as *const _), }); } }); ); macro_rules! str_to_cstr( ($string:expr) => (match CString::new($string) { Ok(string) => string, _ => raise!("failed to process a string"), }); ); macro_rules! c_str_to_string( ($string:expr, $size:expr) => ({ let slice: &CStr = mem::transmute(slice::from_raw_parts($string as *const c_char, $size as usize + 1)); String::from_utf8_lossy(slice.to_bytes()).into_owned() }); ($string:expr) => ({ String::from_utf8_lossy(CStr::from_ptr($string).to_bytes()).into_owned() }); ); macro_rules! c_str_to_vec_u8( ($string:expr, $size:expr) => ({ let slice: &[u8] = mem::transmute(slice::from_raw_parts($string as *const c_char, $size as usize)); Vec::from(slice) }); ); pub trait AsBytes { fn as_bytes(&self) -> &[u8]; } pub struct Context { raw: *mut ffi::redisContext, phantom: PhantomData<ffi::redisContext>, } #[derive(Debug)] pub struct Error { pub kind: ErrorKind, pub message: String, } #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum ErrorKind { InputOutput = ffi::REDIS_ERR_IO as isize, EndOfFile = ffi::REDIS_ERR_EOF as isize, Protocol = ffi::REDIS_ERR_PROTOCOL as isize, OutOfMemory = ffi::REDIS_ERR_OOM as isize, Other = ffi::REDIS_ERR_OTHER as isize, } #[derive(Debug)] pub enum Reply { Status(String), Integer(i64), Bulk(Vec<u8>), Array(Vec<Reply>), Nil, } pub type Result<T> = std::result::Result<T, Error>; impl<'l> AsBytes for &'l str { #[inline] fn as_bytes(&self) -> &[u8] { (*self).as_bytes() } } impl<'l> AsBytes for &'l [u8] { #[inline] fn as_bytes(&self) -> &[u8] { self } } impl Context { pub fn new(host: &str, port: usize) -> Result<Context> { let context = Context { raw: unsafe { let raw = ffi::redisConnect(str_to_cstr!(host).as_ptr(), port as c_int); if raw.is_null() { raise!("failed to create a context"); } raw }, phantom: PhantomData, }; success!(context); Ok(context) } pub fn command<T: AsBytes>(&mut self, arguments: &[T]) -> Result<Reply> { let argc = arguments.len(); let mut argv: Vec<*const c_char> = Vec::with_capacity(argc); let mut argvlen = Vec::with_capacity(argc); for argument in arguments.iter() { let data = argument.as_bytes(); argv.push(data.as_ptr() as *const _); argvlen.push(data.len() as size_t); } let raw = unsafe { ffi::redisCommandArgv(self.raw, argc as c_int, argv.as_ptr() as *mut *const _, argvlen.as_ptr()) as *mut ffi::redisReply }; success!(self); debug_assert!(!raw.is_null()); unsafe { let reply = process_reply(raw); ffi::freeReplyObject(raw as *mut _); reply } } #[inline] pub fn reconnect(&mut self) -> Result<()> { if unsafe { ffi::redisReconnect(self.raw) } != ffi::REDIS_OK { raise!("failed to reconnect"); } Ok(()) } } impl Drop for Context { #[inline] fn drop(&mut self) { unsafe { ffi::redisFree(self.raw) }; } } impl<T> From<T> for Error where T: Into<String> { #[inline] fn from(message: T) -> Error { Error { kind: ErrorKind::Other, message: message.into() } } } impl fmt::Display for Error { #[inline] fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { self.message.fmt(formatter) } } impl error::Error for Error { #[inline] fn description(&self) -> &str { &self.message } } impl From<isize> for ErrorKind { #[inline] fn from(code: isize) -> ErrorKind { use ErrorKind::*; match code as c_int { ffi::REDIS_ERR_IO => InputOutput, ffi::REDIS_ERR_EOF => EndOfFile, ffi::REDIS_ERR_PROTOCOL => Protocol, ffi::REDIS_ERR_OOM => OutOfMemory, _ => Other, } } } impl fmt::Display for ErrorKind { #[inline] fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { write!(formatter, "Hiredis error code {}", *self as isize) } } #[inline] pub fn connect(host: &str, port: usize) -> Result<Context> { Context::new(host, port) } unsafe fn process_reply(raw: *mut ffi::redisReply) -> Result<Reply> { Ok(match (*raw).kind { ffi::REDIS_REPLY_STATUS => { Reply::Status(c_str_to_string!((*raw).string, (*raw).len)) }, ffi::REDIS_REPLY_INTEGER => { Reply::Integer((*raw).integer as i64) }, ffi::REDIS_REPLY_NIL => { Reply::Nil } ffi::REDIS_REPLY_STRING => { Reply::Bulk(c_str_to_vec_u8!((*raw).string, (*raw).len)) }, ffi::REDIS_REPLY_ARRAY => { let count = (*raw).elements as usize; let mut elements = Vec::with_capacity(count); for i in 0..count { elements.push(try!(process_reply(*(*raw).element.offset(i as isize)))); } Reply::Array(elements) }, ffi::REDIS_REPLY_ERROR => { raise!(c_str_to_string!((*raw).string, (*raw).len)); }, _ => { raise!("failed to identify a reply"); }, }) }
extern crate hiredis_sys as ffi; extern crate libc; use libc::{c_char, c_int, size_t}; use std::convert::{From, Into}; use std::ffi::{CStr, CString}; use std::marker::PhantomData; use std::{error, fmt, mem, slice}; macro_rules! raise( ($message:expr) => (return Err(Error::from($message))); ); macro_rules! success( ($context:expr) => (unsafe { if (*$context.raw).err != ffi::REDIS_OK { return Err(Error { kind: ErrorKind::from((*$context.raw).err as isize), message: c_str_to_string!((*$context.raw).errstr.as_ptr() as *const _), }); } }); ); macro_rules! str_to_cstr( ($string:expr) => (match CString::new($string) { Ok(string) => string, _ => raise!("failed to process a string"), }); ); macro_rules! c_str_to_string( ($string:expr, $size:expr) => ({ let slice: &CStr = mem::transmute(slice::from_raw_parts($string as *const c_char, $size as usize + 1)); String::from_utf8_lossy(slice.to_bytes()).into_owned() }); ($string:expr) => ({ String::from_utf8_lossy(CStr::from_ptr($string).to_bytes()).into_owned() }); ); macro_rules! c_str_to_vec_u8( ($string:expr, $size:expr) => ({ let slice: &[u8] = mem::transmute(slice::from_raw_parts($string as *const c_char, $size as usize)); Vec::from(slice) }); ); pub trait AsBytes { fn as_bytes(&self) -> &[u8]; } pub struct Context { raw: *mut ffi::redisContext, phantom: PhantomData<ffi::redisContext>, } #[derive(Debug)] pub struct Error { pub kind: ErrorKind, pub message: String, } #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum ErrorKind { InputOutput = ffi::REDIS_ERR_IO as isize, EndOfFile = ffi::REDIS_ERR_EOF as isize, Protocol = ffi::REDIS_ERR_PROTOCOL as isize, OutOfMemory = ffi::REDIS_ERR_OOM as isize, Other = ffi::REDIS_ERR_OTHER as isize, } #[derive(Debug)] pub enum Reply { Status(String), Integer(i64), Bulk(Vec<u8>), Array(Vec<Reply>), Nil, } pub type Result<T> = std::result::Result<T, Error>; impl<'l> AsBytes for &'l str { #[inline] fn as_bytes(&self) -> &[u8] { (*self).as_bytes() } } impl<'l> AsBytes for &'l [u8] { #[inline] fn as_bytes(&self) -> &[u8] { self } } impl Context { pub fn new(host: &str, port: usize) -> Result<Context> { let context = Context { raw: unsafe { let raw = ffi::redisConnect(str_to_cstr!(host).as_ptr(), port as c_int); if raw.is_null() { raise!("failed to create a context"); } raw }, phantom: PhantomData, }; success!(context); Ok(context) } pub fn command<T: AsBytes>(&mut self, arguments: &[T]) -> Result<Reply> { let argc = arguments.len(); let mut argv: Vec<*const c_char> = Vec::with_capacity(argc); le
#[inline] pub fn reconnect(&mut self) -> Result<()> { if unsafe { ffi::redisReconnect(self.raw) } != ffi::REDIS_OK { raise!("failed to reconnect"); } Ok(()) } } impl Drop for Context { #[inline] fn drop(&mut self) { unsafe { ffi::redisFree(self.raw) }; } } impl<T> From<T> for Error where T: Into<String> { #[inline] fn from(message: T) -> Error { Error { kind: ErrorKind::Other, message: message.into() } } } impl fmt::Display for Error { #[inline] fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { self.message.fmt(formatter) } } impl error::Error for Error { #[inline] fn description(&self) -> &str { &self.message } } impl From<isize> for ErrorKind { #[inline] fn from(code: isize) -> ErrorKind { use ErrorKind::*; match code as c_int { ffi::REDIS_ERR_IO => InputOutput, ffi::REDIS_ERR_EOF => EndOfFile, ffi::REDIS_ERR_PROTOCOL => Protocol, ffi::REDIS_ERR_OOM => OutOfMemory, _ => Other, } } } impl fmt::Display for ErrorKind { #[inline] fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { write!(formatter, "Hiredis error code {}", *self as isize) } } #[inline] pub fn connect(host: &str, port: usize) -> Result<Context> { Context::new(host, port) } unsafe fn process_reply(raw: *mut ffi::redisReply) -> Result<Reply> { Ok(match (*raw).kind { ffi::REDIS_REPLY_STATUS => { Reply::Status(c_str_to_string!((*raw).string, (*raw).len)) }, ffi::REDIS_REPLY_INTEGER => { Reply::Integer((*raw).integer as i64) }, ffi::REDIS_REPLY_NIL => { Reply::Nil } ffi::REDIS_REPLY_STRING => { Reply::Bulk(c_str_to_vec_u8!((*raw).string, (*raw).len)) }, ffi::REDIS_REPLY_ARRAY => { let count = (*raw).elements as usize; let mut elements = Vec::with_capacity(count); for i in 0..count { elements.push(try!(process_reply(*(*raw).element.offset(i as isize)))); } Reply::Array(elements) }, ffi::REDIS_REPLY_ERROR => { raise!(c_str_to_string!((*raw).string, (*raw).len)); }, _ => { raise!("failed to identify a reply"); }, }) }
t mut argvlen = Vec::with_capacity(argc); for argument in arguments.iter() { let data = argument.as_bytes(); argv.push(data.as_ptr() as *const _); argvlen.push(data.len() as size_t); } let raw = unsafe { ffi::redisCommandArgv(self.raw, argc as c_int, argv.as_ptr() as *mut *const _, argvlen.as_ptr()) as *mut ffi::redisReply }; success!(self); debug_assert!(!raw.is_null()); unsafe { let reply = process_reply(raw); ffi::freeReplyObject(raw as *mut _); reply } }
function_block-function_prefixed
[ { "content": "#[test]\n\nfn push_pop_strings() {\n\n let mut context = ok!(hiredis::connect(\"127.0.0.1\", 6379));\n\n match ok!(context.command(&[\"RPUSH\", \"hiredis-baz\", \"Good news, everyone!\"])) {\n\n Reply::Integer(integer) => assert!(integer > 0),\n\n _ => assert!(false),\n\n }\n\n match ok!(context.command(&[\"BLPOP\", \"hiredis-baz\", \"10\"])) {\n\n Reply::Array(mut elements) => {\n\n println!(\"Array!\");\n\n assert_eq!(elements.len(), 2);\n\n elements.reverse();\n\n match elements.pop().unwrap() {\n\n Reply::Bulk(bytes) => {\n\n assert_eq!(&ok!(String::from_utf8(bytes))[..], \"hiredis-baz\");\n\n },\n\n _ => assert!(false),\n\n }\n\n match elements.pop().unwrap() {\n\n Reply::Bulk(bytes) => {\n\n assert_eq!(&ok!(String::from_utf8(bytes))[..], \"Good news, everyone!\");\n\n },\n\n _ => assert!(false),\n\n }\n\n },\n\n _ => assert!(false),\n\n }\n\n}\n", "file_path": "tests/lib.rs", "rank": 2, "score": 33313.80189101869 }, { "content": "#[test]\n\nfn set_get_strings() {\n\n let mut context = ok!(hiredis::connect(\"127.0.0.1\", 6379));\n\n match ok!(context.command(&[\"SET\", \"hiredis-foo\", \"Hi, there!\"])) {\n\n Reply::Status(ref string) => assert_eq!(&string[..], \"OK\"),\n\n _ => assert!(false),\n\n }\n\n match ok!(context.command(&[\"GET\", \"hiredis-foo\"])) {\n\n Reply::Bulk(bytes) => assert_eq!(&ok!(String::from_utf8(bytes))[..], \"Hi, there!\"),\n\n _ => assert!(false),\n\n }\n\n}\n\n\n", "file_path": "tests/lib.rs", "rank": 3, "score": 33313.80189101869 }, { "content": "#[test]\n\nfn set_get_bytes() {\n\n let mut context = ok!(hiredis::connect(\"127.0.0.1\", 6379));\n\n match ok!(context.command(&[&b\"SET\"[..], &b\"hiredis-bar\"[..], &[42u8]])) {\n\n Reply::Status(ref string) => assert_eq!(&string[..], \"OK\"),\n\n _ => assert!(false),\n\n }\n\n match ok!(context.command(&[\"GET\", \"hiredis-bar\"])) {\n\n Reply::Bulk(ref bytes) => assert_eq!(&bytes[..], &[42u8]),\n\n _ => assert!(false),\n\n }\n\n}\n\n\n", "file_path": "tests/lib.rs", "rank": 4, "score": 19576.17280952584 }, { "content": "# Hiredis [![Version][version-img]][version-url] [![Status][status-img]][status-url]\n\n\n\nThe package provides an interface to [Hiredis][1].\n\n\n\n## [Documentation][documentation]\n\n\n\n## Example\n\n\n\n```rust\n\nuse hiredis::Reply;\n\n\n\nlet mut context = hiredis::connect(\"127.0.0.1\", 6379).unwrap();\n\n\n\nmatch context.command(&[\"SET\", \"greeting\", \"Hi, there!\"]).unwrap() {\n\n Reply::Status(_) => {},\n\n _ => assert!(false),\n\n}\n\n\n\nmatch context.command(&[\"GET\", \"greeting\"]).unwrap() {\n\n Reply::Bulk(bytes) => println!(\"{}\", String::from_utf8(bytes).unwrap()),\n\n _ => assert!(false),\n\n};\n\n```\n\n\n\n## Contribution\n\n\n\nYour contribution is highly appreciated. Do not hesitate to open an issue or a\n\npull request. Note that any contribution submitted for inclusion in the project\n\nwill be licensed according to the terms given in [LICENSE.md](LICENSE.md).\n\n\n\n[1]: https://github.com/redis/hiredis\n\n\n\n[documentation]: https://docs.rs/hiredis\n\n[status-img]: https://travis-ci.org/stainless-steel/hiredis.svg?branch=master\n\n[status-url]: https://travis-ci.org/stainless-steel/hiredis\n\n[version-img]: https://img.shields.io/crates/v/hiredis.svg\n\n[version-url]: https://crates.io/crates/hiredis\n", "file_path": "README.md", "rank": 5, "score": 9603.383589075078 }, { "content": "# License\n\n\n\nThe project is dual licensed under the terms of the Apache License, Version 2.0,\n\nand the MIT License. You may obtain copies of the two licenses at\n\n\n\n* https://www.apache.org/licenses/LICENSE-2.0 and\n\n* https://opensource.org/licenses/MIT, respectively.\n\n\n\nThe following two notices apply to every file of the project.\n\n\n\n## The Apache License\n\n\n\n```\n\nCopyright 2015, 2016 The hiredis Developers\n\n\n\nLicensed under the Apache License, Version 2.0 (the “License”); you may not use\n\nthis file except in compliance with the License. You may obtain a copy of the\n\nLicense at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software distributed\n\nunder the License is distributed on an “AS IS” BASIS, WITHOUT WARRANTIES OR\n\nCONDITIONS OF ANY KIND, either express or implied. See the License for the\n\nspecific language governing permissions and limitations under the License.\n\n```\n\n\n\n## The MIT License\n\n\n\n```\n\nCopyright 2015, 2016 The hiredis Developers\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of\n\nthis software and associated documentation files (the “Software”), to deal in\n\nthe Software without restriction, including without limitation the rights to\n\nuse, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of\n\nthe Software, and to permit persons to whom the Software is furnished to do so,\n\nsubject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS\n\nFOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR\n\nCOPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER\n\nIN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN\n\nCONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n```\n", "file_path": "LICENSE.md", "rank": 6, "score": 9600.230836157156 }, { "content": "extern crate hiredis;\n\n\n\nuse hiredis::Reply;\n\n\n\nmacro_rules! ok(\n\n ($result:expr) => ($result.unwrap());\n\n);\n\n\n\n#[test]\n", "file_path": "tests/lib.rs", "rank": 19, "score": 6.4460198127149235 } ]
Rust
src/docker_run/http_extra.rs
glotcode/docker-run
d2ee4c820a19ef063e12b115f0d4463c51dca430
use http::{Request, Response}; use http::header; use http::status; use http::header::CONTENT_LENGTH; use http::header::TRANSFER_ENCODING; use http::response; use std::io::{Read, Write}; use serde::Deserialize; use serde::de::DeserializeOwned; use std::io; use std::io::BufReader; use std::io::BufRead; use std::str::FromStr; use std::fmt; const CARRIAGE_RETURN: u8 = 0xD; const LINE_FEED: u8 = 0xA; pub enum Body { Empty(), Bytes(Vec<u8>), } #[derive(Debug)] pub enum Error { WriteRequest(io::Error), ReadResponse(io::Error), ParseResponseHead(ParseError), BadStatus(status::StatusCode, Vec<u8>), ReadChunkedBody(ReadChunkError), ReadBody(io::Error), DeserializeBody(serde_json::Error), } impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { Error::WriteRequest(err) => { write!(f, "Failed to send request: {}", err) } Error::ReadResponse(err) => { write!(f, "Failed read response: {}", err) } Error::ParseResponseHead(err) => { write!(f, "Failed parse response head: {}", err) } Error::ReadChunkedBody(err) => { write!(f, "Failed read to chunked response body: {}", err) } Error::ReadBody(err) => { write!(f, "Failed read to response body: {}", err) } Error::BadStatus(status_code, body) => { let msg = String::from_utf8(body.to_vec()) .unwrap_or(format!("{:?}", body)); write!(f, "Unexpected status code {}: {}", status_code, msg) } Error::DeserializeBody(err) => { write!(f, "Failed deserialize response body: {}", err) } } } } pub fn send_request<Stream, ResponseBody>(mut stream: Stream, req: Request<Body>) -> Result<Response<ResponseBody>, Error> where Stream: Read + Write, ResponseBody: DeserializeOwned, { write_request_head(&mut stream, &req) .map_err(Error::WriteRequest)?; write_request_body(&mut stream, &req) .map_err(Error::WriteRequest)?; let mut reader = BufReader::new(stream); let response_head = read_response_head(&mut reader) .map_err(Error::ReadResponse)?; let response_parts = parse_response_head(response_head) .map_err(Error::ParseResponseHead)?; let raw_body = match get_transfer_encoding(&response_parts.headers) { TransferEncoding::Chunked() => { read_chunked_response_body(reader) .map_err(Error::ReadChunkedBody)? } _ => { let content_length = get_content_length(&response_parts.headers); read_response_body(content_length, reader) .map_err(Error::ReadBody)? } }; err_if_false(response_parts.status.is_success(), Error::BadStatus( response_parts.status, raw_body.clone() ))?; let body = serde_json::from_slice(&raw_body) .map_err(Error::DeserializeBody)?; Ok(Response::from_parts(response_parts, body)) } fn read_response_body<R: BufRead>(content_length: usize, mut reader: R) -> Result<Vec<u8>, io::Error> { if content_length > 0 { let mut buffer = vec![0u8; content_length]; reader.read_exact(&mut buffer)?; Ok(buffer) } else { Ok(vec![]) } } #[derive(Debug)] pub enum ReadChunkError { ReadChunkLength(io::Error), ParseChunkLength(std::num::ParseIntError), ReadChunk(io::Error), SkipLineFeed(io::Error), } impl fmt::Display for ReadChunkError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { ReadChunkError::ReadChunkLength(err) => { write!(f, "Failed to read chunk length: {}", err) } ReadChunkError::ParseChunkLength(err) => { write!(f, "Failed parse chunk length: {}", err) } ReadChunkError::ReadChunk(err) => { write!(f, "Failed read chunk: {}", err) } ReadChunkError::SkipLineFeed(err) => { write!(f, "Failed read line feed at end of chunk: {}", err) } } } } fn read_chunked_response_body<R: BufRead>(mut reader: R) -> Result<Vec<u8>, ReadChunkError> { let mut body = vec![]; loop { let mut chunk = read_response_chunk(&mut reader)?; if chunk.is_empty() { break } body.append(&mut chunk) } Ok(body) } fn read_response_chunk<R: BufRead>(mut reader: R) -> Result<Vec<u8>, ReadChunkError> { let mut buffer = String::new(); reader.read_line(&mut buffer) .map_err(ReadChunkError::ReadChunkLength)?; let chunk_length = usize::from_str_radix(buffer.trim_end(), 16) .map_err(ReadChunkError::ParseChunkLength)?; let chunk = read_response_body(chunk_length, &mut reader) .map_err(ReadChunkError::ReadChunk)?; let mut void = String::new(); reader.read_line(&mut void) .map_err(ReadChunkError::SkipLineFeed)?; Ok(chunk) } fn get_content_length(headers: &header::HeaderMap<header::HeaderValue>) -> usize { headers.get(CONTENT_LENGTH) .map(|value| { value .to_str() .unwrap_or("") .parse() .unwrap_or(0) }) .unwrap_or(0) } enum TransferEncoding { NoEncoding(), Chunked(), Other(String), } impl TransferEncoding { pub fn from_str(value: &str) -> TransferEncoding { match value { "chunked" => { TransferEncoding::Chunked() } "" => { TransferEncoding::NoEncoding() } other => { TransferEncoding::Other(other.to_string()) } } } } fn get_transfer_encoding(headers: &header::HeaderMap<header::HeaderValue>) -> TransferEncoding { let value = headers.get(TRANSFER_ENCODING) .map(|value| value.to_str().unwrap_or("").to_string()) .unwrap_or_else(|| "".to_string()); TransferEncoding::from_str(&value) } #[derive(Debug)] pub struct EmptyResponse {} impl<'de> Deserialize<'de> for EmptyResponse { fn deserialize<D>(_: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de>, { Ok(EmptyResponse{}) } } pub fn format_request_line<T>(req: &Request<T>) -> String { let path = req.uri() .path_and_query() .map(|x| x.as_str()) .unwrap_or(""); format!("{} {} {:?}", req.method(), path, req.version()) } pub fn format_request_headers<T>(req: &Request<T>) -> String { req.headers() .iter() .map(|(key, value)| format!("{}: {}", key, value.to_str().unwrap_or(""))) .collect::<Vec<String>>() .join("\r\n") } fn write_request_head<T, W: Write>(mut writer: W, req: &Request<T>) -> Result<(), io::Error> { let request_line = format_request_line(&req); write!(writer, "{}\r\n", request_line)?; let headers = format_request_headers(&req); write!(writer, "{}\r\n\r\n", headers) } fn write_request_body<W: Write>(mut writer: W, req: &Request<Body>) -> Result<(), io::Error>{ match req.body() { Body::Empty() => { Ok(()) } Body::Bytes(body) => { writer.write_all(body) } } } fn read_response_head<R: BufRead>(mut reader: R) -> Result<Vec<u8>, io::Error> { let mut response_headers = Vec::new(); for _ in 0..20 { if response_headers.ends_with(&[CARRIAGE_RETURN, LINE_FEED, CARRIAGE_RETURN, LINE_FEED]) { break; } reader.read_until(LINE_FEED, &mut response_headers)?; } Ok(response_headers) } #[derive(Debug)] pub enum ParseError { Parse(httparse::Error), Empty(), Partial(), Response(ResponseError), } impl fmt::Display for ParseError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { ParseError::Parse(err) => { write!(f, "{}", err) } ParseError::Empty() => { write!(f, "Received empty response") } ParseError::Partial() => { write!(f, "Received partial response") } ParseError::Response(err) => { write!(f, "Invalid response: {}", err) } } } } pub fn parse_response_head(bytes: Vec<u8>) -> Result<response::Parts, ParseError> { let mut headers = [httparse::EMPTY_HEADER; 30]; let mut resp = httparse::Response::new(&mut headers); match resp.parse(&bytes) { Ok(httparse::Status::Complete(_)) => { let parts = to_http_parts(resp) .map_err(ParseError::Response)?; Ok(parts) } Ok(httparse::Status::Partial) => { if bytes.is_empty() { Err(ParseError::Empty()) } else { Err(ParseError::Partial()) } } Err(err) => { Err(ParseError::Parse(err)) } } } #[derive(Debug)] pub enum ResponseError { InvalidBuilder(), HeaderName(header::InvalidHeaderName), HeaderValue(header::InvalidHeaderValue), StatusCode(), Builder(http::Error), } impl fmt::Display for ResponseError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { ResponseError::InvalidBuilder() => { write!(f, "Invalid response builder") } ResponseError::HeaderName(err) => { write!(f, "Invalid header name: {}", err) } ResponseError::HeaderValue(err) => { write!(f, "Invalid header value: {}", err) } ResponseError::StatusCode() => { write!(f, "Failed to parse status code") } ResponseError::Builder(err) => { write!(f, "Response builder error: {}", err) } } } } fn to_http_parts(parsed: httparse::Response) -> Result<response::Parts, ResponseError> { let mut builder = Response::builder(); let headers = builder.headers_mut() .ok_or(ResponseError::InvalidBuilder())?; for hdr in parsed.headers.iter() { let name = header::HeaderName::from_str(hdr.name) .map_err(ResponseError::HeaderName)?; let value = header::HeaderValue::from_bytes(hdr.value) .map_err(ResponseError::HeaderValue)?; headers.insert(name, value); } let code = parsed.code .ok_or(ResponseError::StatusCode())?; let response = builder.status(code).body(()) .map_err(ResponseError::Builder)?; Ok(response.into_parts().0) } fn err_if_false<E>(value: bool, err: E) -> Result<(), E> { if value { Ok(()) } else { Err(err) } }
use http::{Request, Response}; use http::header; use http::status; use http::header::CONTENT_LENGTH; use http::header::TRANSFER_ENCODING; use http::response; use std::io::{Read, Write}; use serde::Deserialize; use serde::de::DeserializeOwned; use std::io; use std::io::BufReader; use std::io::BufRead; use std::str::FromStr; use std::fmt; const CARRIAGE_RETURN: u8 = 0xD; const LINE_FEED: u8 = 0xA; pub enum Body { Empty(), Bytes(Vec<u8>), } #[derive(Debug)] pub enum Error { WriteRequest(io::Error), ReadResponse(io::Error), ParseResponseHead(ParseError), BadStatus(status::StatusCode, Vec<u8>), ReadChunkedBody(ReadChunkError), ReadBody(io::Error), DeserializeBody(serde_json::Error), } impl fmt::Display for Error {
} pub fn send_request<Stream, ResponseBody>(mut stream: Stream, req: Request<Body>) -> Result<Response<ResponseBody>, Error> where Stream: Read + Write, ResponseBody: DeserializeOwned, { write_request_head(&mut stream, &req) .map_err(Error::WriteRequest)?; write_request_body(&mut stream, &req) .map_err(Error::WriteRequest)?; let mut reader = BufReader::new(stream); let response_head = read_response_head(&mut reader) .map_err(Error::ReadResponse)?; let response_parts = parse_response_head(response_head) .map_err(Error::ParseResponseHead)?; let raw_body = match get_transfer_encoding(&response_parts.headers) { TransferEncoding::Chunked() => { read_chunked_response_body(reader) .map_err(Error::ReadChunkedBody)? } _ => { let content_length = get_content_length(&response_parts.headers); read_response_body(content_length, reader) .map_err(Error::ReadBody)? } }; err_if_false(response_parts.status.is_success(), Error::BadStatus( response_parts.status, raw_body.clone() ))?; let body = serde_json::from_slice(&raw_body) .map_err(Error::DeserializeBody)?; Ok(Response::from_parts(response_parts, body)) } fn read_response_body<R: BufRead>(content_length: usize, mut reader: R) -> Result<Vec<u8>, io::Error> { if content_length > 0 { let mut buffer = vec![0u8; content_length]; reader.read_exact(&mut buffer)?; Ok(buffer) } else { Ok(vec![]) } } #[derive(Debug)] pub enum ReadChunkError { ReadChunkLength(io::Error), ParseChunkLength(std::num::ParseIntError), ReadChunk(io::Error), SkipLineFeed(io::Error), } impl fmt::Display for ReadChunkError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { ReadChunkError::ReadChunkLength(err) => { write!(f, "Failed to read chunk length: {}", err) } ReadChunkError::ParseChunkLength(err) => { write!(f, "Failed parse chunk length: {}", err) } ReadChunkError::ReadChunk(err) => { write!(f, "Failed read chunk: {}", err) } ReadChunkError::SkipLineFeed(err) => { write!(f, "Failed read line feed at end of chunk: {}", err) } } } } fn read_chunked_response_body<R: BufRead>(mut reader: R) -> Result<Vec<u8>, ReadChunkError> { let mut body = vec![]; loop { let mut chunk = read_response_chunk(&mut reader)?; if chunk.is_empty() { break } body.append(&mut chunk) } Ok(body) } fn read_response_chunk<R: BufRead>(mut reader: R) -> Result<Vec<u8>, ReadChunkError> { let mut buffer = String::new(); reader.read_line(&mut buffer) .map_err(ReadChunkError::ReadChunkLength)?; let chunk_length = usize::from_str_radix(buffer.trim_end(), 16) .map_err(ReadChunkError::ParseChunkLength)?; let chunk = read_response_body(chunk_length, &mut reader) .map_err(ReadChunkError::ReadChunk)?; let mut void = String::new(); reader.read_line(&mut void) .map_err(ReadChunkError::SkipLineFeed)?; Ok(chunk) } fn get_content_length(headers: &header::HeaderMap<header::HeaderValue>) -> usize { headers.get(CONTENT_LENGTH) .map(|value| { value .to_str() .unwrap_or("") .parse() .unwrap_or(0) }) .unwrap_or(0) } enum TransferEncoding { NoEncoding(), Chunked(), Other(String), } impl TransferEncoding { pub fn from_str(value: &str) -> TransferEncoding { match value { "chunked" => { TransferEncoding::Chunked() } "" => { TransferEncoding::NoEncoding() } other => { TransferEncoding::Other(other.to_string()) } } } } fn get_transfer_encoding(headers: &header::HeaderMap<header::HeaderValue>) -> TransferEncoding { let value = headers.get(TRANSFER_ENCODING) .map(|value| value.to_str().unwrap_or("").to_string()) .unwrap_or_else(|| "".to_string()); TransferEncoding::from_str(&value) } #[derive(Debug)] pub struct EmptyResponse {} impl<'de> Deserialize<'de> for EmptyResponse { fn deserialize<D>(_: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de>, { Ok(EmptyResponse{}) } } pub fn format_request_line<T>(req: &Request<T>) -> String { let path = req.uri() .path_and_query() .map(|x| x.as_str()) .unwrap_or(""); format!("{} {} {:?}", req.method(), path, req.version()) } pub fn format_request_headers<T>(req: &Request<T>) -> String { req.headers() .iter() .map(|(key, value)| format!("{}: {}", key, value.to_str().unwrap_or(""))) .collect::<Vec<String>>() .join("\r\n") } fn write_request_head<T, W: Write>(mut writer: W, req: &Request<T>) -> Result<(), io::Error> { let request_line = format_request_line(&req); write!(writer, "{}\r\n", request_line)?; let headers = format_request_headers(&req); write!(writer, "{}\r\n\r\n", headers) } fn write_request_body<W: Write>(mut writer: W, req: &Request<Body>) -> Result<(), io::Error>{ match req.body() { Body::Empty() => { Ok(()) } Body::Bytes(body) => { writer.write_all(body) } } } fn read_response_head<R: BufRead>(mut reader: R) -> Result<Vec<u8>, io::Error> { let mut response_headers = Vec::new(); for _ in 0..20 { if response_headers.ends_with(&[CARRIAGE_RETURN, LINE_FEED, CARRIAGE_RETURN, LINE_FEED]) { break; } reader.read_until(LINE_FEED, &mut response_headers)?; } Ok(response_headers) } #[derive(Debug)] pub enum ParseError { Parse(httparse::Error), Empty(), Partial(), Response(ResponseError), } impl fmt::Display for ParseError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { ParseError::Parse(err) => { write!(f, "{}", err) } ParseError::Empty() => { write!(f, "Received empty response") } ParseError::Partial() => { write!(f, "Received partial response") } ParseError::Response(err) => { write!(f, "Invalid response: {}", err) } } } } pub fn parse_response_head(bytes: Vec<u8>) -> Result<response::Parts, ParseError> { let mut headers = [httparse::EMPTY_HEADER; 30]; let mut resp = httparse::Response::new(&mut headers); match resp.parse(&bytes) { Ok(httparse::Status::Complete(_)) => { let parts = to_http_parts(resp) .map_err(ParseError::Response)?; Ok(parts) } Ok(httparse::Status::Partial) => { if bytes.is_empty() { Err(ParseError::Empty()) } else { Err(ParseError::Partial()) } } Err(err) => { Err(ParseError::Parse(err)) } } } #[derive(Debug)] pub enum ResponseError { InvalidBuilder(), HeaderName(header::InvalidHeaderName), HeaderValue(header::InvalidHeaderValue), StatusCode(), Builder(http::Error), } impl fmt::Display for ResponseError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { ResponseError::InvalidBuilder() => { write!(f, "Invalid response builder") } ResponseError::HeaderName(err) => { write!(f, "Invalid header name: {}", err) } ResponseError::HeaderValue(err) => { write!(f, "Invalid header value: {}", err) } ResponseError::StatusCode() => { write!(f, "Failed to parse status code") } ResponseError::Builder(err) => { write!(f, "Response builder error: {}", err) } } } } fn to_http_parts(parsed: httparse::Response) -> Result<response::Parts, ResponseError> { let mut builder = Response::builder(); let headers = builder.headers_mut() .ok_or(ResponseError::InvalidBuilder())?; for hdr in parsed.headers.iter() { let name = header::HeaderName::from_str(hdr.name) .map_err(ResponseError::HeaderName)?; let value = header::HeaderValue::from_bytes(hdr.value) .map_err(ResponseError::HeaderValue)?; headers.insert(name, value); } let code = parsed.code .ok_or(ResponseError::StatusCode())?; let response = builder.status(code).body(()) .map_err(ResponseError::Builder)?; Ok(response.into_parts().0) } fn err_if_false<E>(value: bool, err: E) -> Result<(), E> { if value { Ok(()) } else { Err(err) } }
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { Error::WriteRequest(err) => { write!(f, "Failed to send request: {}", err) } Error::ReadResponse(err) => { write!(f, "Failed read response: {}", err) } Error::ParseResponseHead(err) => { write!(f, "Failed parse response head: {}", err) } Error::ReadChunkedBody(err) => { write!(f, "Failed read to chunked response body: {}", err) } Error::ReadBody(err) => { write!(f, "Failed read to response body: {}", err) } Error::BadStatus(status_code, body) => { let msg = String::from_utf8(body.to_vec()) .unwrap_or(format!("{:?}", body)); write!(f, "Unexpected status code {}: {}", status_code, msg) } Error::DeserializeBody(err) => { write!(f, "Failed deserialize response body: {}", err) } } }
function_block-full_function
[ { "content": "pub fn remove_container<Stream: Read + Write>(stream: Stream, container_id: &str) -> Result<http::Response<http_extra::EmptyResponse>, Error> {\n\n let req = remove_container_request(container_id)\n\n .map_err(|x| Error::PrepareRequest(PrepareRequestError::Request(x)))?;\n\n\n\n http_extra::send_request(stream, req)\n\n .map_err(Error::SendRequest)\n\n}\n\n\n", "file_path": "src/docker_run/docker.rs", "rank": 0, "score": 115573.3729428931 }, { "content": "pub fn start_container<Stream: Read + Write>(stream: Stream, container_id: &str) -> Result<http::Response<http_extra::EmptyResponse>, Error> {\n\n let req = start_container_request(container_id)\n\n .map_err(|x| Error::PrepareRequest(PrepareRequestError::Request(x)))?;\n\n\n\n http_extra::send_request(stream, req)\n\n .map_err(Error::SendRequest)\n\n}\n\n\n", "file_path": "src/docker_run/docker.rs", "rank": 1, "score": 115573.3729428931 }, { "content": "pub fn attach_container<Stream: Read + Write>(stream: Stream, container_id: &str) -> Result<http::Response<http_extra::EmptyResponse>, Error> {\n\n let req = attach_container_request(container_id)\n\n .map_err(|x| Error::PrepareRequest(PrepareRequestError::Request(x)))?;\n\n\n\n http_extra::send_request(stream, req)\n\n .map_err(Error::SendRequest)\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum StreamError {\n\n Read(io::Error),\n\n ReadStreamType(io::Error),\n\n UnknownStreamType(u8),\n\n ReadStreamLength(io::Error),\n\n InvalidStreamLength(<usize as std::convert::TryFrom<u32>>::Error),\n\n MaxExecutionTime(),\n\n MaxReadSize(usize),\n\n}\n\n\n\nimpl fmt::Display for StreamError {\n", "file_path": "src/docker_run/docker.rs", "rank": 2, "score": 115573.3729428931 }, { "content": "enum Error {\n\n BuildConfig(environment::Error),\n\n CreateServer(io::Error),\n\n StartServer(api::Error),\n\n Signal(io::Error),\n\n}\n\n\n\nimpl fmt::Display for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n Error::BuildConfig(err) => {\n\n write!(f, \"Failed to build config: {}\", err)\n\n }\n\n\n\n Error::CreateServer(err) => {\n\n write!(f, \"Failed to create api server: {}\", err)\n\n }\n\n\n\n Error::StartServer(err) => {\n\n write!(f, \"Failed to start api server: {}\", err)\n\n }\n\n\n\n Error::Signal(err) => {\n\n write!(f, \"Failed to register signal handler: {}\", err)\n\n }\n\n }\n\n }\n\n}\n\n\n\n\n", "file_path": "src/main.rs", "rank": 3, "score": 112664.3745971364 }, { "content": "pub fn version<Stream: Read + Write>(stream: Stream) -> Result<http::Response<VersionResponse>, Error> {\n\n let req = version_request()\n\n .map_err(|x| Error::PrepareRequest(PrepareRequestError::Request(x)))?;\n\n\n\n http_extra::send_request(stream, req)\n\n .map_err(Error::SendRequest)\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Clone, Debug)]\n\n#[serde(rename_all(deserialize = \"PascalCase\"))]\n\n#[serde(rename_all(serialize = \"camelCase\"))]\n\npub struct ContainerCreatedResponse {\n\n pub id: String,\n\n pub warnings: Vec<String>,\n\n}\n\n\n", "file_path": "src/docker_run/docker.rs", "rank": 4, "score": 108940.62187368335 }, { "content": "pub fn create_container<Stream: Read + Write>(stream: Stream, config: &ContainerConfig) -> Result<http::Response<ContainerCreatedResponse>, Error> {\n\n let req = create_container_request(config)\n\n .map_err(Error::PrepareRequest)?;\n\n\n\n http_extra::send_request(stream, req)\n\n .map_err(Error::SendRequest)\n\n}\n\n\n\n\n", "file_path": "src/docker_run/docker.rs", "rank": 6, "score": 99643.26417187846 }, { "content": "pub fn version_request() -> Result<http::Request<http_extra::Body>, http::Error> {\n\n http::Request::get(\"/version\")\n\n .header(\"Accept\", \"application/json\")\n\n .header(\"Host\", \"127.0.0.1\")\n\n .header(\"Connection\", \"close\")\n\n .body(http_extra::Body::Empty())\n\n}\n\n\n", "file_path": "src/docker_run/docker.rs", "rank": 8, "score": 97132.88232182027 }, { "content": "pub fn error_response(request: tiny_http::Request, error: ErrorResponse) -> Result<(), io::Error> {\n\n let error_body = serde_json::to_vec_pretty(&error.body)\n\n .unwrap_or_else(|_| b\"Failed to serialize error body\".to_vec());\n\n\n\n let response = tiny_http::Response::new(\n\n tiny_http::StatusCode(error.status_code),\n\n vec![\n\n tiny_http::Header::from_bytes(&b\"Content-Type\"[..], &b\"application/json\"[..]).unwrap()\n\n ],\n\n error_body.as_slice(),\n\n Some(error_body.len()),\n\n None,\n\n );\n\n\n\n request.respond(response)\n\n}\n\n\n\n\n\npub enum Error {\n\n CloneServer(io::Error, u16),\n", "file_path": "src/docker_run/api/mod.rs", "rank": 10, "score": 91568.38719981781 }, { "content": "pub fn attach_container_request(container_id: &str) -> Result<http::Request<http_extra::Body>, http::Error> {\n\n let url = format!(\"/containers/{}/attach?stream=1&stdout=1&stdin=1&stderr=1\", container_id);\n\n\n\n http::Request::post(url)\n\n .header(\"Host\", \"127.0.0.1\")\n\n .body(http_extra::Body::Empty())\n\n}\n\n\n", "file_path": "src/docker_run/docker.rs", "rank": 11, "score": 89189.06352166322 }, { "content": "pub fn remove_container_request(container_id: &str) -> Result<http::Request<http_extra::Body>, http::Error> {\n\n let url = format!(\"/containers/{}?v=1&force=1\", container_id);\n\n\n\n http::Request::delete(url)\n\n .header(\"Accept\", \"application/json\")\n\n .header(\"Host\", \"127.0.0.1\")\n\n .header(\"Connection\", \"close\")\n\n .body(http_extra::Body::Empty())\n\n}\n\n\n\n\n", "file_path": "src/docker_run/docker.rs", "rank": 12, "score": 89189.06352166319 }, { "content": "pub fn start_container_request(container_id: &str) -> Result<http::Request<http_extra::Body>, http::Error> {\n\n let url = format!(\"/containers/{}/start\", container_id);\n\n\n\n http::Request::post(url)\n\n .header(\"Accept\", \"application/json\")\n\n .header(\"Host\", \"127.0.0.1\")\n\n .header(\"Connection\", \"close\")\n\n .body(http_extra::Body::Empty())\n\n}\n\n\n\n\n", "file_path": "src/docker_run/docker.rs", "rank": 13, "score": 89189.06352166322 }, { "content": "fn decode_dict(data: &[u8]) -> Result<Map<String, Value>, serde_json::Error> {\n\n serde_json::from_slice(data)\n\n}\n\n\n\n\n", "file_path": "src/docker_run/run.rs", "rank": 15, "score": 78972.18500348306 }, { "content": "pub fn authorization_error() -> ErrorResponse {\n\n ErrorResponse{\n\n status_code: 401,\n\n body: ErrorBody{\n\n error: \"access_token\".to_string(),\n\n message: \"Missing or wrong access token\".to_string(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/docker_run/api/mod.rs", "rank": 17, "score": 76097.5000648863 }, { "content": "pub fn lookup<T>(environment: &Environment, key: &'static str) -> Result<T, Error>\n\n where T: FromStr,\n\n T::Err: fmt::Display {\n\n\n\n environment.get(key)\n\n .ok_or(Error::KeyNotFound(key))\n\n .and_then(|string_value| {\n\n string_value\n\n .parse::<T>()\n\n .map_err(|err| Error::Parse{\n\n key,\n\n details: err.to_string(),\n\n })\n\n })\n\n}\n\n\n\n\n", "file_path": "src/docker_run/environment.rs", "rank": 19, "score": 72990.6821915149 }, { "content": "pub fn create_container_request(config: &ContainerConfig) -> Result<http::Request<http_extra::Body>, PrepareRequestError> {\n\n let body = serde_json::to_vec(config)\n\n .map_err(PrepareRequestError::SerializeBody)?;\n\n\n\n http::Request::post(\"/containers/create\")\n\n .header(\"Content-Type\", \"application/json\")\n\n .header(\"Accept\", \"application/json\")\n\n .header(\"Host\", \"127.0.0.1\")\n\n .header(\"Content-Length\", body.len())\n\n .header(\"Connection\", \"close\")\n\n .body(http_extra::Body::Bytes(body))\n\n .map_err(PrepareRequestError::Request)\n\n}\n\n\n", "file_path": "src/docker_run/docker.rs", "rank": 21, "score": 72509.6015766715 }, { "content": "pub fn success_response(request: tiny_http::Request, data: &SuccessResponse) -> Result<(), io::Error> {\n\n let body = data.body.as_slice();\n\n\n\n let response = tiny_http::Response::new(\n\n tiny_http::StatusCode(data.status_code),\n\n vec![\n\n tiny_http::Header::from_bytes(&b\"Content-Type\"[..], &b\"application/json\"[..]).unwrap()\n\n ],\n\n body,\n\n Some(body.len()),\n\n None,\n\n );\n\n\n\n request.respond(response)\n\n}\n\n\n", "file_path": "src/docker_run/api/mod.rs", "rank": 22, "score": 71442.19914027448 }, { "content": "pub fn lookup_optional<T>(environment: &Environment, key: &'static str) -> Result<Option<T>, Error>\n\n where T: FromStr,\n\n T::Err: fmt::Display {\n\n\n\n match environment.get(key) {\n\n None => {\n\n Ok(None)\n\n }\n\n\n\n Some(string_value) => {\n\n string_value\n\n .parse::<T>()\n\n .map(Some)\n\n .map_err(|err| Error::Parse{\n\n key,\n\n details: err.to_string(),\n\n })\n\n }\n\n }\n\n}\n", "file_path": "src/docker_run/environment.rs", "rank": 23, "score": 70018.5609314231 }, { "content": "pub fn prepare_json_response<T: serde::Serialize>(body: &T, format: JsonFormat) -> Result<SuccessResponse, ErrorResponse> {\n\n let json_to_vec = match format {\n\n JsonFormat::Minimal => {\n\n serde_json::to_vec\n\n }\n\n\n\n JsonFormat::Pretty => {\n\n serde_json::to_vec_pretty\n\n }\n\n };\n\n\n\n match json_to_vec(body) {\n\n Ok(data) => {\n\n Ok(SuccessResponse{\n\n status_code: 200,\n\n body: data,\n\n })\n\n }\n\n\n\n Err(err) => {\n", "file_path": "src/docker_run/api/mod.rs", "rank": 24, "score": 69054.31450939178 }, { "content": "pub fn read_json_body<T: serde::de::DeserializeOwned>(request: &mut tiny_http::Request) -> Result<T, ErrorResponse> {\n\n serde_json::from_reader(request.as_reader())\n\n .map_err(|err| ErrorResponse{\n\n status_code: 400,\n\n body: ErrorBody{\n\n error: \"request.parse\".to_string(),\n\n message: format!(\"Failed to parse json from request: {}\", err),\n\n }\n\n })\n\n}\n\n\n\npub struct SuccessResponse {\n\n status_code: u16,\n\n body: Vec<u8>,\n\n}\n\n\n\npub enum JsonFormat {\n\n Minimal,\n\n Pretty,\n\n}\n\n\n\n\n", "file_path": "src/docker_run/api/mod.rs", "rank": 25, "score": 67453.68144531331 }, { "content": "pub fn with_stream<F, T, E, ErrorTagger>(config: &Config, to_error: ErrorTagger, f: F) -> Result<T, E>\n\n where\n\n F: FnOnce(&mut UnixStream) -> Result<T, E>,\n\n ErrorTagger: Copy,\n\n ErrorTagger: FnOnce(Error) -> E {\n\n\n\n let mut stream = UnixStream::connect(&config.path)\n\n .map_err(Error::Connect)\n\n .map_err(to_error)?;\n\n\n\n stream.set_read_timeout(Some(config.read_timeout))\n\n .map_err(Error::SetStreamTimeout)\n\n .map_err(to_error)?;\n\n\n\n stream.set_write_timeout(Some(config.write_timeout))\n\n .map_err(Error::SetStreamTimeout)\n\n .map_err(to_error)?;\n\n\n\n let result = f(&mut stream)?;\n\n\n", "file_path": "src/docker_run/unix_stream.rs", "rank": 26, "score": 61333.92540400839 }, { "content": "pub fn run_code<Payload>(mut stream: &UnixStream, container_id: &str, run_request: &RunRequest<Payload>) -> Result<Map<String, Value>, Error>\n\n where\n\n Payload: Serialize,\n\n {\n\n\n\n docker::attach_container(&mut stream, container_id)\n\n .map_err(Error::AttachContainer)?;\n\n\n\n // Send payload\n\n serde_json::to_writer(&mut stream, &run_request.payload)\n\n .map_err(Error::SerializePayload)?;\n\n\n\n // Shutdown write stream which will trigger an EOF on the reader\n\n let _ = stream.shutdown(net::Shutdown::Write);\n\n\n\n // Read response\n\n let output = docker::read_stream(stream, run_request.limits.max_output_size)\n\n .map_err(Error::ReadStream)?;\n\n\n\n // Return error if we recieved stdin or stderr data from the stream\n", "file_path": "src/docker_run/run.rs", "rank": 27, "score": 60245.818835475424 }, { "content": "pub fn run_with_container<T: Serialize>(stream_config: &unix_stream::Config, run_request: RunRequest<T>, container_id: &str) -> Result<Map<String, Value>, Error> {\n\n\n\n unix_stream::with_stream(&stream_config, Error::UnixStream, |stream| {\n\n docker::start_container(stream, &container_id)\n\n .map_err(Error::StartContainer)\n\n })?;\n\n\n\n let run_config = unix_stream::Config{\n\n read_timeout: run_request.limits.max_execution_time,\n\n ..stream_config.clone()\n\n };\n\n\n\n unix_stream::with_stream(&run_config, Error::UnixStream, |stream| {\n\n run_code(stream, &container_id, &run_request)\n\n })\n\n}\n\n\n", "file_path": "src/docker_run/run.rs", "rank": 28, "score": 58220.94691214888 }, { "content": "pub fn run<T: Serialize>(stream_config: unix_stream::Config, run_request: RunRequest<T>, debug: debug::Config) -> Result<Map<String, Value>, Error> {\n\n let container_response = unix_stream::with_stream(&stream_config, Error::UnixStream, |stream| {\n\n docker::create_container(stream, &run_request.container_config)\n\n .map_err(Error::CreateContainer)\n\n })?;\n\n\n\n let container_id = &container_response.body().id;\n\n\n\n let result = run_with_container(&stream_config, run_request, &container_id);\n\n\n\n if !debug.keep_container {\n\n let _ = unix_stream::with_stream(&stream_config, Error::UnixStream, |stream| {\n\n match docker::remove_container(stream, &container_id) {\n\n Ok(_) => {}\n\n\n\n Err(err) => {\n\n log::error!(\"Failed to remove container: {}\", err);\n\n }\n\n }\n\n\n\n Ok(())\n\n });\n\n }\n\n\n\n result\n\n}\n\n\n", "file_path": "src/docker_run/run.rs", "rank": 29, "score": 58060.56541706532 }, { "content": "#[derive(Debug)]\n\nenum StreamType {\n\n Stdin(),\n\n Stdout(),\n\n Stderr(),\n\n}\n\n\n\nimpl StreamType {\n\n fn from_byte(n: u8) -> Option<StreamType> {\n\n match n {\n\n 0 => Some(StreamType::Stdin()),\n\n 1 => Some(StreamType::Stdout()),\n\n 2 => Some(StreamType::Stderr()),\n\n _ => None,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/docker_run/docker.rs", "rank": 30, "score": 57385.27624369765 }, { "content": "fn io_read_error_to_stream_error(err: io::Error) -> StreamError {\n\n if err.kind() == io::ErrorKind::WouldBlock {\n\n StreamError::MaxExecutionTime()\n\n } else {\n\n StreamError::Read(err)\n\n }\n\n}\n\n\n\n\n", "file_path": "src/docker_run/docker.rs", "rank": 33, "score": 53956.71736620058 }, { "content": "pub fn read_stream<R: Read>(r: R, max_read_size: usize) -> Result<StreamOutput, StreamError> {\n\n let mut reader = iowrap::Eof::new(r);\n\n let mut read_size = 0;\n\n let mut stdin = Vec::new();\n\n let mut stdout = Vec::new();\n\n let mut stderr = Vec::new();\n\n\n\n while !reader.eof().map_err(io_read_error_to_stream_error)? {\n\n let stream_type = read_stream_type(&mut reader)?;\n\n let stream_length = read_stream_length(&mut reader)?;\n\n\n\n let mut buffer = vec![0u8; stream_length];\n\n reader.read_exact(&mut buffer)\n\n .map_err(io_read_error_to_stream_error)?;\n\n\n\n match stream_type {\n\n StreamType::Stdin() => {\n\n stdin.append(&mut buffer);\n\n }\n\n\n", "file_path": "src/docker_run/docker.rs", "rank": 34, "score": 53364.697190503655 }, { "content": "fn handle_error(err: Error) -> api::ErrorResponse {\n\n match err {\n\n Error::UnixStream(_) => {\n\n api::ErrorResponse{\n\n status_code: 500,\n\n body: api::ErrorBody{\n\n error: \"docker.unixsocket\".to_string(),\n\n message: err.to_string(),\n\n }\n\n }\n\n }\n\n\n\n Error::Version(_) => {\n\n api::ErrorResponse{\n\n status_code: 500,\n\n body: api::ErrorBody{\n\n error: \"docker.version\".to_string(),\n\n message: err.to_string(),\n\n }\n\n }\n", "file_path": "src/docker_run/api/version.rs", "rank": 35, "score": 52446.85185918637 }, { "content": "fn handle_error(err: run::Error) -> api::ErrorResponse {\n\n match &err {\n\n run::Error::UnixStream(_) => {\n\n error_response(&err, 500, \"docker.unixsocket\")\n\n }\n\n\n\n run::Error::CreateContainer(_) => {\n\n error_response(&err, 400, \"docker.container.create\")\n\n }\n\n\n\n run::Error::StartContainer(_) => {\n\n error_response(&err, 500, \"docker.container.start\")\n\n }\n\n\n\n run::Error::AttachContainer(_) => {\n\n error_response(&err, 500, \"docker.container.attach\")\n\n }\n\n\n\n run::Error::SerializePayload(_) => {\n\n error_response(&err, 400, \"docker.container.stream.payload.serialize\")\n", "file_path": "src/docker_run/api/run.rs", "rank": 36, "score": 51160.48845822489 }, { "content": "pub fn handle(_: &config::Config, _: &mut tiny_http::Request) -> Result<api::SuccessResponse, api::ErrorResponse> {\n\n\n\n Err(api::ErrorResponse{\n\n status_code: 404,\n\n body: api::ErrorBody{\n\n error: \"route.not_found\".to_string(),\n\n message: \"Route not found\".to_string(),\n\n }\n\n })\n\n}\n", "file_path": "src/docker_run/api/not_found.rs", "rank": 37, "score": 50993.458977861716 }, { "content": "pub fn handle(_: &config::Config, _: &mut tiny_http::Request) -> Result<api::SuccessResponse, api::ErrorResponse> {\n\n\n\n api::prepare_json_response(&ServiceInfo{\n\n name: \"docker-run\".to_string(),\n\n version: VERSION.unwrap_or(\"unknown\").to_string(),\n\n description: \"Api for running code in transient docker containers\".to_string(),\n\n }, api::JsonFormat::Pretty)\n\n}\n", "file_path": "src/docker_run/api/root.rs", "rank": 38, "score": 50993.458977861716 }, { "content": "pub fn handle(config: &config::Config, request: &mut tiny_http::Request) -> Result<api::SuccessResponse, api::ErrorResponse> {\n\n api::check_access_token(&config.api, request)?;\n\n\n\n let data = get_version_info(&config.unix_socket)\n\n .map_err(handle_error)?;\n\n\n\n api::prepare_json_response(&data, api::JsonFormat::Pretty)\n\n}\n\n\n\n\n", "file_path": "src/docker_run/api/version.rs", "rank": 39, "score": 49501.29879465448 }, { "content": "pub fn handle(config: &config::Config, request: &mut tiny_http::Request) -> Result<api::SuccessResponse, api::ErrorResponse> {\n\n api::check_access_token(&config.api, request)?;\n\n\n\n let req_body: RequestBody = api::read_json_body(request)?;\n\n let container_config = run::prepare_container_config(req_body.image, config.container.clone());\n\n\n\n let run_result = run::run(config.unix_socket.clone(), run::RunRequest{\n\n container_config,\n\n payload: req_body.payload,\n\n limits: config.run.clone(),\n\n }, config.debug.clone()).map_err(handle_error)?;\n\n\n\n api::prepare_json_response(&run_result, api::JsonFormat::Minimal)\n\n}\n\n\n", "file_path": "src/docker_run/api/run.rs", "rank": 40, "score": 49501.29879465448 }, { "content": "fn error_response(err: &run::Error, status_code: u16, error_code: &str) -> api::ErrorResponse {\n\n api::ErrorResponse{\n\n status_code,\n\n body: api::ErrorBody{\n\n error: error_code.to_string(),\n\n message: err.to_string(),\n\n }\n\n }\n\n}\n", "file_path": "src/docker_run/api/run.rs", "rank": 41, "score": 48592.477039572026 }, { "content": "fn start() -> Result<(), Error> {\n\n let env = environment::get_environment();\n\n let config = build_config(&env)\n\n .map_err(Error::BuildConfig)?;\n\n\n\n log::info!(\"Listening on {} with {} worker threads\", config.server.listen_addr_with_port(), config.server.worker_threads);\n\n\n\n let server = api::Server::new(config.server.listen_addr_with_port())\n\n .map_err(Error::CreateServer)?;\n\n\n\n let workers = server.start(api::ServerConfig{\n\n listen_addr: config.server.listen_addr_with_port(),\n\n worker_threads: config.server.worker_threads,\n\n handler_config: config,\n\n handler: handle_request,\n\n }).map_err(Error::StartServer)?;\n\n\n\n // Handle OS signals\n\n handle_signals(server)\n\n .map_err(Error::Signal)?;\n\n\n\n // Wait for workers\n\n workers.wait();\n\n\n\n Ok(())\n\n}\n\n\n\n\n", "file_path": "src/main.rs", "rank": 42, "score": 48213.096111047984 }, { "content": "pub fn get_environment() -> Environment {\n\n env::vars().collect()\n\n}\n\n\n", "file_path": "src/docker_run/environment.rs", "rank": 43, "score": 48028.621017234706 }, { "content": "pub fn space_separated_string(s: String) -> Vec<String> {\n\n s.split(' ')\n\n .map(|s| s.trim().to_string())\n\n .filter(|s| !s.is_empty())\n\n .collect()\n\n}\n", "file_path": "src/docker_run/environment.rs", "rank": 44, "score": 43074.5803503319 }, { "content": "fn handle_signals(server: api::Server) -> Result<(), io::Error> {\n\n let signals = Signals::new(&[\n\n signal_hook::SIGTERM,\n\n signal_hook::SIGINT,\n\n ])?;\n\n\n\n thread::spawn(move || {\n\n for signal in signals.forever() {\n\n match signal {\n\n signal_hook::SIGTERM => {\n\n log::info!(\"Caught SIGTERM signal\");\n\n break\n\n }\n\n\n\n signal_hook::SIGINT => {\n\n log::info!(\"Caught SIGINT signal\");\n\n break\n\n }\n\n\n\n _ => {\n", "file_path": "src/main.rs", "rank": 45, "score": 40995.705543927616 }, { "content": "fn build_config(env: &environment::Environment) -> Result<config::Config, environment::Error> {\n\n let server = build_server_config(env)?;\n\n let api = build_api_config(env)?;\n\n let unix_socket = build_unix_socket_config(env)?;\n\n let container = build_container_config(env)?;\n\n let run = build_run_config(env)?;\n\n let debug = build_debug_config(env)?;\n\n\n\n Ok(config::Config{\n\n server,\n\n api,\n\n unix_socket,\n\n container,\n\n run,\n\n debug,\n\n })\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 48, "score": 38418.10192903296 }, { "content": "pub fn prepare_container_config(image_name: String, config: ContainerConfig) -> docker::ContainerConfig {\n\n let tmpfs = config.tmpfs_mounts();\n\n\n\n docker::ContainerConfig{\n\n hostname: config.hostname,\n\n user: config.user,\n\n attach_stdin: true,\n\n attach_stdout: true,\n\n attach_stderr: true,\n\n tty: false,\n\n open_stdin: true,\n\n stdin_once: true,\n\n image: image_name,\n\n network_disabled: config.network_disabled,\n\n host_config: docker::HostConfig{\n\n memory: config.memory,\n\n privileged: false,\n\n cap_add: config.cap_add,\n\n cap_drop: config.cap_drop,\n\n ulimits: vec![\n", "file_path": "src/docker_run/run.rs", "rank": 49, "score": 38309.09048245891 }, { "content": "fn build_debug_config(env: &environment::Environment) -> Result<debug::Config, environment::Error> {\n\n let keep_container = environment::lookup(env, \"DEBUG_KEEP_CONTAINER\").unwrap_or(false);\n\n\n\n Ok(debug::Config{\n\n keep_container,\n\n })\n\n}\n", "file_path": "src/main.rs", "rank": 50, "score": 37880.40664464485 }, { "content": "fn build_run_config(env: &environment::Environment) -> Result<run::Limits, environment::Error> {\n\n let max_execution_time = environment::lookup(env, \"RUN_MAX_EXECUTION_TIME\")?;\n\n let max_output_size = environment::lookup(env, \"RUN_MAX_OUTPUT_SIZE\")?;\n\n\n\n Ok(run::Limits{\n\n max_execution_time: Duration::from_secs(max_execution_time),\n\n max_output_size,\n\n })\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 51, "score": 37880.40664464485 }, { "content": "fn get_version_info(stream_config: &unix_stream::Config) -> Result<VersionInfo, Error> {\n\n let docker_response = unix_stream::with_stream(&stream_config, Error::UnixStream, |stream| {\n\n docker::version(stream)\n\n .map_err(Error::Version)\n\n })?;\n\n\n\n Ok(VersionInfo{\n\n docker: docker_response.body().clone(),\n\n })\n\n}\n\n\n\n\n", "file_path": "src/docker_run/api/version.rs", "rank": 52, "score": 37391.24252733805 }, { "content": "fn build_container_config(env: &environment::Environment) -> Result<run::ContainerConfig, environment::Error> {\n\n let hostname = environment::lookup(env, \"DOCKER_CONTAINER_HOSTNAME\")?;\n\n let user = environment::lookup(env, \"DOCKER_CONTAINER_USER\")?;\n\n let memory = environment::lookup(env, \"DOCKER_CONTAINER_MEMORY\")?;\n\n let network_disabled = environment::lookup(env, \"DOCKER_CONTAINER_NETWORK_DISABLED\")?;\n\n let ulimit_nofile_soft = environment::lookup(env, \"DOCKER_CONTAINER_ULIMIT_NOFILE_SOFT\")?;\n\n let ulimit_nofile_hard = environment::lookup(env, \"DOCKER_CONTAINER_ULIMIT_NOFILE_HARD\")?;\n\n let ulimit_nproc_soft = environment::lookup(env, \"DOCKER_CONTAINER_ULIMIT_NPROC_SOFT\")?;\n\n let ulimit_nproc_hard = environment::lookup(env, \"DOCKER_CONTAINER_ULIMIT_NPROC_HARD\")?;\n\n let cap_add = environment::lookup(env, \"DOCKER_CONTAINER_CAP_ADD\").unwrap_or_default();\n\n let cap_drop = environment::lookup(env, \"DOCKER_CONTAINER_CAP_DROP\").unwrap_or_default();\n\n let readonly_rootfs = environment::lookup(env, \"DOCKER_CONTAINER_READONLY_ROOTFS\").unwrap_or(false);\n\n let tmp_dir_path: Option<String> = environment::lookup_optional(env, \"DOCKER_CONTAINER_TMP_DIR_PATH\")?;\n\n let tmp_dir_options = environment::lookup(env, \"DOCKER_CONTAINER_TMP_DIR_OPTIONS\").unwrap_or_else(|_| \"rw,noexec,nosuid,size=65536k\".to_string());\n\n let work_dir_path: Option<String> = environment::lookup_optional(env, \"DOCKER_CONTAINER_WORK_DIR_PATH\")?;\n\n let work_dir_options = environment::lookup(env, \"DOCKER_CONTAINER_WORK_DIR_OPTIONS\").unwrap_or_else(|_| \"rw,exec,nosuid,size=131072k\".to_string());\n\n\n\n Ok(run::ContainerConfig{\n\n hostname,\n\n user,\n", "file_path": "src/main.rs", "rank": 53, "score": 37369.38010537869 }, { "content": "fn build_server_config(env: &environment::Environment) -> Result<config::ServerConfig, environment::Error> {\n\n let listen_addr = environment::lookup(env, \"SERVER_LISTEN_ADDR\")?;\n\n let listen_port = environment::lookup(env, \"SERVER_LISTEN_PORT\")?;\n\n let worker_threads = environment::lookup(env, \"SERVER_WORKER_THREADS\")?;\n\n\n\n Ok(config::ServerConfig{\n\n listen_addr,\n\n listen_port,\n\n worker_threads,\n\n })\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 54, "score": 37369.38010537869 }, { "content": "fn build_api_config(env: &environment::Environment) -> Result<api::ApiConfig, environment::Error> {\n\n let access_token = environment::lookup(env, \"API_ACCESS_TOKEN\")?;\n\n\n\n Ok(api::ApiConfig{\n\n access_token,\n\n })\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 55, "score": 37369.38010537869 }, { "content": "fn build_unix_socket_config(env: &environment::Environment) -> Result<unix_stream::Config, environment::Error> {\n\n let path = environment::lookup(env, \"DOCKER_UNIX_SOCKET_PATH\")?;\n\n let read_timeout = environment::lookup(env, \"DOCKER_UNIX_SOCKET_READ_TIMEOUT\")?;\n\n let write_timeout = environment::lookup(env, \"DOCKER_UNIX_SOCKET_WRITE_TIMEOUT\")?;\n\n\n\n Ok(unix_stream::Config{\n\n path,\n\n read_timeout: Duration::from_secs(read_timeout),\n\n write_timeout: Duration::from_secs(write_timeout),\n\n })\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 56, "score": 36883.08623948929 }, { "content": "#[derive(Debug, serde::Deserialize)]\n\nstruct RequestBody {\n\n image: String,\n\n payload: Map<String, Value>,\n\n}\n\n\n\n\n", "file_path": "src/docker_run/api/run.rs", "rank": 57, "score": 26882.694890127517 }, { "content": "fn read_stream_length<R: Read>(mut reader: R) -> Result<usize, StreamError> {\n\n let mut buffer = [0; 4];\n\n reader.read_exact(&mut buffer)\n\n .map_err(StreamError::ReadStreamLength)?;\n\n\n\n u32::from_be_bytes(buffer)\n\n .try_into()\n\n .map_err(StreamError::InvalidStreamLength)\n\n}\n", "file_path": "src/docker_run/docker.rs", "rank": 59, "score": 19146.001209957656 }, { "content": "fn read_stream_type<R: Read>(mut reader: R) -> Result<StreamType, StreamError> {\n\n let mut buffer = [0; 4];\n\n reader.read_exact(&mut buffer)\n\n .map_err(StreamError::ReadStreamType)?;\n\n\n\n StreamType::from_byte(buffer[0])\n\n .ok_or(StreamError::UnknownStreamType(buffer[0]))\n\n}\n\n\n", "file_path": "src/docker_run/docker.rs", "rank": 60, "score": 18704.07526335455 }, { "content": "fn send_response(request: tiny_http::Request, response: Result<SuccessResponse, ErrorResponse>) {\n\n let result = match response {\n\n Ok(data) => {\n\n success_response(request, &data)\n\n }\n\n\n\n Err(data) => {\n\n error_response(request, data)\n\n }\n\n };\n\n\n\n match result {\n\n Ok(()) => {},\n\n\n\n Err(err) => {\n\n log::error!(\"Failure while sending response: {}\", err)\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/docker_run/api/mod.rs", "rank": 61, "score": 18704.07526335455 }, { "content": "fn check_access_token(config: &ApiConfig, request: &tiny_http::Request) -> Result<(), ErrorResponse> {\n\n let is_allowed = request.headers().iter()\n\n .filter(|header| header.field.equiv(\"X-Access-Token\"))\n\n .map(|header| header.value.clone())\n\n .any(|value| value == config.access_token);\n\n\n\n if is_allowed {\n\n Ok(())\n\n } else {\n\n Err(authorization_error())\n\n }\n\n}\n\n\n", "file_path": "src/docker_run/api/mod.rs", "rank": 62, "score": 18282.09002213799 }, { "content": "fn handle_request(config: &config::Config, request: &mut tiny_http::Request) -> Result<api::SuccessResponse, api::ErrorResponse> {\n\n match (request.url(), request.method()) {\n\n (\"/\", tiny_http::Method::Get) => {\n\n api::root::handle(config, request)\n\n }\n\n\n\n (\"/run\", tiny_http::Method::Post) => {\n\n api::run::handle(config, request)\n\n }\n\n\n\n (\"/version\", tiny_http::Method::Get) => {\n\n api::version::handle(config, request)\n\n }\n\n\n\n _ => {\n\n api::not_found::handle(config, request)\n\n }\n\n }\n\n}\n\n\n\n\n", "file_path": "src/main.rs", "rank": 63, "score": 17878.725609163816 }, { "content": "}\n\n\n\nimpl fmt::Display for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n Error::CloneServer(err, n) => {\n\n write!(f, \"Failed to clone server (n = {}): {}\", n, err)\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct ErrorResponse {\n\n pub status_code: u16,\n\n pub body: ErrorBody,\n\n}\n\n\n\n#[derive(Debug, serde::Serialize, serde::Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct ErrorBody {\n\n pub error: String,\n\n pub message: String,\n\n}\n\n\n", "file_path": "src/docker_run/api/mod.rs", "rank": 65, "score": 11.212334198745605 }, { "content": " }\n\n }\n\n }\n\n}\n\n\n\n\n\n#[derive(Debug)]\n\npub enum PrepareRequestError {\n\n SerializeBody(serde_json::Error),\n\n Request(http::Error),\n\n}\n\n\n\nimpl fmt::Display for PrepareRequestError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n PrepareRequestError::SerializeBody(err) => {\n\n write!(f, \"Failed to serialize request body: {}\", err)\n\n }\n\n\n\n PrepareRequestError::Request(err) => {\n", "file_path": "src/docker_run/docker.rs", "rank": 66, "score": 10.294677471192683 }, { "content": " }\n\n\n\n StreamError::MaxExecutionTime() => {\n\n write!(f, \"Max execution time exceeded\")\n\n }\n\n\n\n StreamError::MaxReadSize(max_size) => {\n\n write!(f, \"Max output size exceeded ({} bytes)\", max_size)\n\n }\n\n }\n\n }\n\n}\n\n\n\n\n\n#[derive(Debug)]\n\npub struct StreamOutput {\n\n pub stdin: Vec<u8>,\n\n pub stdout: Vec<u8>,\n\n pub stderr: Vec<u8>,\n\n}\n\n\n\n\n", "file_path": "src/docker_run/docker.rs", "rank": 67, "score": 9.848690339856617 }, { "content": " pub soft: i64,\n\n pub hard: i64,\n\n}\n\n\n\n\n\n#[derive(Debug)]\n\npub enum Error {\n\n PrepareRequest(PrepareRequestError),\n\n SendRequest(http_extra::Error),\n\n}\n\n\n\nimpl fmt::Display for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n Error::PrepareRequest(err) => {\n\n write!(f, \"Failed to prepare request: {}\", err)\n\n }\n\n\n\n Error::SendRequest(err) => {\n\n write!(f, \"Failed while sending request: {}\", err)\n", "file_path": "src/docker_run/docker.rs", "rank": 69, "score": 8.862375326942708 }, { "content": " }\n\n }\n\n}\n\n\n\n\n\npub enum Error {\n\n UnixStream(unix_stream::Error),\n\n Version(docker::Error),\n\n}\n\n\n\nimpl fmt::Display for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n Error::UnixStream(err) => {\n\n write!(f, \"Unix socket failure: {}\", err)\n\n }\n\n\n\n Error::Version(err) => {\n\n write!(f, \"Failed to get docker version: {}\", err)\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/docker_run/api/version.rs", "rank": 71, "score": 8.476444914592978 }, { "content": " UnixStream(unix_stream::Error),\n\n CreateContainer(docker::Error),\n\n StartContainer(docker::Error),\n\n AttachContainer(docker::Error),\n\n SerializePayload(serde_json::Error),\n\n ReadStream(docker::StreamError),\n\n StreamStdinUnexpected(Vec<u8>),\n\n StreamStderr(Vec<u8>),\n\n StreamStdoutDecode(serde_json::Error),\n\n}\n\n\n\nimpl fmt::Display for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n Error::UnixStream(err) => {\n\n write!(f, \"Unix socket failure: {}\", err)\n\n }\n\n\n\n Error::CreateContainer(err) => {\n\n write!(f, \"Failed to create container: {}\", err)\n", "file_path": "src/docker_run/run.rs", "rank": 73, "score": 8.00935988027194 }, { "content": " let _ = stream.shutdown(Shutdown::Both);\n\n\n\n Ok(result)\n\n}\n\n\n\n\n\n#[derive(Debug)]\n\npub enum Error {\n\n Connect(io::Error),\n\n SetStreamTimeout(io::Error),\n\n}\n\n\n\nimpl fmt::Display for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n Error::Connect(err) => {\n\n write!(f, \"Failed to connect to docker unix socket: {}\", err)\n\n }\n\n\n\n Error::SetStreamTimeout(err) => {\n\n write!(f, \"Failed set timeout on unix socket: {}\", err)\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/docker_run/unix_stream.rs", "rank": 74, "score": 7.837740000769351 }, { "content": "\n\n\n\n#[derive(Debug)]\n\npub enum Error {\n\n KeyNotFound(&'static str),\n\n Parse {\n\n key: &'static str,\n\n details: String,\n\n },\n\n}\n\n\n\nimpl fmt::Display for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n Error::KeyNotFound(key) =>\n\n write!(f, \"Environment key not found: «{0}»\", key),\n\n\n\n Error::Parse { key, details } =>\n\n write!(f, \"Failed to parse value for environment key: «{0}», details: {1}\", key, details),\n\n }\n\n }\n\n}\n\n\n\n\n", "file_path": "src/docker_run/environment.rs", "rank": 75, "score": 7.746103330409554 }, { "content": "use std::os::unix::net::UnixStream;\n\nuse std::io;\n\nuse std::time::Duration;\n\nuse std::fmt;\n\nuse std::net::Shutdown;\n\nuse std::path::PathBuf;\n\n\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Config {\n\n pub path: PathBuf,\n\n pub read_timeout: Duration,\n\n pub write_timeout: Duration,\n\n}\n\n\n", "file_path": "src/docker_run/unix_stream.rs", "rank": 76, "score": 7.407793929355184 }, { "content": "use crate::docker_run::http_extra;\n\nuse serde::{Serialize, Deserialize};\n\nuse std::io::{Read, Write};\n\nuse std::io;\n\nuse std::fmt;\n\nuse std::convert::TryInto;\n\nuse std::collections::HashMap;\n\n\n\n#[derive(Debug, Serialize)]\n\n#[serde(rename_all = \"PascalCase\")]\n\npub struct ContainerConfig {\n\n pub hostname: String,\n\n pub user: String,\n\n pub attach_stdin: bool,\n\n pub attach_stdout: bool,\n\n pub attach_stderr: bool,\n\n pub tty: bool,\n\n pub open_stdin: bool,\n\n pub stdin_once: bool,\n\n pub image: String,\n", "file_path": "src/docker_run/docker.rs", "rank": 77, "score": 7.19620709997549 }, { "content": " err_if_false(output.stdin.is_empty(), Error::StreamStdinUnexpected(output.stdin))?;\n\n err_if_false(output.stderr.is_empty(), Error::StreamStderr(output.stderr))?;\n\n\n\n // Decode stdout data to dict\n\n decode_dict(&output.stdout)\n\n .map_err(Error::StreamStdoutDecode)\n\n}\n\n\n\n\n\n#[derive(Debug, Clone)]\n\npub struct ContainerConfig {\n\n pub hostname: String,\n\n pub user: String,\n\n pub memory: i64,\n\n pub network_disabled: bool,\n\n pub ulimit_nofile_soft: i64,\n\n pub ulimit_nofile_hard: i64,\n\n pub ulimit_nproc_soft: i64,\n\n pub ulimit_nproc_hard: i64,\n\n pub cap_add: Vec<String>,\n", "file_path": "src/docker_run/run.rs", "rank": 79, "score": 6.890174520831154 }, { "content": "use crate::docker_run::unix_stream;\n\nuse crate::docker_run::run;\n\nuse crate::docker_run::api;\n\nuse crate::docker_run::debug;\n\n\n\n#[derive(Clone, Debug)]\n\npub struct Config {\n\n pub server: ServerConfig,\n\n pub api: api::ApiConfig,\n\n pub unix_socket: unix_stream::Config,\n\n pub container: run::ContainerConfig,\n\n pub run: run::Limits,\n\n pub debug: debug::Config,\n\n}\n\n\n\n\n\n#[derive(Clone, Debug)]\n\npub struct ServerConfig {\n\n pub listen_addr: String,\n\n pub listen_port: u16,\n\n pub worker_threads: u16,\n\n}\n\n\n\nimpl ServerConfig {\n\n pub fn listen_addr_with_port(&self) -> String {\n\n format!(\"{}:{}\", self.listen_addr, self.listen_port)\n\n }\n\n}\n", "file_path": "src/docker_run/config.rs", "rank": 81, "score": 6.575911957912432 }, { "content": "pub mod root;\n\npub mod run;\n\npub mod version;\n\npub mod not_found;\n\n\n\nuse std::io;\n\nuse std::fmt;\n\nuse std::thread;\n\n\n\n\n\n\n\npub struct ServerConfig<C, H> {\n\n pub listen_addr: String,\n\n pub worker_threads: u16,\n\n pub handler_config: C,\n\n pub handler: H,\n\n}\n\n\n\npub struct Server {\n\n server: tiny_http::Server,\n", "file_path": "src/docker_run/api/mod.rs", "rank": 82, "score": 5.764306926654477 }, { "content": " Err(ErrorResponse{\n\n status_code: 500,\n\n body: ErrorBody{\n\n error: \"response.serialize\".to_string(),\n\n message: format!(\"Failed to serialize response: {}\", err),\n\n }\n\n })\n\n }\n\n }\n\n}\n\n\n\n\n", "file_path": "src/docker_run/api/mod.rs", "rank": 83, "score": 5.761276925751666 }, { "content": "use std::time::Duration;\n\nuse std::str;\n\nuse std::fmt;\n\nuse serde::Serialize;\n\nuse serde_json::{Value, Map};\n\nuse std::os::unix::net::UnixStream;\n\nuse std::net;\n\nuse std::collections::HashMap;\n\n\n\nuse crate::docker_run::docker;\n\nuse crate::docker_run::unix_stream;\n\nuse crate::docker_run::debug;\n\n\n\n\n\n#[derive(Debug)]\n\npub struct RunRequest<Payload: Serialize> {\n\n pub container_config: docker::ContainerConfig,\n\n pub payload: Payload,\n\n pub limits: Limits,\n\n}\n", "file_path": "src/docker_run/run.rs", "rank": 84, "score": 5.699583749002352 }, { "content": "}\n\n\n\nimpl Server {\n\n pub fn new(listen_addr: String) -> Result<Server, io::Error> {\n\n let server = tiny_http::Server::new(listen_addr)?;\n\n\n\n Ok(Server{\n\n server,\n\n })\n\n }\n\n\n\n pub fn start<C, H>(&self, config: ServerConfig<C, H>) -> Result<Workers, Error>\n\n where\n\n C: Send + Clone + 'static,\n\n H: Send + Copy + 'static,\n\n H: FnOnce(&C, &mut tiny_http::Request) -> Result<SuccessResponse, ErrorResponse> {\n\n\n\n let mut handles = Vec::new();\n\n let request_handler = config.handler;\n\n\n", "file_path": "src/docker_run/api/mod.rs", "rank": 87, "score": 5.476269246758927 }, { "content": " }\n\n\n\n Error::StartContainer(err) => {\n\n write!(f, \"Failed to start container: {}\", err)\n\n }\n\n\n\n Error::AttachContainer(err) => {\n\n write!(f, \"Failed to attach to container: {}\", err)\n\n }\n\n\n\n Error::SerializePayload(err) => {\n\n write!(f, \"Failed to send payload to stream: {}\", err)\n\n }\n\n\n\n Error::ReadStream(err) => {\n\n write!(f, \"Failed while reading stream: {}\", err)\n\n }\n\n\n\n Error::StreamStdinUnexpected(bytes) => {\n\n let msg = String::from_utf8(bytes.to_vec())\n", "file_path": "src/docker_run/run.rs", "rank": 88, "score": 5.416980418304735 }, { "content": " fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n StreamError::Read(err) => {\n\n write!(f, \"{}\", err)\n\n }\n\n\n\n StreamError::ReadStreamType(err) => {\n\n write!(f, \"Failed to read stream type: {}\", err)\n\n }\n\n\n\n StreamError::UnknownStreamType(stream_type) => {\n\n write!(f, \"Unknown stream type: (type: {})\", stream_type)\n\n }\n\n\n\n StreamError::ReadStreamLength(err) => {\n\n write!(f, \"Failed to read stream length: {}\", err)\n\n }\n\n\n\n StreamError::InvalidStreamLength(err) => {\n\n write!(f, \"Failed to parse stream length: {}\", err)\n", "file_path": "src/docker_run/docker.rs", "rank": 89, "score": 5.303985879882996 }, { "content": "use std::env;\n\nuse std::collections::HashMap;\n\nuse std::str::FromStr;\n\nuse std::fmt;\n\n\n\n\n\npub type Environment = HashMap<String, String>;\n\n\n\n\n", "file_path": "src/docker_run/environment.rs", "rank": 90, "score": 5.201649301638369 }, { "content": " .unwrap_or(format!(\"{:?}\", bytes));\n\n\n\n write!(f, \"Code runner returned unexpected stdin data: {}\", msg)\n\n }\n\n\n\n Error::StreamStderr(bytes) => {\n\n let msg = String::from_utf8(bytes.to_vec())\n\n .unwrap_or(format!(\"{:?}\", bytes));\n\n\n\n write!(f, \"Code runner failed with the following message: {}\", msg)\n\n }\n\n\n\n Error::StreamStdoutDecode(err) => {\n\n write!(f, \"Failed to decode json returned from code runner: {}\", err)\n\n }\n\n }\n\n }\n\n}\n\n\n\n\n", "file_path": "src/docker_run/run.rs", "rank": 92, "score": 4.617574603769999 }, { "content": " write!(f, \"{}\", err)\n\n }\n\n }\n\n }\n\n}\n\n\n\n\n\n#[derive(Deserialize, Serialize, Clone, Debug)]\n\n#[serde(rename_all(deserialize = \"PascalCase\"))]\n\n#[serde(rename_all(serialize = \"camelCase\"))]\n\npub struct VersionResponse {\n\n pub version: String,\n\n pub api_version: String,\n\n pub git_commit: String,\n\n pub go_version: String,\n\n pub os: String,\n\n pub arch: String,\n\n pub kernel_version: String,\n\n pub build_time: String,\n\n pub platform: VersionPlatformResponse,\n", "file_path": "src/docker_run/docker.rs", "rank": 93, "score": 4.513148937090055 }, { "content": "\n\n\n\npub struct Workers {\n\n handles: Vec<thread::JoinHandle<()>>\n\n}\n\n\n\nimpl Workers {\n\n pub fn wait(self) {\n\n // Wait for threads to complete, in practice this will block forever unless:\n\n // - The server is shutdown\n\n // - One of the threads panics\n\n for handle in self.handles {\n\n handle.join().unwrap();\n\n }\n\n }\n\n}\n\n\n\n\n\n#[derive(Debug, Clone)]\n\npub struct ApiConfig {\n\n pub access_token: ascii::AsciiString,\n\n}\n\n\n", "file_path": "src/docker_run/api/mod.rs", "rank": 94, "score": 4.413480165352071 }, { "content": " pub cap_drop: Vec<String>,\n\n pub readonly_rootfs: bool,\n\n pub tmp_dir: Option<Tmpfs>,\n\n pub work_dir: Option<Tmpfs>,\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Tmpfs {\n\n pub path: String,\n\n pub options: String,\n\n}\n\n\n\n\n\nimpl ContainerConfig {\n\n pub fn tmpfs_mounts(&self) -> HashMap<String, String> {\n\n [&self.tmp_dir, &self.work_dir]\n\n .iter()\n\n .filter_map(|tmpfs| tmpfs.as_ref())\n\n .map(|tmpfs| (tmpfs.path.clone(), tmpfs.options.clone()))\n\n .collect()\n\n }\n\n}\n\n\n\n\n", "file_path": "src/docker_run/run.rs", "rank": 95, "score": 4.385328877175155 }, { "content": "mod docker_run;\n\n\n\nuse std::io;\n\nuse std::fmt;\n\nuse std::thread;\n\nuse std::process;\n\nuse std::time::Duration;\n\n\n\nuse signal_hook::iterator::Signals;\n\n\n\nuse docker_run::config;\n\nuse docker_run::environment;\n\nuse docker_run::unix_stream;\n\nuse docker_run::run;\n\nuse docker_run::api;\n\nuse docker_run::debug;\n\n\n\n\n", "file_path": "src/main.rs", "rank": 96, "score": 3.43471731621643 }, { "content": "# docker-run\n\n\n\n## Overview\n\ndocker-run provides a http api for running untrusted code inside transient docker containers.\n\nFor every run request a new container is started and deleted.\n\nThe payload is passed to the container by attaching to it and writing it to stdin. The result is read from stdout.\n\nThe communication with the docker daemon happens via it's api over the unix socket.\n\nThis is used to run code on [glot.io](https://glot.io).\n\n\n\n\n\n## Api\n\n| Action | Method | Route | Requires token |\n\n|:-----------------------------|:-------|:-----------|:---------------|\n\n| Get service info | GET | / | No |\n\n| Get docker info | GET | /version | Yes |\n\n| [Run code](api_docs/run.md) | POST | /run | Yes |\n\n\n\n\n\n## Docker images\n\nWhen a run request is posted to docker-run it will create a new temporary container.\n\nThe container is required to listen for a json payload on stdin and must write the\n\nrun result to stdout as a json object containing the properties: stdout, stderr and error.\n\nThe docker images used by [glot.io](https://glot.io) can be found [here](https://github.com/glotcode/glot-images).\n\n\n\n\n\n## Performance\n\nThe following numbers were obtained using [glot-images](https://github.com/glotcode/glot-images)\n\non a 5$ linode vm running 'Hello World' with [httpstat](https://github.com/reorx/httpstat)\n\nmultiple times locally on the same host and reading the numbers manually.\n\nNot scientific numbers, but it will give an indication of the overhead involved.\n\n\n\n| Language | Min | Max |\n\n|:-----------------|:-------------|:-------------|\n\n| Python | 250 ms | 350 ms |\n\n| C | 330 ms | 430 ms |\n\n| Haskell | 500 ms | 700 ms |\n\n| Java | 2000 ms | 2200 ms |\n\n\n", "file_path": "README.md", "rank": 97, "score": 3.284593096420721 }, { "content": "pub mod docker;\n\npub mod http_extra;\n\npub mod run;\n\npub mod config;\n\npub mod environment;\n\npub mod api;\n\npub mod unix_stream;\n\npub mod debug;\n", "file_path": "src/docker_run/mod.rs", "rank": 98, "score": 3.2515001599637174 }, { "content": "use std::fmt;\n\n\n\nuse crate::docker_run::config;\n\nuse crate::docker_run::api;\n\nuse crate::docker_run::docker;\n\nuse crate::docker_run::unix_stream;\n\n\n\n\n\n#[derive(Debug, serde::Serialize)]\n", "file_path": "src/docker_run/api/version.rs", "rank": 99, "score": 3.2258618404071804 } ]
Rust
src/compute/nullif.rs
abreis/arrow2
982454620d1db7964a4e272b3f74745563b6ba5a
use crate::array::PrimitiveArray; use crate::compute::comparison::primitive_compare_values_op; use crate::datatypes::DataType; use crate::error::{ArrowError, Result}; use crate::{array::Array, types::NativeType}; use super::utils::combine_validities; pub fn nullif_primitive<T: NativeType>( lhs: &PrimitiveArray<T>, rhs: &PrimitiveArray<T>, ) -> Result<PrimitiveArray<T>> { if lhs.data_type() != rhs.data_type() { return Err(ArrowError::InvalidArgumentError( "Arrays must have the same logical type".to_string(), )); } let equal = primitive_compare_values_op(lhs.values(), rhs.values(), |lhs, rhs| lhs != rhs); let equal = equal.into(); let validity = combine_validities(lhs.validity(), &equal); Ok(PrimitiveArray::<T>::from_data( lhs.data_type().clone(), lhs.values_buffer().clone(), validity, )) } pub fn can_nullif(lhs: &DataType, rhs: &DataType) -> bool { if lhs != rhs { return false; }; use DataType::*; matches!( lhs, UInt8 | UInt16 | UInt32 | UInt64 | Int8 | Int16 | Int32 | Int64 | Float32 | Float64 | Time32(_) | Time64(_) | Date32 | Date64 | Timestamp(_, _) | Duration(_) ) } pub fn nullif(lhs: &dyn Array, rhs: &dyn Array) -> Result<Box<dyn Array>> { if lhs.data_type() != rhs.data_type() { return Err(ArrowError::InvalidArgumentError( "Nullif expects arrays of the the same logical type".to_string(), )); } if lhs.len() != rhs.len() { return Err(ArrowError::InvalidArgumentError( "Nullif expects arrays of the the same length".to_string(), )); } use crate::datatypes::DataType::*; match lhs.data_type() { UInt8 => nullif_primitive::<u8>( lhs.as_any().downcast_ref().unwrap(), rhs.as_any().downcast_ref().unwrap(), ) .map(|x| Box::new(x) as Box<dyn Array>), UInt16 => nullif_primitive::<u16>( lhs.as_any().downcast_ref().unwrap(), rhs.as_any().downcast_ref().unwrap(), ) .map(|x| Box::new(x) as Box<dyn Array>), UInt32 => nullif_primitive::<u32>( lhs.as_any().downcast_ref().unwrap(), rhs.as_any().downcast_ref().unwrap(), ) .map(|x| Box::new(x) as Box<dyn Array>), UInt64 => nullif_primitive::<u64>( lhs.as_any().downcast_ref().unwrap(), rhs.as_any().downcast_ref().unwrap(), ) .map(|x| Box::new(x) as Box<dyn Array>), Int8 => nullif_primitive::<i8>( lhs.as_any().downcast_ref().unwrap(), rhs.as_any().downcast_ref().unwrap(), ) .map(|x| Box::new(x) as Box<dyn Array>), Int16 => nullif_primitive::<i16>( lhs.as_any().downcast_ref().unwrap(), rhs.as_any().downcast_ref().unwrap(), ) .map(|x| Box::new(x) as Box<dyn Array>), Int32 | Time32(_) | Date32 => nullif_primitive::<i32>( lhs.as_any().downcast_ref().unwrap(), rhs.as_any().downcast_ref().unwrap(), ) .map(|x| Box::new(x) as Box<dyn Array>), Int64 | Time64(_) | Date64 | Timestamp(_, _) | Duration(_) => nullif_primitive::<i64>( lhs.as_any().downcast_ref().unwrap(), rhs.as_any().downcast_ref().unwrap(), ) .map(|x| Box::new(x) as Box<dyn Array>), Float32 => nullif_primitive::<f32>( lhs.as_any().downcast_ref().unwrap(), rhs.as_any().downcast_ref().unwrap(), ) .map(|x| Box::new(x) as Box<dyn Array>), Float64 => nullif_primitive::<f64>( lhs.as_any().downcast_ref().unwrap(), rhs.as_any().downcast_ref().unwrap(), ) .map(|x| Box::new(x) as Box<dyn Array>), other => Err(ArrowError::NotYetImplemented(format!( "Nullif is not implemented for logical datatype {}", other ))), } }
use crate::array::PrimitiveArray; use crate::compute::comparison::primitive_compare_values_op; use crate::datatypes::DataType; use crate::error::{ArrowError, Result}; use crate::{array::Array, types::NativeType}; use super::utils::combine_validities; pub fn nullif_primitive<T: NativeType>( lhs: &PrimitiveArray<T>, rhs: &PrimitiveArray<T>, ) -> Result<PrimitiveArray<T>> { if lhs.data_type() != rhs.data_type() { return Err(ArrowError::InvalidArgumentError( "Arrays must have the same logical type".to_string(), )); } let equal = primitive_compare_values_op(lhs.values(), rhs.values(), |lhs, rhs| lhs != rhs); let equal = equal.into(); let validity = combine_validities(lhs.validity(), &equal); Ok(PrimitiveArray::<T>::from_data( lhs.data_type().clone(), lhs.values_buffer().clone(), validity, )) } pub fn can_nullif(lhs: &DataType, rhs: &DataType) -> bool { if lhs != rhs { return false; }; use DataType::*; matches!( lhs, UInt8 | UInt16 | UInt32 | UInt64 | Int8 | Int16 | Int32 | Int64 | Float32 | Float64 | Time32(_) | Time64(_) | Date32 | Date64 | Timestamp(_, _) | Duration(_) ) }
pub fn nullif(lhs: &dyn Array, rhs: &dyn Array) -> Result<Box<dyn Array>> { if lhs.data_type() != rhs.data_type() { return Err(ArrowError::InvalidArgumentError( "Nullif expects arrays of the the same logical type".to_string(), )); } if lhs.len() != rhs.len() { return Err(ArrowError::InvalidArgumentError( "Nullif expects arrays of the the same length".to_string(), )); } use crate::datatypes::DataType::*; match lhs.data_type() { UInt8 => nullif_primitive::<u8>( lhs.as_any().downcast_ref().unwrap(), rhs.as_any().downcast_ref().unwrap(), ) .map(|x| Box::new(x) as Box<dyn Array>), UInt16 => nullif_primitive::<u16>( lhs.as_any().downcast_ref().unwrap(), rhs.as_any().downcast_ref().unwrap(), ) .map(|x| Box::new(x) as Box<dyn Array>), UInt32 => nullif_primitive::<u32>( lhs.as_any().downcast_ref().unwrap(), rhs.as_any().downcast_ref().unwrap(), ) .map(|x| Box::new(x) as Box<dyn Array>), UInt64 => nullif_primitive::<u64>( lhs.as_any().downcast_ref().unwrap(), rhs.as_any().downcast_ref().unwrap(), ) .map(|x| Box::new(x) as Box<dyn Array>), Int8 => nullif_primitive::<i8>( lhs.as_any().downcast_ref().unwrap(), rhs.as_any().downcast_ref().unwrap(), ) .map(|x| Box::new(x) as Box<dyn Array>), Int16 => nullif_primitive::<i16>( lhs.as_any().downcast_ref().unwrap(), rhs.as_any().downcast_ref().unwrap(), ) .map(|x| Box::new(x) as Box<dyn Array>), Int32 | Time32(_) | Date32 => nullif_primitive::<i32>( lhs.as_any().downcast_ref().unwrap(), rhs.as_any().downcast_ref().unwrap(), ) .map(|x| Box::new(x) as Box<dyn Array>), Int64 | Time64(_) | Date64 | Timestamp(_, _) | Duration(_) => nullif_primitive::<i64>( lhs.as_any().downcast_ref().unwrap(), rhs.as_any().downcast_ref().unwrap(), ) .map(|x| Box::new(x) as Box<dyn Array>), Float32 => nullif_primitive::<f32>( lhs.as_any().downcast_ref().unwrap(), rhs.as_any().downcast_ref().unwrap(), ) .map(|x| Box::new(x) as Box<dyn Array>), Float64 => nullif_primitive::<f64>( lhs.as_any().downcast_ref().unwrap(), rhs.as_any().downcast_ref().unwrap(), ) .map(|x| Box::new(x) as Box<dyn Array>), other => Err(ArrowError::NotYetImplemented(format!( "Nullif is not implemented for logical datatype {}", other ))), } }
function_block-full_function
[ { "content": "/// Logically compares two [ArrayData].\n\n/// Two arrays are logically equal if and only if:\n\n/// * their data types are equal\n\n/// * their lengths are equal\n\n/// * their null counts are equal\n\n/// * their null bitmaps are equal\n\n/// * each of their items are equal\n\n/// two items are equal when their in-memory representation is physically equal (i.e. same bit content).\n\n/// The physical comparison depend on the data type.\n\n/// # Panics\n\n/// This function may panic whenever any of the [ArrayData] does not follow the Arrow specification.\n\n/// (e.g. wrong number of buffers, buffer `len` does not correspond to the declared `len`)\n\npub fn equal(lhs: &dyn Array, rhs: &dyn Array) -> bool {\n\n let lhs_validity = lhs.validity();\n\n let rhs_validity = rhs.validity();\n\n utils::base_equal(lhs, rhs)\n\n && lhs.null_count() == rhs.null_count()\n\n && utils::equal_validity(lhs_validity, rhs_validity, 0, 0, lhs.len())\n\n && equal_values(lhs, rhs, lhs_validity, rhs_validity, 0, 0, lhs.len())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::sync::Arc;\n\n\n\n use crate::array::{BooleanArray, Int16Array, Int32Array, Offset};\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn test_primitive() {\n\n let cases = vec![\n", "file_path": "src/array/equal/mod.rs", "rank": 0, "score": 408542.6996415361 }, { "content": "/// Sort elements from `ArrayRef` into an unsigned integer (`UInt32Array`) of indices.\n\n/// For floating point arrays any NaN values are considered to be greater than any other non-null value\n\npub fn sort_to_indices(values: &dyn Array, options: &SortOptions) -> Result<Int32Array> {\n\n match values.data_type() {\n\n DataType::Boolean => {\n\n let (v, n) = partition_validity(values);\n\n Ok(sort_boolean(values, v, n, &options))\n\n }\n\n DataType::Int8 => dyn_sort_indices!(i8, values, ord::total_cmp, options),\n\n DataType::Int16 => dyn_sort_indices!(i16, values, ord::total_cmp, options),\n\n DataType::Int32\n\n | DataType::Date32\n\n | DataType::Time32(_)\n\n | DataType::Interval(IntervalUnit::YearMonth) => {\n\n dyn_sort_indices!(i32, values, ord::total_cmp, options)\n\n }\n\n DataType::Int64\n\n | DataType::Date64\n\n | DataType::Time64(_)\n\n | DataType::Timestamp(_, None)\n\n | DataType::Duration(_) => dyn_sort_indices!(i64, values, ord::total_cmp, options),\n\n DataType::UInt8 => dyn_sort_indices!(u8, values, ord::total_cmp, options),\n", "file_path": "src/compute/sort/mod.rs", "rank": 1, "score": 332099.3483140011 }, { "content": "/// Returns an array of integers with the number of bytes on each string of the array.\n\npub fn length(array: &dyn Array) -> Result<Box<dyn Array>> {\n\n match array.data_type() {\n\n DataType::Utf8 => {\n\n let array = array.as_any().downcast_ref::<Utf8Array<i32>>().unwrap();\n\n Ok(Box::new(unary_offsets_string::<i32, _>(array, |x| x)))\n\n }\n\n DataType::LargeUtf8 => {\n\n let array = array.as_any().downcast_ref::<Utf8Array<i64>>().unwrap();\n\n Ok(Box::new(unary_offsets_string::<i64, _>(array, |x| x)))\n\n }\n\n _ => Err(ArrowError::InvalidArgumentError(format!(\n\n \"length not supported for {:?}\",\n\n array.data_type()\n\n ))),\n\n }\n\n}\n\n\n", "file_path": "src/compute/length.rs", "rank": 2, "score": 328689.13851105806 }, { "content": "/// Returns the element-wise hash of an [`Array`]. Validity is preserved.\n\n/// Supported DataTypes:\n\n/// * Boolean types\n\n/// * All primitive types except `Float32` and `Float64`\n\n/// * `[Large]Utf8`;\n\n/// * `[Large]Binary`.\n\n/// # Errors\n\n/// This function errors whenever it does not support the specific `DataType`.\n\npub fn hash(array: &dyn Array) -> Result<PrimitiveArray<u64>> {\n\n Ok(match array.data_type() {\n\n DataType::Boolean => hash_boolean(array.as_any().downcast_ref().unwrap()),\n\n DataType::Int8 => hash_dyn!(i8, array),\n\n DataType::Int16 => hash_dyn!(i16, array),\n\n DataType::Int32\n\n | DataType::Date32\n\n | DataType::Time32(_)\n\n | DataType::Interval(IntervalUnit::YearMonth) => hash_dyn!(i32, array),\n\n DataType::Interval(IntervalUnit::DayTime) => hash_dyn!(days_ms, array),\n\n DataType::Int64\n\n | DataType::Date64\n\n | DataType::Time64(_)\n\n | DataType::Timestamp(_, _)\n\n | DataType::Duration(_) => hash_dyn!(i64, array),\n\n DataType::Decimal(_, _) => hash_dyn!(i128, array),\n\n DataType::UInt8 => hash_dyn!(u8, array),\n\n DataType::UInt16 => hash_dyn!(u16, array),\n\n DataType::UInt32 => hash_dyn!(u32, array),\n\n DataType::UInt64 => hash_dyn!(u64, array),\n", "file_path": "src/compute/hash.rs", "rank": 3, "score": 327579.0959533909 }, { "content": "/// Extracts the hours of a given temporal array as an array of integers\n\npub fn year(array: &dyn Array) -> Result<PrimitiveArray<i32>> {\n\n let final_data_type = DataType::Int32;\n\n match array.data_type() {\n\n DataType::Date32 => {\n\n let array = array\n\n .as_any()\n\n .downcast_ref::<PrimitiveArray<i32>>()\n\n .unwrap();\n\n Ok(unary(\n\n array,\n\n |x| date32_to_datetime(x).year(),\n\n final_data_type,\n\n ))\n\n }\n\n DataType::Date64 => {\n\n let array = array\n\n .as_any()\n\n .downcast_ref::<PrimitiveArray<i64>>()\n\n .unwrap();\n\n Ok(unary(\n", "file_path": "src/compute/temporal.rs", "rank": 4, "score": 327561.412334897 }, { "content": "/// Extracts the hours of a given temporal array as an array of integers\n\npub fn hour(array: &dyn Array) -> Result<PrimitiveArray<u32>> {\n\n let final_data_type = DataType::UInt32;\n\n match array.data_type() {\n\n DataType::Time32(TimeUnit::Second) => {\n\n let array = array\n\n .as_any()\n\n .downcast_ref::<PrimitiveArray<i32>>()\n\n .unwrap();\n\n Ok(unary(array, |x| time32s_to_time(x).hour(), final_data_type))\n\n }\n\n DataType::Time32(TimeUnit::Microsecond) => {\n\n let array = array\n\n .as_any()\n\n .downcast_ref::<PrimitiveArray<i32>>()\n\n .unwrap();\n\n Ok(unary(\n\n array,\n\n |x| time32ms_to_time(x).hour(),\n\n final_data_type,\n\n ))\n", "file_path": "src/compute/temporal.rs", "rank": 5, "score": 327561.412334897 }, { "content": "/// Exports an `Array` to the C data interface.\n\npub fn export_to_c(array: Arc<dyn Array>) -> Result<ArrowArray> {\n\n let field = Field::new(\"\", array.data_type().clone(), array.null_count() != 0);\n\n\n\n Ok(ArrowArray {\n\n array: Arc::new(Ffi_ArrowArray::new(array)),\n\n schema: Arc::new(Ffi_ArrowSchema::try_new(field)?),\n\n })\n\n}\n\n\n", "file_path": "src/ffi/ffi.rs", "rank": 6, "score": 327560.645760698 }, { "content": "/// Concatenate multiple [Array] of the same type into a single [ArrayRef].\n\npub fn concatenate(arrays: &[&dyn Array]) -> Result<Box<dyn Array>> {\n\n if arrays.is_empty() {\n\n return Err(ArrowError::InvalidArgumentError(\n\n \"concat requires input of at least one array\".to_string(),\n\n ));\n\n }\n\n\n\n if arrays\n\n .iter()\n\n .any(|array| array.data_type() != arrays[0].data_type())\n\n {\n\n return Err(ArrowError::InvalidArgumentError(\n\n \"It is not possible to concatenate arrays of different data types.\".to_string(),\n\n ));\n\n }\n\n\n\n let lengths = arrays.iter().map(|array| array.len()).collect::<Vec<_>>();\n\n let capacity = lengths.iter().sum();\n\n\n\n let mut mutable = make_growable(arrays, false, capacity);\n", "file_path": "src/compute/concat.rs", "rank": 7, "score": 322862.7607275428 }, { "content": "/// Reads a valid `ffi` interface into a `Box<dyn Array>`\n\n/// # Errors\n\n/// If and only if:\n\n/// * the data type is not supported\n\n/// * the interface is not valid (e.g. a null pointer)\n\npub fn try_from<A: ArrowArrayRef>(array: A) -> Result<Box<dyn Array>> {\n\n let data_type = array.data_type()?;\n\n let array: Box<dyn Array> = match data_type {\n\n DataType::Boolean => Box::new(BooleanArray::try_from_ffi(array)?),\n\n DataType::Int8 => Box::new(PrimitiveArray::<i8>::try_from_ffi(array)?),\n\n DataType::Int16 => Box::new(PrimitiveArray::<i16>::try_from_ffi(array)?),\n\n DataType::Int32\n\n | DataType::Date32\n\n | DataType::Time32(_)\n\n | DataType::Interval(IntervalUnit::YearMonth) => {\n\n Box::new(PrimitiveArray::<i32>::try_from_ffi(array)?)\n\n }\n\n DataType::Interval(IntervalUnit::DayTime) => {\n\n Box::new(PrimitiveArray::<days_ms>::try_from_ffi(array)?)\n\n }\n\n DataType::Int64\n\n | DataType::Date64\n\n | DataType::Time64(_)\n\n | DataType::Timestamp(_, _)\n\n | DataType::Duration(_) => Box::new(PrimitiveArray::<i64>::try_from_ffi(array)?),\n", "file_path": "src/ffi/array.rs", "rank": 8, "score": 318848.14539923565 }, { "content": "/// Returns the minimum value in the boolean array.\n\n///\n\n/// ```\n\n/// use arrow2::{\n\n/// array::BooleanArray,\n\n/// compute::aggregate::min_boolean,\n\n/// };\n\n///\n\n/// let a = BooleanArray::from(vec![Some(true), None, Some(false)]);\n\n/// assert_eq!(min_boolean(&a), Some(false))\n\n/// ```\n\npub fn min_boolean(array: &BooleanArray) -> Option<bool> {\n\n // short circuit if all nulls / zero length array\n\n if array.null_count() == array.len() {\n\n return None;\n\n }\n\n\n\n // Note the min bool is false (0), so short circuit as soon as we see it\n\n array\n\n .iter()\n\n .find(|&b| b == Some(false))\n\n .flatten()\n\n .or(Some(true))\n\n}\n\n\n", "file_path": "src/compute/aggregate/min_max.rs", "rank": 9, "score": 318444.3525851526 }, { "content": "/// Returns the maximum value in the boolean array\n\n///\n\n/// ```\n\n/// use arrow2::{\n\n/// array::BooleanArray,\n\n/// compute::aggregate::max_boolean,\n\n/// };\n\n///\n\n/// let a = BooleanArray::from(vec![Some(true), None, Some(false)]);\n\n/// assert_eq!(max_boolean(&a), Some(true))\n\n/// ```\n\npub fn max_boolean(array: &BooleanArray) -> Option<bool> {\n\n // short circuit if all nulls / zero length array\n\n if array.null_count() == array.len() {\n\n return None;\n\n }\n\n\n\n // Note the max bool is true (1), so short circuit as soon as we see it\n\n array\n\n .iter()\n\n .find(|&b| b == Some(true))\n\n .flatten()\n\n .or(Some(false))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::array::*;\n\n\n\n #[test]\n", "file_path": "src/compute/aggregate/min_max.rs", "rank": 10, "score": 318444.3525851526 }, { "content": "pub fn get_display<'a>(array: &'a dyn Array) -> Result<Box<dyn Fn(usize) -> String + 'a>> {\n\n let value_display = get_value_display(array)?;\n\n Ok(Box::new(move |row| {\n\n if array.is_null(row) {\n\n \"\".to_string()\n\n } else {\n\n value_display(row)\n\n }\n\n }))\n\n}\n", "file_path": "src/array/display.rs", "rank": 11, "score": 317058.90595098044 }, { "content": "/// Filters an [Array], returning elements matching the filter (i.e. where the values are true).\n\n/// WARNING: the nulls of `filter` are ignored and the value on its slot is considered.\n\n/// Therefore, it is considered undefined behavior to pass `filter` with null values.\n\n/// # Example\n\n/// ```rust\n\n/// # use arrow2::array::{Int32Array, Primitive, BooleanArray};\n\n/// # use arrow2::datatypes::DataType;\n\n/// # use arrow2::error::Result;\n\n/// # use arrow2::compute::filter::filter;\n\n/// # fn main() -> Result<()> {\n\n/// let array = Primitive::from_slice(&vec![5, 6, 7, 8, 9]).to(DataType::Int32);\n\n/// let filter_array = BooleanArray::from_slice(&vec![true, false, false, true, false]);\n\n/// let c = filter(&array, &filter_array)?;\n\n/// let c = c.as_any().downcast_ref::<Int32Array>().unwrap();\n\n/// assert_eq!(c, &Primitive::from_slice(vec![5, 8]).to(DataType::Int32));\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\npub fn filter(array: &dyn Array, filter: &BooleanArray) -> Result<Box<dyn Array>> {\n\n match array.data_type() {\n\n DataType::UInt8 => {\n\n let array = array.as_any().downcast_ref().unwrap();\n\n Ok(Box::new(filter_primitive::<u8>(array, filter)))\n\n }\n\n DataType::UInt16 => {\n\n let array = array.as_any().downcast_ref().unwrap();\n\n Ok(Box::new(filter_primitive::<u16>(array, filter)))\n\n }\n\n DataType::UInt32 => {\n\n let array = array.as_any().downcast_ref().unwrap();\n\n Ok(Box::new(filter_primitive::<u32>(array, filter)))\n\n }\n\n DataType::UInt64 => {\n\n let array = array.as_any().downcast_ref().unwrap();\n\n Ok(Box::new(filter_primitive::<u64>(array, filter)))\n\n }\n\n DataType::Int8 => {\n\n let array = array.as_any().downcast_ref().unwrap();\n", "file_path": "src/compute/filter.rs", "rank": 12, "score": 314745.8271451858 }, { "content": "/// Returns a function of index returning the string representation of the array.\n\n/// # Errors\n\n/// This function errors iff the datatype is not yet supported for printing.\n\npub fn get_value_display<'a>(array: &'a dyn Array) -> Result<Box<dyn Fn(usize) -> String + 'a>> {\n\n use DataType::*;\n\n Ok(match array.data_type() {\n\n Null => Box::new(|_: usize| \"\".to_string()),\n\n Boolean => {\n\n let a = array.as_any().downcast_ref::<BooleanArray>().unwrap();\n\n Box::new(move |row: usize| format!(\"{}\", a.value(row)))\n\n }\n\n Int8 => dyn_primitive!(array, i8, |x| x),\n\n Int16 => dyn_primitive!(array, i16, |x| x),\n\n Int32 => dyn_primitive!(array, i32, |x| x),\n\n Int64 => dyn_primitive!(array, i64, |x| x),\n\n UInt8 => dyn_primitive!(array, u8, |x| x),\n\n UInt16 => dyn_primitive!(array, u16, |x| x),\n\n UInt32 => dyn_primitive!(array, u32, |x| x),\n\n UInt64 => dyn_primitive!(array, u64, |x| x),\n\n Float16 => unreachable!(),\n\n Float32 => dyn_primitive!(array, f32, |x| x),\n\n Float64 => dyn_primitive!(array, f64, |x| x),\n\n Date32 => dyn_primitive!(array, i32, temporal_conversions::date32_to_date),\n", "file_path": "src/array/display.rs", "rank": 13, "score": 314170.7447370685 }, { "content": "/// Shifts array by defined number of items (to left or right)\n\n/// A positive value for `offset` shifts the array to the right\n\n/// a negative value shifts the array to the left.\n\n/// # Examples\n\n/// ```\n\n/// use arrow2::array::Int32Array;\n\n/// use arrow2::compute::window::shift;\n\n///\n\n/// let array = Int32Array::from(&[Some(1), None, Some(3)]);\n\n/// let result = shift(&array, -1).unwrap();\n\n/// let expected = Int32Array::from(&[None, Some(3), None]);\n\n/// assert_eq!(expected, result.as_ref());\n\n/// ```\n\npub fn shift(array: &dyn Array, offset: i64) -> Result<Box<dyn Array>> {\n\n if abs(offset) as usize > array.len() {\n\n return Err(ArrowError::InvalidArgumentError(format!(\n\n \"Shift's absolute offset must be smaller or equal to the arrays length. Offset is {}, length is {}\",\n\n abs(offset), array.len()\n\n )));\n\n }\n\n\n\n // Compute slice\n\n let slice_offset = clamp(-offset, 0, array.len() as i64) as usize;\n\n let length = array.len() - abs(offset) as usize;\n\n let slice = array.slice(slice_offset, length);\n\n\n\n // Generate array with remaining `null` items\n\n let nulls = abs(offset as i64) as usize;\n\n\n\n let null_array = new_null_array(array.data_type().clone(), nulls);\n\n\n\n // Concatenate both arrays, add nulls after if shift > 0 else before\n\n if offset > 0 {\n", "file_path": "src/compute/window.rs", "rank": 14, "score": 313941.47244502226 }, { "content": "pub fn contains(list: &dyn Array, values: &dyn Array) -> Result<BooleanArray> {\n\n let list_data_type = list.data_type();\n\n let values_data_type = values.data_type();\n\n\n\n match (list_data_type, values_data_type) {\n\n (DataType::List(_), DataType::Utf8) => {\n\n let list = list.as_any().downcast_ref::<ListArray<i32>>().unwrap();\n\n let values = values.as_any().downcast_ref::<Utf8Array<i32>>().unwrap();\n\n contains_utf8(list, values)\n\n }\n\n (DataType::List(_), DataType::LargeUtf8) => {\n\n let list = list.as_any().downcast_ref::<ListArray<i32>>().unwrap();\n\n let values = values.as_any().downcast_ref::<Utf8Array<i64>>().unwrap();\n\n contains_utf8(list, values)\n\n }\n\n (DataType::LargeList(_), DataType::LargeUtf8) => {\n\n let list = list.as_any().downcast_ref::<ListArray<i64>>().unwrap();\n\n let values = values.as_any().downcast_ref::<Utf8Array<i64>>().unwrap();\n\n contains_utf8(list, values)\n\n }\n", "file_path": "src/compute/contains.rs", "rank": 15, "score": 311905.4149480816 }, { "content": "/// Cast `array` to the provided data type and return a new [`Array`] with\n\n/// type `to_type`, if possible.\n\n///\n\n/// Behavior:\n\n/// * Boolean to Utf8: `true` => '1', `false` => `0`\n\n/// * Utf8 to numeric: strings that can't be parsed to numbers return null, float strings\n\n/// in integer casts return null\n\n/// * Numeric to boolean: 0 returns `false`, any other value returns `true`\n\n/// * List to List: the underlying data type is cast\n\n/// * Primitive to List: a list array with 1 value per slot is created\n\n/// * Date32 and Date64: precision lost when going to higher interval\n\n/// * Time32 and Time64: precision lost when going to higher interval\n\n/// * Timestamp and Date{32|64}: precision lost when going to higher interval\n\n/// * Temporal to/from backing primitive: zero-copy with data type change\n\n///\n\n/// Unsupported Casts\n\n/// * To or from `StructArray`\n\n/// * List to primitive\n\n/// * Utf8 to boolean\n\n/// * Interval and duration\n\npub fn cast(array: &dyn Array, to_type: &DataType) -> Result<Box<dyn Array>> {\n\n use DataType::*;\n\n let from_type = array.data_type();\n\n\n\n // clone array if types are the same\n\n if from_type == to_type {\n\n return Ok(clone(array));\n\n }\n\n match (from_type, to_type) {\n\n (Struct(_), _) => Err(ArrowError::NotYetImplemented(\n\n \"Cannot cast from struct to other types\".to_string(),\n\n )),\n\n (_, Struct(_)) => Err(ArrowError::NotYetImplemented(\n\n \"Cannot cast to struct from other types\".to_string(),\n\n )),\n\n (List(_), List(to)) => {\n\n let array = array.as_any().downcast_ref::<ListArray<i32>>().unwrap();\n\n\n\n let values = array.values();\n\n let new_values = cast(values.as_ref(), to.data_type())?.into();\n", "file_path": "src/compute/cast/mod.rs", "rank": 16, "score": 307836.3519651755 }, { "content": "pub fn regex_match<O: Offset>(values: &Utf8Array<O>, regex: &Utf8Array<O>) -> Result<BooleanArray> {\n\n if values.len() != regex.len() {\n\n return Err(ArrowError::InvalidArgumentError(\n\n \"Cannot perform comparison operation on arrays of different length\".to_string(),\n\n ));\n\n }\n\n\n\n let mut map = HashMap::new();\n\n let validity = combine_validities(values.validity(), regex.validity());\n\n\n\n let iterator = values.iter().zip(regex.iter()).map(|(haystack, regex)| {\n\n if haystack.is_none() | regex.is_none() {\n\n // regex is expensive => short-circuit if null\n\n return Result::Ok(false);\n\n };\n\n let haystack = haystack.unwrap();\n\n let regex = regex.unwrap();\n\n\n\n let regex = if let Some(regex) = map.get(regex) {\n\n regex\n", "file_path": "src/compute/regex_match.rs", "rank": 18, "score": 297103.0318402211 }, { "content": "/// Regex matches\n\n/// # Example\n\n/// ```\n\n/// use arrow2::array::{Utf8Array, BooleanArray};\n\n/// use arrow2::compute::regex_match::regex_match_scalar;\n\n///\n\n/// let strings = Utf8Array::<i32>::from_slice(&vec![\"ArAow\", \"A_B\", \"AAA\"]);\n\n///\n\n/// let result = regex_match_scalar(&strings, \"^A.A\").unwrap();\n\n/// assert_eq!(result, BooleanArray::from_slice(&vec![true, false, true]));\n\n/// ```\n\npub fn regex_match_scalar<O: Offset>(values: &Utf8Array<O>, regex: &str) -> Result<BooleanArray> {\n\n let regex = Regex::new(regex)\n\n .map_err(|e| ArrowError::InvalidArgumentError(format!(\"Unable to compile regex: {}\", e)))?;\n\n Ok(unary_utf8_boolean(values, |x| regex.is_match(x)))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n fn test_generic<O: Offset, F: Fn(&Utf8Array<O>, &Utf8Array<O>) -> Result<BooleanArray>>(\n\n lhs: Vec<&str>,\n\n pattern: Vec<&str>,\n\n op: F,\n\n expected: Vec<bool>,\n\n ) {\n\n let lhs = Utf8Array::<O>::from_slice(lhs);\n\n let pattern = Utf8Array::<O>::from_slice(pattern);\n\n let expected = BooleanArray::from_slice(expected);\n\n let result = op(&lhs, &pattern).unwrap();\n", "file_path": "src/compute/regex_match.rs", "rank": 19, "score": 296340.5131771032 }, { "content": "/// Sort the `ArrayRef` using `SortOptions`.\n\n///\n\n/// Performs a stable sort on values and indices. Nulls are ordered according to the `nulls_first` flag in `options`.\n\n/// Floats are sorted using IEEE 754 totalOrder\n\n///\n\n/// Returns an `ArrowError::ComputeError(String)` if the array type is either unsupported by `sort_to_indices` or `take`.\n\n///\n\npub fn sort(values: &dyn Array, options: &SortOptions) -> Result<Box<dyn Array>> {\n\n match values.data_type() {\n\n DataType::Int8 => dyn_sort!(i8, values, ord::total_cmp, options),\n\n DataType::Int16 => dyn_sort!(i16, values, ord::total_cmp, options),\n\n DataType::Int32\n\n | DataType::Date32\n\n | DataType::Time32(_)\n\n | DataType::Interval(IntervalUnit::YearMonth) => {\n\n dyn_sort!(i32, values, ord::total_cmp, options)\n\n }\n\n DataType::Int64\n\n | DataType::Date64\n\n | DataType::Time64(_)\n\n | DataType::Timestamp(_, None)\n\n | DataType::Duration(_) => dyn_sort!(i64, values, ord::total_cmp, options),\n\n DataType::UInt8 => dyn_sort!(u8, values, ord::total_cmp, options),\n\n DataType::UInt16 => dyn_sort!(u16, values, ord::total_cmp, options),\n\n DataType::UInt32 => dyn_sort!(u32, values, ord::total_cmp, options),\n\n DataType::UInt64 => dyn_sort!(u64, values, ord::total_cmp, options),\n\n DataType::Float32 => dyn_sort!(f32, values, ord::total_cmp_f32, options),\n", "file_path": "src/compute/sort/mod.rs", "rank": 20, "score": 295118.87861938076 }, { "content": "pub fn compare(lhs: &dyn Array, rhs: &dyn Array, operator: Operator) -> Result<BooleanArray> {\n\n let data_type = lhs.data_type();\n\n if data_type != rhs.data_type() {\n\n return Err(ArrowError::NotYetImplemented(\n\n \"Comparison is only supported for arrays of the same logical type\".to_string(),\n\n ));\n\n }\n\n match data_type {\n\n DataType::Int8 => {\n\n let lhs = lhs.as_any().downcast_ref::<Int8Array>().unwrap();\n\n let rhs = rhs.as_any().downcast_ref::<Int8Array>().unwrap();\n\n primitive::compare(lhs, rhs, operator)\n\n }\n\n DataType::Int16 => {\n\n let lhs = lhs.as_any().downcast_ref::<Int16Array>().unwrap();\n\n let rhs = rhs.as_any().downcast_ref::<Int16Array>().unwrap();\n\n primitive::compare(lhs, rhs, operator)\n\n }\n\n DataType::Int32\n\n | DataType::Date32\n", "file_path": "src/compute/comparison/mod.rs", "rank": 21, "score": 294988.0191833225 }, { "content": "/// Returns an ArrayRef with a substring starting from `start` and with optional length `length` of each of the elements in `array`.\n\n/// `start` can be negative, in which case the start counts from the end of the string.\n\n/// this function errors when the passed array is not a \\[Large\\]String array.\n\npub fn substring(array: &dyn Array, start: i64, length: &Option<u64>) -> Result<Box<dyn Array>> {\n\n match array.data_type() {\n\n DataType::LargeUtf8 => Ok(Box::new(utf8_substring(\n\n array\n\n .as_any()\n\n .downcast_ref::<Utf8Array<i64>>()\n\n .expect(\"A large string is expected\"),\n\n start,\n\n &length.map(|e| e as i64),\n\n ))),\n\n DataType::Utf8 => Ok(Box::new(utf8_substring(\n\n array\n\n .as_any()\n\n .downcast_ref::<Utf8Array<i32>>()\n\n .expect(\"A string is expected\"),\n\n start as i32,\n\n &length.map(|e| e as i32),\n\n ))),\n\n _ => Err(ArrowError::InvalidArgumentError(format!(\n\n \"substring does not support type {:?}\",\n", "file_path": "src/compute/substring.rs", "rank": 22, "score": 294409.0201542848 }, { "content": "pub fn take<I: Offset>(array: &StructArray, indices: &PrimitiveArray<I>) -> Result<StructArray> {\n\n let values: Vec<Arc<dyn Array>> = array\n\n .values()\n\n .iter()\n\n .map(|a| super::take(a.as_ref(), indices).map(|x| x.into()))\n\n .collect::<Result<_>>()?;\n\n let validity = take_validity(array.validity(), indices)?;\n\n Ok(StructArray::from_data(\n\n array.fields().to_vec(),\n\n values,\n\n validity,\n\n ))\n\n}\n", "file_path": "src/compute/take/structure.rs", "rank": 23, "score": 294164.74333797116 }, { "content": "/// returns a comparison function that compares values at two different slots\n\n/// between two [`Array`].\n\n/// # Example\n\n/// ```\n\n/// use arrow2::array::{ord::build_compare, Primitive};\n\n/// use arrow2::datatypes::DataType;\n\n///\n\n/// # fn main() -> arrow2::error::Result<()> {\n\n/// let array1 = Primitive::from_slice(&[1, 2]).to(DataType::Int32);\n\n/// let array2 = Primitive::from_slice(&[3, 4]).to(DataType::Int32);\n\n///\n\n/// let cmp = build_compare(&array1, &array2)?;\n\n///\n\n/// // 1 (index 0 of array1) is smaller than 4 (index 1 of array2)\n\n/// assert_eq!(std::cmp::Ordering::Less, (cmp)(0, 1));\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\n/// # Error\n\n/// The arrays' [`DataType`] must be equal and the types must have a natural order.\n\n// This is a factory of comparisons.\n\n// The lifetime 'a enforces that we cannot use the closure beyond any of the array's lifetime.\n\npub fn build_compare<'a>(left: &'a dyn Array, right: &'a dyn Array) -> Result<DynComparator<'a>> {\n\n use DataType::*;\n\n use IntervalUnit::*;\n\n use TimeUnit::*;\n\n Ok(match (left.data_type(), right.data_type()) {\n\n (a, b) if a != b => {\n\n return Err(ArrowError::InvalidArgumentError(\n\n \"Can't compare arrays of different types\".to_string(),\n\n ));\n\n }\n\n (Boolean, Boolean) => compare_boolean(left, right),\n\n (UInt8, UInt8) => compare_primitives::<u8>(left, right),\n\n (UInt16, UInt16) => compare_primitives::<u16>(left, right),\n\n (UInt32, UInt32) => compare_primitives::<u32>(left, right),\n\n (UInt64, UInt64) => compare_primitives::<u64>(left, right),\n\n (Int8, Int8) => compare_primitives::<i8>(left, right),\n\n (Int16, Int16) => compare_primitives::<i16>(left, right),\n\n (Int32, Int32)\n\n | (Date32, Date32)\n\n | (Time32(Second), Time32(Second))\n", "file_path": "src/array/ord.rs", "rank": 24, "score": 293907.6643815643 }, { "content": "/// Execute an arithmetic operation with two arrays. It uses the enum Operator\n\n/// to select the type of operation that is going to be performed with the two\n\n/// arrays\n\npub fn arithmetic(lhs: &dyn Array, op: Operator, rhs: &dyn Array) -> Result<Box<dyn Array>> {\n\n use DataType::*;\n\n use Operator::*;\n\n match (lhs.data_type(), op, rhs.data_type()) {\n\n (Int8, _, Int8) => primitive!(lhs, rhs, op, i8),\n\n (Int16, _, Int16) => primitive!(lhs, rhs, op, i16),\n\n (Int32, _, Int32) => primitive!(lhs, rhs, op, i32),\n\n (Int64, _, Int64) | (Duration(_), _, Duration(_)) => {\n\n primitive!(lhs, rhs, op, i64)\n\n }\n\n (UInt8, _, UInt8) => primitive!(lhs, rhs, op, u8),\n\n (UInt16, _, UInt16) => primitive!(lhs, rhs, op, u16),\n\n (UInt32, _, UInt32) => primitive!(lhs, rhs, op, u32),\n\n (UInt64, _, UInt64) => primitive!(lhs, rhs, op, u64),\n\n (Float32, _, Float32) => primitive!(lhs, rhs, op, f32),\n\n (Float64, _, Float64) => primitive!(lhs, rhs, op, f64),\n\n (Decimal(_, _), _, Decimal(_, _)) => {\n\n let lhs = lhs.as_any().downcast_ref().unwrap();\n\n let rhs = rhs.as_any().downcast_ref().unwrap();\n\n\n", "file_path": "src/compute/arithmetics/mod.rs", "rank": 25, "score": 291623.18289012846 }, { "content": "pub fn date32_to_date64(from: &PrimitiveArray<i32>) -> PrimitiveArray<i64> {\n\n unary(from, |x| x as i64 * MILLISECONDS_IN_DAY, DataType::Date64)\n\n}\n\n\n", "file_path": "src/compute/cast/primitive_to.rs", "rank": 26, "score": 285099.3902629885 }, { "content": "pub fn date64_to_date32(from: &PrimitiveArray<i64>) -> PrimitiveArray<i32> {\n\n unary(from, |x| (x / MILLISECONDS_IN_DAY) as i32, DataType::Date32)\n\n}\n\n\n", "file_path": "src/compute/cast/primitive_to.rs", "rank": 27, "score": 285099.3902629885 }, { "content": "pub fn take<O: Offset>(values: &dyn Array, indices: &PrimitiveArray<O>) -> Result<Box<dyn Array>> {\n\n if indices.len() == 0 {\n\n return Ok(new_empty_array(values.data_type().clone()));\n\n }\n\n\n\n match values.data_type() {\n\n DataType::Null => Ok(Box::new(NullArray::from_data(indices.len()))),\n\n DataType::Boolean => {\n\n let values = values.as_any().downcast_ref::<BooleanArray>().unwrap();\n\n Ok(Box::new(boolean::take::<O>(values, indices)?))\n\n }\n\n DataType::Int8 => downcast_take!(i8, values, indices),\n\n DataType::Int16 => downcast_take!(i16, values, indices),\n\n DataType::Int32\n\n | DataType::Date32\n\n | DataType::Time32(_)\n\n | DataType::Interval(IntervalUnit::YearMonth) => downcast_take!(i32, values, indices),\n\n DataType::Int64\n\n | DataType::Date64\n\n | DataType::Time64(_)\n", "file_path": "src/compute/take/mod.rs", "rank": 28, "score": 282509.96017985884 }, { "content": "/// Sort a list of [`Array`] using [`SortOptions`] provided for each array.\n\n/// # Implementaqtion\n\n/// The sort is stable and lexicographical on values.\n\n///\n\n/// Returns an [`ArrowError`] if any of the array type is either unsupported by\n\n/// `lexsort_to_indices` or `take`.\n\n///\n\n/// Example:\n\n///\n\n/// ```\n\n/// use std::convert::From;\n\n/// use arrow2::array::{Utf8Array, Int64Array, Array};\n\n/// use arrow2::compute::sort::{SortColumn, SortOptions, lexsort};\n\n/// use arrow2::datatypes::DataType;\n\n///\n\n/// let int64 = Int64Array::from(&[None, Some(-2), Some(89), Some(-64), Some(101)]);\n\n/// let utf8 = Utf8Array::<i32>::from(&vec![Some(\"hello\"), Some(\"world\"), Some(\",\"), Some(\"foobar\"), Some(\"!\")]);\n\n///\n\n/// let sorted_columns = lexsort(&vec![\n\n/// SortColumn {\n\n/// values: &int64,\n\n/// options: None,\n\n/// },\n\n/// SortColumn {\n\n/// values: &utf8,\n\n/// options: Some(SortOptions {\n\n/// descending: true,\n\n/// nulls_first: false,\n\n/// }),\n\n/// },\n\n/// ]).unwrap();\n\n///\n\n/// let sorted = sorted_columns[0].as_any().downcast_ref::<Int64Array>().unwrap();\n\n/// assert_eq!(sorted.value(1), -64);\n\n/// assert!(sorted.is_null(0));\n\n/// ```\n\npub fn lexsort(columns: &[SortColumn]) -> Result<Vec<Box<dyn Array>>> {\n\n let indices = lexsort_to_indices(columns)?;\n\n columns\n\n .iter()\n\n .map(|c| take::take(c.values, &indices))\n\n .collect()\n\n}\n\n\n", "file_path": "src/compute/sort/lex_sort.rs", "rank": 29, "score": 282283.7873052262 }, { "content": "/// Performs `AND` operation on two arrays. If either left or right value is null then the\n\n/// result is also null.\n\n/// # Error\n\n/// This function errors when the arrays have different lengths.\n\n/// # Example\n\n/// ```rust\n\n/// use arrow2::array::BooleanArray;\n\n/// use arrow2::error::Result;\n\n/// use arrow2::compute::boolean::and;\n\n/// # fn main() -> Result<()> {\n\n/// let a = BooleanArray::from(&[Some(false), Some(true), None]);\n\n/// let b = BooleanArray::from(&[Some(true), Some(true), Some(false)]);\n\n/// let and_ab = and(&a, &b)?;\n\n/// assert_eq!(and_ab, BooleanArray::from(&[Some(false), Some(true), None]));\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\npub fn and(lhs: &BooleanArray, rhs: &BooleanArray) -> Result<BooleanArray> {\n\n binary_boolean_kernel(&lhs, &rhs, |lhs, rhs| lhs & rhs)\n\n}\n\n\n", "file_path": "src/compute/boolean.rs", "rank": 30, "score": 279452.1218389705 }, { "content": "/// Performs `OR` operation on two arrays. If either left or right value is null then the\n\n/// result is also null.\n\n/// # Error\n\n/// This function errors when the arrays have different lengths.\n\n/// # Example\n\n/// ```rust\n\n/// use arrow2::array::BooleanArray;\n\n/// use arrow2::error::Result;\n\n/// use arrow2::compute::boolean::or;\n\n/// # fn main() -> Result<()> {\n\n/// let a = BooleanArray::from(vec![Some(false), Some(true), None]);\n\n/// let b = BooleanArray::from(vec![Some(true), Some(true), Some(false)]);\n\n/// let or_ab = or(&a, &b)?;\n\n/// assert_eq!(or_ab, BooleanArray::from(vec![Some(true), Some(true), None]));\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\npub fn or(lhs: &BooleanArray, rhs: &BooleanArray) -> Result<BooleanArray> {\n\n binary_boolean_kernel(&lhs, &rhs, |lhs, rhs| lhs | rhs)\n\n}\n\n\n", "file_path": "src/compute/boolean.rs", "rank": 31, "score": 279451.8717893441 }, { "content": "/// Returns a vector of slices from different sorted arrays that can be used to create sorted arrays.\n\n/// `pairs` is an array representing multiple sorted array sets. The expected format is\n\n///\n\n/// pairs: [([a00, a01], o1), ([a10, a11], o2), ...]\n\n/// where aj0.len() == aj0.len()\n\n/// aj1.len() == aj1.len()\n\n/// ...\n\n/// In other words, `pairs.i.0[j]` must be an array coming from a batch of equal len arrays.\n\n/// # Example\n\n/// ```rust\n\n/// use arrow2::array::Int32Array;\n\n/// use arrow2::compute::merge_sort::{slices, SortOptions};\n\n/// # use arrow2::error::Result;\n\n/// # fn main() -> Result<()> {\n\n/// let a = Int32Array::from_slice(&[2, 4, 6]);\n\n/// let b = Int32Array::from_slice(&[0, 1, 3]);\n\n/// let slices = slices(&[(&[&a, &b], &SortOptions::default())])?;\n\n/// assert_eq!(slices, vec![(1, 0, 2), (0, 0, 1), (1, 2, 1), (0, 1, 2)]);\n\n///\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\n/// # Error\n\n/// This function errors if the arrays `a0i` are not pairwise sortable. This happens when either\n\n/// they have not the same [DataType] or when their [DataType] does not correspond to a sortable type.\n\n/// # Panic\n\n/// This function panics if:\n\n/// * `pairs` has no elements\n\n/// * the length condition above is not fulfilled\n\npub fn slices(pairs: &[(&[&dyn Array], &SortOptions)]) -> Result<Vec<MergeSlice>> {\n\n assert!(!pairs.is_empty());\n\n let comparator = build_comparator(pairs)?;\n\n\n\n // pairs: [([a00, a01], o1), ([a10, a11], o2), ...]\n\n // slices: [(0, 0, len), (1, 0, len)]\n\n\n\n let slices = pairs[0]\n\n .0\n\n .iter()\n\n .enumerate()\n\n .map(|(index, array)| vec![(index, 0, array.len())])\n\n .collect::<Vec<_>>();\n\n\n\n let slices = slices\n\n .iter()\n\n .map(|slice| slice.as_ref())\n\n .collect::<Vec<_>>();\n\n Ok(recursive_merge_sort(&slices, &comparator))\n\n}\n\n\n", "file_path": "src/compute/merge_sort/mod.rs", "rank": 32, "score": 279106.78977753053 }, { "content": "/// Logical 'and' with [Kleene logic](https://en.wikipedia.org/wiki/Three-valued_logic#Kleene_and_Priest_logics)\n\n/// # Errors\n\n/// This function errors if the operands have different lengths.\n\n/// # Example\n\n///\n\n/// ```rust\n\n/// # use arrow2::error::Result;\n\n/// use arrow2::array::BooleanArray;\n\n/// use arrow2::compute::boolean_kleene::and;\n\n/// # fn main() -> Result<()> {\n\n/// let a = BooleanArray::from(&[Some(true), Some(false), None]);\n\n/// let b = BooleanArray::from(&[None, None, None]);\n\n/// let and_ab = and(&a, &b)?;\n\n/// assert_eq!(and_ab, BooleanArray::from(&[None, Some(false), None]));\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\npub fn and(lhs: &BooleanArray, rhs: &BooleanArray) -> Result<BooleanArray> {\n\n if lhs.len() != rhs.len() {\n\n return Err(ArrowError::InvalidArgumentError(\n\n \"Cannot perform bitwise operation on arrays of different length\".to_string(),\n\n ));\n\n }\n\n\n\n let lhs_values = lhs.values();\n\n let rhs_values = rhs.values();\n\n\n\n let lhs_validity = lhs.validity();\n\n let rhs_validity = rhs.validity();\n\n\n\n let validity = match (lhs_validity, rhs_validity) {\n\n (Some(lhs_validity), Some(rhs_validity)) => {\n\n Some(quaternary(\n\n &lhs_values,\n\n &rhs_values,\n\n lhs_validity,\n\n rhs_validity,\n", "file_path": "src/compute/boolean_kleene.rs", "rank": 33, "score": 276235.8002035601 }, { "content": "/// Logical 'or' with [Kleene logic](https://en.wikipedia.org/wiki/Three-valued_logic#Kleene_and_Priest_logics)\n\n/// # Errors\n\n/// This function errors if the operands have different lengths.\n\n/// # Example\n\n///\n\n/// ```rust\n\n/// # use arrow2::error::Result;\n\n/// use arrow2::array::BooleanArray;\n\n/// use arrow2::compute::boolean_kleene::or;\n\n/// # fn main() -> Result<()> {\n\n/// let a = BooleanArray::from(&[Some(true), Some(false), None]);\n\n/// let b = BooleanArray::from(&[None, None, None]);\n\n/// let or_ab = or(&a, &b)?;\n\n/// assert_eq!(or_ab, BooleanArray::from(&[Some(true), None, None]));\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\npub fn or(lhs: &BooleanArray, rhs: &BooleanArray) -> Result<BooleanArray> {\n\n if lhs.len() != rhs.len() {\n\n return Err(ArrowError::InvalidArgumentError(\n\n \"Cannot perform bitwise operation on arrays of different length\".to_string(),\n\n ));\n\n }\n\n\n\n let lhs_values = lhs.values();\n\n let rhs_values = rhs.values();\n\n\n\n let lhs_validity = lhs.validity();\n\n let rhs_validity = rhs.validity();\n\n\n\n let validity = match (lhs_validity, rhs_validity) {\n\n (Some(lhs_validity), Some(rhs_validity)) => {\n\n Some(quaternary(\n\n &lhs_values,\n\n &rhs_values,\n\n lhs_validity,\n\n rhs_validity,\n", "file_path": "src/compute/boolean_kleene.rs", "rank": 34, "score": 276235.80020356004 }, { "content": "/// Returns a prepared function optimized to filter multiple arrays.\n\n/// Creating this function requires time, but using it is faster than [filter] when the\n\n/// same filter needs to be applied to multiple arrays (e.g. a multi-column `RecordBatch`).\n\n/// WARNING: the nulls of `filter` are ignored and the value on its slot is considered.\n\n/// Therefore, it is considered undefined behavior to pass `filter` with null values.\n\npub fn build_filter(filter: &BooleanArray) -> Result<Filter> {\n\n let iter = SlicesIterator::new(filter.values());\n\n let filter_count = iter.slots();\n\n let chunks = iter.collect::<Vec<_>>();\n\n\n\n Ok(Box::new(move |array: &dyn Array| match array.data_type() {\n\n DataType::UInt8 => {\n\n dyn_build_filter!(u8, array, filter_count, chunks)\n\n }\n\n DataType::UInt16 => {\n\n dyn_build_filter!(u16, array, filter_count, chunks)\n\n }\n\n DataType::UInt32 => {\n\n dyn_build_filter!(u32, array, filter_count, chunks)\n\n }\n\n DataType::UInt64 => {\n\n dyn_build_filter!(u64, array, filter_count, chunks)\n\n }\n\n DataType::Int8 => {\n\n dyn_build_filter!(i8, array, filter_count, chunks)\n", "file_path": "src/compute/filter.rs", "rank": 35, "score": 275736.75217628176 }, { "content": "fn write(array: &dyn Array) -> Result<()> {\n\n let field = Field::new(\"c1\", array.data_type().clone(), true);\n\n\n\n let compression = CompressionCodec::Uncompressed;\n\n\n\n let parquet_type = to_parquet_type(&field)?;\n\n\n\n let row_groups = std::iter::once(Result::Ok(std::iter::once(Ok(std::iter::once(\n\n array_to_page(array, &parquet_type, compression),\n\n )))));\n\n\n\n let schema = Schema::new(vec![field]);\n\n let mut writer = Cursor::new(vec![]);\n\n write_file(&mut writer, row_groups, &schema, compression, None)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "benches/write_parquet.rs", "rank": 36, "score": 270835.8554476272 }, { "content": "/// Clones a dynamic [`Array`].\n\n/// # Implementation\n\n/// This operation is `O(1)` over `len`, as it amounts to increase two ref counts\n\n/// and moving the concrete struct under a `Box`.\n\npub fn clone(array: &dyn Array) -> Box<dyn Array> {\n\n match array.data_type() {\n\n DataType::Null => clone_dyn!(array, NullArray),\n\n DataType::Boolean => clone_dyn!(array, BooleanArray),\n\n DataType::Int8 => clone_dyn!(array, PrimitiveArray<i8>),\n\n DataType::Int16 => clone_dyn!(array, PrimitiveArray<i16>),\n\n DataType::Int32\n\n | DataType::Date32\n\n | DataType::Time32(_)\n\n | DataType::Interval(IntervalUnit::YearMonth) => {\n\n clone_dyn!(array, PrimitiveArray<i32>)\n\n }\n\n DataType::Interval(IntervalUnit::DayTime) => clone_dyn!(array, PrimitiveArray<days_ms>),\n\n DataType::Int64\n\n | DataType::Date64\n\n | DataType::Time64(_)\n\n | DataType::Timestamp(_, _)\n\n | DataType::Duration(_) => clone_dyn!(array, PrimitiveArray<i64>),\n\n DataType::Decimal(_, _) => clone_dyn!(array, PrimitiveArray<i128>),\n\n DataType::UInt8 => clone_dyn!(array, PrimitiveArray<u8>),\n", "file_path": "src/array/mod.rs", "rank": 37, "score": 262936.9204413878 }, { "content": "pub fn page_iter_to_array<I: Iterator<Item = std::result::Result<CompressedPage, ParquetError>>>(\n\n iter: I,\n\n metadata: &ColumnChunkMetaData,\n\n) -> Result<Box<dyn Array>> {\n\n match metadata.descriptor().type_() {\n\n ParquetType::PrimitiveType {\n\n physical_type,\n\n converted_type,\n\n logical_type,\n\n ..\n\n } => match (physical_type, converted_type, logical_type) {\n\n // todo: apply conversion rules and the like\n\n (PhysicalType::Int32, _, _) => {\n\n page_iter_i32(iter, metadata, converted_type, logical_type)\n\n }\n\n (PhysicalType::Int64, _, _) => {\n\n page_iter_i64(iter, metadata, converted_type, logical_type)\n\n }\n\n (PhysicalType::Float, None, None) => Ok(Box::new(primitive::iter_to_array(\n\n iter,\n", "file_path": "src/io/parquet/read/mod.rs", "rank": 38, "score": 262492.4165918842 }, { "content": "#[cfg(target_endian = \"big\")]\n\n#[inline]\n\npub fn is_native_little_endian() -> bool {\n\n false\n\n}\n", "file_path": "src/endianess.rs", "rank": 39, "score": 261461.26846123184 }, { "content": "/// Performs unary `NOT` operation on an arrays. If value is null then the result is also\n\n/// null.\n\n/// # Example\n\n/// ```rust\n\n/// use arrow2::array::BooleanArray;\n\n/// use arrow2::compute::boolean::not;\n\n/// # fn main() {\n\n/// let a = BooleanArray::from(vec![Some(false), Some(true), None]);\n\n/// let not_a = not(&a);\n\n/// assert_eq!(not_a, BooleanArray::from(vec![Some(true), Some(false), None]));\n\n/// # }\n\n/// ```\n\npub fn not(array: &BooleanArray) -> BooleanArray {\n\n let values = !array.values();\n\n let validity = array.validity().clone();\n\n BooleanArray::from_data(values, validity)\n\n}\n\n\n", "file_path": "src/compute/boolean.rs", "rank": 40, "score": 260489.74222912831 }, { "content": "/// `take` implementation for boolean arrays\n\npub fn take<I: Offset>(values: &BooleanArray, indices: &PrimitiveArray<I>) -> Result<BooleanArray> {\n\n let indices_has_validity = indices.null_count() > 0;\n\n let values_has_validity = values.null_count() > 0;\n\n\n\n let (values, validity) = match (values_has_validity, indices_has_validity) {\n\n (false, false) => take_no_validity(values.values(), indices.values())?,\n\n (true, false) => take_values_validity(values, indices.values())?,\n\n (false, true) => take_indices_validity(values.values(), indices)?,\n\n (true, true) => take_values_indices_validity(values, indices)?,\n\n };\n\n\n\n Ok(BooleanArray::from_data(values, validity))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::array::Int32Array;\n\n\n\n use super::*;\n\n\n", "file_path": "src/compute/take/boolean.rs", "rank": 41, "score": 260207.58392040245 }, { "content": "#[pyfunction]\n\nfn import_primitive(array: PyObject, py: Python) -> PyResult<bool> {\n\n let array = to_rust(array, py)?;\n\n let expected =\n\n Arc::new(Primitive::<i64>::from(vec![Some(2), None, Some(6)]).to(DataType::Int64))\n\n as ArrayRef;\n\n\n\n Ok(array == expected)\n\n}\n\n\n\n/// Converts to rust and back to python\n", "file_path": "arrow-pyarrow-integration-testing/src/lib.rs", "rank": 42, "score": 257551.39957810583 }, { "content": "/// Sort elements lexicographically from a list of `ArrayRef` into an unsigned integer\n\n/// [`Int32Array`] of indices.\n\npub fn lexsort_to_indices(columns: &[SortColumn]) -> Result<PrimitiveArray<i32>> {\n\n if columns.is_empty() {\n\n return Err(ArrowError::InvalidArgumentError(\n\n \"Sort requires at least one column\".to_string(),\n\n ));\n\n }\n\n if columns.len() == 1 {\n\n // fallback to non-lexical sort\n\n let column = &columns[0];\n\n return sort_to_indices(column.values, &column.options.unwrap_or_default());\n\n }\n\n\n\n let row_count = columns[0].values.len();\n\n if columns.iter().any(|item| item.values.len() != row_count) {\n\n return Err(ArrowError::InvalidArgumentError(\n\n \"lexical sort columns have different row counts\".to_string(),\n\n ));\n\n };\n\n\n\n // map arrays to comparators\n", "file_path": "src/compute/sort/lex_sort.rs", "rank": 43, "score": 257409.1031454992 }, { "content": "/// Prints a visual representation of record batches to stdout\n\npub fn print(results: &[RecordBatch]) -> Result<()> {\n\n create_table(results)?.printstd();\n\n Ok(())\n\n}\n\n\n", "file_path": "src/io/print.rs", "rank": 44, "score": 255895.5958852205 }, { "content": "/// Perform `left < right` operation on two arrays.\n\npub fn lt<T>(lhs: &PrimitiveArray<T>, rhs: &PrimitiveArray<T>) -> Result<BooleanArray>\n\nwhere\n\n T: NativeType + std::cmp::PartialOrd,\n\n{\n\n compare_op(lhs, rhs, |a, b| a < b)\n\n}\n\n\n", "file_path": "src/compute/comparison/primitive.rs", "rank": 45, "score": 254536.3254834311 }, { "content": "/// Perform `lhs == rhs` operation on two arrays.\n\npub fn eq<T>(lhs: &PrimitiveArray<T>, rhs: &PrimitiveArray<T>) -> Result<BooleanArray>\n\nwhere\n\n T: NativeType,\n\n{\n\n compare_op(lhs, rhs, |a, b| a == b)\n\n}\n\n\n", "file_path": "src/compute/comparison/primitive.rs", "rank": 46, "score": 254536.3254834311 }, { "content": "/// Perform `left != right` operation on two arrays.\n\npub fn neq<T>(lhs: &PrimitiveArray<T>, rhs: &PrimitiveArray<T>) -> Result<BooleanArray>\n\nwhere\n\n T: NativeType,\n\n{\n\n compare_op(lhs, rhs, |a, b| a != b)\n\n}\n\n\n", "file_path": "src/compute/comparison/primitive.rs", "rank": 47, "score": 254536.3254834311 }, { "content": "/// Perform `left > right` operation on two arrays. Non-null values are greater than null\n\n/// values.\n\npub fn gt<T>(lhs: &PrimitiveArray<T>, rhs: &PrimitiveArray<T>) -> Result<BooleanArray>\n\nwhere\n\n T: NativeType + std::cmp::PartialOrd,\n\n{\n\n compare_op(lhs, rhs, |a, b| a > b)\n\n}\n\n\n", "file_path": "src/compute/comparison/primitive.rs", "rank": 48, "score": 254536.06373450486 }, { "content": "/// Perform `left != right` operation on an array and a scalar value.\n\npub fn neq_scalar<T>(lhs: &PrimitiveArray<T>, rhs: T) -> Result<BooleanArray>\n\nwhere\n\n T: NativeType,\n\n{\n\n compare_op_scalar(lhs, rhs, |a, b| a != b)\n\n}\n\n\n", "file_path": "src/compute/comparison/primitive.rs", "rank": 49, "score": 253670.61299927565 }, { "content": "/// Perform `left == right` operation on an array and a scalar value.\n\npub fn eq_scalar<T>(lhs: &PrimitiveArray<T>, rhs: T) -> Result<BooleanArray>\n\nwhere\n\n T: NativeType,\n\n{\n\n compare_op_scalar(lhs, rhs, |a, b| a == b)\n\n}\n\n\n", "file_path": "src/compute/comparison/primitive.rs", "rank": 50, "score": 253670.61299927565 }, { "content": "/// Perform `left < right` operation on an array and a scalar value.\n\npub fn lt_scalar<T>(lhs: &PrimitiveArray<T>, rhs: T) -> Result<BooleanArray>\n\nwhere\n\n T: NativeType + std::cmp::PartialOrd,\n\n{\n\n compare_op_scalar(lhs, rhs, |a, b| a < b)\n\n}\n\n\n", "file_path": "src/compute/comparison/primitive.rs", "rank": 51, "score": 253670.61299927565 }, { "content": "/// Perform `left > right` operation on an array and a scalar value.\n\n/// Non-null values are greater than null values.\n\npub fn gt_scalar<T>(lhs: &PrimitiveArray<T>, rhs: T) -> Result<BooleanArray>\n\nwhere\n\n T: NativeType + std::cmp::PartialOrd,\n\n{\n\n compare_op_scalar(lhs, rhs, |a, b| a > b)\n\n}\n\n\n", "file_path": "src/compute/comparison/primitive.rs", "rank": 52, "score": 253670.35552965337 }, { "content": "/// Multiply two decimal primitive arrays with the same precision and scale. If\n\n/// the precision and scale is different, then an InvalidArgumentError is\n\n/// returned. This function panics if the multiplied numbers result in a number\n\n/// larger than the possible number for the selected precision.\n\n///\n\n/// # Examples\n\n/// ```\n\n/// use arrow2::compute::arithmetics::decimal::mul::mul;\n\n/// use arrow2::array::Primitive;\n\n/// use arrow2::datatypes::DataType;\n\n///\n\n/// let a = Primitive::from(&vec![Some(1_00i128), Some(1_00i128), None, Some(2_00i128)]).to(DataType::Decimal(5, 2));\n\n/// let b = Primitive::from(&vec![Some(1_00i128), Some(2_00i128), None, Some(2_00i128)]).to(DataType::Decimal(5, 2));\n\n///\n\n/// let result = mul(&a, &b).unwrap();\n\n/// let expected = Primitive::from(&vec![Some(1_00i128), Some(2_00i128), None, Some(4_00i128)]).to(DataType::Decimal(5, 2));\n\n///\n\n/// assert_eq!(result, expected);\n\n/// ```\n\npub fn mul(lhs: &PrimitiveArray<i128>, rhs: &PrimitiveArray<i128>) -> Result<PrimitiveArray<i128>> {\n\n // Matching on both data types from both arrays\n\n // This match will be true only when precision and scale from both\n\n // arrays are the same, otherwise it will return and ArrowError\n\n match (lhs.data_type(), rhs.data_type()) {\n\n (DataType::Decimal(lhs_p, lhs_s), DataType::Decimal(rhs_p, rhs_s)) => {\n\n if lhs_p == rhs_p && lhs_s == rhs_s {\n\n // Closure for the binary operation. This closure will panic if\n\n // the sum of the values is larger than the max value possible\n\n // for the decimal precision\n\n let op = move |a: i128, b: i128| {\n\n // The multiplication between i128 can overflow if they are\n\n // very large numbers. For that reason a checked\n\n // multiplication is used.\n\n let res: i128 = a.checked_mul(b).expect(\"Mayor overflow for multiplication\");\n\n\n\n // The multiplication is done using the numbers without scale.\n\n // The resulting scale of the value has to be corrected by\n\n // dividing by (10^scale)\n\n\n", "file_path": "src/compute/arithmetics/decimal/mul.rs", "rank": 53, "score": 251868.4030176546 }, { "content": "/// Subtract two decimal primitive arrays with the same precision and scale. If\n\n/// the precision and scale is different, then an InvalidArgumentError is\n\n/// returned. This function panics if the subtracted numbers result in a number\n\n/// smaller than the possible number for the selected precision.\n\n///\n\n/// # Examples\n\n/// ```\n\n/// use arrow2::compute::arithmetics::decimal::sub::sub;\n\n/// use arrow2::array::Primitive;\n\n/// use arrow2::datatypes::DataType;\n\n///\n\n/// let a = Primitive::from(&vec![Some(1i128), Some(1i128), None, Some(2i128)]).to(DataType::Decimal(5, 2));\n\n/// let b = Primitive::from(&vec![Some(1i128), Some(2i128), None, Some(2i128)]).to(DataType::Decimal(5, 2));\n\n///\n\n/// let result = sub(&a, &b).unwrap();\n\n/// let expected = Primitive::from(&vec![Some(0i128), Some(-1i128), None, Some(0i128)]).to(DataType::Decimal(5, 2));\n\n///\n\n/// assert_eq!(result, expected);\n\n/// ```\n\npub fn sub(lhs: &PrimitiveArray<i128>, rhs: &PrimitiveArray<i128>) -> Result<PrimitiveArray<i128>> {\n\n // Matching on both data types from both arrays This match will be true\n\n // only when precision and scale from both arrays are the same, otherwise\n\n // it will return and ArrowError\n\n match (lhs.data_type(), rhs.data_type()) {\n\n (DataType::Decimal(lhs_p, lhs_s), DataType::Decimal(rhs_p, rhs_s)) => {\n\n if lhs_p == rhs_p && lhs_s == rhs_s {\n\n // Closure for the binary operation. This closure will panic if\n\n // the sum of the values is larger than the max value possible\n\n // for the decimal precision\n\n let op = move |a, b| {\n\n let res: i128 = a - b;\n\n\n\n if res.abs() > max_value(*lhs_p) {\n\n panic!(\"Overflow in subtract presented for precision {}\", lhs_p);\n\n }\n\n\n\n res\n\n };\n\n\n", "file_path": "src/compute/arithmetics/decimal/sub.rs", "rank": 54, "score": 251868.4030176546 }, { "content": "/// Adds two decimal primitive arrays with the same precision and scale. If the\n\n/// precision and scale is different, then an InvalidArgumentError is returned.\n\n/// This function panics if the added numbers result in a number larger than\n\n/// the possible number for the selected precision.\n\n///\n\n/// # Examples\n\n/// ```\n\n/// use arrow2::compute::arithmetics::decimal::add::add;\n\n/// use arrow2::array::Primitive;\n\n/// use arrow2::datatypes::DataType;\n\n///\n\n/// let a = Primitive::from(&vec![Some(1i128), Some(1i128), None, Some(2i128)]).to(DataType::Decimal(5, 2));\n\n/// let b = Primitive::from(&vec![Some(1i128), Some(2i128), None, Some(2i128)]).to(DataType::Decimal(5, 2));\n\n///\n\n/// let result = add(&a, &b).unwrap();\n\n/// let expected = Primitive::from(&vec![Some(2i128), Some(3i128), None, Some(4i128)]).to(DataType::Decimal(5, 2));\n\n///\n\n/// assert_eq!(result, expected);\n\n/// ```\n\npub fn add(lhs: &PrimitiveArray<i128>, rhs: &PrimitiveArray<i128>) -> Result<PrimitiveArray<i128>> {\n\n // Matching on both data types from both arrays\n\n // This match will be true only when precision and scale from both\n\n // arrays are the same, otherwise it will return and ArrowError\n\n match (lhs.data_type(), rhs.data_type()) {\n\n (DataType::Decimal(lhs_p, lhs_s), DataType::Decimal(rhs_p, rhs_s)) => {\n\n if lhs_p == rhs_p && lhs_s == rhs_s {\n\n // Closure for the binary operation. This closure will panic if\n\n // the sum of the values is larger than the max value possible\n\n // for the decimal precision\n\n let op = move |a, b| {\n\n let res: i128 = a + b;\n\n\n\n if res.abs() > max_value(*lhs_p) {\n\n panic!(\"Overflow in addition presented for precision {}\", lhs_p);\n\n }\n\n\n\n res\n\n };\n\n\n", "file_path": "src/compute/arithmetics/decimal/add.rs", "rank": 55, "score": 251868.4030176546 }, { "content": "/// Divide two decimal primitive arrays with the same precision and scale. If\n\n/// the precision and scale is different, then an InvalidArgumentError is\n\n/// returned. This function panics if the dividend is divided by 0 or None.\n\n/// This function also panics if the division produces a number larger\n\n/// than the possible number for the array precision.\n\n///\n\n/// # Examples\n\n/// ```\n\n/// use arrow2::compute::arithmetics::decimal::div::div;\n\n/// use arrow2::array::Primitive;\n\n/// use arrow2::datatypes::DataType;\n\n///\n\n/// let a = Primitive::from(&vec![Some(1_00i128), Some(4_00i128), Some(6_00i128)]).to(DataType::Decimal(5, 2));\n\n/// let b = Primitive::from(&vec![Some(1_00i128), Some(2_00i128), Some(2_00i128)]).to(DataType::Decimal(5, 2));\n\n///\n\n/// let result = div(&a, &b).unwrap();\n\n/// let expected = Primitive::from(&vec![Some(1_00i128), Some(2_00i128), Some(3_00i128)]).to(DataType::Decimal(5, 2));\n\n///\n\n/// assert_eq!(result, expected);\n\n/// ```\n\npub fn div(lhs: &PrimitiveArray<i128>, rhs: &PrimitiveArray<i128>) -> Result<PrimitiveArray<i128>> {\n\n // Matching on both data types from both arrays\n\n // This match will be true only when precision and scale from both\n\n // arrays are the same, otherwise it will return and ArrowError\n\n match (lhs.data_type(), rhs.data_type()) {\n\n (DataType::Decimal(lhs_p, lhs_s), DataType::Decimal(rhs_p, rhs_s)) => {\n\n if lhs_p == rhs_p && lhs_s == rhs_s {\n\n // Closure for the binary operation. This closure will panic if\n\n // the sum of the values is larger than the max value possible\n\n // for the decimal precision\n\n let op = move |a: i128, b: i128| {\n\n // The division is done using the numbers without scale.\n\n // The dividend is scaled up to maintain precision after the\n\n // division\n\n\n\n // 222.222 --> 222222000\n\n // 123.456 --> 123456\n\n // -------- ---------\n\n // 1.800 <-- 1800\n\n let numeral: i128 = a * 10i128.pow(*lhs_s as u32);\n", "file_path": "src/compute/arithmetics/decimal/div.rs", "rank": 56, "score": 251868.18585871818 }, { "content": "/// Perform `left <= right` operation on two arrays.\n\npub fn lt_eq<T>(lhs: &PrimitiveArray<T>, rhs: &PrimitiveArray<T>) -> Result<BooleanArray>\n\nwhere\n\n T: NativeType + std::cmp::PartialOrd,\n\n{\n\n compare_op(lhs, rhs, |a, b| a <= b)\n\n}\n\n\n", "file_path": "src/compute/comparison/primitive.rs", "rank": 57, "score": 251845.20307343412 }, { "content": "/// Perform `left >= right` operation on two arrays. Non-null values are greater than null\n\n/// values.\n\npub fn gt_eq<T>(lhs: &PrimitiveArray<T>, rhs: &PrimitiveArray<T>) -> Result<BooleanArray>\n\nwhere\n\n T: NativeType + std::cmp::PartialOrd,\n\n{\n\n compare_op(lhs, rhs, |a, b| a >= b)\n\n}\n\n\n", "file_path": "src/compute/comparison/primitive.rs", "rank": 58, "score": 251844.94132450788 }, { "content": "/// Perform `left <= right` operation on an array and a scalar value.\n\n/// Null values are less than non-null values.\n\npub fn lt_eq_scalar<T>(lhs: &PrimitiveArray<T>, rhs: T) -> Result<BooleanArray>\n\nwhere\n\n T: NativeType + std::cmp::PartialOrd,\n\n{\n\n compare_op_scalar(lhs, rhs, |a, b| a <= b)\n\n}\n\n\n", "file_path": "src/compute/comparison/primitive.rs", "rank": 59, "score": 250788.93881051656 }, { "content": "/// Perform `left >= right` operation on an array and a scalar value.\n\n/// Non-null values are greater than null values.\n\npub fn gt_eq_scalar<T>(lhs: &PrimitiveArray<T>, rhs: T) -> Result<BooleanArray>\n\nwhere\n\n T: NativeType + std::cmp::PartialOrd,\n\n{\n\n compare_op_scalar(lhs, rhs, |a, b| a >= b)\n\n}\n\n\n", "file_path": "src/compute/comparison/primitive.rs", "rank": 60, "score": 250788.93881051656 }, { "content": "/// Returns `lhs LIKE rhs` operation on two [`Utf8Array`].\n\n///\n\n/// There are two wildcards supported:\n\n///\n\n/// * `%` - The percent sign represents zero, one, or multiple characters\n\n/// * `_` - The underscore represents a single character\n\n///\n\n/// # Error\n\n/// Errors iff:\n\n/// * the arrays have a different length\n\n/// * any of the patterns is not valid\n\n/// # Example\n\n/// ```\n\n/// use arrow2::array::{Utf8Array, BooleanArray};\n\n/// use arrow2::compute::like::like_utf8;\n\n///\n\n/// let strings = Utf8Array::<i32>::from_slice(&[\"Arrow\", \"Arrow\", \"Arrow\", \"Arrow\", \"Ar\"]);\n\n/// let patterns = Utf8Array::<i32>::from_slice(&[\"A%\", \"B%\", \"%r_ow\", \"A_\", \"A_\"]);\n\n///\n\n/// let result = like_utf8(&strings, &patterns).unwrap();\n\n/// assert_eq!(result, BooleanArray::from_slice(&[true, false, true, false, true]));\n\n/// ```\n\npub fn like_utf8<O: Offset>(lhs: &Utf8Array<O>, rhs: &Utf8Array<O>) -> Result<BooleanArray> {\n\n a_like_utf8(lhs, rhs, |x| x)\n\n}\n\n\n", "file_path": "src/compute/like.rs", "rank": 61, "score": 249015.14517879978 }, { "content": "pub fn nlike_utf8<O: Offset>(lhs: &Utf8Array<O>, rhs: &Utf8Array<O>) -> Result<BooleanArray> {\n\n a_like_utf8(lhs, rhs, |x| !x)\n\n}\n\n\n", "file_path": "src/compute/like.rs", "rank": 62, "score": 248986.555722164 }, { "content": "/// Element-wise hash of a [`BooleanArray`]. Validity is preserved.\n\npub fn hash_boolean(array: &BooleanArray) -> PrimitiveArray<u64> {\n\n let iter = array.values_iter().map(|x| {\n\n let mut hasher = new_hasher!();\n\n x.hash(&mut hasher);\n\n hasher.finish()\n\n });\n\n let values = Buffer::from_trusted_len_iter(iter);\n\n PrimitiveArray::<u64>::from_data(DataType::UInt64, values, array.validity().clone())\n\n}\n\n\n", "file_path": "src/compute/hash.rs", "rank": 63, "score": 248033.84708806584 }, { "content": "/// Returns `lhs LIKE rhs` operation.\n\n///\n\n/// There are two wildcards supported:\n\n///\n\n/// * `%` - The percent sign represents zero, one, or multiple characters\n\n/// * `_` - The underscore represents a single character\n\n///\n\n/// # Error\n\n/// Errors iff:\n\n/// * the arrays have a different length\n\n/// * any of the patterns is not valid\n\n/// # Example\n\n/// ```\n\n/// use arrow2::array::{Utf8Array, BooleanArray};\n\n/// use arrow2::compute::like::like_utf8_scalar;\n\n///\n\n/// let array = Utf8Array::<i32>::from_slice(&[\"Arrow\", \"Arrow\", \"Arrow\", \"BA\"]);\n\n///\n\n/// let result = like_utf8_scalar(&array, &\"A%\").unwrap();\n\n/// assert_eq!(result, BooleanArray::from_slice(&[true, true, true, false]));\n\n/// ```\n\npub fn like_utf8_scalar<O: Offset>(lhs: &Utf8Array<O>, rhs: &str) -> Result<BooleanArray> {\n\n a_like_utf8_scalar(lhs, rhs, |x| x)\n\n}\n\n\n", "file_path": "src/compute/like.rs", "rank": 64, "score": 247751.59745597962 }, { "content": "pub fn nlike_utf8_scalar<O: Offset>(lhs: &Utf8Array<O>, rhs: &str) -> Result<BooleanArray> {\n\n a_like_utf8_scalar(lhs, rhs, |x| !x)\n\n}\n", "file_path": "src/compute/like.rs", "rank": 65, "score": 247722.51410976175 }, { "content": "/// Returns a non-null [BooleanArray] with whether each value of the array is not null.\n\n/// # Example\n\n/// ```rust\n\n/// use arrow2::array::BooleanArray;\n\n/// use arrow2::compute::boolean::is_not_null;\n\n/// # fn main() {\n\n/// let a = BooleanArray::from(&vec![Some(false), Some(true), None]);\n\n/// let a_is_not_null = is_not_null(&a);\n\n/// assert_eq!(a_is_not_null, BooleanArray::from_slice(&vec![true, true, false]));\n\n/// # }\n\n/// ```\n\npub fn is_not_null(input: &dyn Array) -> BooleanArray {\n\n let values = match input.validity() {\n\n None => Bitmap::from_trusted_len_iter(std::iter::repeat(true).take(input.len())),\n\n Some(buffer) => buffer.clone(),\n\n };\n\n BooleanArray::from_data(values, None)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n use crate::array::*;\n\n use crate::datatypes::*;\n\n\n\n #[test]\n\n fn test_bool_array_and() {\n\n let a = BooleanArray::from_slice(vec![false, false, true, true]);\n\n let b = BooleanArray::from_slice(vec![false, true, false, true]);\n\n let c = and(&a, &b).unwrap();\n", "file_path": "src/compute/boolean.rs", "rank": 66, "score": 247358.92039830118 }, { "content": "/// Returns a non-null [BooleanArray] with whether each value of the array is null.\n\n/// # Error\n\n/// This function never errors.\n\n/// # Example\n\n/// ```rust\n\n/// use arrow2::array::BooleanArray;\n\n/// use arrow2::compute::boolean::is_null;\n\n/// # fn main() {\n\n/// let a = BooleanArray::from(vec![Some(false), Some(true), None]);\n\n/// let a_is_null = is_null(&a);\n\n/// assert_eq!(a_is_null, BooleanArray::from_slice(vec![false, false, true]));\n\n/// # }\n\n/// ```\n\npub fn is_null(input: &dyn Array) -> BooleanArray {\n\n let len = input.len();\n\n\n\n let values = match input.validity() {\n\n None => MutableBitmap::from_len_zeroed(len).into(),\n\n Some(buffer) => !buffer,\n\n };\n\n\n\n BooleanArray::from_data(values, None)\n\n}\n\n\n", "file_path": "src/compute/boolean.rs", "rank": 67, "score": 247358.4704734125 }, { "content": "/// Adds two primitive arrays with the same type.\n\n/// Panics if the sum of one pair of values overflows.\n\n///\n\n/// # Examples\n\n/// ```\n\n/// use arrow2::compute::arithmetics::basic::add::add;\n\n/// use arrow2::array::Primitive;\n\n/// use arrow2::datatypes::DataType;\n\n///\n\n/// let a = Primitive::from(&vec![None, Some(6), None, Some(6)]).to(DataType::Int32);\n\n/// let b = Primitive::from(&vec![Some(5), None, None, Some(6)]).to(DataType::Int32);\n\n/// let result = add(&a, &b).unwrap();\n\n/// let expected = Primitive::from(&vec![None, None, None, Some(12)]).to(DataType::Int32);\n\n/// assert_eq!(result, expected)\n\n/// ```\n\npub fn add<T>(lhs: &PrimitiveArray<T>, rhs: &PrimitiveArray<T>) -> Result<PrimitiveArray<T>>\n\nwhere\n\n T: NativeType + Add<Output = T>,\n\n{\n\n if lhs.data_type() != rhs.data_type() {\n\n return Err(ArrowError::InvalidArgumentError(\n\n \"Arrays must have the same logical type\".to_string(),\n\n ));\n\n }\n\n\n\n binary(lhs, rhs, lhs.data_type().clone(), |a, b| a + b)\n\n}\n\n\n", "file_path": "src/compute/arithmetics/basic/add.rs", "rank": 68, "score": 246416.8902974849 }, { "content": "/// Divides two primitive arrays with the same type.\n\n/// Panics if the divisor is zero of one pair of values overflows.\n\n///\n\n/// # Examples\n\n/// ```\n\n/// use arrow2::compute::arithmetics::basic::div::div;\n\n/// use arrow2::array::Int32Array;\n\n///\n\n/// let a = Int32Array::from(&[Some(10), Some(6)]);\n\n/// let b = Int32Array::from(&[Some(5), Some(6)]);\n\n/// let result = div(&a, &b).unwrap();\n\n/// let expected = Int32Array::from(&[Some(2), Some(1)]);\n\n/// assert_eq!(result, expected)\n\n/// ```\n\npub fn div<T>(lhs: &PrimitiveArray<T>, rhs: &PrimitiveArray<T>) -> Result<PrimitiveArray<T>>\n\nwhere\n\n T: NativeType + Div<Output = T>,\n\n{\n\n if lhs.data_type() != rhs.data_type() {\n\n return Err(ArrowError::InvalidArgumentError(\n\n \"Arrays must have the same logical type\".to_string(),\n\n ));\n\n }\n\n\n\n binary(lhs, rhs, lhs.data_type().clone(), |a, b| a / b)\n\n}\n\n\n", "file_path": "src/compute/arithmetics/basic/div.rs", "rank": 69, "score": 246415.74357330406 }, { "content": "/// Multiplies two primitive arrays with the same type.\n\n/// Panics if the multiplication of one pair of values overflows.\n\n///\n\n/// # Examples\n\n/// ```\n\n/// use arrow2::compute::arithmetics::basic::mul::mul;\n\n/// use arrow2::array::Int32Array;\n\n///\n\n/// let a = Int32Array::from(&[None, Some(6), None, Some(6)]);\n\n/// let b = Int32Array::from(&[Some(5), None, None, Some(6)]);\n\n/// let result = mul(&a, &b).unwrap();\n\n/// let expected = Int32Array::from(&[None, None, None, Some(36)]);\n\n/// assert_eq!(result, expected)\n\n/// ```\n\npub fn mul<T>(lhs: &PrimitiveArray<T>, rhs: &PrimitiveArray<T>) -> Result<PrimitiveArray<T>>\n\nwhere\n\n T: NativeType + Mul<Output = T>,\n\n{\n\n if lhs.data_type() != rhs.data_type() {\n\n return Err(ArrowError::InvalidArgumentError(\n\n \"Arrays must have the same logical type\".to_string(),\n\n ));\n\n }\n\n\n\n binary(lhs, rhs, lhs.data_type().clone(), |a, b| a * b)\n\n}\n\n\n", "file_path": "src/compute/arithmetics/basic/mul.rs", "rank": 70, "score": 246415.22129378223 }, { "content": "/// Subtracts two primitive arrays with the same type.\n\n/// Panics if the subtraction of one pair of values overflows.\n\n///\n\n/// # Examples\n\n/// ```\n\n/// use arrow2::compute::arithmetics::basic::sub::sub;\n\n/// use arrow2::array::Int32Array;\n\n///\n\n/// let a = Int32Array::from(&[None, Some(6), None, Some(6)]);\n\n/// let b = Int32Array::from(&[Some(5), None, None, Some(6)]);\n\n/// let result = sub(&a, &b).unwrap();\n\n/// let expected = Int32Array::from(&[None, None, None, Some(0)]);\n\n/// assert_eq!(result, expected)\n\n/// ```\n\npub fn sub<T>(lhs: &PrimitiveArray<T>, rhs: &PrimitiveArray<T>) -> Result<PrimitiveArray<T>>\n\nwhere\n\n T: NativeType + Sub<Output = T>,\n\n{\n\n if lhs.data_type() != rhs.data_type() {\n\n return Err(ArrowError::InvalidArgumentError(\n\n \"Arrays must have the same logical type\".to_string(),\n\n ));\n\n }\n\n\n\n binary(lhs, rhs, lhs.data_type().clone(), |a, b| a - b)\n\n}\n\n\n", "file_path": "src/compute/arithmetics/basic/sub.rs", "rank": 71, "score": 246415.22129378223 }, { "content": "/// Returns the array, taking only the number of elements specified\n\n///\n\n/// Limit performs a zero-copy slice of the array, and is a convenience method on slice\n\n/// where:\n\n/// * it performs a bounds-check on the array\n\n/// * it slices from offset 0\n\npub fn limit(array: &dyn Array, num_elements: usize) -> Box<dyn Array> {\n\n let lim = num_elements.min(array.len());\n\n array.slice(0, lim)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::array::*;\n\n\n\n #[test]\n\n fn test_limit_array() {\n\n let a = Int32Array::from_slice(&[5, 6, 7, 8, 9]);\n\n let b = limit(&a, 3);\n\n let c = b.as_ref().as_any().downcast_ref::<Int32Array>().unwrap();\n\n let expected = Int32Array::from_slice(&[5, 6, 7]);\n\n assert_eq!(&expected, c);\n\n }\n\n}\n", "file_path": "src/compute/limit.rs", "rank": 72, "score": 245793.8104085001 }, { "content": "fn write_single_array(path: &str, array: &dyn Array, field: Field) -> Result<()> {\n\n let schema = Schema::new(vec![field]);\n\n\n\n // declare the compression\n\n let compression = CompressionCodec::Uncompressed;\n\n\n\n // map arrow fields to parquet fields\n\n let parquet_types = schema\n\n .fields()\n\n .iter()\n\n .map(to_parquet_type)\n\n .collect::<Result<Vec<_>>>()?;\n\n\n\n // Declare the row group iterator. This must be an iterator of iterators of iterators:\n\n // * first iterator of row groups\n\n // * second iterator of column chunks\n\n // * third iterator of pages\n\n // an array can be divided in multiple pages via `.slice(offset, length)` (`O(1)`).\n\n // All column chunks within a row group MUST have the same length.\n\n let row_groups = once(Result::Ok(once(Ok(once(array)\n", "file_path": "examples/parquet_write.rs", "rank": 73, "score": 245103.7308861991 }, { "content": "pub fn iter_to_array<I, E>(mut iter: I, metadata: &ColumnChunkMetaData) -> Result<BooleanArray>\n\nwhere\n\n ArrowError: From<E>,\n\n I: Iterator<Item = std::result::Result<CompressedPage, E>>,\n\n{\n\n // todo: push metadata from the file to get this capacity\n\n let capacity = metadata.num_values() as usize;\n\n let mut values = MutableBitmap::with_capacity(capacity);\n\n let mut validity = MutableBitmap::with_capacity(capacity);\n\n iter.try_for_each(|page| {\n\n extend_from_page(page?, metadata.descriptor(), &mut values, &mut validity)\n\n })?;\n\n\n\n Ok(BooleanArray::from_data(values.into(), validity.into()))\n\n}\n\n\n", "file_path": "src/io/parquet/read/boolean.rs", "rank": 74, "score": 244941.61365792557 }, { "content": "/// Checked multiplication of two primitive arrays. If the result from the\n\n/// multiplications overflows, the validity for that index is changed\n\n/// returned.\n\n///\n\n/// # Examples\n\n/// ```\n\n/// use arrow2::compute::arithmetics::basic::mul::checked_mul;\n\n/// use arrow2::array::Int8Array;\n\n///\n\n/// let a = Int8Array::from(&[Some(100i8), Some(100i8), Some(100i8)]);\n\n/// let b = Int8Array::from(&[Some(1i8), Some(100i8), Some(1i8)]);\n\n/// let result = checked_mul(&a, &b).unwrap();\n\n/// let expected = Int8Array::from(&[Some(100i8), None, Some(100i8)]);\n\n/// assert_eq!(result, expected);\n\n/// ```\n\npub fn checked_mul<T>(lhs: &PrimitiveArray<T>, rhs: &PrimitiveArray<T>) -> Result<PrimitiveArray<T>>\n\nwhere\n\n T: NativeType + CheckedMul<Output = T> + Zero,\n\n{\n\n if lhs.data_type() != rhs.data_type() {\n\n return Err(ArrowError::InvalidArgumentError(\n\n \"Arrays must have the same logical type\".to_string(),\n\n ));\n\n }\n\n\n\n let op = move |a: T, b: T| a.checked_mul(&b);\n\n\n\n binary_checked(lhs, rhs, lhs.data_type().clone(), op)\n\n}\n\n\n", "file_path": "src/compute/arithmetics/basic/mul.rs", "rank": 75, "score": 243907.11743754396 }, { "content": "/// Checked addition of two primitive arrays. If the result from the sum\n\n/// overflows, the validity for that index is changed to None\n\n///\n\n/// # Examples\n\n/// ```\n\n/// use arrow2::compute::arithmetics::basic::add::checked_add;\n\n/// use arrow2::array::Primitive;\n\n/// use arrow2::datatypes::DataType;\n\n///\n\n/// let a = Primitive::from(&vec![Some(100i8), Some(100i8), Some(100i8)]).to(DataType::Int8);\n\n/// let b = Primitive::from(&vec![Some(0i8), Some(100i8), Some(0i8)]).to(DataType::Int8);\n\n/// let result = checked_add(&a, &b).unwrap();\n\n/// let expected = Primitive::from(&vec![Some(100i8), None, Some(100i8)]).to(DataType::Int8);\n\n/// assert_eq!(result, expected);\n\n/// ```\n\npub fn checked_add<T>(lhs: &PrimitiveArray<T>, rhs: &PrimitiveArray<T>) -> Result<PrimitiveArray<T>>\n\nwhere\n\n T: NativeType + CheckedAdd<Output = T> + Zero,\n\n{\n\n if lhs.data_type() != rhs.data_type() {\n\n return Err(ArrowError::InvalidArgumentError(\n\n \"Arrays must have the same logical type\".to_string(),\n\n ));\n\n }\n\n\n\n let op = move |a: T, b: T| a.checked_add(&b);\n\n\n\n binary_checked(lhs, rhs, lhs.data_type().clone(), op)\n\n}\n\n\n", "file_path": "src/compute/arithmetics/basic/add.rs", "rank": 76, "score": 243904.29130890488 }, { "content": "/// Checked division of two primitive arrays. If the result from the division\n\n/// overflows, the result for the operation will change the validity array\n\n/// making this operation None\n\n///\n\n/// # Examples\n\n/// ```\n\n/// use arrow2::compute::arithmetics::basic::div::checked_div;\n\n/// use arrow2::array::Int8Array;\n\n///\n\n/// let a = Int8Array::from(&[Some(-100i8), Some(10i8)]);\n\n/// let b = Int8Array::from(&[Some(100i8), Some(0i8)]);\n\n/// let result = checked_div(&a, &b).unwrap();\n\n/// let expected = Int8Array::from(&[Some(-1i8), None]);\n\n/// assert_eq!(result, expected);\n\n/// ```\n\npub fn checked_div<T>(lhs: &PrimitiveArray<T>, rhs: &PrimitiveArray<T>) -> Result<PrimitiveArray<T>>\n\nwhere\n\n T: NativeType + CheckedDiv<Output = T> + Zero,\n\n{\n\n if lhs.data_type() != rhs.data_type() {\n\n return Err(ArrowError::InvalidArgumentError(\n\n \"Arrays must have the same logical type\".to_string(),\n\n ));\n\n }\n\n\n\n let op = move |a: T, b: T| a.checked_div(&b);\n\n\n\n binary_checked(lhs, rhs, lhs.data_type().clone(), op)\n\n}\n\n\n\n// Implementation of ArrayDiv trait for PrimitiveArrays\n\nimpl<T> ArrayDiv<PrimitiveArray<T>> for PrimitiveArray<T>\n\nwhere\n\n T: NativeType + Div<Output = T> + NotI128,\n\n{\n", "file_path": "src/compute/arithmetics/basic/div.rs", "rank": 77, "score": 243903.29310490715 }, { "content": "/// Checked subtraction of two primitive arrays. If the result from the\n\n/// subtraction overflow, the validity for that index is changed\n\n///\n\n/// # Examples\n\n/// ```\n\n/// use arrow2::compute::arithmetics::basic::sub::checked_sub;\n\n/// use arrow2::array::Int8Array;\n\n///\n\n/// let a = Int8Array::from(&[Some(100i8), Some(-100i8), Some(100i8)]);\n\n/// let b = Int8Array::from(&[Some(1i8), Some(100i8), Some(0i8)]);\n\n/// let result = checked_sub(&a, &b).unwrap();\n\n/// let expected = Int8Array::from(&[Some(99i8), None, Some(100i8)]);\n\n/// assert_eq!(result, expected);\n\n/// ```\n\npub fn checked_sub<T>(lhs: &PrimitiveArray<T>, rhs: &PrimitiveArray<T>) -> Result<PrimitiveArray<T>>\n\nwhere\n\n T: NativeType + CheckedSub<Output = T> + Zero,\n\n{\n\n if lhs.data_type() != rhs.data_type() {\n\n return Err(ArrowError::InvalidArgumentError(\n\n \"Arrays must have the same logical type\".to_string(),\n\n ));\n\n }\n\n\n\n let op = move |a: T, b: T| a.checked_sub(&b);\n\n\n\n binary_checked(lhs, rhs, lhs.data_type().clone(), op)\n\n}\n\n\n", "file_path": "src/compute/arithmetics/basic/sub.rs", "rank": 78, "score": 243903.1029004229 }, { "content": "fn lengths_equal<O: Offset>(lhs: &[O], rhs: &[O]) -> bool {\n\n // invariant from `base_equal`\n\n debug_assert_eq!(lhs.len(), rhs.len());\n\n\n\n if lhs.is_empty() {\n\n return true;\n\n }\n\n\n\n if lhs[0] == O::zero() && rhs[0] == O::zero() {\n\n return lhs == rhs;\n\n };\n\n\n\n // The expensive case, e.g.\n\n // [0, 2, 4, 6, 9] == [4, 6, 8, 10, 13]\n\n lhs.windows(2)\n\n .zip(rhs.windows(2))\n\n .all(|(lhs_offsets, rhs_offsets)| {\n\n // length of left == length of right\n\n (lhs_offsets[1] - lhs_offsets[0]) == (rhs_offsets[1] - rhs_offsets[0])\n\n })\n\n}\n\n\n", "file_path": "src/array/equal/list.rs", "rank": 79, "score": 243373.15333338696 }, { "content": "pub fn unary_utf8_boolean<O: Offset, F: Fn(&str) -> bool>(\n\n values: &Utf8Array<O>,\n\n op: F,\n\n) -> BooleanArray {\n\n let validity = values.validity().clone();\n\n\n\n let iterator = values.iter().map(|value| {\n\n if value.is_none() {\n\n return false;\n\n };\n\n op(value.unwrap())\n\n });\n\n let values = Bitmap::from_trusted_len_iter(iterator);\n\n BooleanArray::from_data(values, validity)\n\n}\n", "file_path": "src/compute/utils.rs", "rank": 80, "score": 242738.09781659208 }, { "content": "/// Creates a new [`Array`] with a [`Array::len`] of 0.\n\npub fn new_empty_array(data_type: DataType) -> Box<dyn Array> {\n\n match data_type {\n\n DataType::Null => Box::new(NullArray::new_empty()),\n\n DataType::Boolean => Box::new(BooleanArray::new_empty()),\n\n DataType::Int8 => Box::new(PrimitiveArray::<i8>::new_empty(data_type)),\n\n DataType::Int16 => Box::new(PrimitiveArray::<i16>::new_empty(data_type)),\n\n DataType::Int32\n\n | DataType::Date32\n\n | DataType::Time32(_)\n\n | DataType::Interval(IntervalUnit::YearMonth) => {\n\n Box::new(PrimitiveArray::<i32>::new_empty(data_type))\n\n }\n\n DataType::Interval(IntervalUnit::DayTime) => {\n\n Box::new(PrimitiveArray::<days_ms>::new_empty(data_type))\n\n }\n\n DataType::Int64\n\n | DataType::Date64\n\n | DataType::Time64(_)\n\n | DataType::Timestamp(_, _)\n\n | DataType::Duration(_) => Box::new(PrimitiveArray::<i64>::new_empty(data_type)),\n", "file_path": "src/array/mod.rs", "rank": 81, "score": 239066.4023230544 }, { "content": "/// Checks if an array of type `datatype` can perform length operation\n\n///\n\n/// # Examples\n\n/// ```\n\n/// use arrow2::compute::length::can_length;\n\n/// use arrow2::datatypes::{DataType};\n\n///\n\n/// let data_type = DataType::Utf8;\n\n/// assert_eq!(can_length(&data_type), true);\n\n///\n\n/// let data_type = DataType::Int8;\n\n/// assert_eq!(can_length(&data_type), false);\n\n/// ```\n\npub fn can_length(data_type: &DataType) -> bool {\n\n matches!(data_type, DataType::Utf8 | DataType::LargeUtf8)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n fn length_test_string<O: Offset>() {\n\n vec![\n\n (\n\n vec![Some(\"hello\"), Some(\" \"), None],\n\n vec![Some(5usize), Some(1), None],\n\n ),\n\n (vec![Some(\"💖\")], vec![Some(4)]),\n\n ]\n\n .into_iter()\n\n .for_each(|(input, expected)| {\n\n let array = Utf8Array::<O>::from(&input);\n\n let result = length(&array).unwrap();\n", "file_path": "src/compute/length.rs", "rank": 82, "score": 238247.0631882333 }, { "content": "/// let data_type = DataType::Int8;\n\n/// assert_eq!(can_year(&data_type), false);\n\n/// ```\n\npub fn can_year(data_type: &DataType) -> bool {\n\n matches!(\n\n data_type,\n\n DataType::Date32 | DataType::Date64 | DataType::Timestamp(_, None)\n\n )\n\n}\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn date64_hour() {\n\n let array = Primitive::<i64>::from(&[Some(1514764800000), None, Some(1550636625000)])\n\n .to(DataType::Date64);\n\n\n\n let result = hour(&array).unwrap();\n\n let expected = UInt32Array::from(&[Some(0), None, Some(4)]);\n\n assert_eq!(result, expected);\n\n }\n\n\n", "file_path": "src/compute/temporal.rs", "rank": 83, "score": 238233.02982523554 }, { "content": "/// let data_type = DataType::Int8;\n\n/// assert_eq!(can_hour(&data_type), false);\n\n/// ```\n\npub fn can_hour(data_type: &DataType) -> bool {\n\n matches!(\n\n data_type,\n\n DataType::Time32(TimeUnit::Second)\n\n | DataType::Time32(TimeUnit::Microsecond)\n\n | DataType::Time64(TimeUnit::Microsecond)\n\n | DataType::Time64(TimeUnit::Nanosecond)\n\n | DataType::Date32\n\n | DataType::Date64\n\n | DataType::Timestamp(_, None)\n\n )\n\n}\n\n\n", "file_path": "src/compute/temporal.rs", "rank": 84, "score": 238233.02982523554 }, { "content": "/// let data_type = DataType::Null;\n\n/// assert_eq!(can_substring(&data_type), false);\n\n/// ```\n\npub fn can_substring(data_type: &DataType) -> bool {\n\n matches!(data_type, DataType::LargeUtf8 | DataType::Utf8)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n fn with_nulls<O: Offset>() -> Result<()> {\n\n let cases = vec![\n\n // identity\n\n (\n\n vec![Some(\"hello\"), None, Some(\"word\")],\n\n 0,\n\n None,\n\n vec![Some(\"hello\"), None, Some(\"word\")],\n\n ),\n\n // 0 length -> Nothing\n\n (\n\n vec![Some(\"hello\"), None, Some(\"word\")],\n", "file_path": "src/compute/substring.rs", "rank": 85, "score": 238227.56456733105 }, { "content": "/// let data_type = DataType::Null;\n\n/// assert_eq!(can_hash(&data_type), false);\n\n/// ```\n\npub fn can_hash(data_type: &DataType) -> bool {\n\n matches!(\n\n data_type,\n\n DataType::Boolean\n\n | DataType::Int8\n\n | DataType::Int16\n\n | DataType::Int32\n\n | DataType::Date32\n\n | DataType::Time32(_)\n\n | DataType::Interval(_)\n\n | DataType::Int64\n\n | DataType::Date64\n\n | DataType::Time64(_)\n\n | DataType::Timestamp(_, _)\n\n | DataType::Duration(_)\n\n | DataType::Decimal(_, _)\n\n | DataType::UInt8\n\n | DataType::UInt16\n\n | DataType::UInt32\n\n | DataType::UInt64\n", "file_path": "src/compute/hash.rs", "rank": 86, "score": 238227.56456733105 }, { "content": "#[inline]\n\nfn build_is_valid(array: &dyn Array) -> IsValid {\n\n if let Some(validity) = array.validity() {\n\n Box::new(move |x| unsafe { validity.get_bit_unchecked(x) })\n\n } else {\n\n Box::new(move |_| true)\n\n }\n\n}\n\n\n", "file_path": "src/compute/sort/lex_sort.rs", "rank": 87, "score": 237621.2543381106 }, { "content": "fn build_compare(array: &dyn Array, sort_option: SortOptions) -> Result<Compare> {\n\n let is_valid = build_is_valid(array);\n\n let comparator = ord::build_compare(array, array)?;\n\n\n\n Ok(match (sort_option.descending, sort_option.nulls_first) {\n\n (true, true) => Box::new(move |i: usize, j: usize| match (is_valid(i), is_valid(j)) {\n\n (true, true) => match (comparator)(i, j) {\n\n Ordering::Equal => Ordering::Equal,\n\n other => other.reverse(),\n\n },\n\n (false, true) => Ordering::Less,\n\n (true, false) => Ordering::Greater,\n\n (false, false) => Ordering::Equal,\n\n }),\n\n (false, true) => Box::new(move |i: usize, j: usize| match (is_valid(i), is_valid(j)) {\n\n (true, true) => match (comparator)(i, j) {\n\n Ordering::Equal => Ordering::Equal,\n\n other => other,\n\n },\n\n (false, true) => Ordering::Less,\n", "file_path": "src/compute/sort/lex_sort.rs", "rank": 88, "score": 237539.29822248765 }, { "content": "/// Returns a visual representation of multiple [`RecordBatch`]es.\n\npub fn write(batches: &[RecordBatch]) -> Result<String> {\n\n Ok(create_table(batches)?.to_string())\n\n}\n\n\n", "file_path": "src/io/print.rs", "rank": 89, "score": 235745.8645593382 }, { "content": "#[inline]\n\npub fn is_set(byte: u8, i: usize) -> bool {\n\n (byte & BIT_MASK[i]) != 0\n\n}\n\n\n\n/// Sets bit at position `i` in `byte`\n", "file_path": "src/bits/mod.rs", "rank": 90, "score": 235529.8738054048 }, { "content": "/// Checks if an array of type `datatype` can be compared with another array of\n\n/// the same type.\n\n///\n\n/// # Examples\n\n/// ```\n\n/// use arrow2::compute::comparison::can_compare;\n\n/// use arrow2::datatypes::{DataType};\n\n///\n\n/// let data_type = DataType::Int8;\n\n/// assert_eq!(can_compare(&data_type), true);\n\n///\n\n/// let data_type = DataType::LargeBinary;\n\n/// assert_eq!(can_compare(&data_type), false)\n\n/// ```\n\npub fn can_compare(data_type: &DataType) -> bool {\n\n matches!(\n\n data_type,\n\n DataType::Int8\n\n | DataType::Int16\n\n | DataType::Int32\n\n | DataType::Date32\n\n | DataType::Time32(_)\n\n | DataType::Interval(_)\n\n | DataType::Int64\n\n | DataType::Timestamp(_, None)\n\n | DataType::Date64\n\n | DataType::Time64(_)\n\n | DataType::Duration(_)\n\n | DataType::UInt8\n\n | DataType::UInt16\n\n | DataType::UInt32\n\n | DataType::UInt64\n\n | DataType::Float32\n\n | DataType::Float64\n", "file_path": "src/compute/comparison/mod.rs", "rank": 91, "score": 235406.04072677152 }, { "content": "/// Checks if an array of type `datatype` can perform take operation\n\n///\n\n/// # Examples\n\n/// ```\n\n/// use arrow2::compute::take::can_take;\n\n/// use arrow2::datatypes::{DataType};\n\n///\n\n/// let data_type = DataType::Int8;\n\n/// assert_eq!(can_take(&data_type), true);\n\n/// ```\n\npub fn can_take(data_type: &DataType) -> bool {\n\n match data_type {\n\n DataType::Null\n\n | DataType::Boolean\n\n | DataType::Int8\n\n | DataType::Int16\n\n | DataType::Int32\n\n | DataType::Date32\n\n | DataType::Time32(_)\n\n | DataType::Interval(IntervalUnit::YearMonth)\n\n | DataType::Int64\n\n | DataType::Date64\n\n | DataType::Time64(_)\n\n | DataType::Duration(_)\n\n | DataType::Timestamp(_, _)\n\n | DataType::UInt8\n\n | DataType::UInt16\n\n | DataType::UInt32\n\n | DataType::UInt64\n\n | DataType::Float16\n", "file_path": "src/compute/take/mod.rs", "rank": 92, "score": 235405.5646707794 }, { "content": "/// Checks if an array of type `datatype` can be sorted\n\n///\n\n/// # Examples\n\n/// ```\n\n/// use arrow2::compute::sort::can_sort;\n\n/// use arrow2::datatypes::{DataType};\n\n///\n\n/// let data_type = DataType::Int8;\n\n/// assert_eq!(can_sort(&data_type), true);\n\n///\n\n/// let data_type = DataType::LargeBinary;\n\n/// assert_eq!(can_sort(&data_type), false)\n\n/// ```\n\npub fn can_sort(data_type: &DataType) -> bool {\n\n match data_type {\n\n DataType::Boolean\n\n | DataType::Int8\n\n | DataType::Int16\n\n | DataType::Int32\n\n | DataType::Date32\n\n | DataType::Time32(_)\n\n | DataType::Interval(_)\n\n | DataType::Int64\n\n | DataType::Date64\n\n | DataType::Time64(_)\n\n | DataType::Timestamp(_, None)\n\n | DataType::Duration(_)\n\n | DataType::UInt8\n\n | DataType::UInt16\n\n | DataType::UInt32\n\n | DataType::UInt64\n\n | DataType::Float32\n\n | DataType::Float64\n", "file_path": "src/compute/sort/mod.rs", "rank": 93, "score": 235405.20609414304 }, { "content": "/// Negates values from array.\n\n///\n\n/// # Examples\n\n/// ```\n\n/// use arrow2::compute::arithmetics::negate;\n\n/// use arrow2::array::Primitive;\n\n/// use arrow2::datatypes::DataType;\n\n///\n\n/// let a = Primitive::from(&vec![None, Some(6), None, Some(7)]).to(DataType::Int32);\n\n/// let result = negate(&a);\n\n/// let expected = Primitive::from(&vec![None, Some(-6), None, Some(-7)]).to(DataType::Int32);\n\n/// assert_eq!(result, expected)\n\n/// ```\n\npub fn negate<T>(array: &PrimitiveArray<T>) -> PrimitiveArray<T>\n\nwhere\n\n T: NativeType + Neg<Output = T>,\n\n{\n\n unary(array, |a| -a, array.data_type().clone())\n\n}\n\n\n", "file_path": "src/compute/arithmetics/mod.rs", "rank": 94, "score": 235238.15558440273 }, { "content": "/// Evaluate `op(left, right)` for [`PrimitiveArray`] and scalar using\n\n/// a specified comparison function.\n\npub fn compare_op_scalar<T, F>(lhs: &PrimitiveArray<T>, rhs: T, op: F) -> Result<BooleanArray>\n\nwhere\n\n T: NativeType,\n\n F: Fn(T, T) -> bool,\n\n{\n\n let validity = lhs.validity().clone();\n\n\n\n let mut values = MutableBuffer::from_len_zeroed((lhs.len() + 7) / 8);\n\n\n\n let lhs_chunks_iter = lhs.values().chunks_exact(8);\n\n let lhs_remainder = lhs_chunks_iter.remainder();\n\n let chunks = lhs.len() / 8;\n\n\n\n values[..chunks]\n\n .iter_mut()\n\n .zip(lhs_chunks_iter)\n\n .for_each(|(byte, chunk)| {\n\n chunk.iter().enumerate().for_each(|(i, &c_i)| {\n\n *byte |= if op(c_i, rhs) { 1 << i } else { 0 };\n\n });\n", "file_path": "src/compute/comparison/primitive.rs", "rank": 95, "score": 234152.28979209188 }, { "content": "pub fn iter_to_array<O, I, E>(mut iter: I, metadata: &ColumnChunkMetaData) -> Result<Utf8Array<O>>\n\nwhere\n\n ArrowError: From<E>,\n\n O: Offset,\n\n I: Iterator<Item = std::result::Result<CompressedPage, E>>,\n\n{\n\n let capacity = metadata.num_values() as usize;\n\n let mut values = MutableBuffer::<u8>::with_capacity(0);\n\n let mut offsets = MutableBuffer::<O>::with_capacity(1 + capacity);\n\n offsets.push(O::default());\n\n let mut validity = MutableBitmap::with_capacity(capacity);\n\n iter.try_for_each(|page| {\n\n extend_from_page(\n\n page?,\n\n metadata.descriptor(),\n\n &mut offsets,\n\n &mut values,\n\n &mut validity,\n\n )\n\n })?;\n", "file_path": "src/io/parquet/read/utf8.rs", "rank": 96, "score": 234141.71248010453 }, { "content": "pub fn iter_to_array<O, I, E>(mut iter: I, metadata: &ColumnChunkMetaData) -> Result<BinaryArray<O>>\n\nwhere\n\n ArrowError: From<E>,\n\n O: Offset,\n\n I: Iterator<Item = std::result::Result<CompressedPage, E>>,\n\n{\n\n let capacity = metadata.num_values() as usize;\n\n let mut values = MutableBuffer::<u8>::with_capacity(0);\n\n let mut offsets = MutableBuffer::<O>::with_capacity(1 + capacity);\n\n offsets.push(O::default());\n\n let mut validity = MutableBitmap::with_capacity(capacity);\n\n iter.try_for_each(|page| {\n\n extend_from_page(\n\n page?,\n\n metadata.descriptor(),\n\n &mut offsets,\n\n &mut values,\n\n &mut validity,\n\n )\n\n })?;\n\n\n\n Ok(BinaryArray::from_data(\n\n offsets.into(),\n\n values.into(),\n\n validity.into(),\n\n ))\n\n}\n", "file_path": "src/io/parquet/read/binary.rs", "rank": 97, "score": 234141.71248010453 }, { "content": "pub fn is_type_nullable(type_: &ParquetType) -> bool {\n\n is_nullable(type_.get_basic_info())\n\n}\n", "file_path": "src/io/parquet/read/schema/mod.rs", "rank": 98, "score": 232658.714333253 }, { "content": "\n\n #[test]\n\n fn consistency() {\n\n use crate::array::new_null_array;\n\n use crate::datatypes::DataType::*;\n\n use crate::datatypes::TimeUnit;\n\n\n\n let datatypes = vec![\n\n Null,\n\n Boolean,\n\n UInt8,\n\n UInt16,\n\n UInt32,\n\n UInt64,\n\n Int8,\n\n Int16,\n\n Int32,\n\n Int64,\n\n Float32,\n\n Float64,\n", "file_path": "src/compute/take/mod.rs", "rank": 99, "score": 50.61130299928626 } ]
Rust
meilisearch-core/src/update/settings_update.rs
irevoire/MeiliSearch
66c455413695ae8bda7af9e909077b9402d65b2e
use std::{borrow::Cow, collections::{BTreeMap, BTreeSet}}; use heed::Result as ZResult; use fst::{SetBuilder, set::OpBuilder}; use sdset::SetBuf; use meilisearch_schema::Schema; use meilisearch_tokenizer::analyzer::{Analyzer, AnalyzerConfig}; use crate::database::{MainT, UpdateT}; use crate::settings::{UpdateState, SettingsUpdate, RankingRule}; use crate::update::documents_addition::reindex_all_documents; use crate::update::{next_update_id, Update}; use crate::{store, MResult, Error}; pub fn push_settings_update( writer: &mut heed::RwTxn<UpdateT>, updates_store: store::Updates, updates_results_store: store::UpdatesResults, settings: SettingsUpdate, ) -> ZResult<u64> { let last_update_id = next_update_id(writer, updates_store, updates_results_store)?; let update = Update::settings(settings); updates_store.put_update(writer, last_update_id, &update)?; Ok(last_update_id) } pub fn apply_settings_update( writer: &mut heed::RwTxn<MainT>, index: &store::Index, settings: SettingsUpdate, ) -> MResult<()> { let mut must_reindex = false; let mut schema = match index.main.schema(writer)? { Some(schema) => schema, None => { match settings.primary_key.clone() { UpdateState::Update(id) => Schema::with_primary_key(&id), _ => return Err(Error::MissingPrimaryKey) } } }; match settings.ranking_rules { UpdateState::Update(v) => { let ranked_field: Vec<&str> = v.iter().filter_map(RankingRule::field).collect(); schema.update_ranked(&ranked_field)?; index.main.put_ranking_rules(writer, &v)?; must_reindex = true; }, UpdateState::Clear => { index.main.delete_ranking_rules(writer)?; schema.clear_ranked(); must_reindex = true; }, UpdateState::Nothing => (), } match settings.distinct_attribute { UpdateState::Update(v) => { let field_id = schema.insert(&v)?; index.main.put_distinct_attribute(writer, field_id)?; }, UpdateState::Clear => { index.main.delete_distinct_attribute(writer)?; }, UpdateState::Nothing => (), } match settings.searchable_attributes.clone() { UpdateState::Update(v) => { if v.iter().any(|e| e == "*") || v.is_empty() { schema.set_all_searchable(); } else { schema.update_searchable(v)?; } must_reindex = true; }, UpdateState::Clear => { schema.set_all_searchable(); must_reindex = true; }, UpdateState::Nothing => (), } match settings.displayed_attributes.clone() { UpdateState::Update(v) => { if v.contains("*") || v.is_empty() { schema.set_all_displayed(); } else { schema.update_displayed(v)? } }, UpdateState::Clear => { schema.set_all_displayed(); }, UpdateState::Nothing => (), } match settings.attributes_for_faceting { UpdateState::Update(attrs) => { apply_attributes_for_faceting_update(writer, index, &mut schema, &attrs)?; must_reindex = true; }, UpdateState::Clear => { index.main.delete_attributes_for_faceting(writer)?; index.facets.clear(writer)?; }, UpdateState::Nothing => (), } index.main.put_schema(writer, &schema)?; match settings.stop_words { UpdateState::Update(stop_words) => { if apply_stop_words_update(writer, index, stop_words)? { must_reindex = true; } }, UpdateState::Clear => { if apply_stop_words_update(writer, index, BTreeSet::new())? { must_reindex = true; } }, UpdateState::Nothing => (), } match settings.synonyms { UpdateState::Update(synonyms) => apply_synonyms_update(writer, index, synonyms)?, UpdateState::Clear => apply_synonyms_update(writer, index, BTreeMap::new())?, UpdateState::Nothing => (), } if must_reindex { reindex_all_documents(writer, index)?; } Ok(()) } fn apply_attributes_for_faceting_update( writer: &mut heed::RwTxn<MainT>, index: &store::Index, schema: &mut Schema, attributes: &[String] ) -> MResult<()> { let mut attribute_ids = Vec::new(); for name in attributes { attribute_ids.push(schema.insert(name)?); } let attributes_for_faceting = SetBuf::from_dirty(attribute_ids); index.main.put_attributes_for_faceting(writer, &attributes_for_faceting)?; Ok(()) } pub fn apply_stop_words_update( writer: &mut heed::RwTxn<MainT>, index: &store::Index, stop_words: BTreeSet<String>, ) -> MResult<bool> { let mut must_reindex = false; let old_stop_words: BTreeSet<String> = index.main .stop_words_fst(writer)? .stream() .into_strs()? .into_iter() .collect(); let deletion: BTreeSet<String> = old_stop_words.difference(&stop_words).cloned().collect(); let addition: BTreeSet<String> = stop_words.difference(&old_stop_words).cloned().collect(); if !addition.is_empty() { apply_stop_words_addition(writer, index, addition)?; } if !deletion.is_empty() { must_reindex = true; apply_stop_words_deletion(writer, index, deletion)?; } let words_fst = index.main.words_fst(writer)?; if !words_fst.is_empty() { let stop_words = fst::Set::from_iter(stop_words)?; let op = OpBuilder::new() .add(&words_fst) .add(&stop_words) .difference(); let mut builder = fst::SetBuilder::memory(); builder.extend_stream(op)?; let words_fst = builder.into_set(); index.main.put_words_fst(writer, &words_fst)?; index.main.put_stop_words_fst(writer, &stop_words)?; } Ok(must_reindex) } fn apply_stop_words_addition( writer: &mut heed::RwTxn<MainT>, index: &store::Index, addition: BTreeSet<String>, ) -> MResult<()> { let main_store = index.main; let postings_lists_store = index.postings_lists; let mut stop_words_builder = SetBuilder::memory(); for word in addition { stop_words_builder.insert(&word)?; postings_lists_store.del_postings_list(writer, word.as_bytes())?; } let delta_stop_words = stop_words_builder.into_set(); let words_fst = main_store.words_fst(writer)?; if !words_fst.is_empty() { let op = OpBuilder::new() .add(&words_fst) .add(&delta_stop_words) .difference(); let mut word_fst_builder = SetBuilder::memory(); word_fst_builder.extend_stream(op)?; let word_fst = word_fst_builder.into_set(); main_store.put_words_fst(writer, &word_fst)?; } let stop_words_fst = main_store.stop_words_fst(writer)?; let op = OpBuilder::new() .add(&stop_words_fst) .add(&delta_stop_words) .r#union(); let mut stop_words_builder = SetBuilder::memory(); stop_words_builder.extend_stream(op)?; let stop_words_fst = stop_words_builder.into_set(); main_store.put_stop_words_fst(writer, &stop_words_fst)?; Ok(()) } fn apply_stop_words_deletion( writer: &mut heed::RwTxn<MainT>, index: &store::Index, deletion: BTreeSet<String>, ) -> MResult<()> { let mut stop_words_builder = SetBuilder::memory(); for word in deletion { stop_words_builder.insert(&word)?; } let delta_stop_words = stop_words_builder.into_set(); let stop_words_fst = index.main.stop_words_fst(writer)?; let op = OpBuilder::new() .add(&stop_words_fst) .add(&delta_stop_words) .difference(); let mut stop_words_builder = SetBuilder::memory(); stop_words_builder.extend_stream(op)?; let stop_words_fst = stop_words_builder.into_set(); Ok(index.main.put_stop_words_fst(writer, &stop_words_fst)?) } pub fn apply_synonyms_update( writer: &mut heed::RwTxn<MainT>, index: &store::Index, synonyms: BTreeMap<String, Vec<String>>, ) -> MResult<()> { let main_store = index.main; let synonyms_store = index.synonyms; let stop_words = index.main.stop_words_fst(writer)?.map_data(Cow::into_owned)?; let analyzer = Analyzer::new(AnalyzerConfig::default_with_stopwords(&stop_words)); fn normalize<T: AsRef<[u8]>>(analyzer: &Analyzer<T>, text: &str) -> String { analyzer.analyze(&text) .tokens() .fold(String::new(), |s, t| s + t.text()) } let synonyms: BTreeMap<String, Vec<String>> = synonyms.into_iter().map( |(word, alternatives)| { let word = normalize(&analyzer, &word); let alternatives = alternatives.into_iter().map(|text| normalize(&analyzer, &text)).collect(); (word, alternatives) }).collect(); let mut synonyms_builder = SetBuilder::memory(); synonyms_store.clear(writer)?; for (word, alternatives) in synonyms { synonyms_builder.insert(&word)?; let alternatives = { let alternatives = SetBuf::from_dirty(alternatives); let mut alternatives_builder = SetBuilder::memory(); alternatives_builder.extend_iter(alternatives)?; alternatives_builder.into_set() }; synonyms_store.put_synonyms(writer, word.as_bytes(), &alternatives)?; } let synonyms_set = synonyms_builder.into_set(); main_store.put_synonyms_fst(writer, &synonyms_set)?; Ok(()) }
use std::{borrow::Cow, collections::{BTreeMap, BTreeSet}}; use heed::Result as ZResult; use fst::{SetBuilder, set::OpBuilder}; use sdset::SetBuf; use meilisearch_schema::Schema; use meilisearch_tokenizer::analyzer::{Analyzer, AnalyzerConfig}; use crate::database::{MainT, UpdateT}; use crate::settings::{UpdateState, SettingsUpdate, RankingRule}; use crate::update::documents_addition::reindex_all_documents; use crate::update::{next_update_id, Update}; use crate::{store, MResult, Error}; pub fn push_settings_update( writer: &mut heed::RwTxn<UpdateT>, updates_store: store::Updates, updates_results_store: store::UpdatesResults, settings: SettingsUpdate, ) -> ZResult<u64> { let last_update_id = next_update_id(writer, updates_store, updates_results_store)?; let update = Update::settings(settings); updates_store.put_update(writer, last_update_id, &update)?; Ok(last_update_id) }
fn apply_attributes_for_faceting_update( writer: &mut heed::RwTxn<MainT>, index: &store::Index, schema: &mut Schema, attributes: &[String] ) -> MResult<()> { let mut attribute_ids = Vec::new(); for name in attributes { attribute_ids.push(schema.insert(name)?); } let attributes_for_faceting = SetBuf::from_dirty(attribute_ids); index.main.put_attributes_for_faceting(writer, &attributes_for_faceting)?; Ok(()) } pub fn apply_stop_words_update( writer: &mut heed::RwTxn<MainT>, index: &store::Index, stop_words: BTreeSet<String>, ) -> MResult<bool> { let mut must_reindex = false; let old_stop_words: BTreeSet<String> = index.main .stop_words_fst(writer)? .stream() .into_strs()? .into_iter() .collect(); let deletion: BTreeSet<String> = old_stop_words.difference(&stop_words).cloned().collect(); let addition: BTreeSet<String> = stop_words.difference(&old_stop_words).cloned().collect(); if !addition.is_empty() { apply_stop_words_addition(writer, index, addition)?; } if !deletion.is_empty() { must_reindex = true; apply_stop_words_deletion(writer, index, deletion)?; } let words_fst = index.main.words_fst(writer)?; if !words_fst.is_empty() { let stop_words = fst::Set::from_iter(stop_words)?; let op = OpBuilder::new() .add(&words_fst) .add(&stop_words) .difference(); let mut builder = fst::SetBuilder::memory(); builder.extend_stream(op)?; let words_fst = builder.into_set(); index.main.put_words_fst(writer, &words_fst)?; index.main.put_stop_words_fst(writer, &stop_words)?; } Ok(must_reindex) } fn apply_stop_words_addition( writer: &mut heed::RwTxn<MainT>, index: &store::Index, addition: BTreeSet<String>, ) -> MResult<()> { let main_store = index.main; let postings_lists_store = index.postings_lists; let mut stop_words_builder = SetBuilder::memory(); for word in addition { stop_words_builder.insert(&word)?; postings_lists_store.del_postings_list(writer, word.as_bytes())?; } let delta_stop_words = stop_words_builder.into_set(); let words_fst = main_store.words_fst(writer)?; if !words_fst.is_empty() { let op = OpBuilder::new() .add(&words_fst) .add(&delta_stop_words) .difference(); let mut word_fst_builder = SetBuilder::memory(); word_fst_builder.extend_stream(op)?; let word_fst = word_fst_builder.into_set(); main_store.put_words_fst(writer, &word_fst)?; } let stop_words_fst = main_store.stop_words_fst(writer)?; let op = OpBuilder::new() .add(&stop_words_fst) .add(&delta_stop_words) .r#union(); let mut stop_words_builder = SetBuilder::memory(); stop_words_builder.extend_stream(op)?; let stop_words_fst = stop_words_builder.into_set(); main_store.put_stop_words_fst(writer, &stop_words_fst)?; Ok(()) } fn apply_stop_words_deletion( writer: &mut heed::RwTxn<MainT>, index: &store::Index, deletion: BTreeSet<String>, ) -> MResult<()> { let mut stop_words_builder = SetBuilder::memory(); for word in deletion { stop_words_builder.insert(&word)?; } let delta_stop_words = stop_words_builder.into_set(); let stop_words_fst = index.main.stop_words_fst(writer)?; let op = OpBuilder::new() .add(&stop_words_fst) .add(&delta_stop_words) .difference(); let mut stop_words_builder = SetBuilder::memory(); stop_words_builder.extend_stream(op)?; let stop_words_fst = stop_words_builder.into_set(); Ok(index.main.put_stop_words_fst(writer, &stop_words_fst)?) } pub fn apply_synonyms_update( writer: &mut heed::RwTxn<MainT>, index: &store::Index, synonyms: BTreeMap<String, Vec<String>>, ) -> MResult<()> { let main_store = index.main; let synonyms_store = index.synonyms; let stop_words = index.main.stop_words_fst(writer)?.map_data(Cow::into_owned)?; let analyzer = Analyzer::new(AnalyzerConfig::default_with_stopwords(&stop_words)); fn normalize<T: AsRef<[u8]>>(analyzer: &Analyzer<T>, text: &str) -> String { analyzer.analyze(&text) .tokens() .fold(String::new(), |s, t| s + t.text()) } let synonyms: BTreeMap<String, Vec<String>> = synonyms.into_iter().map( |(word, alternatives)| { let word = normalize(&analyzer, &word); let alternatives = alternatives.into_iter().map(|text| normalize(&analyzer, &text)).collect(); (word, alternatives) }).collect(); let mut synonyms_builder = SetBuilder::memory(); synonyms_store.clear(writer)?; for (word, alternatives) in synonyms { synonyms_builder.insert(&word)?; let alternatives = { let alternatives = SetBuf::from_dirty(alternatives); let mut alternatives_builder = SetBuilder::memory(); alternatives_builder.extend_iter(alternatives)?; alternatives_builder.into_set() }; synonyms_store.put_synonyms(writer, word.as_bytes(), &alternatives)?; } let synonyms_set = synonyms_builder.into_set(); main_store.put_synonyms_fst(writer, &synonyms_set)?; Ok(()) }
pub fn apply_settings_update( writer: &mut heed::RwTxn<MainT>, index: &store::Index, settings: SettingsUpdate, ) -> MResult<()> { let mut must_reindex = false; let mut schema = match index.main.schema(writer)? { Some(schema) => schema, None => { match settings.primary_key.clone() { UpdateState::Update(id) => Schema::with_primary_key(&id), _ => return Err(Error::MissingPrimaryKey) } } }; match settings.ranking_rules { UpdateState::Update(v) => { let ranked_field: Vec<&str> = v.iter().filter_map(RankingRule::field).collect(); schema.update_ranked(&ranked_field)?; index.main.put_ranking_rules(writer, &v)?; must_reindex = true; }, UpdateState::Clear => { index.main.delete_ranking_rules(writer)?; schema.clear_ranked(); must_reindex = true; }, UpdateState::Nothing => (), } match settings.distinct_attribute { UpdateState::Update(v) => { let field_id = schema.insert(&v)?; index.main.put_distinct_attribute(writer, field_id)?; }, UpdateState::Clear => { index.main.delete_distinct_attribute(writer)?; }, UpdateState::Nothing => (), } match settings.searchable_attributes.clone() { UpdateState::Update(v) => { if v.iter().any(|e| e == "*") || v.is_empty() { schema.set_all_searchable(); } else { schema.update_searchable(v)?; } must_reindex = true; }, UpdateState::Clear => { schema.set_all_searchable(); must_reindex = true; }, UpdateState::Nothing => (), } match settings.displayed_attributes.clone() { UpdateState::Update(v) => { if v.contains("*") || v.is_empty() { schema.set_all_displayed(); } else { schema.update_displayed(v)? } }, UpdateState::Clear => { schema.set_all_displayed(); }, UpdateState::Nothing => (), } match settings.attributes_for_faceting { UpdateState::Update(attrs) => { apply_attributes_for_faceting_update(writer, index, &mut schema, &attrs)?; must_reindex = true; }, UpdateState::Clear => { index.main.delete_attributes_for_faceting(writer)?; index.facets.clear(writer)?; }, UpdateState::Nothing => (), } index.main.put_schema(writer, &schema)?; match settings.stop_words { UpdateState::Update(stop_words) => { if apply_stop_words_update(writer, index, stop_words)? { must_reindex = true; } }, UpdateState::Clear => { if apply_stop_words_update(writer, index, BTreeSet::new())? { must_reindex = true; } }, UpdateState::Nothing => (), } match settings.synonyms { UpdateState::Update(synonyms) => apply_synonyms_update(writer, index, synonyms)?, UpdateState::Clear => apply_synonyms_update(writer, index, BTreeMap::new())?, UpdateState::Nothing => (), } if must_reindex { reindex_all_documents(writer, index)?; } Ok(()) }
function_block-full_function
[]
Rust
2020/day-12/src/main.rs
dstoza/advent-2017
22de531632c1633814ed1d2b9827590af989fb6e
#![deny(clippy::all, clippy::pedantic)] use std::{ env, fs::File, io::{BufRead, BufReader}, }; #[derive(Clone, Copy)] enum Direction { North = 0, East = 1, South = 2, West = 3, } impl Direction { fn from_i32(value: i32) -> Self { match value { 0 => Direction::North, 1 => Direction::East, 2 => Direction::South, 3 => Direction::West, _ => panic!("Unexpected value {}", value), } } } enum Rotation { Right, Left, } enum Mode { Ship, Waypoint, } struct Navigator { mode: Mode, x: i32, y: i32, direction: Direction, waypoint_x: i32, waypoint_y: i32, } impl Navigator { fn new(mode: Mode) -> Self { Self { mode, x: 0, y: 0, direction: Direction::East, waypoint_x: 10, waypoint_y: 1, } } fn translate(&mut self, direction: Direction, amount: i32) { let (x, y) = match self.mode { Mode::Ship => (&mut self.x, &mut self.y), Mode::Waypoint => (&mut self.waypoint_x, &mut self.waypoint_y), }; match direction { Direction::North => { *y += amount; } Direction::East => { *x += amount; } Direction::South => { *y -= amount; } Direction::West => { *x -= amount; } }; } fn rotate_waypoint_clockwise(&mut self) { let (x, y) = (self.waypoint_y, -self.waypoint_x); self.waypoint_x = x; self.waypoint_y = y; } fn turn(&mut self, rotation: &Rotation, amount: i32) { let clockwise_amount = match rotation { Rotation::Right => amount, Rotation::Left => 360 - amount, }; let direction = self.direction as i32 + clockwise_amount / 90; for _ in 0..(clockwise_amount / 90) { self.rotate_waypoint_clockwise(); } self.direction = Direction::from_i32(direction % 4); } fn move_forward(&mut self, amount: i32) { match self.mode { Mode::Ship => self.translate(self.direction, amount), Mode::Waypoint => { self.x += self.waypoint_x * amount; self.y += self.waypoint_y * amount; } } } fn parse_line(&mut self, line: &str) { let amount = line[1..].parse().expect("Failed to parse amount as i32"); match line.as_bytes()[0] { b'N' => self.translate(Direction::North, amount), b'E' => self.translate(Direction::East, amount), b'S' => self.translate(Direction::South, amount), b'W' => self.translate(Direction::West, amount), b'L' => self.turn(&Rotation::Left, amount), b'R' => self.turn(&Rotation::Right, amount), b'F' => self.move_forward(amount), _ => panic!("Unexpected prefix {}", line.as_bytes()[0]), } } fn get_distance(&self) -> i32 { self.x.abs() + self.y.abs() } } fn main() { let args: Vec<String> = env::args().collect(); if args.len() < 2 || args.len() > 3 { return; } let mode = match args[2].as_str() { "ship" => Mode::Ship, "waypoint" => Mode::Waypoint, _ => panic!("Unexpected mode {}", args[2].as_str()), }; let filename = &args[1]; let file = File::open(filename).unwrap_or_else(|_| panic!("Failed to open file {}", filename)); let mut reader = BufReader::new(file); let mut navigator = Navigator::new(mode); let mut line = String::new(); loop { let bytes = reader .read_line(&mut line) .unwrap_or_else(|_| panic!("Failed to read line")); if bytes == 0 { break; } navigator.parse_line(line.trim()); line.clear(); } println!("Distance: {}", navigator.get_distance()); }
#![deny(clippy::all, clippy::pedantic)] use std::{ env, fs::File, io::{BufRead, BufReader}, }; #[derive(Clone, Copy)] enum Direction { North = 0, East = 1, South = 2, West = 3, } impl Direction { fn from_i32(value: i32) -> Self { match value { 0 => Direction::North, 1 => Direction::East, 2 => Direction::South, 3 => Direction::West, _ => panic!("Unexpected value {}", value), } } } enum Rotation { Right, Left, } enum Mode { Ship, Waypoint, } struct Navigator { mode: Mode, x: i32, y: i32, direction: Direction, waypoint_x: i32, waypoint_y: i32, } impl Navigator { fn new(mode: Mode) -> Self { Self { mode, x: 0, y: 0, direction: Direction::East, waypoint_x: 10, waypoint_y: 1, } } fn translate(&mut self, direction: Direction, amount: i32) { let (x, y) = match self.mode { Mode::Ship => (&mut self.x, &mut self.y), Mode::Waypoint => (&mut self.waypoint_x, &mut self.waypoint_y), }; match direction { Direction::North => { *y += amount; } Direction::East => { *x += amount; } Direction::South => { *y -= amount; } Direction::West => { *x -= amount; } }; } fn rotate_waypoint_clockwise(&mut self) { let (x, y) = (self.waypoint_y, -self.waypoint_x); self.waypoint_x = x; self.waypoint_y = y; }
fn move_forward(&mut self, amount: i32) { match self.mode { Mode::Ship => self.translate(self.direction, amount), Mode::Waypoint => { self.x += self.waypoint_x * amount; self.y += self.waypoint_y * amount; } } } fn parse_line(&mut self, line: &str) { let amount = line[1..].parse().expect("Failed to parse amount as i32"); match line.as_bytes()[0] { b'N' => self.translate(Direction::North, amount), b'E' => self.translate(Direction::East, amount), b'S' => self.translate(Direction::South, amount), b'W' => self.translate(Direction::West, amount), b'L' => self.turn(&Rotation::Left, amount), b'R' => self.turn(&Rotation::Right, amount), b'F' => self.move_forward(amount), _ => panic!("Unexpected prefix {}", line.as_bytes()[0]), } } fn get_distance(&self) -> i32 { self.x.abs() + self.y.abs() } } fn main() { let args: Vec<String> = env::args().collect(); if args.len() < 2 || args.len() > 3 { return; } let mode = match args[2].as_str() { "ship" => Mode::Ship, "waypoint" => Mode::Waypoint, _ => panic!("Unexpected mode {}", args[2].as_str()), }; let filename = &args[1]; let file = File::open(filename).unwrap_or_else(|_| panic!("Failed to open file {}", filename)); let mut reader = BufReader::new(file); let mut navigator = Navigator::new(mode); let mut line = String::new(); loop { let bytes = reader .read_line(&mut line) .unwrap_or_else(|_| panic!("Failed to read line")); if bytes == 0 { break; } navigator.parse_line(line.trim()); line.clear(); } println!("Distance: {}", navigator.get_distance()); }
fn turn(&mut self, rotation: &Rotation, amount: i32) { let clockwise_amount = match rotation { Rotation::Right => amount, Rotation::Left => 360 - amount, }; let direction = self.direction as i32 + clockwise_amount / 90; for _ in 0..(clockwise_amount / 90) { self.rotate_waypoint_clockwise(); } self.direction = Direction::from_i32(direction % 4); }
function_block-full_function
[ { "content": "fn roll_die(die: &mut i32) -> i32 {\n\n let roll = *die;\n\n *die = (*die % 100) + 1;\n\n roll\n\n}\n\n\n", "file_path": "2021/day-21/src/main.rs", "rank": 0, "score": 192315.6612448546 }, { "content": "fn get_triangle_value(value: i32) -> i32 {\n\n (value * (value + 1)) / 2\n\n}\n\n\n", "file_path": "2021/day-07/src/main.rs", "rank": 1, "score": 179326.4307635787 }, { "content": "fn run_generation(lines: &mut [Vec<u8>]) -> i32 {\n\n for row in 0..lines.len() {\n\n for column in 0..lines[0].len() {\n\n lines[row][column] += 1;\n\n flash_cell(lines, row, column);\n\n }\n\n }\n\n\n\n let mut flashes = 0;\n\n for line in lines {\n\n for cell in line {\n\n if *cell > 9 {\n\n flashes += 1;\n\n *cell = 0;\n\n }\n\n }\n\n }\n\n flashes\n\n}\n\n\n", "file_path": "2021/day-11/src/main.rs", "rank": 2, "score": 165826.44745480944 }, { "content": "fn run_program(program: &mut Vec<Instruction>, flip_pc: Option<usize>) -> Result<i32, i32> {\n\n let mut accumulator = 0;\n\n let mut pc = 0_usize;\n\n loop {\n\n let instruction = &mut program[pc as usize];\n\n\n\n if instruction.visited {\n\n return Err(accumulator);\n\n }\n\n\n\n instruction.visited = true;\n\n instruction.operation.execute(\n\n flip_pc.map_or(false, |flip_pc| flip_pc == pc),\n\n &mut accumulator,\n\n &mut pc,\n\n );\n\n\n\n if pc == program.len() {\n\n return Ok(accumulator);\n\n }\n\n\n\n if pc > program.len() {\n\n return Err(-1);\n\n }\n\n }\n\n}\n\n\n", "file_path": "2020/day-08/src/main.rs", "rank": 3, "score": 156456.65309320093 }, { "content": "fn find_minimal_fuel(positions: &[i32], use_triangle_fuel_consumption: bool) -> i32 {\n\n let cumulative_sums = get_cumulative_sums(positions);\n\n\n\n let mut middle = positions[positions.len() / 2];\n\n let lesser_end = positions.partition_point(|x| *x < middle);\n\n let greater_start = positions.partition_point(|x| *x <= middle);\n\n\n\n let (mut lesser_fuel, mut greater_fuel) = if use_triangle_fuel_consumption {\n\n calculate_fuel_triangle(positions, middle, lesser_end, greater_start)\n\n } else {\n\n calculate_fuel_linear(&cumulative_sums, middle, lesser_end, greater_start)\n\n };\n\n let mut total_fuel = lesser_fuel + greater_fuel;\n\n\n\n loop {\n\n if lesser_fuel > greater_fuel {\n\n middle -= 1;\n\n } else {\n\n middle += 1;\n\n }\n", "file_path": "2021/day-07/src/main.rs", "rank": 4, "score": 151324.79554537364 }, { "content": "fn number_is_valid(value: &str, min: i32, max: i32) -> bool {\n\n match value.parse::<i32>() {\n\n Ok(number) => number >= min && number <= max,\n\n Err(_) => false,\n\n }\n\n}\n\n\n\nimpl PassportParser {\n\n fn new(validate_values: bool) -> Self {\n\n Self {\n\n validate_values,\n\n fields: Fields::empty(),\n\n }\n\n }\n\n\n\n fn birth_year_if_valid(&self, value: &str) -> Fields {\n\n if !self.validate_values || number_is_valid(value, 1920, 2002) {\n\n Fields::BIRTH_YEAR\n\n } else {\n\n Fields::empty()\n", "file_path": "2020/day-04/src/main.rs", "rank": 5, "score": 151237.46186947642 }, { "content": "fn get_output_value(line: &str) -> i32 {\n\n let (digit_bits, output_bits) = parse_line(line);\n\n output_bits.iter().fold(0, |value, bits| {\n\n let (digit, _bits) = digit_bits\n\n .iter()\n\n .enumerate()\n\n .find(|(_position, b)| **b == *bits)\n\n .unwrap();\n\n let digit: i32 = digit.try_into().unwrap();\n\n value * 10 + digit\n\n })\n\n}\n\n\n", "file_path": "2021/day-08/src/main.rs", "rank": 6, "score": 149587.7911309327 }, { "content": "enum Mode {\n\n Address,\n\n Value,\n\n}\n\n\n", "file_path": "2020/day-14/src/main.rs", "rank": 8, "score": 148627.95147124515 }, { "content": "fn count_descendents(cache: &mut Vec<Option<usize>>, days: i32) -> usize {\n\n if days < 1 {\n\n return 0;\n\n }\n\n\n\n let day_index: usize = days.try_into().unwrap();\n\n if day_index < cache.len() {\n\n if let Some(cached) = cache[day_index] {\n\n return cached;\n\n }\n\n } else {\n\n cache.resize(day_index + 1, None);\n\n }\n\n\n\n let descendants = 1\n\n + count_descendents(cache, days - GESTATION_PERIOD)\n\n + count_descendents(cache, days - GESTATION_PERIOD - ADOLESCENCE_DELAY);\n\n cache[day_index] = Some(descendants);\n\n descendants\n\n}\n\n\n\nconst DAYS: i32 = 256;\n\n\n", "file_path": "2021/day-06/src/main.rs", "rank": 9, "score": 148115.44591675274 }, { "content": "fn get_half_range(range: &Range<i32>, use_top_half: bool) -> Range<i32> {\n\n let middle = get_middle(range);\n\n if use_top_half {\n\n middle..range.end\n\n } else {\n\n range.start..middle\n\n }\n\n}\n\n\n\nimpl Step {\n\n fn new(command: Command, x: Range<i32>, y: Range<i32>, z: Range<i32>) -> Self {\n\n Self { command, x, y, z }\n\n }\n\n\n\n fn parse_from_lines<I: Iterator<Item = String>>(lines: I) -> Vec<Self> {\n\n lines\n\n .map(|line| {\n\n let mut split = line.split(' ');\n\n let command = match split.next() {\n\n Some(\"off\") => Command::Off,\n", "file_path": "2021/day-22/src/main.rs", "rank": 10, "score": 144694.88260147916 }, { "content": "fn step_south(grid: &mut [Vec<u8>]) -> bool {\n\n let mut changed = false;\n\n\n\n for column_index in 0..grid[0].len() {\n\n let last_index = grid.len() - 1;\n\n let first_row_was_empty = grid[0][column_index] == b'.';\n\n let last_row_had_mover = grid[last_index][column_index] == b'v';\n\n\n\n let mut row_index = 0usize;\n\n while row_index < grid.len() - 1 {\n\n if grid[row_index][column_index] == b'v' && grid[row_index + 1][column_index] == b'.' {\n\n grid[row_index][column_index] = b'.';\n\n grid[row_index + 1][column_index] = b'v';\n\n row_index += 2;\n\n changed = true;\n\n } else {\n\n row_index += 1;\n\n }\n\n }\n\n\n\n if last_row_had_mover && first_row_was_empty {\n\n grid[last_index][column_index] = b'.';\n\n grid[0][column_index] = b'v';\n\n changed = true;\n\n }\n\n }\n\n\n\n changed\n\n}\n\n\n", "file_path": "2021/day-25/src/main.rs", "rank": 11, "score": 141085.58117804694 }, { "content": "fn step_east(grid: &mut [Vec<u8>]) -> bool {\n\n let mut changed = false;\n\n\n\n for row in grid {\n\n let last_index = row.len() - 1;\n\n let last_column_had_mover = row[last_index] == b'>';\n\n let first_column_was_empty = row[0] == b'.';\n\n\n\n let mut column_index = 0usize;\n\n while column_index < row.len() - 1 {\n\n if row[column_index] == b'>' && row[column_index + 1] == b'.' {\n\n row.swap(column_index, column_index + 1);\n\n column_index += 2;\n\n changed = true;\n\n } else {\n\n column_index += 1;\n\n }\n\n }\n\n\n\n if last_column_had_mover && first_column_was_empty {\n\n row.swap(0, last_index);\n\n changed = true;\n\n }\n\n }\n\n\n\n changed\n\n}\n\n\n", "file_path": "2021/day-25/src/main.rs", "rank": 12, "score": 141085.58117804694 }, { "content": "fn sum_product2(sorted: &[i32], target: i32) -> Option<i32> {\n\n let mut candidate_index = sorted.len() - 1;\n\n for number in sorted {\n\n while number + sorted[candidate_index] > target {\n\n if candidate_index == 0 {\n\n return None;\n\n }\n\n\n\n candidate_index -= 1;\n\n }\n\n\n\n if number + sorted[candidate_index] == target {\n\n return Some(number * sorted[candidate_index]);\n\n }\n\n }\n\n\n\n None\n\n}\n\n\n", "file_path": "2020/day-01/src/main.rs", "rank": 13, "score": 134457.79579354974 }, { "content": "fn sum_product3(sorted: &[i32], target: i32) -> Option<i32> {\n\n let mut end = sorted.len() - 1;\n\n for number in sorted {\n\n while number + sorted[end] > target {\n\n end -= 1;\n\n }\n\n\n\n if let Some(product2) = sum_product2(&sorted[0..end], target - number) {\n\n return Some(product2 * number);\n\n }\n\n }\n\n\n\n None\n\n}\n\n\n", "file_path": "2020/day-01/src/main.rs", "rank": 14, "score": 134457.79579354974 }, { "content": "fn get_middle(range: &Range<i32>) -> i32 {\n\n ((range.end - range.start) / 2) + range.start\n\n}\n\n\n", "file_path": "2021/day-22/src/main.rs", "rank": 15, "score": 132508.6708165587 }, { "content": "fn get_cumulative_sums(slice: &[i32]) -> Vec<i32> {\n\n let mut sum = 0;\n\n let mut sums = Vec::new();\n\n for element in slice {\n\n sums.push(element + sum);\n\n sum += element;\n\n }\n\n sums\n\n}\n\n\n", "file_path": "2021/day-07/src/main.rs", "rank": 16, "score": 130792.017234017 }, { "content": "fn play_game() -> i32 {\n\n let mut die = 1;\n\n let mut die_rolls = 0;\n\n\n\n let mut positions = [10, 7];\n\n let mut scores = [0, 0];\n\n\n\n let mut current_player = 0;\n\n loop {\n\n let roll = roll_die(&mut die) + roll_die(&mut die) + roll_die(&mut die);\n\n die_rolls += 3;\n\n positions[current_player] = (positions[current_player] + roll - 1) % 10 + 1;\n\n scores[current_player] += positions[current_player];\n\n if scores[current_player] >= 1000 {\n\n return scores[1 - current_player] * die_rolls;\n\n }\n\n\n\n current_player = 1 - current_player;\n\n }\n\n}\n\n\n\nconst DICE_ROLLS: [(u8, usize); 7] = [(3, 1), (4, 3), (5, 6), (6, 7), (7, 6), (8, 3), (9, 1)];\n\n\n", "file_path": "2021/day-21/src/main.rs", "rank": 17, "score": 129862.15397232692 }, { "content": "fn compute_position<I: Iterator<Item = String>>(commands: I, use_aim: bool) -> i32 {\n\n let (distance, depth, _aim) = commands\n\n .map(|command| {\n\n let mut split = command.split(' ');\n\n (\n\n split.next().unwrap().as_bytes()[0],\n\n split.next().unwrap().parse::<i32>().unwrap(),\n\n )\n\n })\n\n .fold(\n\n (0, 0, 0),\n\n if use_aim {\n\n position_with_aim\n\n } else {\n\n naive_position\n\n },\n\n );\n\n distance * depth\n\n}\n\n\n", "file_path": "2021/day-02/src/main.rs", "rank": 18, "score": 125169.03468479606 }, { "content": "struct SumIterator<I: Iterator<Item = i32>> {\n\n inner_iterator: I,\n\n window_size: usize,\n\n window: VecDeque<i32>,\n\n sum: i32,\n\n}\n\n\n\nimpl<I: Iterator<Item = i32>> Iterator for SumIterator<I> {\n\n type Item = i32;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n match self.inner_iterator.next() {\n\n Some(i) => {\n\n self.sum += i;\n\n self.window.push_back(i);\n\n\n\n if self.window.len() > self.window_size {\n\n self.sum -= self.window.front().unwrap();\n\n self.window.pop_front();\n\n }\n", "file_path": "2021/day-01/src/main.rs", "rank": 19, "score": 121091.56912951775 }, { "content": "fn orient_coordinates(coordinates: &Point3<i32>, orientation: Orientation) -> Point3<i32> {\n\n match orientation {\n\n Orientation::PositiveZUpPositiveY => *coordinates,\n\n Orientation::PositiveZUpPositiveX => {\n\n point![-coordinates.y, coordinates.x, coordinates.z]\n\n }\n\n Orientation::PositiveZUpNegativeY => {\n\n point![-coordinates.x, -coordinates.y, coordinates.z]\n\n }\n\n Orientation::PositiveZUpNegativeX => {\n\n point![coordinates.y, -coordinates.x, coordinates.z]\n\n }\n\n Orientation::NegativeZUpPositiveY => {\n\n point![-coordinates.x, coordinates.y, -coordinates.z]\n\n }\n\n Orientation::NegativeZUpNegativeX => {\n\n point![-coordinates.y, -coordinates.x, -coordinates.z]\n\n }\n\n Orientation::NegativeZUpNegativeY => {\n\n point![coordinates.x, -coordinates.y, -coordinates.z]\n", "file_path": "2021/day-19/src/main.rs", "rank": 20, "score": 120450.43019981383 }, { "content": "fn divisible(line: &str) -> i32 {\n\n let mut seen: Vec<i32> = vec![];\n\n for text in line.split_whitespace() {\n\n let number: i32 = text.parse().expect(\"Expected an integer\");\n\n for s in seen.as_slice() {\n\n if *s > number && s % number == 0 {\n\n return s / number;\n\n } else if number > *s && number % s == 0 {\n\n return number / s;\n\n }\n\n }\n\n seen.push(number);\n\n }\n\n 0\n\n}\n\n\n", "file_path": "2017/day-02/src/main.rs", "rank": 21, "score": 120135.91422685311 }, { "content": "fn get_sorted<I: Iterator<Item = i32>>(items: I) -> Vec<i32> {\n\n let mut items: Vec<i32> = items.collect();\n\n items.sort_unstable();\n\n items\n\n}\n\n\n", "file_path": "2021/day-22/src/main.rs", "rank": 22, "score": 120026.2704680406 }, { "content": "fn checksum<I>(stream: I) -> i32\n\nwhere\n\n I: Iterator<Item = i32>,\n\n{\n\n let min_max = stream.fold((i32::max_value(), i32::min_value()), |acc, x| {\n\n (std::cmp::min(acc.0, x), std::cmp::max(acc.1, x))\n\n });\n\n min_max.1 - min_max.0\n\n}\n\n\n", "file_path": "2017/day-02/src/main.rs", "rank": 23, "score": 119255.90277613094 }, { "content": "fn run_steps(steps: &[Step]) -> i32 {\n\n let root = Node::create_root();\n\n\n\n for step in steps {\n\n let cubes = step.slice_into_cubes();\n\n match step.command {\n\n Command::Off => {\n\n for cube in cubes {\n\n root.borrow_mut().remove_cube(&cube.borrow());\n\n }\n\n }\n\n Command::On => {\n\n for cube in cubes {\n\n root.borrow_mut().insert_cube(&cube.borrow());\n\n }\n\n }\n\n }\n\n }\n\n\n\n let volume = root.borrow().get_volume();\n\n volume\n\n}\n\n\n", "file_path": "2021/day-22/src/main.rs", "rank": 24, "score": 118067.00059542799 }, { "content": "fn get_maximum_magnitude(numbers: &[String]) -> i32 {\n\n let mut maximum = 0;\n\n for (index, a) in numbers.iter().enumerate() {\n\n for b in &numbers[index + 1..] {\n\n let forwards = Node::new_pair(\n\n Node::parse_from_bytes(a.as_bytes()).0,\n\n Node::parse_from_bytes(b.as_bytes()).0,\n\n );\n\n reduce(&forwards);\n\n maximum = maximum.max(forwards.borrow().get_magnitude());\n\n\n\n let backwards = Node::new_pair(\n\n Node::parse_from_bytes(b.as_bytes()).0,\n\n Node::parse_from_bytes(a.as_bytes()).0,\n\n );\n\n reduce(&backwards);\n\n maximum = maximum.max(backwards.borrow().get_magnitude());\n\n }\n\n }\n\n maximum\n\n}\n\n\n", "file_path": "2021/day-18/src/main.rs", "rank": 25, "score": 116118.26420945818 }, { "content": "fn evolve_tiles(black_tiles: &mut BitSet) {\n\n let mut tiles_to_flip = Vec::new();\n\n let mut white_tiles = BitSet::new();\n\n\n\n for black_tile in black_tiles.iter() {\n\n let coordinate = Coordinate::from_address(black_tile);\n\n let adjacent_black_tile_count = count_adjacent_black_tiles(coordinate, black_tiles);\n\n if adjacent_black_tile_count == 0 || adjacent_black_tile_count > 2 {\n\n tiles_to_flip.push(black_tile);\n\n }\n\n\n\n for adjacent_tile in &get_adjacent_tiles(coordinate) {\n\n white_tiles.insert(adjacent_tile.get_address() as usize);\n\n }\n\n }\n\n\n\n white_tiles.difference_with(black_tiles);\n\n for white_tile in &white_tiles {\n\n let coordinate = Coordinate::from_address(white_tile);\n\n let adjacent_black_tile_count = count_adjacent_black_tiles(coordinate, black_tiles);\n", "file_path": "2020/day-24/src/main.rs", "rank": 26, "score": 114311.87274875774 }, { "content": "fn resolve_scanners(scanners: &mut Vec<Scanner>) {\n\n let last = scanners.len() - 1;\n\n scanners.swap(0, last);\n\n let mut resolved = scanners.pop().unwrap();\n\n let absolute_beacons = resolved.relative_beacons.clone();\n\n resolved.resolve(absolute_beacons);\n\n\n\n let mut complete = Vec::new();\n\n let mut anchors = vec![resolved];\n\n let mut unresolved: Vec<_> = scanners.drain(..).collect();\n\n while !unresolved.is_empty() {\n\n for scanner in &mut unresolved {\n\n for anchor in &anchors {\n\n if scanner.try_resolve_against(anchor) {\n\n break;\n\n }\n\n }\n\n }\n\n\n\n complete.append(&mut anchors);\n", "file_path": "2021/day-19/src/main.rs", "rank": 27, "score": 113068.86450093519 }, { "content": "fn read_array(filename: &str) -> Vec<i32> {\n\n let mut reader = LineReader::new(filename);\n\n let mut array = Vec::<i32>::new();\n\n reader.read_with(|line| {\n\n array.push(\n\n line.parse()\n\n .unwrap_or_else(|_| panic!(\"Failed to parse {}\", line)),\n\n )\n\n });\n\n\n\n array\n\n}\n\n\n", "file_path": "2020/day-01/src/main.rs", "rank": 29, "score": 113037.69131761944 }, { "content": "enum Direction {\n\n East,\n\n Southeast,\n\n Southwest,\n\n West,\n\n Northwest,\n\n Northeast,\n\n}\n\n\n\nimpl Direction {\n\n fn from_index(index: usize) -> Self {\n\n match index {\n\n 0 => Direction::East,\n\n 1 => Direction::Southeast,\n\n 2 => Direction::Southwest,\n\n 3 => Direction::West,\n\n 4 => Direction::Northwest,\n\n 5 => Direction::Northeast,\n\n _ => panic!(\"Unexpected direction index {}\", index),\n\n }\n\n }\n\n}\n\n\n", "file_path": "2020/day-24/src/main.rs", "rank": 31, "score": 113030.254170298 }, { "content": "fn simplify_possibilities(possibilities: &mut Vec<BitSet>) {\n\n let mut singletons: Vec<usize> = possibilities\n\n .iter()\n\n .filter_map(|field_possibilities| {\n\n if field_possibilities.len() == 1 {\n\n Some(\n\n field_possibilities\n\n .iter()\n\n .next()\n\n .expect(\"Failed to get only element\"),\n\n )\n\n } else {\n\n None\n\n }\n\n })\n\n .collect();\n\n\n\n while !singletons.is_empty() {\n\n let singleton = singletons\n\n .pop()\n", "file_path": "2020/day-16/src/main.rs", "rank": 33, "score": 111229.87507563402 }, { "content": "fn step(grid: &mut [Vec<u8>]) -> bool {\n\n let mut changed = step_east(grid);\n\n changed |= step_south(grid);\n\n changed\n\n}\n\n\n", "file_path": "2021/day-25/src/main.rs", "rank": 34, "score": 110338.17933609008 }, { "content": "fn play_basic_game(mut player1: VecDeque<u8>, mut player2: VecDeque<u8>) -> usize {\n\n while !player1.is_empty() && !player2.is_empty() {\n\n let card1 = player1.pop_front().unwrap();\n\n let card2 = player2.pop_front().unwrap();\n\n\n\n if card1 > card2 {\n\n player1.push_back(card1);\n\n player1.push_back(card2);\n\n } else {\n\n player2.push_back(card2);\n\n player2.push_back(card1);\n\n }\n\n }\n\n\n\n if player1.is_empty() {\n\n compute_score(&player2)\n\n } else {\n\n compute_score(&player1)\n\n }\n\n}\n\n\n", "file_path": "2020/day-22/src/main.rs", "rank": 35, "score": 109794.51725551521 }, { "content": "fn count_until_stop(grid: &mut [Vec<u8>]) -> usize {\n\n let mut count = 0;\n\n while step(grid) {\n\n count += 1;\n\n }\n\n count + 1\n\n}\n\n\n", "file_path": "2021/day-25/src/main.rs", "rank": 36, "score": 108499.1899107889 }, { "content": "fn get_low_point_risk_level(lines: &[Vec<u8>]) -> i32 {\n\n get_low_points(lines)\n\n .iter()\n\n .map(|(row, column)| 1 + (lines[*row][*column] - b'0') as i32)\n\n .sum()\n\n}\n\n\n", "file_path": "2021/day-09/src/main.rs", "rank": 37, "score": 107816.22670595486 }, { "content": "struct DirectionIterator<'a> {\n\n line: &'a str,\n\n cursor: usize,\n\n}\n\n\n\nimpl<'a> DirectionIterator<'a> {\n\n fn new(line: &'a str) -> Self {\n\n Self { line, cursor: 0 }\n\n }\n\n}\n\n\n\nimpl<'a> Iterator for DirectionIterator<'a> {\n\n type Item = Direction;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n if self.cursor == self.line.len() {\n\n return None;\n\n }\n\n\n\n let first = &self.line[self.cursor..=self.cursor];\n", "file_path": "2020/day-24/src/main.rs", "rank": 38, "score": 107387.85843339007 }, { "content": "fn run_step(rules: &RulesSlice, template: &mut Template) {\n\n let mut output = Vec::new();\n\n\n\n for (index, count) in template.iter().enumerate() {\n\n let (_first_character, descendants) = &rules[index];\n\n for descendant in descendants {\n\n if descendant.0 as usize >= output.len() {\n\n output.resize(descendant.0 as usize + 1, 0);\n\n }\n\n\n\n output[descendant.0 as usize] += count;\n\n }\n\n }\n\n\n\n swap(template, &mut output);\n\n}\n\n\n", "file_path": "2021/day-14/src/main.rs", "rank": 39, "score": 106761.15777597218 }, { "content": "fn print_dots(coordinates: &mut [(u16, u16)], commands: &[Command]) {\n\n let unique_dots = get_unique_dots(coordinates, commands);\n\n\n\n let mut max_x = 0;\n\n let mut max_y = 0;\n\n for (x, y) in &unique_dots {\n\n max_x = max_x.max(*x);\n\n max_y = max_y.max(*y);\n\n }\n\n\n\n for row in 0..=max_y {\n\n for column in 0..=max_x {\n\n print!(\n\n \"{}\",\n\n if coordinates.contains(&(column, row)) {\n\n '#'\n\n } else {\n\n '.'\n\n }\n\n );\n\n }\n\n println!();\n\n }\n\n}\n\n\n", "file_path": "2021/day-13/src/main.rs", "rank": 40, "score": 104324.95565812988 }, { "content": "fn execute_command(coordinates: &mut [(u16, u16)], command: &Command) {\n\n match command {\n\n Command::FoldAlongX(value) => {\n\n for (x, _y) in coordinates {\n\n if *x > *value {\n\n *x = value * 2 - *x;\n\n }\n\n }\n\n }\n\n Command::FoldAlongY(value) => {\n\n for (_x, y) in coordinates {\n\n if *y > *value {\n\n *y = value * 2 - *y;\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "2021/day-13/src/main.rs", "rank": 41, "score": 104324.95565812988 }, { "content": "fn count_increases<I: Iterator<Item = i32>>(measurements: I) -> usize {\n\n measurements\n\n .fold((None, 0), |(previous, count), current| match previous {\n\n None => (Some(current), count),\n\n Some(previous) => (Some(current), count + (current > previous) as usize),\n\n })\n\n .1\n\n}\n\n\n", "file_path": "2021/day-01/src/main.rs", "rank": 42, "score": 103848.55049099178 }, { "content": "fn convert_to_binary(mut message: String) -> BitVec<Msb0, usize> {\n\n // Pad message out to 32 bits\n\n let padded_length = ((message.len() + 7) / 8) * 8;\n\n while message.len() < padded_length {\n\n message.push('0');\n\n }\n\n\n\n let mut binary = BitVec::new();\n\n\n\n for word in message.as_bytes().chunks(8) {\n\n let word = String::from_utf8_lossy(word);\n\n let value = u32::from_str_radix(&word, 16).unwrap();\n\n let insertion_point = binary.len();\n\n binary.resize(binary.len() + 32, false);\n\n binary[insertion_point..].store(value);\n\n }\n\n\n\n binary\n\n}\n\n\n\nconst HEADER_SIZE: usize = 6;\n", "file_path": "2021/day-16/src/main.rs", "rank": 43, "score": 102679.78935038985 }, { "content": "fn get_corrupted_score<I: Iterator<Item = String>>(lines: I) -> i32 {\n\n let score_table = HashMap::from([(b')', 3), (b']', 57), (b'}', 1197), (b'>', 25137)]);\n\n lines\n\n .map(|line| match parse_line(&line) {\n\n ParseStatus::Corrupted(illegal) => score_table[&illegal],\n\n _ => 0,\n\n })\n\n .sum()\n\n}\n\n\n", "file_path": "2021/day-10/src/main.rs", "rank": 44, "score": 102110.75906802565 }, { "content": "fn get_possible_values(\n\n target_x: RangeInclusive<i32>,\n\n target_y: RangeInclusive<i32>,\n\n) -> HashSet<(i32, i32)> {\n\n // The minimum possible vx value would be the first triangle number >= target_x.start()\n\n let vx_min = {\n\n let mut distance = 1;\n\n let mut vx = 1;\n\n while distance < *target_x.start() {\n\n vx += 1;\n\n distance += vx;\n\n }\n\n vx\n\n };\n\n\n\n // The maximum possible vx value would be the right side of the target\n\n let vx_max = *target_x.end();\n\n\n\n let mut vx_in_target_at_step = HashMap::new();\n\n let mut vx_in_target_at_or_after_step = BTreeMap::new();\n", "file_path": "2021/day-17/src/main.rs", "rank": 45, "score": 99795.2805983253 }, { "content": "fn propagate_from_neighbors(risk_to_enter: &[Vec<u8>], lowest_risk: &mut [Vec<u16>]) {\n\n let mut updates = VecDeque::new();\n\n\n\n for row in 0..risk_to_enter.len() {\n\n for column in 0..risk_to_enter.len() {\n\n // Above\n\n if row > 0\n\n && (lowest_risk[row - 1][column] + risk_to_enter[row][column] as u16)\n\n < lowest_risk[row][column]\n\n {\n\n lowest_risk[row][column] =\n\n lowest_risk[row - 1][column] + risk_to_enter[row][column] as u16;\n\n updates.push_back((row, column));\n\n continue;\n\n }\n\n\n\n // Left\n\n if column > 0\n\n && (lowest_risk[row][column - 1] + risk_to_enter[row][column] as u16)\n\n < lowest_risk[row][column]\n", "file_path": "2021/day-15/src/main.rs", "rank": 46, "score": 97452.87405678272 }, { "content": "fn flash_cell(lines: &mut [Vec<u8>], row: usize, column: usize) {\n\n if lines[row][column] != 10 {\n\n return;\n\n }\n\n\n\n for (row_delta, column_delta) in [\n\n (-1, -1),\n\n (-1, 0),\n\n (-1, 1),\n\n (0, -1),\n\n (0, 1),\n\n (1, -1),\n\n (1, 0),\n\n (1, 1),\n\n ] {\n\n if row == 0 && row_delta < 0 {\n\n continue;\n\n }\n\n\n\n if column == 0 && column_delta < 0 {\n", "file_path": "2021/day-11/src/main.rs", "rank": 47, "score": 96959.28691887608 }, { "content": "fn parse_scanners<I: Iterator<Item = String>>(mut lines: I) -> Vec<Scanner> {\n\n let mut scanners = Vec::new();\n\n while let Some(_) = lines.next() {\n\n scanners.push(Scanner::from_lines(&mut lines));\n\n }\n\n scanners\n\n}\n\n\n", "file_path": "2021/day-19/src/main.rs", "rank": 48, "score": 96728.04471894653 }, { "content": "fn parse_input<I: Iterator<Item = String>>(mut lines: I) -> (Template, Rules, char) {\n\n let mut token_map = HashMap::new();\n\n let mut next_token = 0u8;\n\n let mut get_next_token = || {\n\n let token = next_token;\n\n next_token += 1;\n\n Token(token)\n\n };\n\n\n\n let template = lines.next().unwrap();\n\n let last_character = template.chars().last().unwrap();\n\n\n\n let pairs: Vec<_> = template\n\n .chars()\n\n .collect::<Vec<char>>()\n\n .as_slice()\n\n .windows(2)\n\n .map(|window| [window[0], window[1]])\n\n .collect();\n\n\n", "file_path": "2021/day-14/src/main.rs", "rank": 49, "score": 93536.56241114909 }, { "content": "fn reduce_list<I: Iterator<Item = String>>(mut list: I) -> Rc<RefCell<Node>> {\n\n let mut left = Node::parse_from_bytes(list.next().unwrap().as_bytes()).0;\n\n for item in list {\n\n let root = Node::new_pair(left, Node::parse_from_bytes(item.as_bytes()).0);\n\n reduce(&root);\n\n left = root;\n\n }\n\n left\n\n}\n\n\n", "file_path": "2021/day-18/src/main.rs", "rank": 50, "score": 92056.11529606667 }, { "content": "fn get_unique_dots(coordinates: &mut [(u16, u16)], commands: &[Command]) -> HashSet<(u16, u16)> {\n\n for command in commands {\n\n execute_command(coordinates, command)\n\n }\n\n\n\n coordinates.iter().cloned().collect()\n\n}\n\n\n", "file_path": "2021/day-13/src/main.rs", "rank": 51, "score": 90941.55042894575 }, { "content": "fn get_path(next_indices: &[[usize; POSITION_COUNT]], mut u: usize, v: usize) -> Vec<Position> {\n\n let mut path = vec![FromPrimitive::from_usize(u).unwrap()];\n\n while u != v {\n\n u = next_indices[u][v];\n\n path.push(FromPrimitive::from_usize(u).unwrap());\n\n }\n\n path\n\n}\n\n\n", "file_path": "2021/day-23/src/main.rs", "rank": 52, "score": 90648.9013800887 }, { "content": "fn set_register(value: Expression) -> Register {\n\n VecDeque::from([value])\n\n}\n\n\n\nimpl RegisterFile {\n\n fn new() -> Self {\n\n Self {\n\n x: set_register(Expression::new_literal(0)),\n\n y: set_register(Expression::new_literal(0)),\n\n z: set_register(Expression::new_literal(0)),\n\n w: set_register(Expression::new_literal(0)),\n\n }\n\n }\n\n\n\n fn get(&self, name: RegisterName) -> &Register {\n\n match name {\n\n RegisterName::X => &self.x,\n\n RegisterName::Y => &self.y,\n\n RegisterName::Z => &self.z,\n\n RegisterName::W => &self.w,\n", "file_path": "2021/day-24/src/main.rs", "rank": 53, "score": 90062.99285943032 }, { "content": "fn simulate_multiverse() -> [usize; 2] {\n\n let mut wins = [0usize; 2];\n\n\n\n let mut initial = HashMap::from([(\n\n Universe {\n\n position: [10, 7],\n\n score: [0, 0],\n\n },\n\n 1,\n\n )]);\n\n let mut result = HashMap::new();\n\n\n\n let mut current_player = 0;\n\n while !initial.is_empty() {\n\n for (universe, count) in &initial {\n\n for (roll_value, roll_count) in DICE_ROLLS {\n\n let mut new_universe = universe.clone();\n\n new_universe.position[current_player] =\n\n (new_universe.position[current_player] + roll_value - 1) % 10 + 1;\n\n new_universe.score[current_player] += new_universe.position[current_player];\n", "file_path": "2021/day-21/src/main.rs", "rank": 54, "score": 89907.10106957777 }, { "content": "fn calculate_first_position(values: &[u16]) -> u16 {\n\n let leading_zeros = values.last().unwrap().leading_zeros();\n\n 1u16 << (15 - leading_zeros)\n\n}\n\n\n", "file_path": "2021/day-03/src/main.rs", "rank": 55, "score": 88109.46053414812 }, { "content": "fn parse_input<I: Iterator<Item = String>>(mut lines: I) -> (Vec<(u16, u16)>, Vec<Command>) {\n\n let coordinates = lines\n\n .by_ref()\n\n .take_while(|line| !line.is_empty())\n\n .map(|coordinate_line| {\n\n let mut split = coordinate_line.split(',');\n\n (\n\n split.next().unwrap().parse().unwrap(),\n\n split.next().unwrap().parse().unwrap(),\n\n )\n\n })\n\n .collect();\n\n\n\n let commands = lines\n\n .map(|command_line| {\n\n let mut split = command_line.split_whitespace().nth(2).unwrap().split('=');\n\n let direction = split.next().unwrap();\n\n let value: u16 = split.next().unwrap().parse().unwrap();\n\n match direction {\n\n \"x\" => Command::FoldAlongX(value),\n\n \"y\" => Command::FoldAlongY(value),\n\n _ => unreachable!(),\n\n }\n\n })\n\n .collect();\n\n\n\n (coordinates, commands)\n\n}\n\n\n", "file_path": "2021/day-13/src/main.rs", "rank": 56, "score": 87783.9504672467 }, { "content": "fn parse_input<I: Iterator<Item = String>>(mut lines: I) -> ([u8; 512], VecDeque<VecDeque<u8>>) {\n\n let algorithm: [u8; 512] = lines.next().unwrap().as_bytes().try_into().unwrap();\n\n // Skip the blank line\n\n lines.next();\n\n\n\n let pixels: VecDeque<_> = lines\n\n .map(|line| line.as_bytes().iter().cloned().collect())\n\n .collect();\n\n\n\n (algorithm, pixels)\n\n}\n\n\n", "file_path": "2021/day-20/src/main.rs", "rank": 57, "score": 86444.66697231645 }, { "content": "fn digit_bits_from_configurations(configurations: &str) -> [u8; 10] {\n\n let mut sorted_by_configuration_length: Vec<_> = {\n\n let mut vector: Vec<_> = configurations.split(' ').collect();\n\n vector.sort_by_key(|s| s.len());\n\n vector.iter().map(|s| bits_from_letters(s)).collect()\n\n };\n\n\n\n let mut bits_for_digit = [0u8; 10];\n\n\n\n bits_for_digit[1] = sorted_by_configuration_length[0];\n\n bits_for_digit[7] = sorted_by_configuration_length[1];\n\n bits_for_digit[4] = sorted_by_configuration_length[2];\n\n bits_for_digit[8] = sorted_by_configuration_length[9];\n\n\n\n let candidates_for_069 = &mut sorted_by_configuration_length[6..=8];\n\n // Of 0, 6, and 9, only 9 is the same when ORed with 4\n\n let (position_of_9, _) = candidates_for_069\n\n .iter()\n\n .enumerate()\n\n .find(|(_position, bits)| **bits | bits_for_digit[4] == **bits)\n", "file_path": "2021/day-08/src/main.rs", "rank": 58, "score": 80237.53816549649 }, { "content": "fn get_next_value(advanced: bool, expression: &str) -> (i64, usize) {\n\n match &expression[0..1] {\n\n \"(\" => evaluate_expression(advanced, &expression[1..]),\n\n _ => (\n\n expression[0..1]\n\n .parse()\n\n .expect(\"Failed to parse digit as i64\"),\n\n 1,\n\n ),\n\n }\n\n}\n\n\n", "file_path": "2020/day-18/src/main.rs", "rank": 59, "score": 79186.80585641388 }, { "content": "fn get_direction_vector(p0: Point2<i16>, p1: Point2<i16>) -> Vector2<i16> {\n\n let mut vector = p1 - p0;\n\n vector.x = vector.x.clamp(-1, 1);\n\n vector.y = vector.y.clamp(-1, 1);\n\n vector\n\n}\n\n\n", "file_path": "2021/day-05/src/main.rs", "rank": 60, "score": 74310.12203137194 }, { "content": "fn calculate_rating(prefer_high: bool, values: &[u16], position: Option<u16>) -> u16 {\n\n let position = match position {\n\n Some(position) => position,\n\n None => calculate_first_position(values),\n\n };\n\n\n\n if values.len() == 1 {\n\n return values[0];\n\n }\n\n\n\n let middle = values.len() / 2;\n\n let desired_value = if prefer_high { position } else { 0 };\n\n if values[middle] & position == desired_value {\n\n let mut start = 0;\n\n while (values[start] & position) == 0 {\n\n start += 1;\n\n }\n\n calculate_rating(prefer_high, &values[start..], Some(position >> 1))\n\n } else {\n\n let mut end = values.len() - 1;\n\n while (values[end] & position) != 0 {\n\n end -= 1;\n\n }\n\n calculate_rating(prefer_high, &values[..=end], Some(position >> 1))\n\n }\n\n}\n\n\n", "file_path": "2021/day-03/src/main.rs", "rank": 61, "score": 74279.25469708747 }, { "content": "fn parse_line(line: &str) -> ([u8; 10], Vec<u8>) {\n\n let mut split = line.split(\" | \");\n\n let digit_bits = digit_bits_from_configurations(split.next().unwrap());\n\n let output_bits: Vec<_> = split\n\n .next()\n\n .unwrap()\n\n .split(' ')\n\n .map(|digit| bits_from_letters(digit))\n\n .collect();\n\n (digit_bits, output_bits)\n\n}\n\n\n", "file_path": "2021/day-08/src/main.rs", "rank": 62, "score": 74119.52212875686 }, { "content": "#[derive(Clone, Copy)]\n\nenum Cell {\n\n Floor,\n\n Empty,\n\n Occupied,\n\n}\n\n\n", "file_path": "2020/day-11/src/main.rs", "rank": 63, "score": 73837.53804910995 }, { "content": "#[derive(Clone, Copy)]\n\nenum Orientation {\n\n PositiveZUpPositiveY,\n\n PositiveZUpPositiveX,\n\n PositiveZUpNegativeY,\n\n PositiveZUpNegativeX,\n\n NegativeZUpPositiveY,\n\n NegativeZUpNegativeX,\n\n NegativeZUpNegativeY,\n\n NegativeZUpPositiveX,\n\n PositiveXUpPositiveY,\n\n PositiveXUpNegativeZ,\n\n PositiveXUpNegativeY,\n\n PositiveXUpPositiveZ,\n\n NegativeXUpPositiveY,\n\n NegativeXUpPositiveZ,\n\n NegativeXUpNegativeY,\n\n NegativeXUpNegativeZ,\n\n PositiveYUpNegativeZ,\n\n PositiveYUpPositiveX,\n\n PositiveYUpPositiveZ,\n", "file_path": "2021/day-19/src/main.rs", "rank": 64, "score": 73837.53804910995 }, { "content": "#[derive(Clone, Copy, Debug)]\n\nenum Command {\n\n Add,\n\n Multiply,\n\n}\n\n\n", "file_path": "2020/day-18/src/main.rs", "rank": 65, "score": 73837.44966719432 }, { "content": "#[derive(Clone, Copy, PartialEq)]\n\nenum Command {\n\n Accumulate,\n\n Jump,\n\n None,\n\n}\n\n\n", "file_path": "2020/day-08/src/main.rs", "rank": 66, "score": 73837.36487825422 }, { "content": "#[derive(Clone, Copy, Eq, FromPrimitive, PartialEq)]\n\nenum Amphipod {\n\n A1,\n\n A2,\n\n A3,\n\n A4,\n\n B1,\n\n B2,\n\n B3,\n\n B4,\n\n C1,\n\n C2,\n\n C3,\n\n C4,\n\n D1,\n\n D2,\n\n D3,\n\n D4,\n\n}\n\n\n\nimpl Amphipod {\n", "file_path": "2021/day-23/src/main.rs", "rank": 67, "score": 73837.20523714859 }, { "content": "#[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd)]\n\nenum Side {\n\n Left = 0,\n\n Top = 1,\n\n Right = 2,\n\n Bottom = 3,\n\n}\n\n\n\nimpl Side {\n\n fn from_index(i: usize) -> Self {\n\n match i {\n\n 0 => Self::Left,\n\n 1 => Self::Top,\n\n 2 => Self::Right,\n\n 3 => Self::Bottom,\n\n _ => panic!(\"Unexpected index {}\", i),\n\n }\n\n }\n\n}\n\n\n", "file_path": "2020/day-20/src/main.rs", "rank": 68, "score": 73836.98786724752 }, { "content": "#[derive(Clone, Copy, Debug, Eq, FromPrimitive, Hash, Ord, PartialEq, PartialOrd)]\n\nenum Position {\n\n Hallway00,\n\n Hallway01,\n\n Hallway02,\n\n Hallway03,\n\n Hallway04,\n\n Hallway05,\n\n Hallway06,\n\n Hallway07,\n\n Hallway08,\n\n Hallway09,\n\n Hallway10,\n\n RoomA1,\n\n RoomA2,\n\n RoomA3,\n\n RoomA4,\n\n RoomB1,\n\n RoomB2,\n\n RoomB3,\n\n RoomB4,\n", "file_path": "2021/day-23/src/main.rs", "rank": 69, "score": 73836.85584729415 }, { "content": "enum Instruction {\n\n Inp(Destination),\n\n Add(Destination, Source),\n\n Mul(Destination, Source),\n\n Div(Destination, Source),\n\n Mod(Destination, Source),\n\n Eql(Destination, Source),\n\n}\n\n\n\nimpl Instruction {\n\n fn parse_from_lines<I: Iterator<Item = String>>(lines: I) -> Vec<Instruction> {\n\n lines\n\n .map(|line| {\n\n let mut split = line.split_whitespace();\n\n match split.next().unwrap() {\n\n \"inp\" => Instruction::Inp(\n\n RegisterName::try_from_string(split.next().unwrap()).unwrap(),\n\n ),\n\n \"add\" => Instruction::Add(\n\n RegisterName::try_from_string(split.next().unwrap()).unwrap(),\n", "file_path": "2021/day-24/src/main.rs", "rank": 70, "score": 73833.27830065007 }, { "content": "#[derive(Debug, Eq, PartialEq)]\n\nenum Command {\n\n Off,\n\n On,\n\n}\n\n\n", "file_path": "2021/day-22/src/main.rs", "rank": 71, "score": 73833.27830065007 }, { "content": "#[derive(Clone, Debug)]\n\nenum Rule {\n\n Indirect(Vec<Vec<u8>>),\n\n Direct(String),\n\n}\n\n\n", "file_path": "2020/day-19/src/main.rs", "rank": 72, "score": 73833.27830065007 }, { "content": "enum Contents {\n\n Regular(i32),\n\n Pair(Rc<RefCell<Node>>, Rc<RefCell<Node>>),\n\n}\n\n\n", "file_path": "2021/day-18/src/main.rs", "rank": 73, "score": 73833.27830065007 }, { "content": "#[derive(Clone)]\n\nenum Expression {\n\n Literal(i32),\n\n Input(usize),\n\n Sum(Vec<Expression>),\n\n}\n\n\n\nimpl Expression {\n\n fn new_literal(value: i32) -> Self {\n\n Expression::Literal(value)\n\n }\n\n\n\n fn new_input() -> Self {\n\n static NEXT_INPUT: AtomicUsize = AtomicUsize::new(1);\n\n Expression::Input(NEXT_INPUT.fetch_add(1, Ordering::Relaxed))\n\n }\n\n}\n\n\n\nimpl AddAssign for Expression {\n\n fn add_assign(&mut self, rhs: Self) {\n\n match self {\n", "file_path": "2021/day-24/src/main.rs", "rank": 74, "score": 73833.27830065007 }, { "content": "#[derive(Debug, Eq, PartialEq)]\n\nenum Operation {\n\n Sum,\n\n Product,\n\n Minimum,\n\n Maximum,\n\n GreaterThan,\n\n LessThan,\n\n EqualTo,\n\n}\n\n\n\nimpl Operation {\n\n fn from_u8(value: u8) -> Self {\n\n match value {\n\n 0 => Operation::Sum,\n\n 1 => Operation::Product,\n\n 2 => Operation::Minimum,\n\n 3 => Operation::Maximum,\n\n 5 => Operation::GreaterThan,\n\n 6 => Operation::LessThan,\n\n 7 => Operation::EqualTo,\n\n _ => unreachable!(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "2021/day-16/src/main.rs", "rank": 75, "score": 73833.27830065007 }, { "content": "#[derive(Debug, Eq, PartialEq)]\n\nenum Command {\n\n FoldAlongX(u16),\n\n FoldAlongY(u16),\n\n}\n\n\n", "file_path": "2021/day-13/src/main.rs", "rank": 76, "score": 73833.27830065007 }, { "content": "#[derive(Debug, Eq, PartialEq)]\n\nenum Packet {\n\n Literal(Version, u64),\n\n Operator(Version, Operation, Vec<Packet>),\n\n}\n\n\n", "file_path": "2021/day-16/src/main.rs", "rank": 77, "score": 73833.27830065007 }, { "content": "enum Source {\n\n Register(RegisterName),\n\n Literal(i32),\n\n}\n\n\n\nimpl Source {\n\n fn from_string(string: &str) -> Self {\n\n if let Some(register) = RegisterName::try_from_string(string) {\n\n Source::Register(register)\n\n } else {\n\n Source::Literal(string.parse().unwrap())\n\n }\n\n }\n\n}\n\n\n\nimpl Debug for Source {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n match self {\n\n Source::Register(register) => register.fmt(f),\n\n Source::Literal(value) => write!(f, \"{}\", value),\n\n }\n\n }\n\n}\n\n\n", "file_path": "2021/day-24/src/main.rs", "rank": 78, "score": 73833.27830065007 }, { "content": "#[derive(Debug, Eq, PartialEq)]\n\nenum Position {\n\n Left,\n\n Right,\n\n}\n\n\n", "file_path": "2021/day-18/src/main.rs", "rank": 79, "score": 73833.27830065007 }, { "content": "#[derive(Clone, Copy, Debug)]\n\nstruct Operation {\n\n command: Command,\n\n value: i64,\n\n}\n\n\n", "file_path": "2020/day-18/src/main.rs", "rank": 80, "score": 73304.33675636534 }, { "content": "#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]\n\nstruct Coordinate {\n\n x: i8,\n\n y: i8,\n\n}\n\n\n\nimpl Coordinate {\n\n fn new() -> Self {\n\n Self { x: 0, y: 0 }\n\n }\n\n\n\n fn from_address(address: usize) -> Self {\n\n let x: i16 = ((address >> 8) & 0xFF).try_into().unwrap();\n\n let y: i16 = (address & 0xFF).try_into().unwrap();\n\n\n\n Self {\n\n x: (x - 128).try_into().unwrap(),\n\n y: (y - 128).try_into().unwrap(),\n\n }\n\n }\n\n\n", "file_path": "2020/day-24/src/main.rs", "rank": 81, "score": 73304.0170934133 }, { "content": "struct Counts {\n\n any_person: u32,\n\n all_people: u32,\n\n}\n\n\n\nimpl AddAssign for Counts {\n\n fn add_assign(&mut self, other: Self) {\n\n self.any_person += other.any_person;\n\n self.all_people += other.all_people;\n\n }\n\n}\n\n\n\nimpl QuestionCounter {\n\n fn new() -> Self {\n\n Self {\n\n any_person: 0_u32,\n\n all_people: u32::MAX,\n\n }\n\n }\n\n\n", "file_path": "2020/day-06/src/main.rs", "rank": 82, "score": 73300.16538982108 }, { "content": "#[derive(Debug, Eq, PartialEq)]\n\nstruct Step {\n\n command: Command,\n\n x: Range<i32>,\n\n y: Range<i32>,\n\n z: Range<i32>,\n\n}\n\n\n", "file_path": "2021/day-22/src/main.rs", "rank": 83, "score": 73300.16538982108 }, { "content": "#[derive(Clone)]\n\nstruct Layout {\n\n line_of_sight: bool,\n\n map: Vec<Cell>,\n\n column_count: i32,\n\n row_count: i32,\n\n adjacent_indices: Vec<u16>,\n\n updated_indices: Vec<u16>,\n\n occupied_seats: Vec<bool>,\n\n}\n\n\n\nimpl Layout {\n\n fn new(line_of_sight: bool) -> Self {\n\n Self {\n\n line_of_sight,\n\n map: Vec::new(),\n\n column_count: -1,\n\n row_count: 0,\n\n adjacent_indices: Vec::new(),\n\n updated_indices: Vec::new(),\n\n occupied_seats: Vec::new(),\n", "file_path": "2020/day-11/src/main.rs", "rank": 84, "score": 73300.16538982108 }, { "content": "#[derive(Debug)]\n\nstruct Tile {\n\n id: u16,\n\n image: Vec<Vec<u8>>,\n\n // Stored LTRB, horizontal L->R, vertical T->B\n\n sides: [[u8; TILE_SIZE]; 4],\n\n sides_with_neighbors: Vec<Side>,\n\n}\n\n\n\nimpl Tile {\n\n fn from_lines(lines: &[String]) -> Self {\n\n let id = lines[0]\n\n .split(' ')\n\n .nth(1)\n\n .expect(\"Failed to find ID in split\")\n\n .trim_end_matches(':')\n\n .parse()\n\n .expect(\"Failed to parse ID as i16\");\n\n\n\n let mut image = vec![vec![b' '; IMAGE_SIZE]; IMAGE_SIZE];\n\n\n", "file_path": "2020/day-20/src/main.rs", "rank": 85, "score": 73300.16538982108 }, { "content": "#[derive(Debug, Eq)]\n\nstruct Node {\n\n previous: Vec<(u16, u16)>,\n\n total_estimated_risk: u16,\n\n risk_to_node: u16,\n\n row: u16,\n\n column: u16,\n\n}\n\n\n\nimpl Node {\n\n fn has_previous(&self, row: usize, column: usize) -> bool {\n\n self.previous.contains(&(row as u16, column as u16))\n\n }\n\n}\n\n\n\nimpl Ord for Node {\n\n fn cmp(&self, other: &Self) -> std::cmp::Ordering {\n\n other.total_estimated_risk.cmp(&self.total_estimated_risk)\n\n }\n\n}\n\n\n", "file_path": "2021/day-15/src/main.rs", "rank": 86, "score": 73300.16538982108 }, { "content": "#[derive(Clone, Eq, Hash, PartialEq)]\n\nstruct Universe {\n\n position: [u8; 2],\n\n score: [u8; 2],\n\n}\n\n\n", "file_path": "2021/day-21/src/main.rs", "rank": 87, "score": 73300.16538982108 }, { "content": "#[derive(Debug)]\n\nstruct Board {\n\n lines: [BitSet; 10],\n\n}\n\n\n\nimpl Board {\n\n fn from_lines<I: Iterator<Item = String>>(lines: &mut I) -> Option<Self> {\n\n let mut line_sets = vec![BitSet::with_capacity(100); 10];\n\n\n\n for (row, line) in lines.enumerate() {\n\n if line.is_empty() {\n\n break;\n\n }\n\n\n\n let split = line.split_whitespace();\n\n for (column, value) in split.map(|value| value.parse::<u8>().unwrap()).enumerate() {\n\n line_sets[row + 5].insert(value as usize);\n\n line_sets[column].insert(value as usize);\n\n }\n\n }\n\n\n", "file_path": "2021/day-04/src/main.rs", "rank": 88, "score": 73300.16538982108 }, { "content": "#[derive(Debug)]\n\nstruct Node {\n\n x: i32,\n\n y: i32,\n\n z: i32,\n\n size: i32, // Unsigned in practice and asserted as such at construction, but stored as i32 for ease of use\n\n is_cube: bool,\n\n children: Vec<Rc<RefCell<Node>>>,\n\n}\n\n\n\nimpl Node {\n\n const MAX_VALUE: i32 = 128 * 1024;\n\n\n\n fn new(x: i32, y: i32, z: i32, size: i32) -> Rc<RefCell<Self>> {\n\n assert!(size > 0);\n\n Rc::new(RefCell::new(Self {\n\n x,\n\n y,\n\n z,\n\n size,\n\n is_cube: true,\n", "file_path": "2021/day-22/src/main.rs", "rank": 89, "score": 73300.16538982108 }, { "content": "struct Node {\n\n parent: Option<(Weak<RefCell<Node>>, Position)>,\n\n weak_self: Weak<RefCell<Node>>,\n\n contents: Contents,\n\n}\n\n\n\nimpl Node {\n\n fn new_regular(value: i32) -> Rc<RefCell<Self>> {\n\n let node = Rc::new(RefCell::new(Node {\n\n parent: None,\n\n weak_self: Weak::new(),\n\n contents: Contents::Regular(value),\n\n }));\n\n node.borrow_mut().weak_self = Rc::downgrade(&node);\n\n node\n\n }\n\n\n\n fn new_pair(left: Rc<RefCell<Node>>, right: Rc<RefCell<Node>>) -> Rc<RefCell<Self>> {\n\n let pair = Rc::new(RefCell::new(Node {\n\n parent: None,\n", "file_path": "2021/day-18/src/main.rs", "rank": 90, "score": 73300.16538982108 }, { "content": "struct Bag {\n\n name: String,\n\n count: i32,\n\n}\n\n\n", "file_path": "2020/day-07/src/main.rs", "rank": 91, "score": 73300.16538982108 }, { "content": "#[derive(Clone)]\n\nstruct Constraint {\n\n kind: ConstraintKind,\n\n lhs: Register,\n\n rhs: Register,\n\n}\n\n\n\nimpl Constraint {\n\n fn new(kind: ConstraintKind, lhs: Register, rhs: Register) -> Self {\n\n Self { kind, lhs, rhs }\n\n }\n\n}\n\n\n\nimpl Debug for Constraint {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"{:?} {:?} {:?}\", self.lhs, self.kind, self.rhs)\n\n }\n\n}\n\n\n", "file_path": "2021/day-24/src/main.rs", "rank": 92, "score": 73300.16538982108 }, { "content": "struct Scanner {\n\n relative_beacons: Vec<Point3<i32>>,\n\n absolute_position: Point3<i32>,\n\n absolute_beacons: Vec<Point3<i32>>,\n\n anchor_relative_beacons: Vec<HashSet<Point3<i32>>>,\n\n}\n\n\n\nimpl Scanner {\n\n fn from_lines<I: Iterator<Item = String>>(lines: &mut I) -> Self {\n\n Self {\n\n relative_beacons: lines\n\n .take_while(|line| !line.is_empty())\n\n .map(|line| {\n\n let mut split = line.split(',');\n\n point![\n\n split.next().unwrap().parse().unwrap(),\n\n split.next().unwrap().parse().unwrap(),\n\n split.next().unwrap().parse().unwrap()\n\n ]\n\n })\n", "file_path": "2021/day-19/src/main.rs", "rank": 93, "score": 73300.16538982108 }, { "content": "struct Instruction {\n\n operation: Operation,\n\n visited: bool,\n\n}\n\n\n\nimpl Instruction {\n\n fn new(operation: Operation) -> Self {\n\n Self {\n\n operation,\n\n visited: false,\n\n }\n\n }\n\n}\n\n\n", "file_path": "2020/day-08/src/main.rs", "rank": 94, "score": 73300.16538982108 }, { "content": "struct Operation {\n\n command: Command,\n\n payload: i32,\n\n}\n\n\n\nimpl Operation {\n\n fn from_line(line: &str) -> Operation {\n\n let mut split = line.split(' ');\n\n\n\n let mnemonic = split.next().expect(\"Failed to parse mnemonic\");\n\n let command = match mnemonic {\n\n \"acc\" => Command::Accumulate,\n\n \"jmp\" => Command::Jump,\n\n \"nop\" => Command::None,\n\n _ => panic!(\"Unexpected mnemonic [{}]\", mnemonic),\n\n };\n\n\n\n let payload = split\n\n .next()\n\n .expect(\"Failed to parse payload\")\n", "file_path": "2020/day-08/src/main.rs", "rank": 95, "score": 73300.16538982108 }, { "content": "struct Range {\n\n begin: i32,\n\n end: i32,\n\n}\n\n\n", "file_path": "2020/day-16/src/main.rs", "rank": 96, "score": 73300.16538982108 }, { "content": "struct Field {\n\n id: usize,\n\n name: String,\n\n ranges: Vec<Range>,\n\n}\n\n\n", "file_path": "2020/day-16/src/main.rs", "rank": 97, "score": 73300.16538982108 }, { "content": "struct Transformer {\n\n subject: u64,\n\n value: u64,\n\n loop_count: u32\n\n}\n\n\n\nimpl Transformer {\n\n fn new(subject: u64) -> Self {\n\n Self {\n\n subject,\n\n value: 1,\n\n loop_count: 0,\n\n }\n\n }\n\n\n\n fn run_loop(&mut self) {\n\n self.value *= self.subject;\n\n self.value %= 20201227;\n\n self.loop_count += 1;\n\n }\n\n\n\n fn get_value(&self) -> u64 {\n\n self.value\n\n }\n\n\n\n fn get_loop_count(&self) -> u32 {\n\n self.loop_count\n\n }\n\n}\n\n\n", "file_path": "2020/day-25/src/main.rs", "rank": 98, "score": 73300.16538982108 }, { "content": "#[derive(Clone, Copy)]\n\nenum PolicyType {\n\n Range,\n\n Position,\n\n}\n\n\n", "file_path": "2020/day-02/src/main.rs", "rank": 99, "score": 72557.34924616822 } ]
Rust
src/statemachine/mac.rs
nathanwhit/tarpaulin
42dd578e753e4b380d27545704b49239f9f6ea30
#![allow(unused)] #![allow(non_snake_case)] use crate::config::Config; use crate::errors::RunError; use crate::statemachine::*; use log::{debug, trace}; use nix::errno::Errno; use nix::sys::signal::Signal; use nix::sys::wait::*; use nix::unistd::Pid; use nix::Error as NixErr; use std::collections::{HashMap, HashSet}; pub fn create_state_machine<'a>( test: Pid, traces: &'a mut TraceMap, config: &'a Config, ) -> (TestState, MacData<'a>) { let mut data = MacData::new(traces, config); data.parent = test; (TestState::start_state(), data) } pub type UpdateContext = (TestState, TracerAction<ProcessInfo>); #[derive(Debug, Copy, Clone, Eq, PartialEq)] pub struct ProcessInfo { pid: Pid, signal: Option<Signal>, } impl ProcessInfo { fn new(pid: Pid, signal: Option<Signal>) -> Self { Self { pid, signal } } } impl From<Pid> for ProcessInfo { fn from(pid: Pid) -> Self { ProcessInfo::new(pid, None) } } impl From<&Pid> for ProcessInfo { fn from(pid: &Pid) -> Self { ProcessInfo::new(*pid, None) } } pub struct MacData<'a> { wait_queue: Vec<WaitStatus>, current: Pid, parent: Pid, breakpoints: HashMap<u64, Breakpoint>, traces: &'a mut TraceMap, config: &'a Config, thread_count: isize, } impl<'a> StateData for MacData<'a> { fn start(&mut self) -> Result<Option<TestState>, RunError> { match waitpid(self.current, Some(WaitPidFlag::WNOHANG | WaitPidFlag::WUNTRACED)) { Ok(WaitStatus::StillAlive) => Ok(None), Ok(sig @ WaitStatus::Stopped(_, Signal::SIGTRAP)) => { if let WaitStatus::Stopped(child, _) = sig { self.current = child; } trace!("Caught inferior transitioning to Initialise state"); Ok(Some(TestState::Initialise)) } Ok(_) => Err(RunError::TestRuntime( "Unexpected signal when starting test".to_string(), )), Err(e) => Err(RunError::TestRuntime(format!( "Error when starting test: {}", e ))), } } fn init(&mut self) -> Result<TestState, RunError> { trace_children(self.current)?; for trace in self.traces.all_traces() { if let Some(addr) = trace.address { match Breakpoint::new(self.current, addr) { Ok(bp) => { let _ = self.breakpoints.insert(addr, bp); } Err(e) if e == NixErr::Sys(Errno::EIO) => { return Err(RunError::TestRuntime( "ERROR: Tarpaulin cannot find code addresses \ check that pie is disabled for your linker. \ If linking with gcc try adding -C link-args=-no-pie \ to your rust flags" .to_string(), )); } Err(NixErr::UnsupportedOperation) => { debug!("Instrumentation address clash, ignoring 0x{:x}", addr); } Err(_) => { return Err(RunError::TestRuntime( "Failed to instrument test executable".to_string(), )); } } } } if continue_exec(self.parent, None).is_ok() { trace!("Initialised inferior, transitioning to wait state"); Ok(TestState::wait_state()) } else { Err(RunError::TestRuntime( "Test didn't launch correctly".to_string(), )) } } fn wait(&mut self) -> Result<Option<TestState>, RunError> { let mut result = Ok(None); let mut running = true; while running { let wait = waitpid( Pid::from_raw(-1), Some(WaitPidFlag::WNOHANG), ); match wait { Ok(WaitStatus::StillAlive) => { running = false; } Ok(WaitStatus::Exited(_, _)) => { self.wait_queue.push(wait.unwrap()); result = Ok(Some(TestState::Stopped)); running = false; } Ok(s) => { self.wait_queue.push(s); result = Ok(Some(TestState::Stopped)); } Err(e) => { running = false; result = Err(RunError::TestRuntime(format!( "An error occurred while waiting for response from test: {}", e ))) } } } if !self.wait_queue.is_empty() { trace!("Result queue is {:?}", self.wait_queue); } result } fn stop(&mut self) -> Result<TestState, RunError> { let mut actions = Vec::new(); let mut pcs = HashSet::new(); let mut result = Ok(TestState::wait_state()); let pending = self.wait_queue.clone(); self.wait_queue.clear(); for status in &pending { let state = match status { WaitStatus::Stopped(c, Signal::SIGTRAP) => { self.current = *c; match self.collect_coverage_data(&mut pcs) { Ok(s) => Ok(s), Err(e) => Err(RunError::TestRuntime(format!( "Error when collecting coverage: {}", e ))), } } WaitStatus::Stopped(child, Signal::SIGSTOP) => Ok(( TestState::wait_state(), TracerAction::Continue(child.into()), )), WaitStatus::Stopped(_, Signal::SIGSEGV) => Err(RunError::TestRuntime( "A segfault occurred while executing tests".to_string(), )), WaitStatus::Stopped(child, Signal::SIGILL) => { let pc = current_instruction_pointer(*child).unwrap_or_else(|_| 1) - 1; trace!("SIGILL raised. Child program counter is: 0x{:x}", pc); Err(RunError::TestRuntime(format!( "Error running test - SIGILL raised in {}", child ))) } WaitStatus::Stopped(c, s) => { let sig = if self.config.forward_signals { Some(*s) } else { None }; let info = ProcessInfo::new(*c, sig); Ok((TestState::wait_state(), TracerAction::TryContinue(info))) } WaitStatus::Signaled(c, s, f) => { if let Ok(s) = self.handle_signaled(c, s, *f) { Ok(s) } else { Err(RunError::TestRuntime( "Attempting to handle tarpaulin being signaled".to_string(), )) } } WaitStatus::Exited(child, ec) => { for ref mut value in self.breakpoints.values_mut() { value.thread_killed(*child); } trace!("Exited {:?} parent {:?}", child, self.parent); if child == &self.parent { Ok((TestState::End(*ec), TracerAction::Nothing)) } else { Ok(( TestState::wait_state(), TracerAction::TryContinue(self.parent.into()), )) } } _ => Err(RunError::TestRuntime( "An unexpected signal has been caught by tarpaulin!".to_string(), )), }; match state { Ok((TestState::Waiting { .. }, action)) => { actions.push(action); } Ok((state, action)) => { result = Ok(state); actions.push(action); } Err(e) => result = Err(e), } } let mut continued = false; for a in &actions { match a { TracerAction::TryContinue(t) => { continued = true; let _ = continue_exec(t.pid, t.signal); } TracerAction::Continue(t) => { continued = true; continue_exec(t.pid, t.signal)?; } TracerAction::Step(t) => { continued = true; single_step(t.pid)?; } TracerAction::Detach(t) => { continued = true; detach_child(t.pid)?; } _ => {} } } if !continued { trace!("No action suggested to continue tracee. Attempting a continue"); let _ = continue_exec(self.parent, None); } result } } impl<'a> MacData<'a> { pub fn new(traces: &'a mut TraceMap, config: &'a Config) -> MacData<'a> { MacData { wait_queue: Vec::new(), current: Pid::from_raw(0), parent: Pid::from_raw(0), breakpoints: HashMap::new(), traces, config, thread_count: 0, } } fn handle_ptrace_event( &mut self, child: Pid, sig: Signal, event: i32, ) -> Result<(TestState, TracerAction<ProcessInfo>), RunError> { use nix::libc::*; if sig == Signal::SIGTRAP { match event { PTRACE_EVENT_CLONE => match get_event_data(child) { Ok(t) => { trace!("New thread spawned {}", t); self.thread_count += 1; Ok(( TestState::wait_state(), TracerAction::Continue(child.into()), )) } Err(e) => { trace!("Error in clone event {:?}", e); Err(RunError::TestRuntime( "Error occurred upon test executable thread creation".to_string(), )) } }, PTRACE_EVENT_FORK => { trace!("Caught fork event"); Ok(( TestState::wait_state(), TracerAction::Continue(child.into()), )) } PTRACE_EVENT_EXEC => { trace!("Child execed other process - detaching ptrace"); Ok((TestState::wait_state(), TracerAction::Detach(child.into()))) } PTRACE_EVENT_EXIT => { trace!("Child exiting"); self.thread_count -= 1; Ok(( TestState::wait_state(), TracerAction::TryContinue(child.into()), )) } _ => Err(RunError::TestRuntime(format!( "Unrecognised ptrace event {}", event ))), } } else { trace!("Unexpected signal with ptrace event {}", event); trace!("Signal: {:?}", sig); Err(RunError::TestRuntime("Unexpected signal".to_string())) } } fn collect_coverage_data( &mut self, visited_pcs: &mut HashSet<u64>, ) -> Result<UpdateContext, RunError> { let mut action = None; if let Ok(rip) = current_instruction_pointer(self.current) { let rip = (rip - 1) as u64; trace!("Hit address 0x{:x}", rip); if self.breakpoints.contains_key(&rip) { let bp = &mut self.breakpoints.get_mut(&rip).unwrap(); let updated = if visited_pcs.contains(&rip) { let _ = bp.jump_to(self.current); (true, TracerAction::Continue(self.current.into())) } else { let enable = self.config.count; if let Ok(x) = bp.process(self.current, enable) { x } else { (false, TracerAction::Continue(self.current.into())) } }; if updated.0 { if let Some(ref mut t) = self.traces.get_trace_mut(rip) { if let CoverageStat::Line(ref mut x) = t.stats { trace!("Incrementing hit count for trace"); *x += 1; } } } action = Some(updated.1); } } let action = action.unwrap_or_else(|| TracerAction::Continue(self.current.into())); Ok((TestState::wait_state(), action)) } fn handle_signaled( &mut self, pid: &Pid, sig: &Signal, flag: bool, ) -> Result<UpdateContext, RunError> { match (sig, flag) { (Signal::SIGTRAP, true) => { Ok((TestState::wait_state(), TracerAction::Continue(pid.into()))) } _ => Err(RunError::StateMachine("Unexpected stop".to_string())), } } }
#![allow(unused)] #![allow(non_snake_case)] use crate::config::Config; use crate::errors::RunError; use crate::statemachine::*; use log::{debug, trace}; use nix::errno::Errno; use nix::sys::signal::Signal; use nix::sys::wait::*; use nix::unistd::Pid; use nix::Error as NixErr; use std::collections::{HashMap, HashSet}; pub fn create_state_machine<'a>( test: Pid, traces: &'a mut TraceMap, config: &'a Config, ) -> (TestState, MacData<'a>) { let mut data = MacData::new(traces, config); data.parent = test; (TestState::start_state(), data) } pub type UpdateContext = (TestState, TracerAction<ProcessInfo>); #[derive(Debug, Copy, Clone, Eq, PartialEq)] pub struct ProcessInfo { pid: Pid, signal: Option<Signal>, } impl ProcessInfo { fn new(pid: Pid, signal: Option<Signal>) -> Self { Self { pid, signal } } } impl From<Pid> for ProcessInfo { fn from(pid: Pid) -> Self { ProcessInfo::new(pid, None) } } impl From<&Pid> for ProcessInfo { fn from(pid: &Pid) -> Self { ProcessInfo::new(*pid, None) } } pub struct MacData<'a> { wait_queue: Vec<WaitStatus>, current: Pid, parent: Pid, breakpoints: HashMap<u64, Breakpoint>, traces: &'a mut TraceMap, config: &'a Config, thread_count: isize, } impl<'a> StateData for MacData<'a> { fn start(&mut self) -> Result<Option<TestState>, RunError> { match waitpid(self.current, Some(WaitPidFlag::WNOHANG | WaitPidFlag::WUNTRACED)) { Ok(WaitStatus::StillAlive) => Ok(None), Ok(sig @ WaitStatus::Stopped(_, Signal::SIGTRAP)) => { if let WaitStatus::Stopped(child, _) = sig { self.current = child; } trace!("Caught inferior transitioning to Initialise state"); Ok(Some(TestState::Initialise)) } Ok(_) => Err(RunError::TestRuntime( "Unexpected signal when starting test".to_string(), )), Err(e) => Err(RunError::TestRuntime(format!( "Error when starting test: {}", e ))), } } fn init(&mut self) -> Result<TestState, RunError> { trace_children(self.current)?; for trace in self.traces.all_traces() { if let Some(addr) = trace.address { match Breakpoint::new(self.current, addr) { Ok(bp) => { let _ = self.breakpoints.insert(addr, bp); } Err(e) if e == NixErr::Sys(Errno::EIO) => { return Err(RunError::TestRuntime( "ERROR: Tarpaulin cannot find code addresses \ check that pie is disabled for your linker. \ If linking with gcc try adding -C link-args=-no-pie \ to your rust flags" .to_string(), )); } Err(NixErr::UnsupportedOperation) => { debug!("Instrumentation address clash, ignoring 0x{:x}", addr); } Err(_) => { return Err(RunError::TestRuntime( "Failed to instrument test executable".to_string(), )); } } } } if continue_exec(self.parent, None).is_ok() { trace!("Initialised inferior, transitioning to wait state"); Ok(TestState::wait_state()) } else { Err(RunError::TestRuntime( "Test didn't launch correctly".to_string(), )) } } fn wait(&mut self) -> Result<Option<TestState>, RunError> { let mut result = Ok(None); let mut running = true; while running { let wait = waitpid( Pid::from_raw(-1), Some(WaitPidFlag::WNOHANG), ); match wait { Ok(WaitStatus::StillAlive) => { running = false; } Ok(WaitStatus::Exited(_, _)) => { self.wait_queue.push(wait.unwrap()); result = Ok(Some(TestState::Stopped)); running = false; } Ok(s) => { self.wait_queue.push(s); result = Ok(Some(TestState::Stopped)); } Err(e) => { running = false; result = Err(RunError::TestRuntime(format!( "An error occurred while waiting for response from test: {}", e ))) } } } if !self.wait_queue.is_empty() { trace!("Result queue is {:?}", self.wait_queue); } result }
} impl<'a> MacData<'a> { pub fn new(traces: &'a mut TraceMap, config: &'a Config) -> MacData<'a> { MacData { wait_queue: Vec::new(), current: Pid::from_raw(0), parent: Pid::from_raw(0), breakpoints: HashMap::new(), traces, config, thread_count: 0, } } fn handle_ptrace_event( &mut self, child: Pid, sig: Signal, event: i32, ) -> Result<(TestState, TracerAction<ProcessInfo>), RunError> { use nix::libc::*; if sig == Signal::SIGTRAP { match event { PTRACE_EVENT_CLONE => match get_event_data(child) { Ok(t) => { trace!("New thread spawned {}", t); self.thread_count += 1; Ok(( TestState::wait_state(), TracerAction::Continue(child.into()), )) } Err(e) => { trace!("Error in clone event {:?}", e); Err(RunError::TestRuntime( "Error occurred upon test executable thread creation".to_string(), )) } }, PTRACE_EVENT_FORK => { trace!("Caught fork event"); Ok(( TestState::wait_state(), TracerAction::Continue(child.into()), )) } PTRACE_EVENT_EXEC => { trace!("Child execed other process - detaching ptrace"); Ok((TestState::wait_state(), TracerAction::Detach(child.into()))) } PTRACE_EVENT_EXIT => { trace!("Child exiting"); self.thread_count -= 1; Ok(( TestState::wait_state(), TracerAction::TryContinue(child.into()), )) } _ => Err(RunError::TestRuntime(format!( "Unrecognised ptrace event {}", event ))), } } else { trace!("Unexpected signal with ptrace event {}", event); trace!("Signal: {:?}", sig); Err(RunError::TestRuntime("Unexpected signal".to_string())) } } fn collect_coverage_data( &mut self, visited_pcs: &mut HashSet<u64>, ) -> Result<UpdateContext, RunError> { let mut action = None; if let Ok(rip) = current_instruction_pointer(self.current) { let rip = (rip - 1) as u64; trace!("Hit address 0x{:x}", rip); if self.breakpoints.contains_key(&rip) { let bp = &mut self.breakpoints.get_mut(&rip).unwrap(); let updated = if visited_pcs.contains(&rip) { let _ = bp.jump_to(self.current); (true, TracerAction::Continue(self.current.into())) } else { let enable = self.config.count; if let Ok(x) = bp.process(self.current, enable) { x } else { (false, TracerAction::Continue(self.current.into())) } }; if updated.0 { if let Some(ref mut t) = self.traces.get_trace_mut(rip) { if let CoverageStat::Line(ref mut x) = t.stats { trace!("Incrementing hit count for trace"); *x += 1; } } } action = Some(updated.1); } } let action = action.unwrap_or_else(|| TracerAction::Continue(self.current.into())); Ok((TestState::wait_state(), action)) } fn handle_signaled( &mut self, pid: &Pid, sig: &Signal, flag: bool, ) -> Result<UpdateContext, RunError> { match (sig, flag) { (Signal::SIGTRAP, true) => { Ok((TestState::wait_state(), TracerAction::Continue(pid.into()))) } _ => Err(RunError::StateMachine("Unexpected stop".to_string())), } } }
fn stop(&mut self) -> Result<TestState, RunError> { let mut actions = Vec::new(); let mut pcs = HashSet::new(); let mut result = Ok(TestState::wait_state()); let pending = self.wait_queue.clone(); self.wait_queue.clear(); for status in &pending { let state = match status { WaitStatus::Stopped(c, Signal::SIGTRAP) => { self.current = *c; match self.collect_coverage_data(&mut pcs) { Ok(s) => Ok(s), Err(e) => Err(RunError::TestRuntime(format!( "Error when collecting coverage: {}", e ))), } } WaitStatus::Stopped(child, Signal::SIGSTOP) => Ok(( TestState::wait_state(), TracerAction::Continue(child.into()), )), WaitStatus::Stopped(_, Signal::SIGSEGV) => Err(RunError::TestRuntime( "A segfault occurred while executing tests".to_string(), )), WaitStatus::Stopped(child, Signal::SIGILL) => { let pc = current_instruction_pointer(*child).unwrap_or_else(|_| 1) - 1; trace!("SIGILL raised. Child program counter is: 0x{:x}", pc); Err(RunError::TestRuntime(format!( "Error running test - SIGILL raised in {}", child ))) } WaitStatus::Stopped(c, s) => { let sig = if self.config.forward_signals { Some(*s) } else { None }; let info = ProcessInfo::new(*c, sig); Ok((TestState::wait_state(), TracerAction::TryContinue(info))) } WaitStatus::Signaled(c, s, f) => { if let Ok(s) = self.handle_signaled(c, s, *f) { Ok(s) } else { Err(RunError::TestRuntime( "Attempting to handle tarpaulin being signaled".to_string(), )) } } WaitStatus::Exited(child, ec) => { for ref mut value in self.breakpoints.values_mut() { value.thread_killed(*child); } trace!("Exited {:?} parent {:?}", child, self.parent); if child == &self.parent { Ok((TestState::End(*ec), TracerAction::Nothing)) } else { Ok(( TestState::wait_state(), TracerAction::TryContinue(self.parent.into()), )) } } _ => Err(RunError::TestRuntime( "An unexpected signal has been caught by tarpaulin!".to_string(), )), }; match state { Ok((TestState::Waiting { .. }, action)) => { actions.push(action); } Ok((state, action)) => { result = Ok(state); actions.push(action); } Err(e) => result = Err(e), } } let mut continued = false; for a in &actions { match a { TracerAction::TryContinue(t) => { continued = true; let _ = continue_exec(t.pid, t.signal); } TracerAction::Continue(t) => { continued = true; continue_exec(t.pid, t.signal)?; } TracerAction::Step(t) => { continued = true; single_step(t.pid)?; } TracerAction::Detach(t) => { continued = true; detach_child(t.pid)?; } _ => {} } } if !continued { trace!("No action suggested to continue tracee. Attempting a continue"); let _ = continue_exec(self.parent, None); } result }
function_block-full_function
[ { "content": "/// Launches tarpaulin with the given configuration.\n\npub fn launch_tarpaulin(config: &Config) -> Result<(TraceMap, i32), RunError> {\n\n setup_environment(&config);\n\n cargo::core::enable_nightly_features();\n\n let cwd = match config.manifest.parent() {\n\n Some(p) => p.to_path_buf(),\n\n None => PathBuf::new(),\n\n };\n\n let home = match homedir(&cwd) {\n\n Some(h) => h,\n\n None => {\n\n warn!(\"Warning failed to find home directory.\");\n\n PathBuf::new()\n\n }\n\n };\n\n let mut cargo_config = CargoConfig::new(Shell::new(), cwd, home);\n\n let flag_quiet = if config.verbose { None } else { Some(true) };\n\n\n\n // This shouldn't fail so no checking the error.\n\n let _ = cargo_config.configure(\n\n 0u32,\n", "file_path": "src/lib.rs", "rank": 0, "score": 412185.1118991169 }, { "content": "pub fn export(coverage_data: &TraceMap, config: &Config) -> Result<(), RunError> {\n\n if let Some(ref key) = config.coveralls {\n\n let id = get_identity(&config.ci_tool, key);\n\n\n\n let mut report = CoverallsReport::new(id);\n\n for file in &coverage_data.files() {\n\n let rel_path = config.strip_base_dir(file);\n\n let mut lines: HashMap<usize, usize> = HashMap::new();\n\n let fcov = coverage_data.get_child_traces(file);\n\n\n\n for c in &fcov {\n\n match c.stats {\n\n CoverageStat::Line(hits) => {\n\n lines.insert(c.line as usize, hits as usize);\n\n }\n\n _ => {\n\n info!(\"Support for coverage statistic not implemented or supported for coveralls.io\");\n\n }\n\n }\n\n }\n", "file_path": "src/report/coveralls.rs", "rank": 1, "score": 376138.4362117892 }, { "content": "pub fn export(coverage_data: &TraceMap, config: &Config) -> Result<(), RunError> {\n\n let mut report = CoverageReport { files: Vec::new() };\n\n for (path, traces) in coverage_data.iter() {\n\n let content = match read_to_string(path) {\n\n Ok(k) => k,\n\n Err(e) => {\n\n return Err(RunError::Html(format!(\n\n \"Unable to read source file to string: {}\",\n\n e.to_string()\n\n )))\n\n }\n\n };\n\n\n\n report.files.push(SourceFile {\n\n path: path\n\n .components()\n\n .map(|c| c.as_os_str().to_string_lossy().to_string())\n\n .collect(),\n\n content,\n\n traces: traces.clone(),\n", "file_path": "src/report/html.rs", "rank": 2, "score": 376138.43621178914 }, { "content": "pub fn export(coverage_data: &TraceMap, config: &Config) -> Result<(), RunError> {\n\n let file_path = config.output_directory.join(\"lcov.info\");\n\n let mut file = match File::create(file_path) {\n\n Ok(k) => k,\n\n Err(e) => {\n\n return Err(RunError::Lcov(format!(\n\n \"File is not writeable: {}\",\n\n e.to_string()\n\n )))\n\n }\n\n };\n\n\n\n for (path, traces) in coverage_data.iter() {\n\n writeln!(file, \"TN:\")?;\n\n writeln!(file, \"SF:{}\", path.to_str().unwrap())?;\n\n\n\n let mut fns: Vec<String> = vec![];\n\n let mut fnda: Vec<String> = vec![];\n\n let mut da: Vec<(u64, u64)> = vec![];\n\n\n", "file_path": "src/report/lcov.rs", "rank": 3, "score": 376138.43621178914 }, { "content": "pub fn run(config: &Config) -> Result<(), RunError> {\n\n let (tracemap, ret) = launch_tarpaulin(config)?;\n\n report_coverage(config, &tracemap)?;\n\n\n\n if ret == 0 {\n\n Ok(())\n\n } else {\n\n Err(RunError::TestFailed)\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 4, "score": 368758.04030885256 }, { "content": "/// Reports the test coverage using the users preferred method. See config.rs\n\n/// or help text for details.\n\npub fn report_coverage(config: &Config, result: &TraceMap) -> Result<(), RunError> {\n\n if !result.is_empty() {\n\n info!(\"Coverage Results:\");\n\n if config.verbose {\n\n print_missing_lines(config, result);\n\n }\n\n print_summary(config, result);\n\n generate_requested_reports(config, result)?;\n\n if let Some(project_dir) = config.manifest.parent() {\n\n let mut report_dir = project_dir.join(\"target\");\n\n report_dir.push(\"tarpaulin\");\n\n report_dir.push(\"coverage.json\");\n\n let file = File::create(&report_dir)\n\n .map_err(|_| RunError::CovReport(\"Failed to create run report\".to_string()))?;\n\n serde_json::to_writer(&file, &result)\n\n .map_err(|_| RunError::CovReport(\"Failed to save run report\".to_string()))?;\n\n }\n\n Ok(())\n\n } else if !config.no_run {\n\n Err(RunError::CovReport(\n\n \"No coverage results collected.\".to_string(),\n\n ))\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/report/mod.rs", "rank": 5, "score": 367759.14622777747 }, { "content": "pub fn report(traces: &TraceMap, config: &Config) -> Result<(), Error> {\n\n let result = Report::render(config, traces)?;\n\n result.export(config)\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum Error {\n\n Unknown,\n\n ExportError(quick_xml::Error),\n\n}\n\n\n\nimpl error::Error for Error {}\n\n\n\nimpl fmt::Display for Error {\n\n #[inline]\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n Error::ExportError(ref e) => write!(f, \"Export Error {}\", e),\n\n Error::Unknown => write!(f, \"Unknown Error\"),\n\n }\n", "file_path": "src/report/cobertura.rs", "rank": 6, "score": 339066.8338783198 }, { "content": "pub fn continue_exec(pid: Pid, sig: Option<Signal>) -> Result<()> {\n\n cont(pid, sig)\n\n}\n\n\n", "file_path": "src/ptrace_control/linux.rs", "rank": 7, "score": 325154.23995967046 }, { "content": "pub fn continue_exec(pid: Pid, sig: Option<Signal>) -> Result<()> {\n\n cont(pid, sig)\n\n}\n\n\n", "file_path": "src/ptrace_control/mac.rs", "rank": 8, "score": 325154.2399596705 }, { "content": "pub fn write_to_address(pid: Pid, address: u64, data: i64) -> Result<()> {\n\n write(pid, address as AddressType, data as *mut c_void)\n\n}\n\n\n", "file_path": "src/ptrace_control/linux.rs", "rank": 9, "score": 324286.9259849612 }, { "content": "pub fn write_to_address(pid: Pid, address: u64, data: i64) -> Result<()> {\n\n println!(\"write_to_address: {:?} {:?} {:?}\", pid, address, data);\n\n let task_port = get_task_port(pid)?;\n\n println!(\"Task port = {}, Write address = {}\", task_port, address);\n\n unsafe {\n\n let bytes_to_write: u32 = mem::size_of::<i64>().try_into().unwrap();\n\n loop {\n\n let (_, prot) = check_prots(task_port, address)?;\n\n println!(\"current protection : {}\", prot);\n\n if prot == VM_PROT_ALL {\n\n break;\n\n } \n\n println!(\"setting prots\");\n\n set_prot_flag(task_port, address, VM_PROT_COPY)?;\n\n set_prot_flag(task_port, address, VM_PROT_ALL)?;\n\n }\n\n let bytes = &data as *const _ as *const u8 as usize;\n\n // let byte_addr = bytes.as_ptr() as usize;\n\n\n\n let res: KernelRet = KernelRet::from(mach_vm_write(\n", "file_path": "src/ptrace_control/mac.rs", "rank": 10, "score": 324286.9259849612 }, { "content": "fn generate_requested_reports(config: &Config, result: &TraceMap) -> Result<(), RunError> {\n\n if config.is_coveralls() {\n\n coveralls::export(result, config)?;\n\n info!(\"Coverage data sent\");\n\n }\n\n\n\n if !config.is_default_output_dir() {\n\n if create_dir_all(&config.output_directory).is_err() {\n\n return Err(RunError::OutFormat(format!(\n\n \"Failed to create or locate custom output directory: {:?}\",\n\n config.output_directory,\n\n )));\n\n }\n\n }\n\n\n\n for g in &config.generate {\n\n match *g {\n\n OutputFile::Xml => {\n\n cobertura::report(result, config).map_err(|e| RunError::XML(e))?;\n\n }\n", "file_path": "src/report/mod.rs", "rank": 11, "score": 322780.6409028049 }, { "content": "pub fn read_address(pid: Pid, address: u64) -> Result<c_long> {\n\n read(pid, address as AddressType)\n\n}\n\n\n", "file_path": "src/ptrace_control/linux.rs", "rank": 12, "score": 282646.82595394796 }, { "content": "pub fn read_address(pid: Pid, address: u64) -> Result<c_long> {\n\n let task_port = get_task_port(pid)?;\n\n println!(\"Task port = {}, Reading address = {}\", task_port, address);\n\n unsafe {\n\n let mut data_addr: MaybeUninit<vm_offset_t> = MaybeUninit::uninit();\n\n let mut bytes_read: mach_msg_type_number_t = mem::size_of::<c_long>().try_into().unwrap();\n\n let bytes_req = bytes_read as u64;\n\n let res: KernelRet = KernelRet::from(mach_vm_read(\n\n task_port,\n\n address,\n\n bytes_req,\n\n data_addr.as_mut_ptr(),\n\n &mut bytes_read\n\n ) as u32);\n\n if res == KernelRet::Success {\n\n let data_addr = data_addr.assume_init();\n\n let data_ptr = data_addr as *const u8;\n\n assert_eq!(bytes_read as u64, bytes_req);\n\n let data = std::slice::from_raw_parts(data_ptr, bytes_read as usize);\n\n let value = c_long::from_ne_bytes(data.try_into().unwrap());\n", "file_path": "src/ptrace_control/mac.rs", "rank": 13, "score": 282646.82595394796 }, { "content": "fn check_speed(c: &mut Criterion) {\n\n c.bench_function(\"some_fn\", |b| {\n\n b.iter(|| only_ran_in_benches(vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10]))\n\n });\n\n}\n\n\n\ncriterion_group!(benches, check_speed);\n\ncriterion_main!(benches);\n", "file_path": "tests/data/all_test_types/benches/bench_fn.rs", "rank": 14, "score": 279774.17676829745 }, { "content": "pub fn detach_child(pid: Pid) -> Result<()> {\n\n detach(pid, None)\n\n}\n\n\n", "file_path": "src/ptrace_control/linux.rs", "rank": 15, "score": 274940.88980514 }, { "content": "pub fn detach_child(pid: Pid) -> Result<()> {\n\n detach(pid, None)\n\n}\n\n\n", "file_path": "src/ptrace_control/mac.rs", "rank": 16, "score": 274940.88980514 }, { "content": "pub fn trace_children(pid: Pid) -> Result<()> {\n\n //TODO need to check support.\n\n let options: Options = Options::PTRACE_O_TRACESYSGOOD\n\n | Options::PTRACE_O_TRACEEXEC\n\n | Options::PTRACE_O_TRACEEXIT\n\n | Options::PTRACE_O_TRACECLONE\n\n | Options::PTRACE_O_TRACEFORK\n\n | Options::PTRACE_O_TRACEVFORK;\n\n setoptions(pid, options)\n\n}\n\n\n", "file_path": "src/ptrace_control/linux.rs", "rank": 17, "score": 274514.43549094046 }, { "content": "pub fn trace_children(pid: Pid) -> Result<()> {\n\n //TODO need to check support.\n\n // todo!()\n\n Ok(())\n\n // attach(pid)\n\n}\n\n\n", "file_path": "src/ptrace_control/mac.rs", "rank": 18, "score": 274514.43549094046 }, { "content": "#[allow(deprecated)]\n\npub fn current_instruction_pointer(pid: Pid) -> Result<c_long> {\n\n let ret = unsafe {\n\n Errno::clear();\n\n libc::ptrace(\n\n Request::PTRACE_PEEKUSER as RequestType,\n\n libc::pid_t::from(pid),\n\n RIP as *mut c_void,\n\n ptr::null_mut() as *mut c_void,\n\n )\n\n };\n\n match Errno::result(ret) {\n\n Ok(..) | Err(Error::Sys(Errno::UnknownErrno)) => Ok(ret),\n\n err @ Err(..) => err,\n\n }\n\n}\n\n\n", "file_path": "src/ptrace_control/linux.rs", "rank": 19, "score": 261494.4473059838 }, { "content": "pub fn current_instruction_pointer(pid: Pid) -> Result<c_long> {\n\n println!(\"CURRENT IP\");\n\n unsafe {\n\n Errno::clear();\n\n }\n\n println!(\"current PID is {:?}\", pid);\n\n let test_thread = test_thread_for_pid(pid)?;\n\n let thread_state = get_thread_state(test_thread)?;\n\n Ok(thread_state.__rip.try_into().unwrap())\n\n}\n\n\n", "file_path": "src/ptrace_control/mac.rs", "rank": 20, "score": 261494.4473059838 }, { "content": "pub fn get_event_data(pid: Pid) -> Result<c_long> {\n\n // getevent(pid);\n\n todo!()\n\n}\n", "file_path": "src/ptrace_control/mac.rs", "rank": 21, "score": 260542.6515834275 }, { "content": "pub fn get_event_data(pid: Pid) -> Result<c_long> {\n\n getevent(pid)\n\n}\n", "file_path": "src/ptrace_control/linux.rs", "rank": 22, "score": 260542.6515834275 }, { "content": "pub fn only_ran_in_test(mut v: Vec<i32>) -> Vec<i32> {\n\n v.clear();\n\n v\n\n}\n\n\n\n#[cfg(test)]\n\npub mod tests {\n\n use super::*;\n\n #[test]\n\n fn test_it() {\n\n only_ran_in_test(vec![1, 2, 3, 4, 5, 6]);\n\n }\n\n}\n", "file_path": "tests/data/all_test_types/src/only_test.rs", "rank": 23, "score": 247800.7450052083 }, { "content": "pub fn execute(program: CString, argv: &[CString], envar: &[CString]) -> Result<(), RunError> {\n\n disable_aslr().map_err(|e| RunError::TestRuntime(format!(\"ASLR disable failed: {}\", e)))?;\n\n\n\n request_trace().map_err(|e| RunError::Trace(e.to_string()))?;\n\n\n\n let arg_ref = argv.iter().map(|x| x.as_ref()).collect::<Vec<&CStr>>();\n\n let env_ref = envar.iter().map(|x| x.as_ref()).collect::<Vec<&CStr>>();\n\n execve(&program, &arg_ref, &env_ref)\n\n .map_err(|_| RunError::Internal)\n\n .map(|_| ())\n\n}\n", "file_path": "src/process_handling/linux.rs", "rank": 24, "score": 247501.08023333835 }, { "content": "pub fn execute(program: CString, argv: &[CString], envar: &[CString]) -> Result<(), RunError> {\n\n let mut attr: MaybeUninit<posix_spawnattr_t> = MaybeUninit::uninit();\n\n let mut res = unsafe { posix_spawnattr_init(attr.as_mut_ptr()) };\n\n if res != 0 {\n\n eprintln!(\"Can't initialise posix_spawnattr_t\");\n\n }\n\n let mut attr = unsafe { attr.assume_init() };\n\n \n\n let flags = (POSIX_SPAWN_SETEXEC | 0x0100) as i16;\n\n \n\n res = unsafe { posix_spawnattr_setflags(&mut attr, flags) };\n\n if res != 0 {\n\n eprintln!(\"Failed to set spawn flags\");\n\n }\n\n\n\n let mut args: Vec<*mut c_char> = argv.iter().map(|s| s.clone().into_raw()).collect();\n\n\n\n args.push(ptr::null_mut());\n\n\n\n let mut envs: Vec<*mut c_char> = envar.iter().map(|s| s.clone().into_raw()).collect();\n", "file_path": "src/process_handling/mac.rs", "rank": 25, "score": 247501.08023333835 }, { "content": "#[test]\n\nfn method_call_with_collect_try() -> Result<(), Box<dyn std::error::Error>> {\n\n let strings = vec![\"93\", \"18\"];\n\n\n\n let _collect_no_try = strings.iter().map(|s| s.parse::<i32>()).collect::<Result<Vec<_>, _>>();\n\n let _collect_no_try = strings\n\n .iter()\n\n .map(|s| s.parse::<i32>())\n\n .collect::<Result<Vec<_>, _>>();\n\n\n\n let _collect_with_try = strings.iter().map(|s| s.parse::<i32>()).collect::<Result<Vec<_>, _>>()?;\n\n let _collect_with_try = strings\n\n .iter()\n\n .map(|s| s.parse::<i32>())\n\n .collect::<Result<Vec<_>, _>>()?; // FIXME: This line is not covered.\n\n\n\n let _collect_no_try = vec![\"93\", \"18\"].into_iter().map(|s| s.parse::<i32>()).collect::<Result<Vec<_>, _>>();\n\n let _collect_no_try = vec![\"93\", \"18\"]\n\n .into_iter()\n\n .map(|s| s.parse::<i32>())\n\n .collect::<Result<Vec<_>, _>>();\n", "file_path": "tests/data/method_calls/src/lib.rs", "rank": 26, "score": 245262.26502722618 }, { "content": "#[test]\n\nfn simple_method_call_with_try() -> Result<(), Box<dyn std::error::Error>> {\n\n let _num: i32 = \"123\".parse()?;\n\n\n\n let mut test = File::open(\"Cargo.toml\")?;\n\n let mut string = String::new();\n\n test.read_to_string(&mut string)?;\n\n\n\n // Separated lines\n\n let _num: i32 = \"123\"\n\n .parse()?;\n\n\n\n let mut test = File::open(\"Cargo.toml\")?;\n\n let mut string = String::new();\n\n test\n\n .read_to_string(&mut string)?; // FIXME: This line is not covered.\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/data/method_calls/src/lib.rs", "rank": 27, "score": 245262.26502722612 }, { "content": "#[test]\n\nfn method_call_chain_with_try() -> Result<(), Box<dyn std::error::Error>> {\n\n let _num: i32 = \"123\".clone().parse()?;\n\n let _num: i32 = \"123\"\n\n .clone()\n\n .parse()?;\n\n\n\n let _num: i32 = \"123\".parse::<i32>()?.clone();\n\n let _num: i32 = \"123\"\n\n .parse::<i32>()?\n\n .clone();\n\n\n\n let _num: i32 = \"123\".clone().parse::<i32>()?.clone();\n\n let _num: i32 = \"123\"\n\n .clone()\n\n .parse::<i32>()?\n\n .clone();\n\n\n\n let _num: i32 = \"123\"\n\n .clone()\n\n .parse::<i32>()?\n", "file_path": "tests/data/method_calls/src/lib.rs", "rank": 28, "score": 245262.26502722618 }, { "content": "fn print_summary(config: &Config, result: &TraceMap) {\n\n let last = match get_previous_result(config) {\n\n Some(l) => l,\n\n None => TraceMap::new(),\n\n };\n\n println!(\"|| Tested/Total Lines:\");\n\n for file in result.files() {\n\n let path = config.strip_base_dir(file);\n\n if last.contains_file(file) {\n\n let last_percent = coverage_percentage(&last.get_child_traces(file));\n\n let current_percent = coverage_percentage(&result.get_child_traces(file));\n\n let delta = 100.0f64 * (current_percent - last_percent);\n\n println!(\n\n \"|| {}: {}/{} {:+}%\",\n\n path.display(),\n\n result.covered_in_path(&file),\n\n result.coverable_in_path(&file),\n\n delta\n\n );\n\n } else {\n", "file_path": "src/report/mod.rs", "rank": 29, "score": 243961.58763248788 }, { "content": "#[allow(deprecated)]\n\npub fn single_step(pid: Pid) -> Result<()> {\n\n step(pid, None)\n\n}\n\n\n", "file_path": "src/ptrace_control/linux.rs", "rank": 30, "score": 242954.6906081438 }, { "content": "#[allow(deprecated)]\n\npub fn single_step(pid: Pid) -> Result<()> {\n\n // let rip = current_instruction_pointer(pid)?;\n\n // loop {\n\n unsafe {\n\n Errno::clear();\n\n }\n\n println!(\"Single step\");\n\n let res = Errno::result(unsafe { libc::ptrace(\n\n libc::PT_STEP,\n\n libc::pid_t::from(pid),\n\n 1 as *mut i8,\n\n 0\n\n ) })?;\n\n // let res = Errno::result(unsafe { libc::ptrace(\n\n // libc::PT_STEP,\n\n // libc::pid_t::from(pid),\n\n // 1 as *mut i8,\n\n // 0\n\n // ) })?;\n\n // let res = step(pid, None)?;\n\n // }\n\n // Ok((res))\n\n Ok(())\n\n}\n\n\n\n\n", "file_path": "src/ptrace_control/mac.rs", "rank": 31, "score": 242954.6906081438 }, { "content": "pub fn check_match(x: usize) -> usize {\n\n match x {\n\n 0 => 1,\n\n 1...5 => 2,\n\n 6 | 8 => 3,\n\n x if x % 2 == 0 => x,\n\n _ => 0,\n\n }\n\n}\n\n\n", "file_path": "tests/data/matches/src/lib.rs", "rank": 32, "score": 242662.50047849948 }, { "content": "fn print_missing_lines(config: &Config, result: &TraceMap) {\n\n println!(\"|| Uncovered Lines:\");\n\n for (ref key, ref value) in result.iter() {\n\n let path = config.strip_base_dir(key);\n\n let mut uncovered_lines = vec![];\n\n for v in value.iter() {\n\n match v.stats {\n\n CoverageStat::Line(count) if count == 0 => {\n\n uncovered_lines.push(v.line);\n\n }\n\n _ => (),\n\n }\n\n }\n\n uncovered_lines.sort();\n\n let (groups, last_group) = uncovered_lines\n\n .into_iter()\n\n .fold((vec![], vec![]), accumulate_lines);\n\n let (groups, _) = accumulate_lines((groups, last_group), u64::max_value());\n\n if !groups.is_empty() {\n\n println!(\"|| {}: {}\", path.display(), groups.join(\", \"));\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/report/mod.rs", "rank": 33, "score": 240654.52536821523 }, { "content": "///This is a doc comment\n\n/// ```\n\n/// use all_test_types::only_doc_test::*;\n\n/// assert!(only_ran_in_doctest(vec![1,2,3,4,5,6]).is_empty());\n\n/// ```\n\npub fn only_ran_in_doctest(mut v: Vec<i32>) -> Vec<i32> {\n\n v.clear();\n\n v\n\n}\n", "file_path": "tests/data/all_test_types/src/only_doc_test.rs", "rank": 34, "score": 240533.81040357074 }, { "content": "pub fn only_ran_in_benches(mut v: Vec<i32>) -> Vec<i32> {\n\n v.clear();\n\n v\n\n}\n", "file_path": "tests/data/all_test_types/src/only_bench.rs", "rank": 35, "score": 239221.45005765572 }, { "content": "pub fn only_ran_in_examples(mut v: Vec<i32>) -> Vec<i32> {\n\n v.clear();\n\n v\n\n}\n", "file_path": "tests/data/all_test_types/src/only_example.rs", "rank": 36, "score": 239221.45005765572 }, { "content": "pub fn check_percentage_with_config(\n\n project_name: &str,\n\n minimum_coverage: f64,\n\n has_lines: bool,\n\n mut config: Config,\n\n) {\n\n config.verbose = true;\n\n config.test_timeout = Duration::from_secs(60);\n\n let restore_dir = env::current_dir().unwrap();\n\n let test_dir = get_test_path(project_name);\n\n env::set_current_dir(&test_dir).unwrap();\n\n config.manifest = test_dir;\n\n config.manifest.push(\"Cargo.toml\");\n\n\n\n let (res, _) = launch_tarpaulin(&config).unwrap();\n\n\n\n env::set_current_dir(restore_dir).unwrap();\n\n assert!(res.coverage_percentage() >= minimum_coverage);\n\n if has_lines {\n\n assert!(res.total_coverable() > 0);\n\n }\n\n}\n\n\n", "file_path": "tests/mod.rs", "rank": 37, "score": 230179.23568959278 }, { "content": "#[test]\n\npub fn c() {\n\n futures::executor::ThreadPool::new();\n\n}\n\n\n", "file_path": "tests/data/futures/src/lib.rs", "rank": 38, "score": 226900.78144607373 }, { "content": "#[allow(deprecated)]\n\npub fn set_instruction_pointer(pid: Pid, pc: u64) -> Result<c_long> {\n\n println!(\"Setting PC to {}\", pc);\n\n let task = get_task_port(pid)?;\n\n let test_thread = test_thread_for_pid(pid)?;\n\n // unsafe { mach::thread_act::thread_suspend(test_thread); }\n\n unsafe { mach::task::task_suspend(task); }\n\n println!(\"Test thread = {}\", test_thread);\n\n let mut old_state = get_thread_state(test_thread)?;\n\n let old_pc = old_state.__rip;\n\n old_state.__rip = pc;\n\n println!(\"setting\");\n\n set_thread_state(test_thread, old_state)?;\n\n println!(\"set\");\n\n let changed = get_thread_state(test_thread)?;\n\n assert_eq!(changed.__rip, pc);\n\n // let res: KernelRet = unsafe { mach::thread_act::thread_resume(test_thread).into() };\n\n let res: KernelRet = unsafe { mach::task::task_resume(task).into() };\n\n println!(\"RESUMED => RES = {:?}\", res);\n\n Ok(pc as i64)\n\n}\n\n\n", "file_path": "src/ptrace_control/mac.rs", "rank": 39, "score": 216266.76182264579 }, { "content": "#[allow(deprecated)]\n\npub fn set_instruction_pointer(pid: Pid, pc: u64) -> Result<c_long> {\n\n unsafe {\n\n ptrace(\n\n Request::PTRACE_POKEUSER,\n\n pid,\n\n RIP as *mut c_void,\n\n pc as *mut c_void,\n\n )\n\n }\n\n}\n\n\n", "file_path": "src/ptrace_control/linux.rs", "rank": 40, "score": 216266.76182264579 }, { "content": "fn test_thread_for_pid(pid: Pid) -> Result<thread_act_t> {\n\n let task = get_task_port(pid)?;\n\n let threads = threads_for_task(task)?;\n\n let highest = threads.iter().map(|&t| (t, get_thread_info(t).unwrap().thread_id)).max_by_key(|&(t, tid)| tid).unwrap();\n\n Ok(highest.0)\n\n}\n\n\n", "file_path": "src/ptrace_control/mac.rs", "rank": 41, "score": 214743.0231747691 }, { "content": "fn get_previous_result(config: &Config) -> Option<TraceMap> {\n\n // Check for previous report\n\n if let Some(project_dir) = config.manifest.parent() {\n\n let mut report_dir = project_dir.join(\"target\");\n\n report_dir.push(\"tarpaulin\");\n\n if report_dir.exists() {\n\n // is report there?\n\n report_dir.push(\"coverage.json\");\n\n let file = File::open(&report_dir).ok()?;\n\n let reader = BufReader::new(file);\n\n serde_json::from_reader(reader).ok()\n\n } else {\n\n // make directory\n\n std::fs::create_dir(&report_dir)\n\n .unwrap_or_else(|e| error!(\"Failed to create report directory: {}\", e));\n\n None\n\n }\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/report/mod.rs", "rank": 42, "score": 214311.24328896625 }, { "content": "pub fn debug_printout(result: &HashMap<PathBuf, LineAnalysis>, config: &Config) {\n\n if config.debug {\n\n for (ref path, ref analysis) in result {\n\n trace!(\n\n \"Source analysis for {}\",\n\n config.strip_base_dir(path).display()\n\n );\n\n let mut lines = Vec::new();\n\n for l in &analysis.ignore {\n\n match l {\n\n Lines::All => {\n\n lines.clear();\n\n trace!(\"All lines are ignorable\");\n\n break;\n\n }\n\n Lines::Line(i) => {\n\n lines.push(i);\n\n }\n\n }\n\n }\n", "file_path": "src/source_analysis.rs", "rank": 43, "score": 213253.08445788856 }, { "content": "pub fn request_trace() -> Result<()> {\n\n traceme()\n\n // Ok(())\n\n}\n\n\n", "file_path": "src/ptrace_control/mac.rs", "rank": 44, "score": 212976.7559286423 }, { "content": "pub fn request_trace() -> Result<()> {\n\n traceme()\n\n}\n\n\n", "file_path": "src/ptrace_control/linux.rs", "rank": 45, "score": 212976.7559286423 }, { "content": "pub fn destructuring_match(x: u32, y: u32) {\n\n let _y = match (x, y) {\n\n (1, _) => 1,\n\n (_, 1) => 1,\n\n (2, 2) => 2,\n\n _ => 0,\n\n };\n\n\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn it_works() {\n\n check_match(0);\n\n check_match(2);\n\n check_match(999999);\n\n check_match(8);\n\n check_match(9998);\n\n\n\n destructuring_match(1, 3);\n\n destructuring_match(2, 1);\n\n destructuring_match(2, 2);\n\n destructuring_match(3, 2);\n\n }\n\n}\n", "file_path": "tests/data/matches/src/lib.rs", "rank": 46, "score": 208224.66983855277 }, { "content": "pub fn generate_tracemap(\n\n project: &Workspace,\n\n test: &Path,\n\n analysis: &HashMap<PathBuf, LineAnalysis>,\n\n config: &Config,\n\n) -> io::Result<TraceMap> {\n\n let manifest = project.root();\n\n let file = open_symbols_file(test)?;\n\n let file = unsafe { MmapOptions::new().map(&file)? };\n\n if let Ok(obj) = OFile::parse(&*file) {\n\n let endian = if obj.is_little_endian() {\n\n RunTimeEndian::Little\n\n } else {\n\n RunTimeEndian::Big\n\n };\n\n if let Ok(result) = get_line_addresses(endian, manifest, &obj, &analysis, config) {\n\n Ok(result)\n\n } else {\n\n Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n", "file_path": "src/test_loader.rs", "rank": 47, "score": 194683.89463181846 }, { "content": "#[test]\n\npub fn a() {\n\n futures::executor::ThreadPool::new();\n\n}\n\n\n", "file_path": "tests/data/futures/src/lib.rs", "rank": 48, "score": 193531.08725634217 }, { "content": "#[test]\n\npub fn d() {\n\n futures::executor::ThreadPool::new();\n\n}\n", "file_path": "tests/data/futures/src/lib.rs", "rank": 49, "score": 193531.08725634217 }, { "content": "#[test]\n\npub fn b() {\n\n futures::executor::ThreadPool::new();\n\n}\n\n\n", "file_path": "tests/data/futures/src/lib.rs", "rank": 50, "score": 193531.08725634214 }, { "content": "/// Amount of data coverable in the provided slice traces\n\npub fn amount_coverable(traces: &[&Trace]) -> usize {\n\n let mut result = 0usize;\n\n for t in traces {\n\n result += match t.stats {\n\n CoverageStat::Branch(_) => 2usize,\n\n CoverageStat::Condition(ref x) => x.len() * 2usize,\n\n _ => 1usize,\n\n };\n\n }\n\n result\n\n}\n\n\n", "file_path": "src/traces.rs", "rank": 51, "score": 186477.7994222305 }, { "content": "/// Amount of data covered in the provided trace slice\n\npub fn amount_covered(traces: &[&Trace]) -> usize {\n\n let mut result = 0usize;\n\n for t in traces {\n\n result += match t.stats {\n\n CoverageStat::Branch(ref x) => (x.been_true as usize) + (x.been_false as usize),\n\n CoverageStat::Condition(ref x) => x.iter().fold(0, |acc, ref x| {\n\n acc + (x.been_true as usize) + (x.been_false as usize)\n\n }),\n\n CoverageStat::Line(ref x) => (*x > 0) as usize,\n\n };\n\n }\n\n result\n\n}\n\n\n", "file_path": "src/traces.rs", "rank": 52, "score": 186477.7994222305 }, { "content": "pub fn coverage_percentage(traces: &[&Trace]) -> f64 {\n\n (amount_covered(traces) as f64) / (amount_coverable(traces) as f64)\n\n}\n\n\n\n/// Stores all the program traces mapped to files and provides an interface to\n\n/// add, query and change traces.\n\n#[derive(Debug, Default, Deserialize, Serialize)]\n\npub struct TraceMap {\n\n /// Traces in the program mapped to the given file\n\n traces: BTreeMap<PathBuf, Vec<Trace>>,\n\n}\n\n\n\nimpl TraceMap {\n\n /// Create a new TraceMap\n\n pub fn new() -> TraceMap {\n\n TraceMap {\n\n traces: BTreeMap::new(),\n\n }\n\n }\n\n\n", "file_path": "src/traces.rs", "rank": 53, "score": 186469.64768822715 }, { "content": "pub fn hello() {\n\n println!(\"Hello world\");\n\n println!(\"I'm never tested\");\n\n}\n", "file_path": "tests/data/simple_project/src/unused.rs", "rank": 54, "score": 185270.83560234983 }, { "content": "fn render_packages(config: &Config, traces: &TraceMap) -> Vec<Package> {\n\n let dirs: HashSet<&Path> = traces\n\n .files()\n\n .into_iter()\n\n .filter_map(|x| x.parent())\n\n .collect();\n\n\n\n dirs.into_iter()\n\n .map(|x| render_package(config, traces, x))\n\n .collect()\n\n}\n\n\n", "file_path": "src/report/cobertura.rs", "rank": 55, "score": 184582.37142136513 }, { "content": "// TODO: Cobertura distinguishes between lines outside methods, and methods\n\n// (which also contain lines). As there is currently no way to get traces from\n\n// a particular function only, all traces are put into lines, and the vector\n\n// of methods is empty.\n\n//\n\n// Until this is fixed, the render_method function will panic if called, as it\n\n// cannot be properly implemented.\n\n//\n\nfn render_class(config: &Config, traces: &TraceMap, file: &Path) -> Class {\n\n let name = file\n\n .file_stem()\n\n .map(|x| x.to_str().unwrap())\n\n .unwrap_or_default()\n\n .to_string();\n\n\n\n let file_name = config.strip_base_dir(file).to_str().unwrap().to_string();\n\n\n\n let covered = traces.covered_in_path(file) as f64;\n\n let line_rate = covered / traces.coverable_in_path(file) as f64;\n\n let lines = traces\n\n .get_child_traces(file)\n\n .iter()\n\n .map(|x| render_line(x))\n\n .collect();\n\n\n\n Class {\n\n name: name,\n\n file_name: file_name,\n\n line_rate: line_rate,\n\n branch_rate: 0.0,\n\n complexity: 0.0,\n\n lines: lines,\n\n methods: vec![],\n\n }\n\n}\n\n\n", "file_path": "src/report/cobertura.rs", "rank": 56, "score": 179199.5306399568 }, { "content": "fn render_package(config: &Config, traces: &TraceMap, pkg: &Path) -> Package {\n\n let name = config.strip_base_dir(pkg).to_str().unwrap().to_string();\n\n\n\n let line_cover = traces.covered_in_path(pkg) as f64;\n\n let line_rate = line_cover / (traces.coverable_in_path(pkg) as f64);\n\n\n\n Package {\n\n name: name,\n\n line_rate: line_rate,\n\n branch_rate: 0.0,\n\n complexity: 0.0,\n\n classes: render_classes(config, traces, pkg),\n\n }\n\n}\n\n\n", "file_path": "src/report/cobertura.rs", "rank": 57, "score": 179192.6345154225 }, { "content": "fn get_task_port(pid: Pid) -> Result<vm_task_entry_t> {\n\n let mut port: MaybeUninit<vm_task_entry_t> = MaybeUninit::uninit();\n\n unsafe {\n\n let res = mach::traps::task_for_pid(\n\n mach::traps::mach_task_self(),\n\n pid.into(),\n\n port.as_mut_ptr()\n\n );\n\n if res == KERN_SUCCESS {\n\n let port = port.assume_init();\n\n Ok(port)\n\n } else {\n\n println!(\"KERN RET FAIL : {}\", res);\n\n Err(Error::from_errno(Errno::UnknownErrno))\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/ptrace_control/mac.rs", "rank": 58, "score": 178383.25841019014 }, { "content": "pub fn hello_to(s: &str) -> String {\n\n format!(\"Hello {}\", s)\n\n}\n", "file_path": "tests/data/example_test/src/lib.rs", "rank": 59, "score": 178247.51973828042 }, { "content": "#[cfg(test)]\n\n#[derive(Default)]\n\nstruct Foo {\n\n x: f64,\n\n y: Option<i32>\n\n}\n\n\n\n\n", "file_path": "tests/data/lets/src/lib.rs", "rank": 60, "score": 176581.01029313932 }, { "content": "#[test]\n\nfn error_if_compilation_fails() {\n\n let mut config = Config::default();\n\n let test_dir = get_test_path(\"compile_fail\");\n\n env::set_current_dir(&test_dir).unwrap();\n\n config.manifest = test_dir;\n\n config.manifest.push(\"Cargo.toml\");\n\n\n\n let result = launch_tarpaulin(&config);\n\n\n\n assert!(result.is_err());\n\n\n\n if let Err(RunError::TestCompile(_)) = result {\n\n } else {\n\n panic!(\"Expected a TestCompile error\");\n\n }\n\n}\n", "file_path": "tests/compile_fail.rs", "rank": 61, "score": 175065.24850747018 }, { "content": "///This is a doc comment\n\n/// ```\n\n/// use doc_coverage::uncovered_by_tests;\n\n/// assert_eq!(4, uncovered_by_tests(4));\n\n/// ```\n\npub fn uncovered_by_tests(x: i32) -> i32 {\n\n let y = x.pow(2);\n\n y / x\n\n}\n", "file_path": "tests/data/doc_coverage/src/lib.rs", "rank": 62, "score": 174992.89938768148 }, { "content": "pub fn junk() -> Vec<u8> {\n\n Vec::<u8>::with_capacity(max(1, min(10, 0)))\n\n}\n\n\n\n\n", "file_path": "tests/data/paths/src/lib.rs", "rank": 63, "score": 174714.93426087344 }, { "content": "fn render_classes(config: &Config, traces: &TraceMap, pkg: &Path) -> Vec<Class> {\n\n traces\n\n .files()\n\n .iter()\n\n .filter(|x| x.parent() == Some(pkg))\n\n .map(|x| render_class(config, traces, x))\n\n .collect()\n\n}\n\n\n", "file_path": "src/report/cobertura.rs", "rank": 64, "score": 174160.9178427305 }, { "content": "fn check_prots(task: vm_task_entry_t, address: u64) -> Result<(u64, i32)> {\n\n let mut address = address;\n\n let mut region_info = vm_region_basic_info_64::default();\n\n let mut size = mem::size_of_val(&region_info).try_into().unwrap();\n\n let mut sz = 8;\n\n let mut name = 1;\n\n println!(\"REGION\");\n\n let res: KernelRet = unsafe { mach_vm_region(\n\n task,\n\n &mut address,\n\n &mut sz,\n\n VM_REGION_BASIC_INFO_64,\n\n &mut region_info as *mut _ as *mut i32,\n\n &mut size,\n\n &mut name\n\n ).into()};\n\n let prot = region_info.protection;\n\n let max_prot = region_info.max_protection;\n\n println!(\"Protection = {}\", prot);\n\n println!(\"Max protection = {}\", max_prot);\n\n println!(\"Region started at addr : {}\", address);\n\n Ok((address, prot))\n\n}\n\n\n", "file_path": "src/ptrace_control/mac.rs", "rank": 65, "score": 172114.04937443946 }, { "content": "pub fn branch_test_one(x: i32) -> i32 {\n\n if x > 5 {\n\n 10\n\n } else {\n\n 5\n\n }\n\n}\n\n\n\n\n\n\n\n\n\n\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use branch_test_one;\n\n #[test]\n\n fn bad_test() {\n\n branch_test_one(2);\n\n }\n\n}\n", "file_path": "tests/data/simple_project/src/lib.rs", "rank": 66, "score": 171876.3670123777 }, { "content": "pub fn limit_affinity() -> nix::Result<()> {\n\n let core_ids = core_affinity::get_core_ids().unwrap();\n\n core_affinity::set_for_current(core_ids[0]);\n\n Ok(())\n\n}", "file_path": "src/process_handling/mac.rs", "rank": 67, "score": 171692.67981312913 }, { "content": "pub fn limit_affinity() -> nix::Result<()> {\n\n let mut cpu_set = CpuSet::new();\n\n cpu_set.set(0)?;\n\n let this = Pid::this();\n\n sched_setaffinity(this, &cpu_set)\n\n}\n\n\n", "file_path": "src/process_handling/linux.rs", "rank": 68, "score": 171692.67981312913 }, { "content": "/// Tracing a process on an OS will have platform specific code.\n\n/// Structs containing the platform specific datastructures should\n\n/// provide this trait with an implementation of the handling of\n\n/// the given states.\n\npub trait StateData {\n\n /// Starts the tracing. Returns None while waiting for\n\n /// start. Statemachine then checks timeout\n\n fn start(&mut self) -> Result<Option<TestState>, RunError>;\n\n /// Initialises test for tracing returns next state\n\n fn init(&mut self) -> Result<TestState, RunError>;\n\n /// Waits for notification from test executable that there's\n\n /// something to do. Selects the next appropriate state if there's\n\n /// something to do otherwise None\n\n fn wait(&mut self) -> Result<Option<TestState>, RunError>;\n\n /// Handle a stop in the test executable. Coverage data will\n\n /// be collected here as well as other OS specific functions\n\n fn stop(&mut self) -> Result<TestState, RunError>;\n\n}\n\n\n\nimpl TestState {\n\n /// Convenience function used to check if the test has finished or errored\n\n pub fn is_finished(self) -> bool {\n\n match self {\n\n TestState::End(_) => true,\n", "file_path": "src/statemachine/mod.rs", "rank": 69, "score": 171375.00422883456 }, { "content": "#[test]\n\nfn let_statements() {\n\n let _x = 5;\n\n let _x =\n\n 5;\n\n let _x\n\n =\n\n 5;\n\n let _x:\n\n i32 \n\n =\n\n 5;\n\n\n\n\n\n let _y: Foo = Foo::default();\n\n let _y: \n\n Foo \n\n = Foo::default();\n\n}\n", "file_path": "tests/data/lets/src/lib.rs", "rank": 70, "score": 169998.81590301727 }, { "content": "fn main() {\n\n let _ = only_ran_in_examples(vec![1, 2, 43, 4, 5]);\n\n}\n", "file_path": "tests/data/all_test_types/examples/example_test.rs", "rank": 71, "score": 169997.58146116597 }, { "content": "#[test]\n\nfn return_statements() {\n\n early_return(true);\n\n early_return(false);\n\n\n\n is_even(1);\n\n is_even(2);\n\n}\n", "file_path": "tests/data/returns/src/lib.rs", "rank": 72, "score": 169971.60139935053 }, { "content": "#[test]\n\nfn struct_exprs() {\n\n let _ = Foo::new();\n\n let _x = Foo {\n\n x: 6,\n\n y: vec![\"Hello\".to_string(),\n\n \"world\".to_string(),\n\n ],\n\n z: Some(\n\n 0.0)\n\n };\n\n\n\n let _x = Foo {\n\n x: 5,\n\n ..Default::default()\n\n };\n\n}\n", "file_path": "tests/data/structs/src/lib.rs", "rank": 73, "score": 169794.48399146466 }, { "content": "fn set_prot_flag(task: vm_task_entry_t, address: u64, prots: i32) -> Result<()> {\n\n // let (addr, prot) = check_prots(task, address)?;\n\n // if prot & prots == prots {\n\n // println!(\"Protections already correctly set!\");\n\n // return Ok(());\n\n // }\n\n unsafe { \n\n let res: KernelRet = mach_vm_protect(\n\n task,\n\n address,\n\n 8 as u64,\n\n 0,\n\n prots\n\n ).into();\n\n match res {\n\n KernelRet::Success => {\n\n Ok(())\n\n },\n\n _ => {\n\n eprintln!(\"Kernel returned {:?}\", res);\n\n // let (addr, prot) = check_prots(task, addr)?;\n\n Err(Error::from_errno(res.into()))\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/ptrace_control/mac.rs", "rank": 74, "score": 169524.84979947863 }, { "content": "#[cfg(target_os = \"macos\")]\n\nfn open_symbols_file(test: &Path) -> io::Result<File> {\n\n let symbs = test.with_extension(\"dSYM\").join(\"Contents/Resources/DWARF\").join(test.file_name().unwrap());\n\n File::open(&symbs)\n\n}\n\n\n", "file_path": "src/test_loader.rs", "rank": 75, "score": 168480.1346789186 }, { "content": "/// Finds lines from the raw string which are ignorable.\n\n/// These are often things like close braces, semi colons that may regiser as\n\n/// false positives.\n\nfn find_ignorable_lines(content: &str, analysis: &mut LineAnalysis) {\n\n let lines = content\n\n .lines()\n\n .enumerate()\n\n .filter(|&(_, x)| !x.chars().any(|x| !\"(){}[]?;\\t ,\".contains(x)))\n\n .map(|(i, _)| i + 1)\n\n .collect::<Vec<usize>>();\n\n analysis.add_to_ignore(&lines);\n\n\n\n let lines = content\n\n .lines()\n\n .enumerate()\n\n .filter(|&(_, x)| {\n\n let mut x = x.to_string();\n\n x.retain(|c| !c.is_whitespace());\n\n x == \"}else{\"\n\n })\n\n .map(|(i, _)| i + 1)\n\n .collect::<Vec<usize>>();\n\n analysis.add_to_ignore(&lines);\n\n}\n\n\n", "file_path": "src/source_analysis.rs", "rank": 76, "score": 162758.86537548096 }, { "content": "fn visit_struct_expr(structure: &ExprStruct, analysis: &mut LineAnalysis) -> SubResult {\n\n let mut cover: HashSet<usize> = HashSet::new();\n\n for field in structure.fields.pairs() {\n\n let first = match field {\n\n Pair::Punctuated(t, _) => t,\n\n Pair::End(t) => t,\n\n };\n\n let span = match first.member {\n\n Member::Named(ref i) => i.span(),\n\n Member::Unnamed(ref i) => i.span,\n\n };\n\n match first.expr {\n\n Expr::Lit(_) | Expr::Path(_) => {}\n\n _ => {\n\n cover.insert(span.start().line);\n\n }\n\n }\n\n }\n\n let x = get_line_range(structure)\n\n .filter(|x| !cover.contains(&x))\n\n .collect::<Vec<usize>>();\n\n analysis.add_to_ignore(&x);\n\n // struct expressions are never unreachable by themselves\n\n SubResult::Ok\n\n}\n\n\n", "file_path": "src/source_analysis.rs", "rank": 77, "score": 161558.47063230333 }, { "content": "/// Returns the coverage statistics for a test executable in the given workspace\n\npub fn get_test_coverage(\n\n project: &Workspace,\n\n package: Option<&Package>,\n\n test: &Path,\n\n analysis: &HashMap<PathBuf, LineAnalysis>,\n\n config: &Config,\n\n can_quiet: bool,\n\n ignored: bool,\n\n) -> Result<Option<(TraceMap, i32)>, RunError> {\n\n if !test.exists() {\n\n return Ok(None);\n\n }\n\n if let Err(e) = limit_affinity() {\n\n println!(\"Failed to set processor affinity {}\", e);\n\n }\n\n match fork() {\n\n Ok(ForkResult::Parent { child }) => {\n\n match collect_coverage(project, test, child, analysis, config) {\n\n Ok(t) => Ok(Some(t)),\n\n Err(e) => Err(RunError::TestCoverage(e.to_string())),\n", "file_path": "src/lib.rs", "rank": 78, "score": 158624.71820880624 }, { "content": "fn early_return(i: bool) -> i32 {\n\n if i {\n\n return 1\n\n }\n\n\n\n 0\n\n}\n\n\n", "file_path": "tests/data/returns/src/lib.rs", "rank": 79, "score": 157783.10290462436 }, { "content": "/// Returns a list of files and line numbers to ignore (not indexes!)\n\npub fn get_line_analysis(project: &Workspace, config: &Config) -> HashMap<PathBuf, LineAnalysis> {\n\n let mut result: HashMap<PathBuf, LineAnalysis> = HashMap::new();\n\n\n\n let mut ignored_files: HashSet<PathBuf> = HashSet::new();\n\n\n\n let walker = WalkDir::new(project.root()).into_iter();\n\n for e in walker\n\n .filter_entry(|e| !is_target_folder(e, project.root()))\n\n .filter_map(|e| e.ok())\n\n .filter(|e| is_source_file(e))\n\n {\n\n if !ignored_files.contains(e.path()) {\n\n analyse_package(\n\n e.path(),\n\n project.root(),\n\n &config,\n\n &mut result,\n\n &mut ignored_files,\n\n );\n\n } else {\n", "file_path": "src/source_analysis.rs", "rank": 80, "score": 156703.75529431074 }, { "content": "fn visit_match(mat: &ExprMatch, ctx: &Context, analysis: &mut LineAnalysis) -> SubResult {\n\n // a match with some arms is unreachable iff all its arms are unreachable\n\n let mut reachable_arm = false;\n\n for arm in &mat.arms {\n\n if let SubResult::Ok = process_expr(&arm.body, ctx, analysis) {\n\n reachable_arm = true\n\n }\n\n }\n\n if !reachable_arm {\n\n analysis.ignore_tokens(mat);\n\n SubResult::Unreachable\n\n } else {\n\n SubResult::Ok\n\n }\n\n}\n\n\n", "file_path": "src/source_analysis.rs", "rank": 81, "score": 155209.74340257654 }, { "content": "fn visit_return(ret: &ExprReturn, ctx: &Context, analysis: &mut LineAnalysis) -> SubResult {\n\n let check_cover = check_attr_list(&ret.attrs, ctx, analysis);\n\n if check_cover {\n\n for a in &ret.attrs {\n\n analysis.ignore_tokens(a);\n\n }\n\n } else {\n\n analysis.ignore_tokens(ret);\n\n }\n\n SubResult::Ok\n\n}\n\n\n", "file_path": "src/source_analysis.rs", "rank": 82, "score": 155186.64657903338 }, { "content": "pub fn create_state_machine<'a>(\n\n test: Pid,\n\n traces: &'a mut TraceMap,\n\n config: &'a Config,\n\n) -> (TestState, LinuxData<'a>) {\n\n let mut data = LinuxData::new(traces, config);\n\n data.parent = test;\n\n (TestState::start_state(), data)\n\n}\n\n\n\npub type UpdateContext = (TestState, TracerAction<ProcessInfo>);\n\n\n\n#[derive(Debug, Copy, Clone, Eq, PartialEq)]\n\npub struct ProcessInfo {\n\n pid: Pid,\n\n signal: Option<Signal>,\n\n}\n\n\n\nimpl ProcessInfo {\n\n fn new(pid: Pid, signal: Option<Signal>) -> Self {\n", "file_path": "src/statemachine/linux.rs", "rank": 84, "score": 153517.21417353948 }, { "content": "pub fn check_percentage(project_name: &str, minimum_coverage: f64, has_lines: bool) {\n\n let config = Config::default();\n\n check_percentage_with_config(project_name, minimum_coverage, has_lines, config);\n\n}\n\n\n", "file_path": "tests/mod.rs", "rank": 85, "score": 152921.74004271333 }, { "content": "fn is_even(i: i32) -> bool {\n\n if i%2 == 0 {\n\n true\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "tests/data/returns/src/lib.rs", "rank": 86, "score": 151289.2940915555 }, { "content": "#[derive(Clone, Default, Debug)]\n\nstruct Foo {\n\n x: i32,\n\n y: Vec<String>,\n\n z: Option<f64>,\n\n}\n\n\n\nimpl Foo {\n\n fn new() -> Self {\n\n Foo {\n\n x:5,\n\n y: vec![\"Hello\".to_string()],\n\n z: None\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/data/structs/src/lib.rs", "rank": 87, "score": 150270.06786603475 }, { "content": "#[test]\n\nfn only_test_coverage() {\n\n let mut config = Config::default();\n\n config.verbose = true;\n\n config.test_timeout = Duration::from_secs(60);\n\n config.run_types = vec![RunType::Tests];\n\n let restore_dir = env::current_dir().unwrap();\n\n let test_dir = get_test_path(\"all_test_types\");\n\n env::set_current_dir(&test_dir).unwrap();\n\n config.manifest = test_dir.clone();\n\n config.manifest.push(\"Cargo.toml\");\n\n\n\n let (res, ret) = launch_tarpaulin(&config).unwrap();\n\n assert_eq!(ret, 0);\n\n env::set_current_dir(restore_dir).unwrap();\n\n\n\n for f in res.files() {\n\n let f_name = f.file_name().unwrap().to_str().unwrap();\n\n if f_name.contains(\"test\") && !f_name.contains(\"doc\") {\n\n assert!(res.covered_in_path(f) > 0);\n\n } else {\n\n assert_eq!(res.covered_in_path(f), 0);\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/test_types.rs", "rank": 88, "score": 145690.182438231 }, { "content": "fn disable_aslr() -> nix::Result<i32> {\n\n match personality(GET_PERSONA) {\n\n Ok(p) => match personality(i64::from(p) | ADDR_NO_RANDOMIZE) {\n\n ok @ Ok(_) => ok,\n\n err @ Err(..) => err,\n\n },\n\n err @ Err(..) => err,\n\n }\n\n}\n\n\n", "file_path": "src/process_handling/linux.rs", "rank": 89, "score": 144532.73149925112 }, { "content": "use all_test_types::only_bench::*;\n\nuse criterion::{criterion_group, criterion_main, Criterion};\n\n\n", "file_path": "tests/data/all_test_types/benches/bench_fn.rs", "rank": 90, "score": 143635.688610098 }, { "content": "#[test]\n\n#[ignore]\n\nfn only_bench_coverage() {\n\n let mut config = Config::default();\n\n config.verbose = true;\n\n config.test_timeout = Duration::from_secs(60);\n\n config.run_types = vec![RunType::Benchmarks];\n\n let restore_dir = env::current_dir().unwrap();\n\n let test_dir = get_test_path(\"all_test_types\");\n\n env::set_current_dir(&test_dir).unwrap();\n\n config.manifest = test_dir.clone();\n\n config.manifest.push(\"Cargo.toml\");\n\n\n\n let (res, ret) = launch_tarpaulin(&config).unwrap();\n\n assert_eq!(ret, 0);\n\n env::set_current_dir(restore_dir).unwrap();\n\n\n\n for f in res.files() {\n\n let f_name = f.file_name().unwrap().to_str().unwrap();\n\n if f_name.contains(\"bench\") {\n\n assert!(res.covered_in_path(f) > 0);\n\n } else {\n\n assert_eq!(res.covered_in_path(f), 0);\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/test_types.rs", "rank": 91, "score": 141548.09930079384 }, { "content": "#[test]\n\n#[ignore]\n\nfn only_doctest_coverage() {\n\n let mut config = Config::default();\n\n config.verbose = true;\n\n config.test_timeout = Duration::from_secs(60);\n\n config.run_types = vec![RunType::Doctests];\n\n let restore_dir = env::current_dir().unwrap();\n\n let test_dir = get_test_path(\"all_test_types\");\n\n env::set_current_dir(&test_dir).unwrap();\n\n config.manifest = test_dir.clone();\n\n config.manifest.push(\"Cargo.toml\");\n\n\n\n let (res, ret) = launch_tarpaulin(&config).unwrap();\n\n assert_eq!(ret, 0);\n\n env::set_current_dir(restore_dir).unwrap();\n\n\n\n for f in res.files() {\n\n let f_name = f.file_name().unwrap().to_str().unwrap();\n\n if f_name.contains(\"doc\") {\n\n assert!(res.covered_in_path(f) > 0);\n\n } else {\n\n assert_eq!(res.covered_in_path(f), 0);\n\n }\n\n }\n\n}\n", "file_path": "tests/test_types.rs", "rank": 92, "score": 141548.09930079384 }, { "content": "#[test]\n\nfn only_example_coverage() {\n\n let mut config = Config::default();\n\n config.verbose = true;\n\n config.test_timeout = Duration::from_secs(60);\n\n config.run_types = vec![RunType::Examples];\n\n let restore_dir = env::current_dir().unwrap();\n\n let test_dir = get_test_path(\"all_test_types\");\n\n env::set_current_dir(&test_dir).unwrap();\n\n config.manifest = test_dir.clone();\n\n config.manifest.push(\"Cargo.toml\");\n\n\n\n let (res, ret) = launch_tarpaulin(&config).unwrap();\n\n assert_eq!(ret, 0);\n\n env::set_current_dir(restore_dir).unwrap();\n\n\n\n for f in res.files() {\n\n let f_name = f.file_name().unwrap().to_str().unwrap();\n\n if f_name.contains(\"example\") {\n\n assert!(res.covered_in_path(f) > 0);\n\n } else {\n\n assert_eq!(res.covered_in_path(f), 0);\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/test_types.rs", "rank": 93, "score": 141543.88550217779 }, { "content": "/// Analyse the crates lib.rs for some common false positives\n\nfn analyse_lib_rs(file: &Path, result: &mut HashMap<PathBuf, LineAnalysis>) {\n\n if let Ok(f) = File::open(file) {\n\n let read_file = BufReader::new(f);\n\n if let Some(Ok(first)) = read_file.lines().nth(0) {\n\n if !(first.starts_with(\"pub\") || first.starts_with(\"fn\")) {\n\n let file = file.to_path_buf();\n\n if result.contains_key(&file) {\n\n let l = result.get_mut(&file).unwrap();\n\n l.add_to_ignore(&[1]);\n\n } else {\n\n let mut l = LineAnalysis::new();\n\n l.add_to_ignore(&[1]);\n\n result.insert(file, l);\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/source_analysis.rs", "rank": 94, "score": 141429.90283197392 }, { "content": "fn setup_environment(config: &Config) {\n\n env::set_var(\"TARPAULIN\", \"1\");\n\n let common_opts =\n\n \" -C relocation-model=dynamic-no-pic -C link-dead-code -C opt-level=0 -C debuginfo=2 \";\n\n let rustflags = \"RUSTFLAGS\";\n\n let mut value = common_opts.to_string();\n\n if config.release {\n\n value = format!(\"{}-C debug-assertions=off \", value);\n\n }\n\n if let Ok(vtemp) = env::var(rustflags) {\n\n value.push_str(vtemp.as_ref());\n\n }\n\n env::set_var(rustflags, value);\n\n // doesn't matter if we don't use it\n\n let rustdoc = \"RUSTDOCFLAGS\";\n\n let mut value = format!(\n\n \"{} --persist-doctests {} -Z unstable-options \",\n\n common_opts, DOCTEST_FOLDER\n\n );\n\n if let Ok(vtemp) = env::var(rustdoc) {\n\n if !vtemp.contains(\"--persist-doctests\") {\n\n value.push_str(vtemp.as_ref());\n\n }\n\n }\n\n env::set_var(rustdoc, value);\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 95, "score": 140198.65197489737 }, { "content": "fn visit_impl(impl_blk: &ItemImpl, analysis: &mut LineAnalysis, ctx: &Context) {\n\n let check_cover = check_attr_list(&impl_blk.attrs, ctx, analysis);\n\n if check_cover {\n\n for item in &impl_blk.items {\n\n if let ImplItem::Method(ref i) = *item {\n\n if check_attr_list(&i.attrs, ctx, analysis) {\n\n analysis.cover_token_stream(i.into_token_stream(), Some(ctx.file_contents));\n\n if let SubResult::Unreachable =\n\n process_statements(&i.block.stmts, ctx, analysis)\n\n {\n\n // if the body of this method is unreachable, this means that the method\n\n // cannot be called, and is unreachable\n\n analysis.ignore_tokens(i);\n\n return;\n\n }\n\n\n\n visit_generics(&i.sig.generics, analysis);\n\n analysis.ignore.remove(&Lines::Line(i.span().start().line));\n\n\n\n // Ignore multiple lines of fn decl\n", "file_path": "src/source_analysis.rs", "rank": 96, "score": 138000.5305867058 }, { "content": "#[test]\n\nfn if_test() {\n\n let x = 5;\n\n if x%2 == 1 {\n\n println!(\"foo\");\n\n }\n\n if x%2 == x%4 {\n\n }\n\n\n\n if x > 0 &&\n\n x - 1 > 0 {\n\n println!(\"bar\");\n\n }\n\n}\n\n\n", "file_path": "tests/data/ifelse/src/lib.rs", "rank": 97, "score": 137558.56748351213 }, { "content": "#[test]\n\nfn lets_coverage() {\n\n check_percentage(\"lets\", 1.0f64, true);\n\n}\n\n\n", "file_path": "tests/mod.rs", "rank": 98, "score": 136445.86349140725 }, { "content": " /// Current Pid to process\n\n current: Pid,\n\n /// Parent PID of test process\n\n parent: Pid,\n\n /// Map of addresses to breakpoints\n\n breakpoints: HashMap<u64, Breakpoint>,\n\n /// Instrumentation points in code with associated coverage data\n\n traces: &'a mut TraceMap,\n\n /// Program config\n\n config: &'a Config,\n\n /// Thread count. Hopefully getting rid of in future\n\n thread_count: isize,\n\n}\n\n\n\nimpl<'a> StateData for LinuxData<'a> {\n\n fn start(&mut self) -> Result<Option<TestState>, RunError> {\n\n match waitpid(self.current, Some(WaitPidFlag::WNOHANG)) {\n\n Ok(WaitStatus::StillAlive) => Ok(None),\n\n Ok(sig @ WaitStatus::Stopped(_, Signal::SIGTRAP)) => {\n\n if let WaitStatus::Stopped(child, _) = sig {\n", "file_path": "src/statemachine/linux.rs", "rank": 99, "score": 71.69518257087195 } ]
Rust
tests/read_write_pdbs.rs
douweschulte/pdbtbx
4495b3af56d88b72f27e5d9f17848bcbdec34381
use pdbtbx::*; use std::path::Path; use std::time::Instant; use std::{env, fs}; #[test] fn run_pdbs() { let current_dir = env::current_dir().unwrap(); let pdb_dir = current_dir.as_path().join(Path::new("example-pdbs")); let dump_dir = current_dir .as_path() .join(Path::new("dump")) .into_os_string() .into_string() .unwrap() + &String::from(std::path::MAIN_SEPARATOR); let _ = fs::create_dir(dump_dir.clone()); println!("{:?}", pdb_dir); save_invalid_name(); save_pdb_strict(); save_mmcif_strict(); for entry in fs::read_dir(pdb_dir).unwrap() { let entry = entry.unwrap(); let path = entry.path(); let metadata = fs::metadata(&path).unwrap(); if metadata.is_file() { do_something( &path.clone().into_os_string().into_string().unwrap(), &dump_dir, &path .file_stem() .unwrap() .to_os_string() .into_string() .unwrap(), ); } } } fn do_something(file: &str, folder: &str, name: &str) { println!("Working on file: {}", file); let now = Instant::now(); let (pdb, errors) = open(file, StrictnessLevel::Loose).unwrap(); let time = now.elapsed(); for error in errors { println!("{}", error); } println!( "Found {} atoms, in {} residues, in {} chains, in {} models it all took {} ms", pdb.total_atom_count(), pdb.total_residue_count(), pdb.total_chain_count(), pdb.model_count(), time.as_millis() ); assert!(pdb.total_atom_count() != 0, "No atoms found"); println!("PDB parsed"); let mut avg = 0.0; let mut total_back = 0; let mut avg_back = 0.0; let mut total_side = 0; let mut avg_side = 0.0; println!("Set values"); for hierarchy in pdb.atoms_with_hierarchy() { avg += hierarchy.atom().b_factor(); if hierarchy.is_backbone() { total_back += 1; avg_back += hierarchy.atom().b_factor(); } else { total_side += 1; avg_side += hierarchy.atom().b_factor(); } } println!("Counted for averages"); avg /= (total_back + total_side) as f64; avg_back /= total_back as f64; avg_side /= total_side as f64; println!("Found averages"); println!( "Average B factor: Total: {:.3}, Backbone: {:.3}, Sidechains: {:.3}", avg, avg_back, avg_side ); if validate_pdb(&pdb) .iter() .all(|a| !a.fails(StrictnessLevel::Medium)) { save( &pdb, &(folder.to_string() + name + ".pdb"), StrictnessLevel::Loose, ) .expect("PDB resave not successful"); let (_saved_pdb, _) = open( &(folder.to_string() + name + ".pdb"), StrictnessLevel::Loose, ) .expect("PDB reparse not successful"); } save( &pdb, &(folder.to_string() + name + ".cif"), StrictnessLevel::Loose, ) .expect("mmCIF resave not successful"); let (_saved_mmcif, _) = open( &(folder.to_string() + name + ".cif"), StrictnessLevel::Loose, ) .expect("mmCIF reparse not successful"); } fn save_invalid_name() { let name = env::current_dir() .unwrap() .as_path() .join(Path::new("dump")) .join(Path::new("save_test.name")) .into_os_string() .into_string() .unwrap(); let res = save(&PDB::new(), &name, StrictnessLevel::Loose); assert!(res.is_err()); let err = res.unwrap_err(); assert_eq!(err.len(), 1); assert_eq!(err[0].short_description(), "Incorrect extension") } fn save_pdb_strict() { let name = env::current_dir() .unwrap() .as_path() .join(Path::new("dump")) .join(Path::new("save_test.pdb")) .into_os_string() .into_string() .unwrap(); let atom = Atom::new(false, 0, "H", 0.0, 0.0, 0.0, 0.0, 0.0, "H", 0).unwrap(); let mut model = Model::new(0); model.add_atom(atom, "A", (0, None), ("LYS", None)); let mut pdb = PDB::new(); pdb.add_model(model); let res = save(&pdb, &name, StrictnessLevel::Strict); assert!(res.is_ok()); let (_pdb, errors) = crate::open(&name, StrictnessLevel::Strict).unwrap(); assert_eq!(errors.len(), 0); } fn save_mmcif_strict() { let name = env::current_dir() .unwrap() .as_path() .join(Path::new("dump")) .join(Path::new("save_test.cif")) .into_os_string() .into_string() .unwrap(); let atom = Atom::new(false, 0, "H", 0.0, 0.0, 0.0, 0.0, 0.0, "H", 0).unwrap(); let mut model = Model::new(0); model.add_atom(atom, "A", (0, None), ("LYS", None)); let mut pdb = PDB::new(); pdb.add_model(model); let res = save(&pdb, &name, StrictnessLevel::Strict); println!("{:?}", res); assert!(res.is_ok()); let (_pdb, errors) = crate::open(&name, StrictnessLevel::Strict).unwrap(); assert_eq!(errors.len(), 0); }
use pdbtbx::*; use std::path::Path; use std::time::Instant; use std::{env, fs}; #[test] fn run_pdbs() { let current_dir = env::current_dir().unwrap(); let pdb_dir = current_dir.as_path().join(Path::new("example-pdbs")); let dump_dir = current_dir .as_path() .join(Path::new("dump")) .into_os_string() .into_string() .unwrap() + &String::from(std::path::MAIN_SEPARATOR); let _ = fs::create_dir(dump_dir.clone()); println!("{:?}", pdb_dir); save_invalid_name(); save_pdb_strict(); save_mmcif_strict(); for entry in fs::read_dir(pdb_dir).unwrap() { let entry = entry.unwrap(); let path = entry.path(); let metadata = fs::metadata(&path).unwrap(); if metadata.is_file() { do_something( &path.clone().into_os_string().into_string().unwrap(), &dump_dir, &path .file_stem() .unwrap() .to_os_string() .into_string() .unwrap(), ); } } } fn do_something(file: &str, folder: &str, name: &str) { println!("Working on file: {}", file); let now = Instant::now(); let (pdb, errors) = open(file, StrictnessLevel::Loose).unwrap(); let time = now.elapsed(); for error in errors { println!("{}", error); } println!( "Found {} atoms, in {} residues, in {} chains, in {} models it all took {} ms", pdb.total_atom_count(), pdb.total_residue_count(), pdb.total_chain_count(), pdb.model_count(), time.as_millis() ); assert!(pdb.total_atom_count() != 0, "No atoms found"); println!("PDB parsed"); let mut avg = 0.0; let mut total_back = 0; let mut avg_back = 0.0; let mut total_side = 0; let mut avg_side = 0.0; println!("Set values"); for hierarchy in pdb.atoms_with_hierarchy() { avg += hierarchy.atom().b_factor(); if hierarchy.is_backbone() { total_back += 1; avg_back += hierarchy.atom().b_factor(); } else { total_side += 1; avg_side += hierarchy.atom().b_factor(); } } println!("Counted for averages"); avg /= (total_back + total_side) as f64; avg_back /= total_back as f64; avg_side /= total_side as f64; println!("Found averages"); println!( "Average B factor: Total: {:.3}, Backbone: {:.3}, Sidechains: {:.3}", avg, avg_back, avg_side ); if validate_pdb(&pdb) .iter() .all(|a| !a.fails(StrictnessLevel::Medium)) { save( &pdb, &(folder.to_string() + name + ".pdb"), StrictnessLevel::Loose, ) .expect("PDB resave not successful"); let (_saved_pdb, _) = open( &(folder.to_string() + name + ".pdb"), StrictnessLevel::Loose, ) .expect("PDB reparse not successful"); } save( &pdb, &(folder.to_string() + name + ".cif"), StrictnessLevel::Loose, ) .expect("mmCIF resave not successful"); let (_saved_mmcif, _) = open( &(folder.to_string() + name + ".cif"), StrictnessLevel::Loose, ) .expect("mmCIF reparse not successful"); } fn save_invalid_name() { let name = env::current_dir() .unwrap() .as_path() .join(Path::new("dump")) .join(Path::new("save_test.name")) .into_os_string() .into_string() .unwrap(); let res = save(&PDB::new(), &name, StrictnessLevel::Loose); assert!(res.is_err()); let err = res.unwrap_err(); assert_eq!(err.len(), 1); assert_eq!(err[0].short_description(), "Incorrect extension") } fn save_pdb_strict() { let name = env::current_dir() .unwrap() .as_path() .join(Path::new("dump")) .join(Path::new("save_test.pdb")) .into_os_string() .into_string() .unwrap(); let atom = Atom::new(false, 0, "H", 0.0, 0.0, 0.0, 0.0, 0.0, "H", 0).unwrap(); let mut model = Model::new(0); model.add_atom(atom, "A", (0, None), ("LYS", None)); let mut pdb = PDB::new(); pdb.add_model(model); let res = save(&pdb, &name, StrictnessLevel::Strict); assert!(res.is_ok()); let (_pdb, errors) = crate::open(&name, StrictnessLevel::Strict).unwrap(); assert_eq!(errors.len(), 0); } fn save_mmcif_strict() { let name = env::current_dir() .unwrap() .
as_path() .join(Path::new("dump")) .join(Path::new("save_test.cif")) .into_os_string() .into_string() .unwrap(); let atom = Atom::new(false, 0, "H", 0.0, 0.0, 0.0, 0.0, 0.0, "H", 0).unwrap(); let mut model = Model::new(0); model.add_atom(atom, "A", (0, None), ("LYS", None)); let mut pdb = PDB::new(); pdb.add_model(model); let res = save(&pdb, &name, StrictnessLevel::Strict); println!("{:?}", res); assert!(res.is_ok()); let (_pdb, errors) = crate::open(&name, StrictnessLevel::Strict).unwrap(); assert_eq!(errors.len(), 0); }
function_block-function_prefix_line
[ { "content": "/// Parse a loop containing atomic data\n\nfn parse_atoms(input: &Loop, pdb: &mut PDB) -> Option<Vec<PDBError>> {\n\n /// These are the columns needed to fill out the PDB correctly\n\n const COLUMNS: &[&str] = &[\n\n \"atom_site.group_PDB\",\n\n \"atom_site.label_atom_id\",\n\n \"atom_site.id\",\n\n \"atom_site.type_symbol\",\n\n \"atom_site.label_comp_id\",\n\n \"atom_site.label_seq_id\",\n\n \"atom_site.label_asym_id\",\n\n \"atom_site.Cartn_x\",\n\n \"atom_site.Cartn_y\",\n\n \"atom_site.Cartn_z\",\n\n \"atom_site.occupancy\",\n\n \"atom_site.B_iso_or_equiv\",\n\n \"atom_site.pdbx_formal_charge\",\n\n ];\n\n /// These are some optional columns with data that will be used but is not required to be present\n\n const OPTIONAL_COLUMNS: &[&str] = &[\n\n \"atom_site.pdbx_PDB_model_num\",\n", "file_path": "src/read/mmcif/parser.rs", "rank": 1, "score": 223864.863657028 }, { "content": "/// Parse a value for a data item or inside a loop\n\nfn parse_value(input: &mut Position<'_>) -> Result<Value, PDBError> {\n\n let start = *input;\n\n trim_comments_and_whitespace(input);\n\n if input.text.is_empty() {\n\n Err(PDBError::new(\n\n ErrorLevel::BreakingError,\n\n \"Empty value\",\n\n \"No text left\",\n\n Context::position(input),\n\n ))\n\n // The following are reserved words, and need to be checked with a branching position as otherwise it would consume the keyword if matched\n\n } else if start_with(&mut input.clone(), \"data_\").is_some()\n\n || start_with(&mut input.clone(), \"global_\").is_some()\n\n || start_with(&mut input.clone(), \"loop_\").is_some()\n\n || start_with(&mut input.clone(), \"save_\").is_some()\n\n || start_with(&mut input.clone(), \"stop_\").is_some()\n\n {\n\n Err(PDBError::new(\n\n ErrorLevel::BreakingError,\n\n \"Use of reserved word\",\n", "file_path": "src/read/mmcif/lexer.rs", "rank": 2, "score": 221177.21956945313 }, { "content": "/// Lexes the general structure of a transformation record (ORIGXn, SCALEn, MTRIXn)\n\nfn lex_transformation(linenumber: usize, line: &str) -> ([f64; 4], Vec<PDBError>) {\n\n let mut errors = Vec::new();\n\n let chars: Vec<char> = line.chars().collect();\n\n let mut check = |item| match item {\n\n Ok(t) => t,\n\n Err(e) => {\n\n errors.push(e);\n\n 0.0\n\n }\n\n };\n\n let a = check(parse_number(\n\n Context::line(linenumber, line, 10, 10),\n\n &chars[10..20],\n\n ));\n\n let b = check(parse_number(\n\n Context::line(linenumber, line, 20, 10),\n\n &chars[20..30],\n\n ));\n\n let c = check(parse_number(\n\n Context::line(linenumber, line, 30, 10),\n\n &chars[30..40],\n\n ));\n\n let d = check(parse_number(\n\n Context::line(linenumber, line, 45, 10),\n\n &chars[45..55],\n\n ));\n\n\n\n ([a, b, c, d], errors)\n\n}\n\n\n", "file_path": "src/read/pdb/lexer.rs", "rank": 3, "score": 213468.68806576735 }, { "content": "/// Get the Textual content of the value, if available\n\nfn get_text<'a, 'b>(value: &'a Value, context: &'b Context) -> Result<Option<&'a str>, PDBError> {\n\n match value {\n\n Value::Text(t) => Ok(Some(t)),\n\n Value::Inapplicable => Ok(None),\n\n Value::Unknown => Ok(None),\n\n _ => Err(PDBError::new(\n\n ErrorLevel::InvalidatingError,\n\n \"Not text\",\n\n \"\",\n\n context.clone(),\n\n )),\n\n }\n\n}\n\n\n", "file_path": "src/read/mmcif/parser.rs", "rank": 4, "score": 208907.73864181398 }, { "content": "#[allow(clippy::unwrap_used)]\n\nfn validate_models(pdb: &PDB) -> Vec<PDBError> {\n\n let mut errors = Vec::new();\n\n let total_atoms = pdb.model(0).unwrap().atom_count();\n\n let normal_atoms = pdb\n\n .model(0)\n\n .unwrap()\n\n .atoms()\n\n .filter(|a| !a.hetero())\n\n .count();\n\n for model in pdb.models().skip(1) {\n\n if model.atom_count() != total_atoms {\n\n errors.push(PDBError::new(\n\n ErrorLevel::LooseWarning,\n\n \"Invalid Model\",\n\n &format!(\n\n \"Model {} does not have the same amount of atoms (Normal + Hetero) ({}) as the first model ({}).\",\n\n model.serial_number(),\n\n model.atom_count(),\n\n total_atoms\n\n ),\n", "file_path": "src/validate.rs", "rank": 5, "score": 200373.43379007338 }, { "content": "/// Get the Numeric content of the value, if available, it also fails on NumericWithUncertainty\n\nfn get_f64(value: &Value, context: &Context) -> Result<Option<f64>, PDBError> {\n\n match value {\n\n Value::Numeric(num) => Ok(Some(*num)),\n\n Value::Inapplicable => Ok(None),\n\n Value::Unknown => Ok(None),\n\n _ => Err(PDBError::new(\n\n ErrorLevel::InvalidatingError,\n\n \"Not a number\",\n\n \"\",\n\n context.clone(),\n\n )),\n\n }\n\n}\n\n\n", "file_path": "src/read/mmcif/parser.rs", "rank": 6, "score": 199998.85743795388 }, { "content": "/// Parse a multiline string <eol>; ...(text)... <eol>;, it assumes the first position is ';'\n\nfn parse_multiline_string<'a>(input: &mut Position<'a>) -> Result<&'a str, PDBError> {\n\n let mut chars_to_remove = 1; //Assume the first position is ';'\n\n let mut eol = false;\n\n let mut iter = input.text.chars().skip(1).peekable();\n\n\n\n while let Some(c) = (&mut iter).next() {\n\n if eol && c == ';' {\n\n let trimmed = &input.text[1..chars_to_remove];\n\n input.text = &input.text[(chars_to_remove + 1)..];\n\n input.column += 1;\n\n return Ok(trimmed);\n\n } else if c == '\\n' {\n\n if let Some('\\r') = (&mut iter).peek() {\n\n chars_to_remove += 1;\n\n let _ = (&mut iter).next();\n\n }\n\n input.line += 1;\n\n input.column = 1;\n\n chars_to_remove += 1;\n\n eol = true;\n", "file_path": "src/read/mmcif/lexer.rs", "rank": 7, "score": 199679.2956777281 }, { "content": "fn bench_remove(mut pdb: PDB) {\n\n pdb.remove_atoms_by(|atom| atom.serial_number() % 2 == 0);\n\n}\n\n\n", "file_path": "benches/benchmark.rs", "rank": 8, "score": 198655.0487094259 }, { "content": "fn bench_renumber(mut pdb: PDB) {\n\n pdb.renumber();\n\n}\n\n\n", "file_path": "benches/benchmark.rs", "rank": 9, "score": 198655.0487094259 }, { "content": "fn bench_transformation(mut pdb: PDB) {\n\n let transformation = TransformationMatrix::rotation_x(90.0);\n\n pdb.apply_transformation(&transformation);\n\n}\n\n\n", "file_path": "benches/benchmark.rs", "rank": 10, "score": 198655.04870942593 }, { "content": "/// Parse a main loop item, a data item or a save frame\n\nfn parse_data_item_or_save_frame(input: &mut Position<'_>) -> Result<Item, PDBError> {\n\n let start = *input;\n\n if let Some(()) = start_with(input, \"save_\") {\n\n let mut frame = SaveFrame {\n\n name: parse_identifier(input).to_string(),\n\n items: Vec::new(),\n\n };\n\n while let Ok(item) = parse_data_item(input) {\n\n frame.items.push(item);\n\n }\n\n if let Some(()) = start_with(input, \"save_\") {\n\n Ok(Item::SaveFrame(frame))\n\n } else {\n\n Err(PDBError::new(\n\n ErrorLevel::BreakingError,\n\n \"No matching \\'save_\\' found\",\n\n \"A save frame was instantiated but not closed (correctly)\",\n\n Context::range(&start, input),\n\n ))\n\n }\n\n } else {\n\n let item = parse_data_item(input)?;\n\n Ok(Item::DataItem(item))\n\n }\n\n}\n\n\n", "file_path": "src/read/mmcif/lexer.rs", "rank": 11, "score": 198245.91921261133 }, { "content": "fn create_waterbox(size: (f64, f64, f64)) -> PDB {\n\n let now = Instant::now();\n\n\n\n let (mut liquid, _errors) =\n\n open_pdb(\"example-pdbs/liquid.pdb\", StrictnessLevel::Loose).unwrap();\n\n\n\n let time = now.elapsed();\n\n\n\n liquid.remove_atoms_by(|a| a.name() != \"O\");\n\n liquid.atoms_mut().for_each(|a| {\n\n a.set_b_factor(50.0).unwrap();\n\n a.set_element(\"O\").unwrap();\n\n });\n\n\n\n println!(\"Time to parse liquid.pdb {}ms\", time.as_millis());\n\n println!(\"The PDB: {}\", liquid);\n\n\n\n let cell = liquid.unit_cell.as_ref().unwrap().size();\n\n let fa = (size.0 / cell.0).ceil() as usize;\n\n let fb = (size.1 / cell.1).ceil() as usize;\n", "file_path": "examples/waterbox.rs", "rank": 13, "score": 196764.5947163836 }, { "content": "#[allow(clippy::unwrap_used)]\n\npub fn save_mmcif_raw<T: Write>(pdb: &PDB, mut sink: BufWriter<T>) {\n\n /// Write a piece of text to the file, has the same structure as format!\n\n macro_rules! write {\n\n ($($arg:tt)*) => {\n\n sink.write_fmt(format_args!($($arg)*)).unwrap();\n\n sink.write_all(b\"\\n\").unwrap();\n\n }\n\n }\n\n\n\n let empty = \"?\".to_string();\n\n let name = pdb.identifier.as_ref().unwrap_or(&empty);\n\n\n\n // Header\n\n write!(\n\n \"data_{}\n\n# \n\n_entry.id {} \n\n# \n\n_audit_conform.dict_name mmcif_pdbx.dic \n\n_audit_conform.dict_version 5.338 \n", "file_path": "src/save/mmcif.rs", "rank": 14, "score": 196613.70967034803 }, { "content": "/// Returns if the given atom name is a name for an atom in the backbone of a protein\n\npub fn is_backbone(name: impl AsRef<str>) -> bool {\n\n BACKBONE_NAMES.contains(&name.as_ref())\n\n}\n\n\n", "file_path": "src/reference_tables.rs", "rank": 15, "score": 195312.07428224356 }, { "content": "#[allow(clippy::unwrap_used)]\n\npub fn save_pdb_raw<T: Write>(pdb: &PDB, mut sink: BufWriter<T>, level: StrictnessLevel) {\n\n let get_line = |fields: Vec<(usize, &str)>| {\n\n let mut line = String::with_capacity(70);\n\n for (length, text) in fields {\n\n if length > 0 {\n\n let cell = &text[text.len() - cmp::min(length, text.len())..];\n\n let trimmed = cell.trim_start_matches('0');\n\n if !cell.is_empty() && trimmed.is_empty() {\n\n line.push_str(&format!(\"{0:1$}\", \"0\", length));\n\n } else {\n\n line.push_str(&format!(\"{0:1$}\", trimmed, length));\n\n }\n\n } else {\n\n line += text;\n\n }\n\n }\n\n line\n\n };\n\n let mut print_line = |fields: Vec<(usize, &str)>| {\n\n let mut line = get_line(fields);\n", "file_path": "src/save/pdb.rs", "rank": 16, "score": 194750.61628276692 }, { "content": "#[allow(clippy::unwrap_used)]\n\nfn add_bonds(pdb: &mut PDB, bonds: Vec<(Context, LexItem)>) -> Vec<PDBError> {\n\n let mut errors = Vec::new();\n\n for (context, bond) in bonds {\n\n match bond {\n\n LexItem::SSBond(atom1, atom2, ..) => {\n\n let find = |atom: (String, isize, Option<String>, String)| {\n\n pdb.chains()\n\n .find(|c| c.id() == atom.3)\n\n .and_then(|c| {\n\n c.residues()\n\n .find(|r| {\n\n r.serial_number() == atom.1\n\n && r.insertion_code() == atom.2.as_deref()\n\n })\n\n .map(|r| {\n\n r.conformers().find(|c| c.name() == atom.0).map(|c| {\n\n c.atoms().find(|a| a.name() == \"SG\").map(Atom::counter)\n\n })\n\n })\n\n })\n", "file_path": "src/read/pdb/parser.rs", "rank": 17, "score": 192366.5077562771 }, { "content": "/// Adds all MODRES records to the Atoms\n\nfn add_modifications(pdb: &mut PDB, modifications: Vec<(Context, LexItem)>) -> Vec<PDBError> {\n\n let mut errors = Vec::new();\n\n for (context, item) in modifications {\n\n match item {\n\n LexItem::Modres(_, res_name, chain_id, seq_num, insertion_code, std_name, comment) => {\n\n if let Some(chain) = pdb.chains_mut().find(|c| c.id() == chain_id) {\n\n if let Some(residue) = chain\n\n .residues_mut()\n\n .find(|r| r.id() == (seq_num, insertion_code.as_deref()))\n\n {\n\n if let Some(conformer) =\n\n residue.conformers_mut().find(|c| c.name() == res_name)\n\n {\n\n if let Err(e) = conformer.set_modification((std_name, comment)) {\n\n errors.push(PDBError::new(\n\n ErrorLevel::InvalidatingError,\n\n \"Invalid characters\",\n\n &e,\n\n context,\n\n ));\n", "file_path": "src/read/pdb/parser.rs", "rank": 18, "score": 192361.64422028675 }, { "content": "/// Copy all atoms in blank alternative conformers into the other conformers.\n\n/// So if there is a A and B conformer with one atom different, based on the\n\n/// PDB file the generated structs will contain a blank, an A, and a B Conformer\n\n/// so the atoms in the blank constructs will have to be copied to the A and B\n\n/// Conformers.\n\npub fn reshuffle_conformers(pdb: &mut PDB) {\n\n for residue in pdb.residues_mut() {\n\n let count = residue.conformer_count();\n\n if count > 1 {\n\n let mut blank = None;\n\n for (index, conformer) in residue.conformers().enumerate() {\n\n if conformer.alternative_location().is_none() {\n\n blank = Some(index);\n\n }\n\n }\n\n #[allow(clippy::unwrap_used, clippy::cast_precision_loss)]\n\n if let Some(index) = blank {\n\n let mut shared = residue.conformer(index).unwrap().clone();\n\n shared\n\n .atoms_mut()\n\n .for_each(|a| a.set_occupancy(a.occupancy() / (count as f64)).unwrap());\n\n residue.remove_conformer(index);\n\n for conformer in residue.conformers_mut() {\n\n conformer.join(shared.clone());\n\n }\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/validate.rs", "rank": 19, "score": 192085.34504035686 }, { "content": "/// Parse a piece of text enclosed by a char, it assumes the first position also matches the char.\n\n/// It will fail if it finds a newline in the text. SO it can be used for single or double quoted strings.\n\nfn parse_enclosed<'a>(input: &mut Position<'a>, pat: char) -> Result<&'a str, PDBError> {\n\n let mut chars_to_remove = 1; //Assume the first position is 'pat'\n\n\n\n for c in input.text.chars().skip(1) {\n\n if c == pat {\n\n let trimmed = &input.text[1..chars_to_remove];\n\n input.text = &input.text[(chars_to_remove + 1)..];\n\n input.column += chars_to_remove + 1;\n\n return Ok(trimmed);\n\n } else if c == '\\n' || c == '\\r' {\n\n let mut end = *input;\n\n end.text = &input.text[(chars_to_remove + 1)..];\n\n end.column += chars_to_remove + 1;\n\n return Err(PDBError::new(\n\n ErrorLevel::BreakingError,\n\n \"Invalid enclosing\",\n\n &format!(\n\n \"This element was enclosed by \\'{}\\' but the closing delimiter was not found.\",\n\n pat\n\n ),\n", "file_path": "src/read/mmcif/lexer.rs", "rank": 20, "score": 191800.88735726383 }, { "content": "/// Lex a MODEL\n\n/// ## Fails\n\n/// It fails on incorrect numbers for the serial number\n\nfn lex_model(linenumber: usize, line: &str) -> (LexItem, Vec<PDBError>) {\n\n let mut errors = Vec::new();\n\n let number = match parse_number(\n\n Context::line(linenumber, line, 6, line.len() - 6),\n\n &line.chars().collect::<Vec<char>>()[6..]\n\n .iter()\n\n .collect::<String>()\n\n .trim()\n\n .chars()\n\n .collect::<Vec<char>>()[..],\n\n ) {\n\n Ok(n) => n,\n\n Err(e) => {\n\n errors.push(e);\n\n 0\n\n }\n\n };\n\n (LexItem::Model(number), errors)\n\n}\n\n\n", "file_path": "src/read/pdb/lexer.rs", "rank": 21, "score": 188101.00456445082 }, { "content": "/// A trait which defines all functions on a mutable hierarchy which contains Atoms, Conformers, Residues, Chains, and Models.\n\npub trait ContainsAtomConformerResidueChainModelMut:\n\n ContainsAtomConformerResidueChainModel + ContainsAtomConformerResidueChainMut\n\n{\n\n /// Get a mutable reference to the model\n\n fn model_mut(&mut self) -> &mut Model;\n\n}\n\n\n\n#[cfg(feature = \"rstar\")]\n\nuse rstar::{PointDistance, RTreeObject, AABB};\n\n\n\nmacro_rules! impl_hierarchy {\n\n ($($type:ty,)*) => {\n\n $(#[cfg(feature = \"rstar\")]\n\n impl<'a> RTreeObject for $type {\n\n type Envelope = AABB<(f64, f64, f64)>;\n\n\n\n fn envelope(&self) -> Self::Envelope {\n\n self.atom().envelope()\n\n }\n\n }\n", "file_path": "src/structs/hierarchy.rs", "rank": 22, "score": 186470.81231939534 }, { "content": "/// Merge all warnings about long REMARK definitions into a single warning\n\nfn merge_long_remark_warnings(errors: &mut Vec<PDBError>) {\n\n // Weed out all remark too long warnings\n\n let mut remark_too_long = Vec::new();\n\n errors.retain(|error| {\n\n if error.short_description() == \"Remark too long\" {\n\n remark_too_long.push(error.context().clone());\n\n false\n\n } else {\n\n true\n\n }\n\n });\n\n // Merge consecutive warnings into a single context to take up less vertical space\n\n let mut contexts = Vec::new();\n\n let mut lines = Vec::new();\n\n let mut highlights = Vec::new();\n\n let mut last = usize::MAX;\n\n let mut index = 0;\n\n for context in remark_too_long {\n\n if let Context::Line {\n\n linenumber,\n", "file_path": "src/read/pdb/parser.rs", "rank": 23, "score": 184073.95189237007 }, { "content": "/// Parse/lex a CIF file into CIF intermediate structure\n\npub fn lex_cif(text: &str) -> Result<DataBlock, PDBError> {\n\n parse_main(&mut Position {\n\n text,\n\n line: 1,\n\n column: 1,\n\n })\n\n}\n\n\n", "file_path": "src/read/mmcif/lexer.rs", "rank": 24, "score": 182939.90143452404 }, { "content": "/// Check if the input starts with the given pattern, it is case insensitive by\n\n/// lowercasing the input string, so the pattern should be lowercase otherwise\n\n/// it can never match.\n\nfn start_with(input: &mut Position<'_>, pattern: &str) -> Option<()> {\n\n if input.text.len() < pattern.len() {\n\n None\n\n } else {\n\n for (p, c) in pattern.chars().zip(input.text.chars()) {\n\n if p != c.to_ascii_lowercase() {\n\n return None;\n\n }\n\n }\n\n input.text = &input.text[pattern.len()..];\n\n input.column += pattern.len();\n\n Some(())\n\n }\n\n}\n\n\n", "file_path": "src/read/mmcif/lexer.rs", "rank": 25, "score": 180412.5607551869 }, { "content": "fn bench_save_pdb(pdb: PDB) {\n\n save(&pdb, \"dump/dump.pdb\", StrictnessLevel::Loose).unwrap();\n\n}\n\n\n", "file_path": "benches/benchmark.rs", "rank": 26, "score": 177665.13212789607 }, { "content": "/// Parse a CIF file\n\nfn parse_main(input: &mut Position<'_>) -> Result<DataBlock, PDBError> {\n\n trim_comments_and_whitespace(input);\n\n parse_data_block(input)\n\n}\n\n\n", "file_path": "src/read/mmcif/lexer.rs", "rank": 27, "score": 177125.41884058883 }, { "content": "fn bench_iteration(pdb: PDB) {\n\n let mut _average = 0.0;\n\n for atom in pdb.atoms() {\n\n _average += atom.b_factor();\n\n }\n\n _average /= pdb.atom_count() as f64;\n\n}\n\n\n", "file_path": "benches/benchmark.rs", "rank": 28, "score": 176531.32819481718 }, { "content": "/// Parse a data block, the main item of a CIF file\n\nfn parse_data_block(input: &mut Position<'_>) -> Result<DataBlock, PDBError> {\n\n if start_with(input, \"data_\").is_none() {\n\n return Err(PDBError::new(\n\n ErrorLevel::BreakingError,\n\n \"Data Block not opened\",\n\n \"The data block should be opened with \\\"data_\\\".\",\n\n Context::position(input),\n\n ));\n\n }\n\n let identifier = parse_identifier(input);\n\n let mut block = DataBlock {\n\n name: identifier.to_string(),\n\n items: Vec::new(),\n\n };\n\n loop {\n\n trim_comments_and_whitespace(input);\n\n if input.text.is_empty() {\n\n return Ok(block);\n\n }\n\n let item = parse_data_item_or_save_frame(input)?;\n\n block.items.push(item);\n\n }\n\n}\n\n\n", "file_path": "src/read/mmcif/lexer.rs", "rank": 29, "score": 174126.71721394887 }, { "content": "/// Parse a data item, a loop or a single item\n\nfn parse_data_item(input: &mut Position<'_>) -> Result<DataItem, PDBError> {\n\n let start = *input;\n\n trim_comments_and_whitespace(input);\n\n if let Some(()) = start_with(input, \"loop_\") {\n\n let mut loop_value = Loop {\n\n header: Vec::new(),\n\n data: Vec::new(),\n\n };\n\n let mut values = Vec::new();\n\n trim_comments_and_whitespace(input);\n\n\n\n while let Some(()) = start_with(input, \"_\") {\n\n let inner_name = parse_identifier(input);\n\n loop_value.header.push(inner_name.to_string());\n\n trim_comments_and_whitespace(input);\n\n }\n\n\n\n while let Ok(value) = parse_value(input) {\n\n values.push(value);\n\n }\n", "file_path": "src/read/mmcif/lexer.rs", "rank": 30, "score": 174117.51795029885 }, { "content": "fn bench_save_mmcif(pdb: PDB) {\n\n save(&pdb, \"dump/dump.cif\", StrictnessLevel::Loose).unwrap();\n\n}\n\n\n", "file_path": "benches/benchmark.rs", "rank": 31, "score": 172708.10978291166 }, { "content": "/// A trait which defines all functions on a hierarchy which contains Atoms, Conformers, Residues, Chains, and Models.\n\npub trait ContainsAtomConformerResidueChainModel: ContainsAtomConformerResidueChain {\n\n /// Get a reference to the model\n\n fn model(&self) -> &Model;\n\n}\n\n\n", "file_path": "src/structs/hierarchy.rs", "rank": 32, "score": 172698.56747536163 }, { "content": "/// Lex a REMARK\n\n/// ## Fails\n\n/// It fails on incorrect numbers for the remark-type-number\n\nfn lex_remark(linenumber: usize, line: &str) -> Result<(LexItem, Vec<PDBError>), PDBError> {\n\n let mut errors = Vec::new();\n\n let number = match parse_number(\n\n Context::line(linenumber, line, 7, 3),\n\n &line.chars().collect::<Vec<char>>()[7..10],\n\n ) {\n\n Ok(n) => n,\n\n Err(e) => {\n\n errors.push(e);\n\n 0\n\n }\n\n };\n\n if !reference_tables::valid_remark_type_number(number) {\n\n errors.push(PDBError::new(\n\n ErrorLevel::StrictWarning,\n\n \"Remark type number invalid\",\n\n \"The remark-type-number is not valid, see wwPDB v3.30 for all valid numbers.\",\n\n Context::line(linenumber, line, 7, 3),\n\n ));\n\n }\n", "file_path": "src/read/pdb/lexer.rs", "rank": 33, "score": 166488.23803206923 }, { "content": "/// Lex a HEADER\n\n/// ## Fails\n\n/// Fails if the header is too short (below 66 lines)\n\nfn lex_header(linenumber: usize, line: &str) -> Result<(LexItem, Vec<PDBError>), PDBError> {\n\n if line.len() < 66 {\n\n Err(PDBError::new(\n\n ErrorLevel::LooseWarning,\n\n \"Header too short\",\n\n \"The HEADER is too short, the min is 66 characters.\",\n\n Context::line(linenumber, line, 11, line.len() - 11),\n\n ))\n\n } else {\n\n Ok((\n\n LexItem::Header(\n\n line.chars().collect::<Vec<char>>()[10..50]\n\n .iter()\n\n .collect::<String>(),\n\n line.chars().collect::<Vec<char>>()[50..59]\n\n .iter()\n\n .collect::<String>(),\n\n line.chars().collect::<Vec<char>>()[62..66]\n\n .iter()\n\n .collect::<String>(),\n\n ),\n\n Vec::new(),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "src/read/pdb/lexer.rs", "rank": 34, "score": 166483.73403860355 }, { "content": "/// Validates this models specifically for the PDB format\n\npub fn validate_pdb(pdb: &PDB) -> Vec<PDBError> {\n\n let mut errors = Vec::new();\n\n for model in pdb.models() {\n\n if model.serial_number() > 9999 {\n\n errors.push(PDBError::new(\n\n ErrorLevel::LooseWarning,\n\n \"Model serial number too high\",\n\n &format!(\n\n \"Model {} has a serial number which is too high, max 9999.\",\n\n model.serial_number()\n\n ),\n\n Context::None,\n\n ));\n\n }\n\n for chain in model.chains() {\n\n if chain.id().len() > 1 {\n\n errors.push(PDBError::new(\n\n ErrorLevel::LooseWarning,\n\n \"Chain id too long\",\n\n &format!(\n", "file_path": "src/validate.rs", "rank": 35, "score": 165937.15986930765 }, { "content": "/// Validate a given PDB file in terms of invariants that should be held up.\n\n/// It returns PDBErrors with the warning messages.\n\n///\n\n/// ## Invariants Tested\n\n/// * With multiple models the models should all contain atoms that correspond.\n\n///\n\n/// ## Invariants Not Tested\n\n/// * Numbering of all structs, serial numbers should be unique. To enforce this the `renumber()` function should be called on the PDB struct.\n\npub fn validate(pdb: &PDB) -> Vec<PDBError> {\n\n // Print warnings/errors and return a bool for success\n\n let mut errors = Vec::new();\n\n if pdb.model_count() > 1 {\n\n errors.append(&mut validate_models(pdb));\n\n }\n\n\n\n if pdb.atoms().next().is_none() {\n\n errors.push(PDBError::new(\n\n ErrorLevel::BreakingError,\n\n \"No Atoms parsed\", \n\n \"No Atoms could be parsed from the given file. Please make sure it is a valid PDB/mmCIF file.\", \n\n Context::None)\n\n )\n\n }\n\n errors\n\n}\n\n\n", "file_path": "src/validate.rs", "rank": 36, "score": 165442.12865226727 }, { "content": "#[test]\n\nfn wrapping_residue_number() {\n\n let (pdb, errors) = pdbtbx::open(\"example-pdbs/large.pdb\", StrictnessLevel::Strict).unwrap();\n\n let pdb_errors = save(&pdb, \"dump/large.pdb\", StrictnessLevel::Loose);\n\n let (pdb2, _) = pdbtbx::open(\"dump/large.pdb\", StrictnessLevel::Strict).unwrap();\n\n print!(\"{:?}\", errors);\n\n print!(\"{:?}\", pdb_errors);\n\n // See that the original file is the same as saved and reopened\n\n assert_eq!(pdb, pdb2);\n\n // See that it is possible to select atom with 'impossible' atom serial numbers according to the PDB definition\n\n // These are made by adding 100000 to the atom serial number every time a wrap is detected (99999 followed by 0)\n\n assert_eq!(\n\n pdb.atoms()\n\n .find(|a| a.serial_number() == 100005)\n\n .unwrap()\n\n .pos(),\n\n (28.212, 27.833, 14.033)\n\n );\n\n assert_eq!(\n\n pdb.atoms()\n\n .find(|a| a.serial_number() == 120830)\n\n .unwrap()\n\n .pos(),\n\n (14.041, 8.886, 15.800)\n\n );\n\n}\n", "file_path": "tests/wrapping_atom_number.rs", "rank": 38, "score": 165091.453655763 }, { "content": "/// Lex a full line. It returns a lexed item with errors if it can lex something, otherwise it will only return an error.\n\npub fn lex_line(line: &str, linenumber: usize) -> Result<(LexItem, Vec<PDBError>), PDBError> {\n\n if line.len() > 6 {\n\n match &line[..6] {\n\n \"HEADER\" => lex_header(linenumber, line),\n\n \"REMARK\" => lex_remark(linenumber, line),\n\n \"ATOM \" => lex_atom(linenumber, line, false),\n\n \"ANISOU\" => Ok(lex_anisou(linenumber, line)),\n\n \"HETATM\" => lex_atom(linenumber, line, true),\n\n \"CRYST1\" => Ok(lex_cryst(linenumber, line)),\n\n \"SCALE1\" => Ok(lex_scale(linenumber, line, 0)),\n\n \"SCALE2\" => Ok(lex_scale(linenumber, line, 1)),\n\n \"SCALE3\" => Ok(lex_scale(linenumber, line, 2)),\n\n \"ORIGX1\" => Ok(lex_origx(linenumber, line, 0)),\n\n \"ORIGX2\" => Ok(lex_origx(linenumber, line, 1)),\n\n \"ORIGX3\" => Ok(lex_origx(linenumber, line, 2)),\n\n \"MTRIX1\" => Ok(lex_mtrix(linenumber, line, 0)),\n\n \"MTRIX2\" => Ok(lex_mtrix(linenumber, line, 1)),\n\n \"MTRIX3\" => Ok(lex_mtrix(linenumber, line, 2)),\n\n \"MODEL \" => Ok(lex_model(linenumber, line)),\n\n \"MASTER\" => Ok(lex_master(linenumber, line)),\n", "file_path": "src/read/pdb/lexer.rs", "rank": 40, "score": 162696.1339715881 }, { "content": "/// Lex an ANISOU\n\n/// ## Fails\n\n/// It fails on incorrect numbers in the line\n\nfn lex_anisou(linenumber: usize, line: &str) -> (LexItem, Vec<PDBError>) {\n\n let mut errors = Vec::new();\n\n let mut check = |item| match item {\n\n Ok(t) => t,\n\n Err(e) => {\n\n errors.push(e);\n\n 0\n\n }\n\n };\n\n let chars: Vec<char> = line.chars().collect();\n\n let ai: isize = check(parse_number(\n\n Context::line(linenumber, line, 28, 7),\n\n &chars[28..35],\n\n ));\n\n let bi: isize = check(parse_number(\n\n Context::line(linenumber, line, 35, 7),\n\n &chars[35..42],\n\n ));\n\n let ci: isize = check(parse_number(\n\n Context::line(linenumber, line, 42, 7),\n", "file_path": "src/read/pdb/lexer.rs", "rank": 41, "score": 160330.5315887158 }, { "content": "/// Lex a CRYST1\n\n/// ## Fails\n\n/// It fails on incorrect numbers in the line\n\nfn lex_cryst(linenumber: usize, line: &str) -> (LexItem, Vec<PDBError>) {\n\n let mut errors = Vec::new();\n\n let chars: Vec<char> = line.chars().collect();\n\n let mut check = |item| match item {\n\n Ok(t) => t,\n\n Err(e) => {\n\n errors.push(e);\n\n 0.0\n\n }\n\n };\n\n let a = check(parse_number(\n\n Context::line(linenumber, line, 6, 9),\n\n &chars[6..15],\n\n ));\n\n let b = check(parse_number(\n\n Context::line(linenumber, line, 15, 9),\n\n &chars[15..24],\n\n ));\n\n let c = check(parse_number(\n\n Context::line(linenumber, line, 24, 9),\n", "file_path": "src/read/pdb/lexer.rs", "rank": 42, "score": 160330.5315887158 }, { "content": "/// Lex a MASTER\n\n/// ## Fails\n\n/// It fails on incorrect numbers in the line\n\nfn lex_master(linenumber: usize, line: &str) -> (LexItem, Vec<PDBError>) {\n\n let mut errors = Vec::new();\n\n let chars: Vec<char> = line.chars().collect();\n\n let mut check = |item| match item {\n\n Ok(t) => t,\n\n Err(e) => {\n\n errors.push(e);\n\n 0\n\n }\n\n };\n\n let num_remark = check(parse_number(\n\n Context::line(linenumber, line, 10, 5),\n\n &chars[10..15],\n\n ));\n\n let num_empty = check(parse_number(\n\n Context::line(linenumber, line, 15, 5),\n\n &chars[15..20],\n\n ));\n\n let num_het = check(parse_number(\n\n Context::line(linenumber, line, 20, 5),\n", "file_path": "src/read/pdb/lexer.rs", "rank": 43, "score": 160330.5315887158 }, { "content": "/// Lexes a DBREF2 record\n\nfn lex_dbref2(linenumber: usize, line: &str) -> (LexItem, Vec<PDBError>) {\n\n let mut errors = Vec::new();\n\n let chars: Vec<char> = line.chars().collect();\n\n let mut check = |item| match item {\n\n Ok(t) => t,\n\n Err(e) => {\n\n errors.push(e);\n\n 0\n\n }\n\n };\n\n let id_code = [chars[7], chars[8], chars[9], chars[10]];\n\n let chain_id = chars[12];\n\n let database_accession = chars[18..40].iter().collect::<String>().trim().to_string();\n\n let database_seq_begin = check(parse_number(\n\n Context::line(linenumber, line, 55, 5),\n\n &chars[45..55],\n\n ));\n\n let database_seq_end = check(parse_number(\n\n Context::line(linenumber, line, 62, 5),\n\n &chars[57..67],\n", "file_path": "src/read/pdb/lexer.rs", "rank": 44, "score": 160325.91291340726 }, { "content": "/// Lexes a DBREF1 record\n\nfn lex_dbref1(linenumber: usize, line: &str) -> (LexItem, Vec<PDBError>) {\n\n let mut errors = Vec::new();\n\n let chars: Vec<char> = line.chars().collect();\n\n let mut check = |item| match item {\n\n Ok(t) => t,\n\n Err(e) => {\n\n errors.push(e);\n\n 0\n\n }\n\n };\n\n let id_code = [chars[7], chars[8], chars[9], chars[10]];\n\n let chain_id = chars[12];\n\n let seq_begin = check(parse_number(\n\n Context::line(linenumber, line, 14, 4),\n\n &chars[14..18],\n\n ));\n\n let insert_begin = chars[18];\n\n let seq_end = check(parse_number(\n\n Context::line(linenumber, line, 21, 4),\n\n &chars[21..24],\n", "file_path": "src/read/pdb/lexer.rs", "rank": 45, "score": 160325.91291340726 }, { "content": "/// Parse a SSBond line into the corresponding LexItem\n\nfn lex_ssbond(linenumber: usize, line: &str) -> (LexItem, Vec<PDBError>) {\n\n let mut errors = Vec::new();\n\n let chars: Vec<char> = line.chars().collect();\n\n // The Serial number field is ignored\n\n let res_1 = chars[11..14].iter().collect::<String>();\n\n let chain_1 = chars[15];\n\n let res_seq_1: isize = parse_number(Context::line(linenumber, line, 17, 4), &chars[17..21])\n\n .unwrap_or_else(|err| {\n\n errors.push(err);\n\n 0\n\n });\n\n let icode_1 = if chars[21] == ' ' {\n\n None\n\n } else {\n\n Some(chars[21].to_string())\n\n };\n\n let res_2 = chars[25..28].iter().collect::<String>();\n\n let chain_2 = chars[29];\n\n let res_seq_2 = parse_number(Context::line(linenumber, line, 31, 4), &chars[31..35])\n\n .unwrap_or_else(|err| {\n", "file_path": "src/read/pdb/lexer.rs", "rank": 46, "score": 160325.91291340726 }, { "content": "/// Lexes a MODRES record\n\nfn lex_modres(linenumber: usize, line: &str) -> (LexItem, Vec<PDBError>) {\n\n let mut errors = Vec::new();\n\n let chars: Vec<char> = line.chars().collect();\n\n let mut check = |item| match item {\n\n Ok(t) => t,\n\n Err(e) => {\n\n errors.push(e);\n\n 0\n\n }\n\n };\n\n let id = [chars[7], chars[8], chars[9], chars[10]];\n\n let res_name = chars[12..15].iter().collect::<String>();\n\n let chain_id = chars[16];\n\n let seq_num = check(parse_number(\n\n Context::line(linenumber, line, 18, 4),\n\n &chars[18..22],\n\n ));\n\n let insert = chars[22];\n\n let std_res = chars[24..27].iter().collect::<String>();\n\n let comment = chars[29..].iter().collect::<String>().trim().to_string();\n", "file_path": "src/read/pdb/lexer.rs", "rank": 47, "score": 160325.91291340726 }, { "content": "/// Lexes a SEQADV record\n\nfn lex_seqadv(linenumber: usize, line: &str) -> (LexItem, Vec<PDBError>) {\n\n let mut errors = Vec::new();\n\n let chars: Vec<char> = line.chars().collect();\n\n let mut check = |item| match item {\n\n Ok(t) => t,\n\n Err(e) => {\n\n errors.push(e);\n\n 0\n\n }\n\n };\n\n let id_code = [chars[7], chars[8], chars[9], chars[10]];\n\n let res_name = chars[12..15].iter().collect::<String>().trim().to_string();\n\n let chain_id = chars[16];\n\n let seq_num = check(parse_number(\n\n Context::line(linenumber, line, 18, 4),\n\n &chars[18..22],\n\n ));\n\n let insert = chars[22];\n\n let database = chars[24..28].iter().collect::<String>().trim().to_string();\n\n let database_accession = chars[29..38].iter().collect::<String>().trim().to_string();\n", "file_path": "src/read/pdb/lexer.rs", "rank": 48, "score": 160325.91291340726 }, { "content": "/// Lexes a DBREF record\n\nfn lex_dbref(linenumber: usize, line: &str) -> (LexItem, Vec<PDBError>) {\n\n let mut errors = Vec::new();\n\n let chars: Vec<char> = line.chars().collect();\n\n let mut check = |item| match item {\n\n Ok(t) => t,\n\n Err(e) => {\n\n errors.push(e);\n\n 0\n\n }\n\n };\n\n let id_code = [chars[7], chars[8], chars[9], chars[10]];\n\n let chain_id = chars[12];\n\n let seq_begin = check(parse_number(\n\n Context::line(linenumber, line, 14, 4),\n\n &chars[14..18],\n\n ));\n\n let insert_begin = chars[18];\n\n let seq_end = check(parse_number(\n\n Context::line(linenumber, line, 21, 4),\n\n &chars[20..24],\n", "file_path": "src/read/pdb/lexer.rs", "rank": 49, "score": 160325.91291340726 }, { "content": "/// Lexes a SEQRES record\n\nfn lex_seqres(linenumber: usize, line: &str) -> (LexItem, Vec<PDBError>) {\n\n let mut errors = Vec::new();\n\n let chars: Vec<char> = line.chars().collect();\n\n let mut check = |item| match item {\n\n Ok(t) => t,\n\n Err(e) => {\n\n errors.push(e);\n\n 0\n\n }\n\n };\n\n let ser_num = check(parse_number(\n\n Context::line(linenumber, line, 7, 3),\n\n &chars[7..10],\n\n ));\n\n let chain_id = chars[11];\n\n let num_res = check(parse_number(\n\n Context::line(linenumber, line, 13, 4),\n\n &chars[13..17],\n\n ));\n\n let mut values = Vec::new();\n", "file_path": "src/read/pdb/lexer.rs", "rank": 50, "score": 160325.91291340726 }, { "content": "/// A trait which defines all functions on a mutable hierarchy which contains Atoms, Conformers, Residues, and Chains.\n\npub trait ContainsAtomConformerResidueChainMut:\n\n ContainsAtomConformerResidueChain + ContainsAtomConformerResidueMut\n\n{\n\n /// Get a mutable reference to the chain\n\n fn chain_mut(&mut self) -> &mut Chain;\n\n}\n\n\n", "file_path": "src/structs/hierarchy.rs", "rank": 51, "score": 160167.42754322782 }, { "content": "fn bench_open(filename: &str) {\n\n let (_pdb, _errors) = open(filename, StrictnessLevel::Loose).unwrap();\n\n}\n\n\n", "file_path": "benches/benchmark.rs", "rank": 52, "score": 159822.76677925247 }, { "content": "/// Parse an identifier, basically all chars until the next whitespace\n\nfn parse_identifier<'a>(input: &mut Position<'a>) -> &'a str {\n\n let mut chars_to_remove = 0;\n\n\n\n for c in input.text.chars() {\n\n if c.is_ascii_whitespace() {\n\n let identifier = &input.text[..chars_to_remove];\n\n input.text = &input.text[chars_to_remove..];\n\n input.column += chars_to_remove;\n\n return identifier;\n\n }\n\n chars_to_remove += 1;\n\n }\n\n\n\n let identifier = input.text;\n\n input.text = \"\";\n\n input.column += chars_to_remove;\n\n identifier\n\n}\n\n\n", "file_path": "src/read/mmcif/lexer.rs", "rank": 53, "score": 159341.9554406149 }, { "content": "/// Lex an ORIGXn (where `n` is given)\n\n/// ## Fails\n\n/// It fails on incorrect numbers in the line\n\nfn lex_origx(linenumber: usize, line: &str, row: usize) -> (LexItem, Vec<PDBError>) {\n\n let (data, errors) = lex_transformation(linenumber, line);\n\n\n\n (LexItem::OrigX(row, data), errors)\n\n}\n\n\n", "file_path": "src/read/pdb/lexer.rs", "rank": 54, "score": 151753.29677039135 }, { "content": "/// Lex an SCALEn (where `n` is given)\n\n/// ## Fails\n\n/// It fails on incorrect numbers in the line\n\nfn lex_scale(linenumber: usize, line: &str, row: usize) -> (LexItem, Vec<PDBError>) {\n\n let (data, errors) = lex_transformation(linenumber, line);\n\n\n\n (LexItem::Scale(row, data), errors)\n\n}\n\n\n", "file_path": "src/read/pdb/lexer.rs", "rank": 55, "score": 151753.29677039135 }, { "content": "/// Lex an MTRIXn (where `n` is given)\n\n/// ## Fails\n\n/// It fails on incorrect numbers in the line\n\nfn lex_mtrix(linenumber: usize, line: &str, row: usize) -> (LexItem, Vec<PDBError>) {\n\n let mut errors = Vec::new();\n\n let chars: Vec<char> = line.chars().collect();\n\n let mut check = |item| match item {\n\n Ok(t) => t,\n\n Err(e) => {\n\n errors.push(e);\n\n 0\n\n }\n\n };\n\n let ser = check(parse_number(\n\n Context::line(linenumber, line, 7, 4),\n\n &chars[7..10],\n\n ));\n\n let (data, transformation_errors) = lex_transformation(linenumber, line);\n\n errors.extend(transformation_errors);\n\n\n\n let mut given = false;\n\n if chars.len() >= 60 {\n\n given = chars[59] == '1';\n\n }\n\n\n\n (LexItem::MtriX(row, ser, data, given), errors)\n\n}\n\n\n", "file_path": "src/read/pdb/lexer.rs", "rank": 56, "score": 151753.29677039132 }, { "content": "#[allow(clippy::cast_possible_truncation)]\n\nfn print_float(num: f64) -> String {\n\n let rounded = (num * 100000.).round() / 100000.;\n\n if (rounded.round() - rounded).abs() < std::f64::EPSILON {\n\n format!(\"{}.0\", rounded.trunc() as isize)\n\n } else {\n\n format!(\"{}\", rounded)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n #[allow(clippy::excessive_precision, clippy::print_literal)]\n\n fn test_print_float() {\n\n assert_eq!(print_float(1.), \"1.0\".to_string());\n\n assert_eq!(print_float(128734.), \"128734.0\".to_string());\n\n assert_eq!(print_float(0.1), \"0.1\".to_string());\n\n assert_eq!(print_float(1.015), \"1.015\".to_string());\n", "file_path": "src/save/mmcif.rs", "rank": 57, "score": 151528.66400838378 }, { "content": "/// Save the given PDB struct to the given file, validating it beforehand.\n\n///\n\n/// # Errors\n\n/// It fails if the validation fails with the given `level`.\n\n/// If validation gives rise to problems, use the `save_raw` function.\n\n///\n\n/// # Known Problems\n\n/// Saving SEQRES lines is experimental, as there are many nitpicky things to consider\n\n/// when generating SEQRES records, which are not all implemented (yet).\n\npub fn save_pdb(\n\n pdb: &PDB,\n\n filename: impl AsRef<str>,\n\n level: StrictnessLevel,\n\n) -> Result<(), Vec<PDBError>> {\n\n let filename = filename.as_ref();\n\n let mut errors = validate(pdb);\n\n errors.extend(validate_pdb(pdb));\n\n for error in &errors {\n\n if error.fails(level) {\n\n return Err(errors);\n\n }\n\n }\n\n\n\n let file = match File::create(filename) {\n\n Ok(f) => f,\n\n Err(e) => {\n\n errors.push(PDBError::new(\n\n ErrorLevel::BreakingError,\n\n \"Could not open file\",\n", "file_path": "src/save/pdb.rs", "rank": 58, "score": 151075.9204031477 }, { "content": "/// Gets the covalent bond radii for the given atomic number (defined for all elements (<=118)).\n\n/// The result is the radius for a single, double and triple bond, where the last two are optional.\n\n/// All values are given in Å.\n\n/// Sources:\n\n/// * P. Pyykkö; M. Atsumi (2009). \"Molecular Single-Bond Covalent Radii for Elements 1-118\". Chemistry: A European Journal. 15 (1): 186–197. doi:10.1002/chem.200800987\n\n/// * P. Pyykkö; M. Atsumi (2009). \"Molecular Double-Bond Covalent Radii for Elements Li–E112\". Chemistry: A European Journal. 15 (46): 12770–12779. doi:10.1002/chem.200901472\n\n/// * P. Pyykkö; S. Riedel; M. Patzschke (2005). \"Triple-Bond Covalent Radii\". Chemistry: A European Journal. 11 (12): 3511–3520. doi:10.1002/chem.200401299\n\npub fn get_covalent_bond_radii(atomic_number: usize) -> (f64, Option<f64>, Option<f64>) {\n\n *ELEMENT_BOND_RADII\n\n .get(atomic_number - 1)\n\n .expect(\"Invalid atomic number provided for element bond radius lookup. The number should be less than or equal to 118.\")\n\n}\n\n\n", "file_path": "src/reference_tables.rs", "rank": 59, "score": 150917.21300249276 }, { "content": "/// Gets the atomic radius for the given atomic number (defined up until 'Cm' 96) in Å.\n\n/// Source: Martin Rahm, Roald Hoffmann, and N. W. Ashcroft. Atomic and Ionic Radii of Elements 1-96. Chemistry - A European Journal, 22(41):14625–14632, oct 2016. <http://doi.org/10.1002/chem.201602949>\n\n/// Updated to the corrigendum: <https://doi.org/10.1002/chem.201700610>\n\npub fn get_atomic_radius(atomic_number: usize) -> Option<f64> {\n\n ELEMENT_ATOMIC_RADII.get(atomic_number - 1).copied()\n\n}\n\n\n", "file_path": "src/reference_tables.rs", "rank": 60, "score": 146461.31132081832 }, { "content": "/// Parse a numeric value from a string which is expected to be of non zero length and not containing whitespace\n\nfn parse_numeric(text: &str) -> Option<Value> {\n\n let mut chars_to_remove = 0;\n\n let first_char = text.chars().next().unwrap();\n\n // Parse a possible sign\n\n let mut minus = false;\n\n if first_char == '-' {\n\n minus = true;\n\n chars_to_remove += 1;\n\n } else if first_char == '+' {\n\n chars_to_remove += 1;\n\n }\n\n\n\n // Parse the integer part\n\n let mut integer_set = false;\n\n let mut value = 0;\n\n for c in text.chars().skip(chars_to_remove) {\n\n if let Some(num) = c.to_digit(10) {\n\n integer_set = true;\n\n value *= 10;\n\n value += num;\n", "file_path": "src/read/mmcif/lexer.rs", "rank": 61, "score": 141674.77645944455 }, { "content": "#[test]\n\nfn wrapping_residue_number() {\n\n let (pdb, errors) = pdbtbx::open(\"example-pdbs/eq.pdb\", StrictnessLevel::Strict).unwrap();\n\n let pdb_errors = save(&pdb, \"dump/eq.pdb\", StrictnessLevel::Loose);\n\n let (pdb2, _) = pdbtbx::open(\"dump/eq.pdb\", StrictnessLevel::Strict).unwrap();\n\n print!(\"{:?}\", errors);\n\n print!(\"{:?}\", pdb_errors);\n\n // See that the original file is the same as saved and reopened\n\n assert_eq!(pdb, pdb2);\n\n // See that it is possible to select atom with 'impossible' residue serial numbers according to the PDB definition\n\n // These are made by adding 10000 to the residue serial number every time a wrap is detected (9999 followed by 0)\n\n assert_eq!(\n\n pdb.residues()\n\n .find(|r| r.serial_number() == 10005)\n\n .unwrap()\n\n .name()\n\n .unwrap(),\n\n \"HOH\"\n\n );\n\n assert_eq!(\n\n pdb.residues()\n\n .find(|r| r.serial_number() == 20250)\n\n .unwrap()\n\n .name()\n\n .unwrap(),\n\n \"HOH\"\n\n );\n\n}\n", "file_path": "tests/wrapping_residue_number.rs", "rank": 62, "score": 140657.02461116834 }, { "content": "/// Save the given PDB struct to the given file, validating it beforehand.\n\n/// If validation gives rise to problems, use the `save_raw` function. The correct file\n\n/// type (pdb or mmCIF/PDBx) will be determined based on the given file extension.\n\n/// # Errors\n\n/// Fails if the validation fails with the given `level`.\n\npub fn save(\n\n pdb: &PDB,\n\n filename: impl AsRef<str>,\n\n level: StrictnessLevel,\n\n) -> Result<(), Vec<PDBError>> {\n\n if check_extension(&filename, \"pdb\") {\n\n save_pdb(pdb, filename, level)\n\n } else if check_extension(&filename, \"cif\") {\n\n save_mmcif(pdb, filename, level)\n\n } else {\n\n Err(vec![PDBError::new(\n\n ErrorLevel::BreakingError,\n\n \"Incorrect extension\",\n\n \"Could not determine the type of the given file, make it .pdb or .cif\",\n\n Context::show(filename.as_ref()),\n\n )])\n\n }\n\n}\n", "file_path": "src/save/general.rs", "rank": 63, "score": 140455.30560267981 }, { "content": "/// A trait which defines all functions on a hierarchy which contains Atoms, Conformers, Residues, and Chains.\n\npub trait ContainsAtomConformerResidueChain: ContainsAtomConformerResidue {\n\n /// Get a reference to the chain\n\n fn chain(&self) -> &Chain;\n\n}\n\n\n", "file_path": "src/structs/hierarchy.rs", "rank": 64, "score": 139762.6082444247 }, { "content": "fn bench_validate(pdb: PDB) {\n\n validate(&pdb);\n\n}\n\n\n", "file_path": "benches/benchmark.rs", "rank": 65, "score": 139722.9440300417 }, { "content": "fn bench_clone(pdb: PDB) {\n\n let _copy = pdb;\n\n}\n\n\n", "file_path": "benches/benchmark.rs", "rank": 66, "score": 139722.9440300417 }, { "content": "/// Trim all whitespace (<space, \\t, <eol>) from the start of the string\n\nfn trim_whitespace(input: &mut Position<'_>) {\n\n let mut chars_to_remove = 0;\n\n let mut iter = input.text.chars().peekable();\n\n\n\n while let Some(c) = (&mut iter).next() {\n\n if c == ' ' || c == '\\t' {\n\n input.column += 1;\n\n chars_to_remove += 1;\n\n } else if c == '\\n' {\n\n if let Some('\\r') = (&mut iter).peek() {\n\n chars_to_remove += 1;\n\n let _ = (&mut iter).next();\n\n }\n\n input.line += 1;\n\n input.column = 1;\n\n chars_to_remove += 1;\n\n } else if c == '\\r' {\n\n if let Some('\\n') = (&mut iter).peek() {\n\n chars_to_remove += 1;\n\n let _ = (&mut iter).next();\n", "file_path": "src/read/mmcif/lexer.rs", "rank": 67, "score": 139517.13211233402 }, { "content": "/// Skip forward until the next eol, \\r\\n and \\n\\r are but consumed in full\n\nfn skip_to_eol(input: &mut Position<'_>) {\n\n let mut chars_to_remove = 1;\n\n let mut iter = input.text.chars().skip(1).peekable();\n\n\n\n while let Some(c) = (&mut iter).next() {\n\n if c == '\\n' {\n\n if let Some('\\r') = (&mut iter).peek() {\n\n chars_to_remove += 1\n\n }\n\n input.line += 1;\n\n input.column = 1;\n\n chars_to_remove += 1;\n\n input.text = &input.text[chars_to_remove..];\n\n return;\n\n } else if c == '\\r' {\n\n if let Some('\\n') = (&mut iter).peek() {\n\n chars_to_remove += 1\n\n }\n\n input.line += 1;\n\n input.column = 1;\n", "file_path": "src/read/mmcif/lexer.rs", "rank": 68, "score": 139517.13211233402 }, { "content": "#[test]\n\nfn low_b_factor_messages() {\n\n let filename = env::current_dir()\n\n .unwrap()\n\n .as_path()\n\n .join(Path::new(\"example-pdbs\"))\n\n .join(Path::new(\"low_b.pdb\"))\n\n .into_os_string()\n\n .into_string()\n\n .unwrap();\n\n\n\n let (pdb, errors) = pdbtbx::open(&filename, StrictnessLevel::Strict).unwrap();\n\n let pdb_errors = validate_pdb(&pdb);\n\n print!(\"{:?}\", errors);\n\n print!(\"{:?}\", pdb_errors);\n\n assert_eq!(errors.len(), 0);\n\n assert_eq!(pdb_errors.len(), 0);\n\n assert_eq!(pdb.atom(0).unwrap().b_factor(), 0.00);\n\n assert_eq!(pdb.atom(1).unwrap().b_factor(), 0.01);\n\n assert_eq!(pdb.atom(2).unwrap().b_factor(), 999.99);\n\n assert_eq!(pdb.atom(3).unwrap().occupancy(), 0.00);\n\n assert_eq!(pdb.atom(4).unwrap().occupancy(), 0.01);\n\n assert_eq!(pdb.atom(5).unwrap().occupancy(), 999.99);\n\n}\n", "file_path": "tests/low_b.rs", "rank": 69, "score": 137912.65188238875 }, { "content": "/// Get the Numeric content of the value, if available, as an isize\n\nfn get_isize(value: &Value, context: &Context) -> Result<Option<isize>, PDBError> {\n\n flatten_result(get_f64(value, context).map(|result| {\n\n if let Some(num) = result {\n\n #[allow(\n\n clippy::cast_precision_loss,\n\n clippy::cast_possible_truncation,\n\n clippy::float_cmp\n\n )]\n\n if (std::isize::MIN as f64..std::isize::MAX as f64).contains(&num) && num.trunc() == num\n\n {\n\n Ok(Some(num as isize))\n\n } else {\n\n Err(PDBError::new(\n\n ErrorLevel::InvalidatingError,\n\n \"Not an integer\",\n\n \"\",\n\n context.clone(),\n\n ))\n\n }\n\n } else {\n\n Ok(None)\n\n }\n\n }))\n\n}\n", "file_path": "src/read/mmcif/parser.rs", "rank": 70, "score": 137625.72816383865 }, { "content": "/// Get the Numeric content of the value, if available, as a usize\n\nfn get_usize(value: &Value, context: &Context) -> Result<Option<usize>, PDBError> {\n\n flatten_result(get_f64(value, context).map(|result| {\n\n if let Some(num) = result {\n\n #[allow(\n\n clippy::cast_precision_loss,\n\n clippy::cast_possible_truncation,\n\n clippy::cast_sign_loss,\n\n clippy::float_cmp\n\n )]\n\n if (0.0..std::usize::MAX as f64).contains(&num) && num.trunc() == num {\n\n Ok(Some(num as usize))\n\n } else {\n\n Err(PDBError::new(\n\n ErrorLevel::InvalidatingError,\n\n \"Not an unsigned integer\",\n\n \"\",\n\n context.clone(),\n\n ))\n\n }\n\n } else {\n\n Ok(None)\n\n }\n\n }))\n\n}\n\n\n", "file_path": "src/read/mmcif/parser.rs", "rank": 71, "score": 137625.72816383865 }, { "content": "/// Trim all allowed whitespace (including comments)\n\nfn trim_comments_and_whitespace(input: &mut Position<'_>) {\n\n loop {\n\n trim_whitespace(input);\n\n if input.text.is_empty() {\n\n return;\n\n }\n\n if input.text.starts_with('#') {\n\n skip_to_eol(input);\n\n } else {\n\n return;\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/read/mmcif/lexer.rs", "rank": 72, "score": 137493.2556871976 }, { "content": "/// Parse the given file into a PDB struct.\n\n/// Returns a PDBError if a BreakingError is found. Otherwise it returns the PDB with all errors/warnings found while parsing it.\n\n///\n\n/// # Related\n\n/// If you want to open a file from memory see [`open_pdb_raw`]. There is also a function to open a mmCIF file directly\n\n/// see [`crate::open_mmcif`]. If you want to open a general file with no knowledge about the file type see [`crate::open`].\n\npub fn open_pdb(\n\n filename: impl AsRef<str>,\n\n level: StrictnessLevel,\n\n) -> Result<(PDB, Vec<PDBError>), Vec<PDBError>> {\n\n let filename = filename.as_ref();\n\n // Open a file a use a buffered reader to minimise memory use while immediately lexing the line followed by adding it to the current PDB\n\n let file = if let Ok(f) = File::open(filename) {\n\n f\n\n } else {\n\n return Err(vec![PDBError::new(ErrorLevel::BreakingError, \"Could not open file\", \"Could not open the specified file, make sure the path is correct, you have permission, and that it is not open in another program.\", Context::show(filename))]);\n\n };\n\n let reader = BufReader::new(file);\n\n open_pdb_raw(reader, Context::show(filename), level)\n\n}\n\n\n", "file_path": "src/read/pdb/parser.rs", "rank": 73, "score": 137465.04347872356 }, { "content": "/// Gets the van der Waals radius for the given atomic number (defined up until 'Es' 99) in Å.\n\n/// Source: Alvarez, S. (2013). A cartography of the van der Waals territories. Dalton Transactions, 42(24), 8617. <https://doi.org/10.1039/c3dt50599e>\n\npub fn get_vanderwaals_radius(atomic_number: usize) -> Option<f64> {\n\n ELEMENT_VANDERWAALS_RADII.get(atomic_number - 1).copied()\n\n}\n\n\n", "file_path": "src/reference_tables.rs", "rank": 74, "score": 135253.67579581964 }, { "content": "/// Parse a number, generic for anything that can be parsed using FromStr\n\nfn parse_number<T: FromStr>(context: Context, input: &[char]) -> Result<T, PDBError> {\n\n let string = input\n\n .iter()\n\n .collect::<String>()\n\n .split_whitespace()\n\n .collect::<String>();\n\n match string.parse::<T>() {\n\n Ok(v) => Ok(v),\n\n Err(_) => Err(PDBError::new(\n\n ErrorLevel::InvalidatingError,\n\n \"Not a number\",\n\n \"The text presented is not a number of the right kind.\",\n\n context,\n\n )),\n\n }\n\n}\n", "file_path": "src/read/pdb/lexer.rs", "rank": 75, "score": 133417.6732638216 }, { "content": "/// A trait which defines all functions on a mutable hierarchy which contains Atoms, Conformers, and Residues.\n\npub trait ContainsAtomConformerResidueMut:\n\n ContainsAtomConformerResidue + ContainsAtomConformerMut\n\n{\n\n /// Get a mutable reference to the residue\n\n fn residue_mut(&mut self) -> &mut Residue;\n\n}\n\n\n", "file_path": "src/structs/hierarchy.rs", "rank": 76, "score": 132165.39751171292 }, { "content": "/// Lex an ATOM\n\n/// ## Fails\n\n/// It fails on incorrect numbers in the line\n\nfn lex_atom(\n\n linenumber: usize,\n\n line: &str,\n\n hetero: bool,\n\n) -> Result<(LexItem, Vec<PDBError>), PDBError> {\n\n let mut errors = Vec::new();\n\n let chars: Vec<char> = line.chars().collect();\n\n if chars.len() < 54 {\n\n return Err(PDBError::new(\n\n ErrorLevel::BreakingError,\n\n \"Atom line too short\",\n\n \"This line is too short to contain all necessary elements (up to `z` at least).\",\n\n Context::full_line(linenumber, line),\n\n ));\n\n };\n\n let mut check = |item| match item {\n\n Ok(t) => t,\n\n Err(e) => {\n\n errors.push(e);\n\n 0.0\n", "file_path": "src/read/pdb/lexer.rs", "rank": 77, "score": 131780.51239354996 }, { "content": "/// Open an atomic data file, either PDB or mmCIF/PDBx. The correct type will be\n\n/// determined based on the file extension.\n\n///\n\n/// # Errors\n\n/// Returns a `PDBError` if a `BreakingError` is found. Otherwise it returns the PDB with all errors/warnings found while parsing it.\n\n///\n\n/// # Related\n\n/// If you want to open a file from memory see [`open_raw`]. There are also function to open a specified file type directly\n\n/// see [`crate::open_pdb`] and [`crate::open_mmcif`] respectively.\n\npub fn open(\n\n filename: impl AsRef<str>,\n\n level: StrictnessLevel,\n\n) -> Result<(PDB, Vec<PDBError>), Vec<PDBError>> {\n\n if check_extension(&filename, \"pdb\") {\n\n open_pdb(filename, level)\n\n } else if check_extension(&filename, \"cif\") {\n\n open_mmcif(filename, level)\n\n } else {\n\n Err(vec![PDBError::new(\n\n ErrorLevel::BreakingError,\n\n \"Incorrect extension\",\n\n \"Could not determine the type of the given file, make it .pdb or .cif\",\n\n Context::show(filename.as_ref()),\n\n )])\n\n }\n\n}\n\n\n", "file_path": "src/read/general.rs", "rank": 78, "score": 130610.24516550644 }, { "content": "/// Parse the input stream into a PDB struct. To allow for direct streaming from sources, like from RCSB.org.\n\n/// Returns a PDBError if a BreakingError is found. Otherwise it returns the PDB with all errors/warnings found while parsing it.\n\n///\n\n/// ## Arguments\n\n/// * `input` - the input stream\n\n/// * `context` - the context of the full stream, to place error messages correctly, for files this is `Context::show(filename)`.\n\n/// * `level` - the strictness level to operate in. If errors are generated which are breaking in the given level the parsing will fail.\n\n///\n\n/// # Related\n\n/// If you want to open a file see [`open_pdb`]. There is also a function to open a mmCIF file directly\n\n/// see [`crate::open_mmcif`] and [`crate::open_mmcif_raw`]. If you want to open a general file\n\n/// with no knowledge about the file type see [`crate::open`] and [`crate::open_raw`].\n\npub fn open_pdb_raw<T>(\n\n input: std::io::BufReader<T>,\n\n context: Context,\n\n level: StrictnessLevel,\n\n) -> Result<(PDB, Vec<PDBError>), Vec<PDBError>>\n\nwhere\n\n T: std::io::Read,\n\n{\n\n let mut errors = Vec::new();\n\n let mut pdb = PDB::new();\n\n let mut current_model_number = 0;\n\n let mut current_model: IndexMap<String, IndexMap<(isize, Option<String>), Residue>> =\n\n IndexMap::new();\n\n let mut sequence: HashMap<String, Vec<(usize, usize, Vec<String>)>> = HashMap::new();\n\n let mut seqres_lines = Vec::new();\n\n let mut seqres_start_linenumber = usize::MAX;\n\n let mut database_references = Vec::new();\n\n let mut modifications = Vec::new();\n\n let mut bonds = Vec::new();\n\n let mut temp_scale = BuildUpMatrix::empty();\n", "file_path": "src/read/pdb/parser.rs", "rank": 79, "score": 129945.72580679889 }, { "content": "#[allow(clippy::type_complexity)]\n\nfn lex_atom_basics(\n\n linenumber: usize,\n\n line: &str,\n\n) -> (\n\n (\n\n usize,\n\n String,\n\n Option<String>,\n\n String,\n\n String,\n\n isize,\n\n Option<String>,\n\n String,\n\n String,\n\n isize,\n\n ),\n\n Vec<PDBError>,\n\n) {\n\n let mut errors = Vec::new();\n\n let chars: Vec<char> = line.chars().collect();\n", "file_path": "src/read/pdb/lexer.rs", "rank": 80, "score": 128128.36774156275 }, { "content": "/// Gets the atomic number for the given element. It is case insensitive for the element name.\n\npub fn get_atomic_number(element: impl AsRef<str>) -> Option<usize> {\n\n let mut counter = 1;\n\n let element = element.as_ref().to_ascii_uppercase();\n\n for item in ELEMENT_SYMBOLS {\n\n if item == &element {\n\n return Some(counter);\n\n }\n\n counter += 1;\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/reference_tables.rs", "rank": 81, "score": 127653.3989405584 }, { "content": "#[cfg(feature = \"rayon\")]\n\npub fn par_get_index_for_symbol(symbol: impl AsRef<str>) -> Option<usize> {\n\n let symbol = symbol.as_ref();\n\n if let Some(index) = HERMANN_MAUGUIN_SYMBOL\n\n .par_iter()\n\n .position_any(|i| *i == symbol)\n\n {\n\n Some(index + 1)\n\n } else {\n\n HALL_SYMBOL\n\n .par_iter()\n\n .position_any(|i| *i == symbol)\n\n .map(|n| n + 1)\n\n }\n\n}\n\n\n", "file_path": "src/reference_tables.rs", "rank": 82, "score": 125589.08657004485 }, { "content": "/// Gets the transformations given an index (into Int. Crys. Handbook Vol A 2016) for the given space group\n\npub fn get_transformation(index: usize) -> Option<&'static [[[f64; 4]; 3]]> {\n\n SYMBOL_TRANSFORMATION.get(index - 1).copied()\n\n}\n\n\n", "file_path": "src/reference_tables.rs", "rank": 83, "score": 124967.06182971553 }, { "content": "/// Helper function to check extensions in filenames\n\nfn check_extension(filename: impl AsRef<str>, extension: impl AsRef<str>) -> bool {\n\n filename\n\n .as_ref()\n\n .rsplit('.')\n\n .next()\n\n .map(|ext| ext.eq_ignore_ascii_case(extension.as_ref()))\n\n == Some(true)\n\n}\n", "file_path": "src/lib.rs", "rank": 84, "score": 114682.25269973115 }, { "content": "def save(pdb):\n\n f = open(\"dump.pdb\", \"w\")\n\n f.write(pdb.hierarchy.as_pdb_string())\n", "file_path": "benches/benchmark.py", "rank": 85, "score": 108884.47978033886 }, { "content": "#[allow(clippy::unwrap_used)]\n\npub fn number_to_base26(mut num: usize) -> String {\n\n let mut output = vec![ALPHABET.chars().nth(num % 26).unwrap()];\n\n num /= 26;\n\n while num != 0 {\n\n output.push(ALPHABET.chars().nth(num % 26).unwrap());\n\n num /= 26;\n\n }\n\n output.iter().rev().collect::<String>()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n #[test]\n\n fn correct_examples() {\n\n assert!(check_char('a'));\n\n assert!(check_char('9'));\n\n assert!(check_char('*'));\n\n assert!(check_char('@'));\n\n assert!(check_char('O'));\n", "file_path": "src/structs/helper.rs", "rank": 86, "score": 108172.90188544852 }, { "content": "/// Save the given PDB struct to the given file as mmCIF or PDBx.\n\n/// # Errors\n\n/// It validates the PDB. It fails if the validation fails with the given `level`, or if the file could not be opened.\n\n/// If validation gives rise to problems, use the `save_raw` function.\n\npub fn save_mmcif(\n\n pdb: &PDB,\n\n filename: impl AsRef<str>,\n\n level: StrictnessLevel,\n\n) -> Result<(), Vec<PDBError>> {\n\n let filename = filename.as_ref();\n\n let mut errors = validate(pdb);\n\n for error in &errors {\n\n if error.fails(level) {\n\n return Err(errors);\n\n }\n\n }\n\n\n\n let file = match File::create(filename) {\n\n Ok(f) => f,\n\n Err(_e) => {\n\n errors.push(PDBError::new(\n\n ErrorLevel::BreakingError,\n\n \"Could not open file\",\n\n \"Could not open the file for writing, make sure you have permission for this file and no other program is currently using it.\",\n", "file_path": "src/save/mmcif.rs", "rank": 87, "score": 106056.4709515158 }, { "content": "#[test]\n\nfn clipped() {\n\n let (pdb, errors) = pdbtbx::open(\"example-pdbs/large.pdb\", StrictnessLevel::Strict).unwrap();\n\n let pdb_errors = save(&pdb, \"dump/large.pdb\", StrictnessLevel::Loose);\n\n print!(\"{:?}\", errors);\n\n print!(\"{:?}\", pdb_errors);\n\n let file = File::open(\"dump/large.pdb\").unwrap();\n\n let mut buffer = BufReader::new(file).lines();\n\n let target = \"ATOM 8662 H2 WAT C5372 7.739 79.053 26.313 1.00 0.00 H\";\n\n let target_line = buffer.find(|l| {\n\n if let Ok(line) = l {\n\n line.trim() == target\n\n } else {\n\n false\n\n }\n\n });\n\n assert!(target_line.is_some())\n\n}\n", "file_path": "tests/clipped.rs", "rank": 88, "score": 105663.50650090826 }, { "content": "fn main() {\n\n let errors = vec![\n\n PDBError::new(\n\n ErrorLevel::BreakingError,\n\n \"Error01:None\",\n\n \"General text with how the error came to be.\",\n\n Context::None,\n\n ),\n\n PDBError::new(\n\n ErrorLevel::BreakingError,\n\n \"Error02:Show\",\n\n \"General text with how the error came to be.\",\n\n Context::Show {\n\n line: \"line with erroer\".to_string(),\n\n },\n\n ),\n\n PDBError::new(\n\n ErrorLevel::BreakingError,\n\n \"Error03:FullLine\",\n\n \"General text with how the error came to be.\",\n", "file_path": "examples/errors.rs", "rank": 89, "score": 105237.20927392151 }, { "content": "def open_pdb(filename):\n", "file_path": "benches/benchmark.py", "rank": 90, "score": 104630.25482685403 }, { "content": "/// A trait which defines all functions on a mutable hierarchy which contains Atoms and Conformers.\n\npub trait ContainsAtomConformerMut: ContainsAtomConformer {\n\n /// Get a mutable reference to the atom\n\n fn atom_mut(&mut self) -> &mut Atom;\n\n /// Get a mutable reference to the conformer\n\n fn conformer_mut(&mut self) -> &mut Conformer;\n\n}\n\n\n", "file_path": "src/structs/hierarchy.rs", "rank": 91, "score": 104325.2805045325 }, { "content": "/// A trait which defines all functions on a hierarchy which contains Atoms, Conformers, and Residues.\n\npub trait ContainsAtomConformerResidue: ContainsAtomConformer {\n\n /// Get a reference to the residue\n\n fn residue(&self) -> &Residue;\n\n}\n\n\n", "file_path": "src/structs/hierarchy.rs", "rank": 92, "score": 104254.02523202643 }, { "content": "/// Checks a string using `check_char`.\n\n/// Returns `true` if the text is valid.\n\npub fn valid_identifier(text: impl AsRef<str>) -> bool {\n\n for c in text.as_ref().chars() {\n\n if !check_char(c) {\n\n return false;\n\n }\n\n }\n\n true\n\n}\n\n\n", "file_path": "src/structs/helper.rs", "rank": 93, "score": 103633.14998977925 }, { "content": "/// Checks a string using `check_char`.\n\n/// Returns `true` if the text is valid.\n\npub fn valid_text(text: impl AsRef<str>) -> bool {\n\n for c in text.as_ref().chars() {\n\n if !check_char(c) {\n\n return false;\n\n }\n\n }\n\n true\n\n}\n\n\n", "file_path": "src/structs/helper.rs", "rank": 94, "score": 103633.14998977925 }, { "content": "/// Find all Atoms belonging to a Residue that has at least one Atom within a sphere of\n\n/// user-defined origin and radius.\n\n/// This is using the features `rstar` and `rayon`.\n\nfn residue_sphere() {\n\n let (pdb, _errors) = open_pdb(\"example-pdbs/1ubq.pdb\", StrictnessLevel::Loose).unwrap();\n\n let (origin_id, radius): (usize, f64) = (12, 3.5);\n\n\n\n let sphere_origin = pdb\n\n .atoms_with_hierarchy()\n\n .find(|a| a.atom().serial_number() == origin_id)\n\n .unwrap();\n\n // Create a tree of atoms containing their respective hierarchies.\n\n let tree = pdb.create_hierarchy_rtree();\n\n let mut sphere_atoms: Vec<&Atom> = tree\n\n // This finds all Atoms with their hierarchy within the given sphere.\n\n .locate_within_distance(sphere_origin.atom().pos(), radius.powf(2.0))\n\n // Find the Residues each found Atom belongs to and return all the Atoms they contain.\n\n .flat_map(|atom_hier| atom_hier.residue().atoms())\n\n // Collect the flattened iterator into a Vec\n\n .collect();\n\n\n\n // The resulting Vec contains duplicates because each hierarchical Atom found was queried for\n\n // all Atoms within the same Residue so sorting and deduplicating is necessary.\n", "file_path": "examples/sphere.rs", "rank": 95, "score": 102458.76114981923 }, { "content": "/// Find all Atoms in a sphere around a single origin Atom with a user-defined radius\n\n/// This is using the features `rstar` and `rayon`.\n\nfn atom_sphere() {\n\n let (pdb, _errors) = open_pdb(\"example-pdbs/1ubq.pdb\", StrictnessLevel::Loose).unwrap();\n\n let (origin_id, radius): (usize, f64) = (12, 3.5);\n\n\n\n // Leverage parallel searching\n\n let origin_atom = pdb\n\n .par_atoms()\n\n .find_first(|atom| atom.serial_number() == origin_id)\n\n .unwrap();\n\n let tree = pdb.create_atom_rtree();\n\n let mut sphere_atoms: Vec<&&Atom> = tree\n\n .locate_within_distance(origin_atom.pos(), radius.powf(2.0))\n\n .collect();\n\n\n\n // Since the rtree is not ordered, the resulting Vec won't be either.\n\n sphere_atoms.sort_unstable();\n\n assert_eq!(sphere_atoms.len(), 16)\n\n}\n\n\n", "file_path": "examples/sphere.rs", "rank": 96, "score": 102284.0484853944 }, { "content": "/// Gets the Hall symbol for the given index (into Int. Crys. Handbook Vol A 2016)\n\npub fn get_hall_symbol_for_index(index: usize) -> Option<&'static str> {\n\n HALL_SYMBOL.get(index - 1).copied()\n\n}\n\n\n", "file_path": "src/reference_tables.rs", "rank": 97, "score": 100149.1644390532 }, { "content": "pub fn prepare_identifier(text: impl AsRef<str>) -> Option<String> {\n\n let text = text.as_ref();\n\n if valid_identifier(text) && !text.trim().is_empty() {\n\n Some(text.trim().to_ascii_uppercase())\n\n } else {\n\n None\n\n }\n\n}\n\n\n\nconst ALPHABET: &str = \"ABCDEFGHIJKLMNOPQRSTUVWXYZ\";\n\n\n\n/// Converts a number into a base26 with only the alphabet as possible chars\n", "file_path": "src/structs/helper.rs", "rank": 98, "score": 100129.88502411803 }, { "content": "#[test]\n\nfn insertion_codes() {\n\n let (pdb, errors) =\n\n pdbtbx::open(\"example-pdbs/insertion_codes.pdb\", StrictnessLevel::Strict).unwrap();\n\n let pdb_errors = save(&pdb, \"dump/insertion_codes.pdb\", StrictnessLevel::Loose);\n\n let (pdb2, _) = pdbtbx::open(\"dump/insertion_codes.pdb\", StrictnessLevel::Strict).unwrap();\n\n print!(\"{:?}\", errors);\n\n print!(\"{:?}\", pdb_errors);\n\n // See that the original file is the same as saved and reopened\n\n assert_eq!(pdb, pdb2);\n\n assert_eq!(pdb.residues().count(), 2);\n\n assert_eq!(pdb.residue(0).unwrap().insertion_code().unwrap(), \"A\");\n\n assert_eq!(pdb.residue(1).unwrap().insertion_code().unwrap(), \"B\");\n\n assert_eq!(pdb2.residues().count(), 2);\n\n assert_eq!(pdb2.residue(0).unwrap().insertion_code().unwrap(), \"A\");\n\n assert_eq!(pdb2.residue(1).unwrap().insertion_code().unwrap(), \"B\");\n\n}\n", "file_path": "tests/insertion_codes.rs", "rank": 99, "score": 99747.9484555888 } ]
Rust
lib/llvm-backend/src/platform/unix.rs
spacemeshos/wasmer
31365510edd73878916e1aba2dd79f415b8c6891
use super::common::round_up_to_page_size; use crate::structs::{LLVMResult, MemProtect}; use libc::{ c_void, mmap, mprotect, munmap, siginfo_t, MAP_ANON, MAP_PRIVATE, PROT_EXEC, PROT_NONE, PROT_READ, PROT_WRITE, }; use nix::sys::signal::{sigaction, SaFlags, SigAction, SigHandler, SigSet, SIGBUS, SIGSEGV}; use std::ptr; #[allow(clippy::cast_ptr_alignment)] #[cfg(target_os = "macos")] pub unsafe fn visit_fde(addr: *mut u8, size: usize, visitor: extern "C" fn(*mut u8)) { unsafe fn process_fde(entry: *mut u8, visitor: extern "C" fn(*mut u8)) -> *mut u8 { let mut p = entry; let length = (p as *const u32).read_unaligned(); p = p.add(4); let offset = (p as *const u32).read_unaligned(); if offset != 0 { visitor(entry); } p.add(length as usize) } let mut p = addr; let end = p.add(size); loop { if p >= end { break; } p = process_fde(p, visitor); } } #[cfg(not(target_os = "macos"))] pub unsafe fn visit_fde(addr: *mut u8, _size: usize, visitor: extern "C" fn(*mut u8)) { visitor(addr); } extern "C" { #[cfg_attr(nightly, unwind(allowed))] fn throw_trap(ty: i32) -> !; } pub unsafe fn install_signal_handler() { let sa = SigAction::new( SigHandler::SigAction(signal_trap_handler), SaFlags::SA_ONSTACK | SaFlags::SA_SIGINFO, SigSet::empty(), ); sigaction(SIGSEGV, &sa).unwrap(); sigaction(SIGBUS, &sa).unwrap(); } #[cfg_attr(nightly, unwind(allowed))] extern "C" fn signal_trap_handler( _signum: ::nix::libc::c_int, _siginfo: *mut siginfo_t, _ucontext: *mut c_void, ) { unsafe { throw_trap(2); } } pub unsafe fn alloc_memory( size: usize, protect: MemProtect, ptr_out: &mut *mut u8, size_out: &mut usize, ) -> LLVMResult { let size = round_up_to_page_size(size); let ptr = mmap( ptr::null_mut(), size, match protect { MemProtect::NONE => PROT_NONE, MemProtect::READ => PROT_READ, MemProtect::READ_WRITE => PROT_READ | PROT_WRITE, MemProtect::READ_EXECUTE => PROT_READ | PROT_EXEC, }, MAP_PRIVATE | MAP_ANON, -1, 0, ); if ptr as isize == -1 { return LLVMResult::ALLOCATE_FAILURE; } *ptr_out = ptr as _; *size_out = size; LLVMResult::OK } pub unsafe fn protect_memory(ptr: *mut u8, size: usize, protect: MemProtect) -> LLVMResult { let res = mprotect( ptr as _, round_up_to_page_size(size), match protect { MemProtect::NONE => PROT_NONE, MemProtect::READ => PROT_READ, MemProtect::READ_WRITE => PROT_READ | PROT_WRITE, MemProtect::READ_EXECUTE => PROT_READ | PROT_EXEC, }, ); if res == 0 { LLVMResult::OK } else { LLVMResult::PROTECT_FAILURE } } pub unsafe fn dealloc_memory(ptr: *mut u8, size: usize) -> LLVMResult { let res = munmap(ptr as _, round_up_to_page_size(size)); if res == 0 { LLVMResult::OK } else { LLVMResult::DEALLOC_FAILURE } }
use super::common::round_up_to_page_size; use crate::structs::{LLVMResult, MemProtect}; use libc::{ c_void, mmap, mprotect, munmap, siginfo_t, MAP_ANON, MAP_PRIVATE, PROT_EXEC, PROT_NONE, PROT_READ, PROT_WRITE, }; use nix::sys::signal::{sigaction, SaFlags, SigAction, SigHandler, SigSet, SIGBUS, SIGSEGV}; use std::ptr; #[allow(clippy::cast_ptr_alignment)] #[cfg(target_os = "macos")] pub unsafe fn visit_fde(addr: *mut u8, size: usize, visitor: extern "C" fn(*mut u8)) { unsafe fn process_fde(entry: *mut u8, visitor: extern "C" fn(*mut u8)) -> *mut u8 { let mut p = entry; let length = (p as *const u32).read_unaligned(); p = p.add(4); let offset = (p as *const u32).read_unaligned(); if offset != 0 { visitor(entry); } p.add(length as usize) } let
res == 0 { LLVMResult::OK } else { LLVMResult::PROTECT_FAILURE } } pub unsafe fn dealloc_memory(ptr: *mut u8, size: usize) -> LLVMResult { let res = munmap(ptr as _, round_up_to_page_size(size)); if res == 0 { LLVMResult::OK } else { LLVMResult::DEALLOC_FAILURE } }
mut p = addr; let end = p.add(size); loop { if p >= end { break; } p = process_fde(p, visitor); } } #[cfg(not(target_os = "macos"))] pub unsafe fn visit_fde(addr: *mut u8, _size: usize, visitor: extern "C" fn(*mut u8)) { visitor(addr); } extern "C" { #[cfg_attr(nightly, unwind(allowed))] fn throw_trap(ty: i32) -> !; } pub unsafe fn install_signal_handler() { let sa = SigAction::new( SigHandler::SigAction(signal_trap_handler), SaFlags::SA_ONSTACK | SaFlags::SA_SIGINFO, SigSet::empty(), ); sigaction(SIGSEGV, &sa).unwrap(); sigaction(SIGBUS, &sa).unwrap(); } #[cfg_attr(nightly, unwind(allowed))] extern "C" fn signal_trap_handler( _signum: ::nix::libc::c_int, _siginfo: *mut siginfo_t, _ucontext: *mut c_void, ) { unsafe { throw_trap(2); } } pub unsafe fn alloc_memory( size: usize, protect: MemProtect, ptr_out: &mut *mut u8, size_out: &mut usize, ) -> LLVMResult { let size = round_up_to_page_size(size); let ptr = mmap( ptr::null_mut(), size, match protect { MemProtect::NONE => PROT_NONE, MemProtect::READ => PROT_READ, MemProtect::READ_WRITE => PROT_READ | PROT_WRITE, MemProtect::READ_EXECUTE => PROT_READ | PROT_EXEC, }, MAP_PRIVATE | MAP_ANON, -1, 0, ); if ptr as isize == -1 { return LLVMResult::ALLOCATE_FAILURE; } *ptr_out = ptr as _; *size_out = size; LLVMResult::OK } pub unsafe fn protect_memory(ptr: *mut u8, size: usize, protect: MemProtect) -> LLVMResult { let res = mprotect( ptr as _, round_up_to_page_size(size), match protect { MemProtect::NONE => PROT_NONE, MemProtect::READ => PROT_READ, MemProtect::READ_WRITE => PROT_READ | PROT_WRITE, MemProtect::READ_EXECUTE => PROT_READ | PROT_EXEC, }, ); if
random
[ { "content": "type Trampoline = unsafe extern \"C\" fn(*mut Ctx, NonNull<Func>, *const u64, *mut u64);\n", "file_path": "lib/win-exception-handler/src/exception_handling.rs", "rank": 0, "score": 336851.1212825576 }, { "content": "pub fn round_up_to_page_size(size: usize) -> usize {\n\n (size + (4096 - 1)) & !(4096 - 1)\n\n}\n", "file_path": "lib/llvm-backend/src/platform/common.rs", "rank": 1, "score": 286346.96887368476 }, { "content": "pub fn endpwent(_ctx: &mut Ctx) {\n\n debug!(\"emscripten::endpwent\");\n\n}\n\n\n", "file_path": "lib/emscripten/src/libc.rs", "rank": 2, "score": 273384.4866140868 }, { "content": "pub fn setpwent(_ctx: &mut Ctx) {\n\n debug!(\"emscripten::setpwent\");\n\n}\n\n\n", "file_path": "lib/emscripten/src/libc.rs", "rank": 3, "score": 273384.4866140868 }, { "content": "/// This function trys to find an entry in mapdir\n\n/// translating paths into their correct value\n\npub fn get_cstr_path(ctx: &mut Ctx, path: *const i8) -> Option<std::ffi::CString> {\n\n use std::collections::VecDeque;\n\n\n\n let path_str =\n\n unsafe { std::ffi::CStr::from_ptr(path as *const _).to_str().unwrap() }.to_string();\n\n let data = get_emscripten_data(ctx);\n\n let path = PathBuf::from(path_str);\n\n let mut prefix_added = false;\n\n let mut components = path.components().collect::<VecDeque<_>>();\n\n // TODO(mark): handle absolute/non-canonical/non-relative paths too (this\n\n // functionality should be shared among the abis)\n\n if components.len() == 1 {\n\n components.push_front(std::path::Component::CurDir);\n\n prefix_added = true;\n\n }\n\n let mut cumulative_path = PathBuf::new();\n\n for c in components.into_iter() {\n\n cumulative_path.push(c);\n\n if let Some(val) = data\n\n .mapped_dirs\n", "file_path": "lib/emscripten/src/utils.rs", "rank": 4, "score": 265030.4718460224 }, { "content": "pub fn getpwent(_ctx: &mut Ctx) -> i32 {\n\n debug!(\"emscripten::getpwent\");\n\n 0\n\n}\n\n\n", "file_path": "lib/emscripten/src/libc.rs", "rank": 5, "score": 262784.2106857542 }, { "content": "/// emscripten: ___cxa_allocate_exception\n\npub fn ___cxa_allocate_exception(ctx: &mut Ctx, size: u32) -> u32 {\n\n debug!(\"emscripten::___cxa_allocate_exception\");\n\n env::call_malloc(ctx, size as _)\n\n}\n\n\n", "file_path": "lib/emscripten/src/exception.rs", "rank": 6, "score": 262263.6280755891 }, { "content": "pub fn call_malloc(ctx: &mut Ctx, size: u32) -> u32 {\n\n get_emscripten_data(ctx)\n\n .malloc\n\n .as_ref()\n\n .unwrap()\n\n .call(size)\n\n .unwrap()\n\n}\n\n\n", "file_path": "lib/emscripten/src/env/mod.rs", "rank": 7, "score": 262263.6280755891 }, { "content": "pub fn static_alloc(static_top: &mut u32, size: u32) -> u32 {\n\n let old_static_top = *static_top;\n\n // NOTE: The `4294967280` is a u32 conversion of -16 as gotten from emscripten.\n\n *static_top = (*static_top + size + 15) & 4294967280;\n\n old_static_top\n\n}\n", "file_path": "lib/emscripten/src/storage.rs", "rank": 8, "score": 262263.6280755891 }, { "content": "pub fn current_sigrtmin(_ctx: &mut Ctx) -> i32 {\n\n debug!(\"emscripten::current_sigrtmin\");\n\n 0\n\n}\n\n\n", "file_path": "lib/emscripten/src/libc.rs", "rank": 9, "score": 258614.5814750648 }, { "content": "pub fn current_sigrtmax(_ctx: &mut Ctx) -> i32 {\n\n debug!(\"emscripten::current_sigrtmax\");\n\n 0\n\n}\n\n\n", "file_path": "lib/emscripten/src/libc.rs", "rank": 10, "score": 258614.5814750648 }, { "content": "#[cfg(target_os = \"wasi\")]\n\nfn pread(fd: u32, iovs: &[&mut [u8]], offset: u64) -> u32 {\n\n let mut nread = 0;\n\n let mut processed_iovs = vec![];\n\n\n\n for iov in iovs {\n\n processed_iovs.push(WasiIovec {\n\n buf: iov.as_ptr() as usize as u32,\n\n buf_len: iov.len() as u32,\n\n })\n\n }\n\n\n\n unsafe {\n\n fd_pread(\n\n fd,\n\n processed_iovs.as_ptr() as usize as u32,\n\n processed_iovs.len() as u32,\n\n offset,\n\n &mut nread as *mut u32 as usize as u32,\n\n );\n\n }\n\n nread\n\n}\n\n\n", "file_path": "lib/wasi-tests/wasitests/fd_pread.rs", "rank": 11, "score": 255068.87686992704 }, { "content": "/// emscripten: _emscripten_get_heap_size\n\npub fn _emscripten_get_heap_size(ctx: &mut Ctx) -> u32 {\n\n trace!(\"emscripten::_emscripten_get_heap_size\");\n\n let result = ctx.memory(0).size().bytes().0 as u32;\n\n trace!(\"=> {}\", result);\n\n\n\n result\n\n}\n\n\n", "file_path": "lib/emscripten/src/memory.rs", "rank": 12, "score": 250823.9020224922 }, { "content": "pub fn sigpending(_ctx: &mut Ctx, _a: i32) -> i32 {\n\n debug!(\"emscripten::sigpending\");\n\n 0\n\n}\n", "file_path": "lib/emscripten/src/libc.rs", "rank": 13, "score": 248487.3674278147 }, { "content": "fn offset_to_index(offset: u8) -> u32 {\n\n (offset as usize / ::std::mem::size_of::<usize>()) as u32\n\n}\n\n\n\nimpl<'a> CtxType<'a> {\n\n pub fn new(\n\n info: &'a ModuleInfo,\n\n func_value: &'a FunctionValue,\n\n cache_builder: Builder,\n\n ) -> CtxType<'a> {\n\n CtxType {\n\n ctx_ptr_value: func_value.get_nth_param(0).unwrap().into_pointer_value(),\n\n\n\n info,\n\n cache_builder,\n\n\n\n cached_memories: HashMap::new(),\n\n cached_tables: HashMap::new(),\n\n cached_sigindices: HashMap::new(),\n\n cached_globals: HashMap::new(),\n", "file_path": "lib/llvm-backend/src/intrinsics.rs", "rank": 14, "score": 247962.9991334029 }, { "content": "pub fn call_memalign(ctx: &mut Ctx, alignment: u32, size: u32) -> u32 {\n\n if let Some(memalign) = &get_emscripten_data(ctx).memalign {\n\n memalign.call(alignment, size).unwrap()\n\n } else {\n\n panic!(\"Memalign is set to None\");\n\n }\n\n}\n\n\n", "file_path": "lib/emscripten/src/env/mod.rs", "rank": 15, "score": 244747.96300273988 }, { "content": "/// Sets the number of points used by an Instance.\n\npub fn set_points_used(instance: &mut Instance, value: u64) {\n\n instance.set_internal(&INTERNAL_FIELD, value);\n\n}\n\n\n", "file_path": "lib/middleware-common/src/metering.rs", "rank": 16, "score": 241799.70857780435 }, { "content": "/// Sets the number of points used in a Ctx.\n\npub fn set_points_used_ctx(ctx: &mut Ctx, value: u64) {\n\n ctx.set_internal(&INTERNAL_FIELD, value);\n\n}\n\n\n\n#[cfg(all(test, any(feature = \"singlepass\", feature = \"llvm\")))]\n\nmod tests {\n\n use super::*;\n\n use wabt::wat2wasm;\n\n\n\n use wasmer_runtime_core::codegen::{MiddlewareChain, StreamingCompiler};\n\n use wasmer_runtime_core::{backend::Compiler, compile_with, imports, Func};\n\n\n\n #[cfg(feature = \"llvm\")]\n\n fn get_compiler(limit: u64) -> impl Compiler {\n\n use wasmer_llvm_backend::ModuleCodeGenerator as LLVMMCG;\n\n let c: StreamingCompiler<LLVMMCG, _, _, _, _> = StreamingCompiler::new(move || {\n\n let mut chain = MiddlewareChain::new();\n\n chain.push(Metering::new(limit));\n\n chain\n\n });\n", "file_path": "lib/middleware-common/src/metering.rs", "rank": 17, "score": 238308.48835424107 }, { "content": "#[cfg(not(all(unix, target_arch = \"x86_64\")))]\n\nfn get_interrupt_signal_mem() -> *mut u8 {\n\n static mut REGION: u64 = 0;\n\n unsafe { &mut REGION as *mut u64 as *mut u8 }\n\n}\n\n\n\nimpl Ctx {\n\n #[doc(hidden)]\n\n pub unsafe fn new(\n\n local_backing: &mut LocalBacking,\n\n import_backing: &mut ImportBacking,\n\n module: &ModuleInner,\n\n ) -> Self {\n\n let (mem_base, mem_bound): (*mut u8, usize) =\n\n if module.info.memories.len() == 0 && module.info.imported_memories.len() == 0 {\n\n (::std::ptr::null_mut(), 0)\n\n } else {\n\n let mem = match MemoryIndex::new(0).local_or_import(&module.info) {\n\n LocalOrImport::Local(index) => local_backing.vm_memories[index],\n\n LocalOrImport::Import(index) => import_backing.vm_memories[index],\n\n };\n", "file_path": "lib/runtime-core/src/vm.rs", "rank": 18, "score": 238302.1715279921 }, { "content": "pub fn asm_const_i(_ctx: &mut Ctx, _val: i32) -> i32 {\n\n debug!(\"emscripten::asm_const_i: {}\", _val);\n\n 0\n\n}\n\n\n", "file_path": "lib/emscripten/src/emscripten_target.rs", "rank": 19, "score": 237063.8568177364 }, { "content": "pub fn killpg(_ctx: &mut Ctx, _a: i32, _b: i32) -> i32 {\n\n debug!(\"emscripten::killpg\");\n\n 0\n\n}\n\n\n", "file_path": "lib/emscripten/src/libc.rs", "rank": 20, "score": 236625.19430205852 }, { "content": "pub fn execv(_ctx: &mut Ctx, _a: i32, _b: i32) -> i32 {\n\n debug!(\"emscripten::execv\");\n\n 0\n\n}\n\n\n", "file_path": "lib/emscripten/src/libc.rs", "rank": 21, "score": 236625.19430205852 }, { "content": "pub fn fpathconf(_ctx: &mut Ctx, _a: i32, _b: i32) -> i32 {\n\n debug!(\"emscripten::fpathconf\");\n\n 0\n\n}\n\n\n", "file_path": "lib/emscripten/src/libc.rs", "rank": 22, "score": 236625.19430205852 }, { "content": "pub fn getitimer(_ctx: &mut Ctx, _a: i32, _b: i32) -> i32 {\n\n debug!(\"emscripten::getitimer\");\n\n 0\n\n}\n\n\n", "file_path": "lib/emscripten/src/libc.rs", "rank": 23, "score": 236625.19430205852 }, { "content": "pub fn sigismember(_ctx: &mut Ctx, _a: i32, _b: i32) -> i32 {\n\n debug!(\"emscripten::sigismember\");\n\n 0\n\n}\n\n\n", "file_path": "lib/emscripten/src/libc.rs", "rank": 24, "score": 236625.19430205852 }, { "content": "pub fn allocate_and_run<R, F: FnOnce() -> R>(size: usize, f: F) -> R {\n\n struct Context<F: FnOnce() -> R, R> {\n\n f: Option<F>,\n\n ret: Option<R>,\n\n }\n\n\n\n extern \"C\" fn invoke<F: FnOnce() -> R, R>(ctx: &mut Context<F, R>) {\n\n let f = ctx.f.take().unwrap();\n\n ctx.ret = Some(f());\n\n }\n\n\n\n unsafe {\n\n let mut ctx = Context {\n\n f: Some(f),\n\n ret: None,\n\n };\n\n assert!(size % 16 == 0);\n\n assert!(size >= 4096);\n\n\n\n let mut stack: Vec<u64> = vec![0; size / 8];\n", "file_path": "lib/runtime-core/src/fault.rs", "rank": 25, "score": 234040.75702366407 }, { "content": "/// emscripten: _emscripten_resize_heap\n\n/// Note: this function only allows growing the size of heap\n\npub fn _emscripten_resize_heap(ctx: &mut Ctx, requested_size: u32) -> u32 {\n\n debug!(\"emscripten::_emscripten_resize_heap {}\", requested_size);\n\n let current_memory_pages = ctx.memory(0).size();\n\n let current_memory = current_memory_pages.bytes().0 as u32;\n\n\n\n // implementation from emscripten\n\n let mut new_size = usize::max(current_memory as usize, WASM_MIN_PAGES * WASM_PAGE_SIZE);\n\n while new_size < requested_size as usize {\n\n if new_size <= 0x2000_0000 {\n\n new_size = align_up(new_size * 2, WASM_PAGE_SIZE);\n\n } else {\n\n new_size = usize::min(\n\n align_up((3 * new_size + 0x8000_0000) / 4, WASM_PAGE_SIZE),\n\n WASM_PAGE_SIZE * WASM_MAX_PAGES,\n\n );\n\n }\n\n }\n\n\n\n let amount_to_grow = (new_size - current_memory as usize) / WASM_PAGE_SIZE;\n\n if let Ok(_pages_allocated) = ctx.memory(0).grow(Pages(amount_to_grow as u32)) {\n\n debug!(\"{} pages allocated\", _pages_allocated.0);\n\n 1\n\n } else {\n\n 0\n\n }\n\n}\n\n\n", "file_path": "lib/emscripten/src/memory.rs", "rank": 26, "score": 233561.54562375537 }, { "content": "/// emscripten: abortOnCannotGrowMemory\n\npub fn abort_on_cannot_grow_memory(ctx: &mut Ctx, _requested_size: u32) -> u32 {\n\n debug!(\n\n \"emscripten::abort_on_cannot_grow_memory {}\",\n\n _requested_size\n\n );\n\n abort_with_message(ctx, \"Cannot enlarge memory arrays!\");\n\n 0\n\n}\n\n\n", "file_path": "lib/emscripten/src/memory.rs", "rank": 27, "score": 233556.2247651604 }, { "content": "pub fn execvp(ctx: &mut Ctx, command_name_offset: u32, argv_offset: u32) -> i32 {\n\n // a single reference to re-use\n\n let emscripten_memory = ctx.memory(0);\n\n\n\n // read command name as string\n\n let command_name_string_vec: Vec<u8> = emscripten_memory.view()\n\n [(command_name_offset as usize)..]\n\n .iter()\n\n .map(|cell| cell.get())\n\n .take_while(|&byte| byte != 0)\n\n .collect();\n\n let command_name_string = CString::new(command_name_string_vec).unwrap();\n\n\n\n // get the array of args\n\n let mut argv: Vec<*const i8> = emscripten_memory.view()[((argv_offset / 4) as usize)..]\n\n .iter()\n\n .map(|cell: &Cell<u32>| cell.get())\n\n .take_while(|&byte| byte != 0)\n\n .map(|offset| {\n\n let p: *const i8 = (emscripten_memory.view::<u8>()[(offset as usize)..])\n", "file_path": "lib/emscripten/src/exec.rs", "rank": 28, "score": 233248.81438340395 }, { "content": "/// Detect if a provided binary is a Wasm file\n\npub fn is_wasm_binary(binary: &[u8]) -> bool {\n\n binary.starts_with(&[b'\\0', b'a', b's', b'm'])\n\n}\n", "file_path": "src/utils.rs", "rank": 29, "score": 231457.7919997263 }, { "content": "pub fn call_memset(ctx: &mut Ctx, pointer: u32, value: u32, size: u32) -> u32 {\n\n get_emscripten_data(ctx)\n\n .memset\n\n .as_ref()\n\n .unwrap()\n\n .call(pointer, value, size)\n\n .unwrap()\n\n}\n\n\n\npub(crate) fn get_emscripten_data(ctx: &mut Ctx) -> &mut EmscriptenData {\n\n unsafe { &mut *(ctx.data as *mut EmscriptenData) }\n\n}\n\n\n", "file_path": "lib/emscripten/src/env/mod.rs", "rank": 30, "score": 229906.2677283355 }, { "content": "pub fn fexecve(_ctx: &mut Ctx, _a: i32, _b: i32, _c: i32) -> i32 {\n\n debug!(\"emscripten::fexecve\");\n\n 0\n\n}\n\n\n", "file_path": "lib/emscripten/src/libc.rs", "rank": 31, "score": 226511.90389951243 }, { "content": "/// emscripten: alignfault\n\npub fn alignfault(ctx: &mut Ctx) {\n\n debug!(\"emscripten::alignfault\");\n\n abort_with_message(ctx, \"alignment fault\");\n\n}\n\n\n", "file_path": "lib/emscripten/src/memory.rs", "rank": 32, "score": 226184.8049918518 }, { "content": "/// emscripten: ftfault\n\npub fn ftfault(ctx: &mut Ctx) {\n\n debug!(\"emscripten::ftfault\");\n\n abort_with_message(ctx, \"Function table mask error\");\n\n}\n\n\n", "file_path": "lib/emscripten/src/memory.rs", "rank": 33, "score": 226184.8049918518 }, { "content": "/// emscripten: _tvset\n\npub fn _tvset(_ctx: &mut Ctx) {\n\n debug!(\"emscripten::_tvset UNIMPLEMENTED\");\n\n}\n\n\n\n/// formats time as a C string\n\n#[allow(clippy::cast_ptr_alignment)]\n\nunsafe fn fmt_time(ctx: &mut Ctx, time: u32) -> *const c_char {\n\n let date = &*(emscripten_memory_pointer!(ctx.memory(0), time) as *mut guest_tm);\n\n\n\n let days = vec![\"Sun\", \"Mon\", \"Tue\", \"Wed\", \"Thu\", \"Fri\", \"Sat\"];\n\n let months = vec![\n\n \"Jan\", \"Feb\", \"Mar\", \"Apr\", \"May\", \"Jun\", \"Jul\", \"Aug\", \"Sep\", \"Oct\", \"Nov\", \"Dec\",\n\n ];\n\n let year = 1900 + date.tm_year;\n\n\n\n let time_str = format!(\n\n // NOTE: TODO: Hack! The 14 accompanying chars are needed for some reason\n\n \"{} {} {:2} {:02}:{:02}:{:02} {:4}\\n\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\",\n\n days[date.tm_wday as usize],\n\n months[date.tm_mon as usize],\n\n date.tm_mday,\n\n date.tm_hour,\n\n date.tm_min,\n\n date.tm_sec,\n\n year\n\n );\n\n\n\n time_str[0..26].as_ptr() as _\n\n}\n\n\n", "file_path": "lib/emscripten/src/time.rs", "rank": 34, "score": 226184.8049918518 }, { "content": "pub fn _endgrent(_ctx: &mut Ctx) {\n\n debug!(\"emscripten::_endgrent\");\n\n}\n\n\n", "file_path": "lib/emscripten/src/process.rs", "rank": 35, "score": 226184.8049918518 }, { "content": "/// emscripten: segfault\n\npub fn segfault(ctx: &mut Ctx) {\n\n debug!(\"emscripten::segfault\");\n\n abort_with_message(ctx, \"segmentation fault\");\n\n}\n\n\n", "file_path": "lib/emscripten/src/memory.rs", "rank": 36, "score": 226184.8049918518 }, { "content": "pub fn _setgrent(_ctx: &mut Ctx) {\n\n debug!(\"emscripten::_setgrent\");\n\n}\n\n\n", "file_path": "lib/emscripten/src/process.rs", "rank": 37, "score": 226184.8049918518 }, { "content": "pub fn _abort(_ctx: &mut Ctx) {\n\n debug!(\"emscripten::_abort\");\n\n unsafe {\n\n abort();\n\n }\n\n}\n\n\n", "file_path": "lib/emscripten/src/process.rs", "rank": 38, "score": 226184.8049918518 }, { "content": "/// Calls `GET_CONTEXT` and returns the current context.\n\npub fn get_context() -> *const CallContext {\n\n GET_CONTEXT()\n\n}\n\n\n\nimpl TrampolineBufferBuilder {\n\n pub fn new() -> TrampolineBufferBuilder {\n\n TrampolineBufferBuilder {\n\n code: vec![],\n\n offsets: vec![],\n\n }\n\n }\n\n\n\n /// Adds a context trampoline.\n\n ///\n\n /// This generates a transparent trampoline function that forwards any call to `target` with\n\n /// unmodified params/returns. When called from the trampoline, `target` will have access to\n\n /// the `context` specified here through `get_context()`.\n\n ///\n\n /// Note that since `rax` is overwritten internally, variadic functions are not supported as `target`.\n\n pub fn add_context_trampoline(\n", "file_path": "lib/runtime-core/src/trampoline_x64.rs", "rank": 39, "score": 226049.74048522394 }, { "content": "#[cfg(unix)]\n\npub fn pathconf(ctx: &mut Ctx, path_ptr: i32, name: i32) -> i32 {\n\n debug!(\"emscripten::pathconf\");\n\n let path = emscripten_memory_pointer!(ctx.memory(0), path_ptr) as *const i8;\n\n unsafe { libc::pathconf(path as *const _, name).try_into().unwrap() }\n\n}\n\n\n", "file_path": "lib/emscripten/src/libc.rs", "rank": 40, "score": 225682.43907911822 }, { "content": "#[cfg(not(unix))]\n\npub fn pathconf(_ctx: &mut Ctx, _path_ptr: i32, _name: i32) -> i32 {\n\n debug!(\"emscripten::pathconf\");\n\n 0\n\n}\n\n\n", "file_path": "lib/emscripten/src/libc.rs", "rank": 41, "score": 225682.43907911822 }, { "content": "/// Perform validation as defined by the\n\n/// WebAssembly specification. Returns `true` if validation\n\n/// succeeded, `false` if validation failed.\n\npub fn validate(wasm: &[u8]) -> bool {\n\n validate_and_report_errors(wasm).is_ok()\n\n}\n\n\n", "file_path": "lib/runtime-core/src/lib.rs", "rank": 42, "score": 224743.86405828127 }, { "content": "/// tzset\n\npub fn tzset(_ctx: &mut Ctx) {\n\n debug!(\"emscripten::tzset - stub\");\n\n //unimplemented!()\n\n}\n\n\n", "file_path": "lib/emscripten/src/io/mod.rs", "rank": 43, "score": 222923.44440785696 }, { "content": "pub fn _llvm_trap(ctx: &mut Ctx) {\n\n debug!(\"emscripten::_llvm_trap\");\n\n abort_with_message(ctx, \"abort!\");\n\n}\n\n\n", "file_path": "lib/emscripten/src/process.rs", "rank": 44, "score": 222923.44440785696 }, { "content": "/// printf\n\npub fn printf(ctx: &mut Ctx, memory_offset: i32, extra: i32) -> i32 {\n\n debug!(\"emscripten::printf {}, {}\", memory_offset, extra);\n\n unsafe {\n\n let addr = emscripten_memory_pointer!(ctx.memory(0), memory_offset) as _;\n\n _printf(addr, extra)\n\n }\n\n}\n\n\n", "file_path": "lib/emscripten/src/io/unix.rs", "rank": 45, "score": 222345.67049176118 }, { "content": "/// printf\n\npub fn printf(_ctx: &mut Ctx, memory_offset: i32, extra: i32) -> i32 {\n\n debug!(\"emscripten::printf {}, {}\", memory_offset, extra);\n\n #[cfg(not(feature = \"debug\"))]\n\n {\n\n let _ = memory_offset;\n\n let _ = extra;\n\n }\n\n // unsafe {\n\n // let addr = emscripten_memory_pointer!(ctx.memory(0), memory_offset) as _;\n\n // _printf(addr, extra)\n\n // }\n\n -1\n\n}\n\n\n", "file_path": "lib/emscripten/src/io/windows.rs", "rank": 46, "score": 222345.67049176118 }, { "content": "/// The webassembly::compile() function compiles a webassembly::Module\n\n/// from WebAssembly binary code. This function is useful if it\n\n/// is necessary to a compile a module before it can be instantiated\n\n/// (otherwise, the webassembly::instantiate() function should be used).\n\n/// Params:\n\n/// * `buffer_source`: A `Vec<u8>` containing the\n\n/// binary code of the .wasm module you want to compile.\n\n/// Errors:\n\n/// If the operation fails, the Result rejects with a\n\n/// webassembly::CompileError.\n\npub fn compile(buffer_source: &[u8]) -> Result<Module> {\n\n let module = runtime::compile(buffer_source)?;\n\n Ok(module)\n\n}\n", "file_path": "src/webassembly.rs", "rank": 47, "score": 221713.280878281 }, { "content": "/// ### `random_get()`\n\n/// Fill buffer with high-quality random data. This function may be slow and block\n\n/// Inputs:\n\n/// - `void *buf`\n\n/// A pointer to a buffer where the random bytes will be written\n\n/// - `size_t buf_len`\n\n/// The number of bytes that will be written\n\npub fn random_get(ctx: &mut Ctx, buf: WasmPtr<u8, Array>, buf_len: u32) -> __wasi_errno_t {\n\n debug!(\"wasi::random_get buf_len: {}\", buf_len);\n\n let mut rng = thread_rng();\n\n let memory = ctx.memory(0);\n\n\n\n let buf = wasi_try!(buf.deref(memory, 0, buf_len));\n\n\n\n unsafe {\n\n let u8_buffer = &mut *(buf as *const [_] as *mut [_] as *mut [u8]);\n\n thread_rng().fill(u8_buffer);\n\n }\n\n\n\n __WASI_ESUCCESS\n\n}\n\n\n", "file_path": "lib/wasi/src/syscalls/mod.rs", "rank": 48, "score": 221338.3628654674 }, { "content": "pub fn ___cxa_end_catch(_ctx: &mut Ctx) {\n\n debug!(\"emscripten::___cxa_end_catch\");\n\n}\n\n\n", "file_path": "lib/emscripten/src/exception.rs", "rank": 49, "score": 219811.8102323614 }, { "content": "pub fn ___cxa_pure_virtual(_ctx: &mut Ctx) {\n\n debug!(\"emscripten::___cxa_pure_virtual\");\n\n // ABORT = true\n\n panic!(\"Pure virtual function called!\");\n\n}\n", "file_path": "lib/emscripten/src/exception.rs", "rank": 50, "score": 219811.8102323614 }, { "content": "pub fn ensure_sighandler() {\n\n INSTALL_SIGHANDLER.call_once(|| unsafe {\n\n install_sighandler();\n\n });\n\n}\n\n\n\nstatic INSTALL_SIGHANDLER: Once = Once::new();\n\n\n\nunsafe fn install_sighandler() {\n\n let sa_trap = SigAction::new(\n\n SigHandler::SigAction(signal_trap_handler),\n\n SaFlags::SA_ONSTACK,\n\n SigSet::empty(),\n\n );\n\n sigaction(SIGFPE, &sa_trap).unwrap();\n\n sigaction(SIGILL, &sa_trap).unwrap();\n\n sigaction(SIGSEGV, &sa_trap).unwrap();\n\n sigaction(SIGBUS, &sa_trap).unwrap();\n\n sigaction(SIGTRAP, &sa_trap).unwrap();\n\n\n", "file_path": "lib/runtime-core/src/fault.rs", "rank": 51, "score": 217703.52464410343 }, { "content": "#[warn(dead_code)]\n\npub fn call_malloc_with_cast<T: Copy, Ty>(ctx: &mut Ctx, size: u32) -> WasmPtr<T, Ty> {\n\n WasmPtr::new(call_malloc(ctx, size))\n\n}\n\n\n", "file_path": "lib/emscripten/src/env/mod.rs", "rank": 52, "score": 217679.8614944949 }, { "content": "/// Round `size` up to the nearest multiple of `page_size`.\n\nfn round_up_to_page_size(size: usize, page_size: usize) -> usize {\n\n assert!(page_size.is_power_of_two());\n\n (size + (page_size - 1)) & !(page_size - 1)\n\n}\n\n\n", "file_path": "lib/runtime-core/src/sys/unix/memory.rs", "rank": 53, "score": 217347.38260653598 }, { "content": "/// Round `size` down to the nearest multiple of `page_size`.\n\nfn round_down_to_page_size(size: usize, page_size: usize) -> usize {\n\n size & !(page_size - 1)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn clone() {\n\n // these should work\n\n let _ = Memory::with_size_protect(200_000, Protect::Read)\n\n .unwrap()\n\n .clone();\n\n let _ = Memory::with_size_protect(200_000, Protect::ReadWrite)\n\n .unwrap()\n\n .clone();\n\n let _ = Memory::with_size_protect(200_000, Protect::ReadExec)\n\n .unwrap()\n\n .clone();\n\n\n\n // this would cause segmentation fault as uncommited memory with no access\n\n //let _ = Memory::with_size_protect(200_000, Protect::None).unwrap().clone();\n\n }\n\n}\n", "file_path": "lib/runtime-core/src/sys/windows/memory.rs", "rank": 54, "score": 217347.38260653598 }, { "content": "/// Round `size` down to the nearest multiple of `page_size`.\n\nfn round_down_to_page_size(size: usize, page_size: usize) -> usize {\n\n assert!(page_size.is_power_of_two());\n\n size & !(page_size - 1)\n\n}\n", "file_path": "lib/runtime-core/src/sys/unix/memory.rs", "rank": 55, "score": 217347.38260653598 }, { "content": "/// Round `size` up to the nearest multiple of `page_size`.\n\nfn round_up_to_page_size(size: usize, page_size: usize) -> usize {\n\n (size + (page_size - 1)) & !(page_size - 1)\n\n}\n\n\n", "file_path": "lib/runtime-core/src/sys/windows/memory.rs", "rank": 56, "score": 217347.38260653598 }, { "content": "pub fn exit_with_live_runtime(_ctx: &mut Ctx) {\n\n debug!(\"emscripten::exit_with_live_runtime\");\n\n}\n\n\n", "file_path": "lib/emscripten/src/emscripten_target.rs", "rank": 57, "score": 216839.82310671365 }, { "content": "/// emscripten: dlerror() -> *mut c_char\n\npub fn _dlerror(_ctx: &mut Ctx) -> i32 {\n\n debug!(\"emscripten::_dlerror\");\n\n -1\n\n}\n", "file_path": "lib/emscripten/src/linking.rs", "rank": 58, "score": 216699.03547371348 }, { "content": "#[allow(clippy::cast_ptr_alignment)]\n\npub fn _getgrent(_ctx: &mut Ctx) -> c_int {\n\n debug!(\"emscripten::_getgrent\");\n\n -1\n\n}\n\n\n", "file_path": "lib/emscripten/src/process.rs", "rank": 59, "score": 216693.43297420908 }, { "content": "/// emscripten: _clock\n\npub fn _clock(_ctx: &mut Ctx) -> c_int {\n\n debug!(\"emscripten::_clock\");\n\n 0 // TODO: unimplemented\n\n}\n\n\n", "file_path": "lib/emscripten/src/time.rs", "rank": 60, "score": 216693.43297420908 }, { "content": "pub fn _sigaction(_ctx: &mut Ctx, _signum: u32, _act: u32, _oldact: u32) -> i32 {\n\n debug!(\"emscripten::_sigaction {}, {}, {}\", _signum, _act, _oldact);\n\n 0\n\n}\n\n\n", "file_path": "lib/emscripten/src/signal.rs", "rank": 61, "score": 216048.42037109117 }, { "content": "/// ### `args_sizes_get()`\n\n/// Return command-line argument data sizes.\n\n/// Outputs:\n\n/// - `size_t *argc`\n\n/// The number of arguments.\n\n/// - `size_t *argv_buf_size`\n\n/// The size of the argument string data.\n\npub fn args_sizes_get(\n\n ctx: &mut Ctx,\n\n argc: WasmPtr<u32>,\n\n argv_buf_size: WasmPtr<u32>,\n\n) -> __wasi_errno_t {\n\n debug!(\"wasi::args_sizes_get\");\n\n let memory = ctx.memory(0);\n\n\n\n let argc = wasi_try!(argc.deref(memory));\n\n let argv_buf_size = wasi_try!(argv_buf_size.deref(memory));\n\n\n\n let state = get_wasi_state(ctx);\n\n\n\n let argc_val = state.args.len() as u32;\n\n let argv_buf_size_val = state.args.iter().map(|v| v.len() as u32 + 1).sum();\n\n argc.set(argc_val);\n\n argv_buf_size.set(argv_buf_size_val);\n\n\n\n debug!(\"=> argc={}, argv_buf_size={}\", argc_val, argv_buf_size_val);\n\n\n\n __WASI_ESUCCESS\n\n}\n\n\n", "file_path": "lib/wasi/src/syscalls/mod.rs", "rank": 62, "score": 214243.5809819289 }, { "content": "/// ### `environ_sizes_get()`\n\n/// Return command-line argument data sizes.\n\n/// Outputs:\n\n/// - `size_t *environ_count`\n\n/// The number of environment variables.\n\n/// - `size_t *environ_buf_size`\n\n/// The size of the environment variable string data.\n\npub fn environ_sizes_get(\n\n ctx: &mut Ctx,\n\n environ_count: WasmPtr<u32>,\n\n environ_buf_size: WasmPtr<u32>,\n\n) -> __wasi_errno_t {\n\n debug!(\"wasi::environ_sizes_get\");\n\n let memory = ctx.memory(0);\n\n\n\n let environ_count = wasi_try!(environ_count.deref(memory));\n\n let environ_buf_size = wasi_try!(environ_buf_size.deref(memory));\n\n\n\n let state = get_wasi_state(ctx);\n\n\n\n let env_var_count = state.envs.len() as u32;\n\n let env_buf_size = state.envs.iter().map(|v| v.len() as u32 + 1).sum();\n\n environ_count.set(env_var_count);\n\n environ_buf_size.set(env_buf_size);\n\n\n\n debug!(\n\n \"env_var_count: {}, env_buf_size: {}\",\n\n env_var_count, env_buf_size\n\n );\n\n\n\n __WASI_ESUCCESS\n\n}\n\n\n", "file_path": "lib/wasi/src/syscalls/mod.rs", "rank": 63, "score": 214243.525688476 }, { "content": "pub fn _fork(_ctx: &mut Ctx) -> PidT {\n\n debug!(\"emscripten::_fork\");\n\n // unsafe {\n\n // fork()\n\n // }\n\n -1\n\n}\n\n\n", "file_path": "lib/emscripten/src/process.rs", "rank": 64, "score": 213581.79879871354 }, { "content": "pub fn _sched_yield(_ctx: &mut Ctx) -> i32 {\n\n debug!(\"emscripten::_sched_yield\");\n\n -1\n\n}\n\n\n", "file_path": "lib/emscripten/src/process.rs", "rank": 65, "score": 213581.79879871354 }, { "content": "pub fn _pthread_self(_ctx: &mut Ctx) -> i32 {\n\n trace!(\"emscripten::_pthread_self\");\n\n 0\n\n}\n\n\n", "file_path": "lib/emscripten/src/pthread.rs", "rank": 66, "score": 213581.79879871354 }, { "content": "#[cfg(not(target_os = \"linux\"))]\n\npub fn _getdtablesize(_ctx: &mut Ctx) -> i32 {\n\n debug!(\"emscripten::getdtablesize\");\n\n -1\n\n}\n", "file_path": "lib/emscripten/src/emscripten_target.rs", "rank": 67, "score": 213581.79879871354 }, { "content": "pub fn _emscripten_random(_ctx: &mut Ctx) -> f64 {\n\n debug!(\"emscripten::_emscripten_random\");\n\n -1.0\n\n}\n\n\n", "file_path": "lib/emscripten/src/math.rs", "rank": 68, "score": 213581.79879871354 }, { "content": "/// emscripten: enlargeMemory\n\npub fn enlarge_memory(_ctx: &mut Ctx) -> u32 {\n\n debug!(\"emscripten::enlarge_memory\");\n\n // instance.memories[0].grow(100);\n\n // TODO: Fix implementation\n\n 0\n\n}\n\n\n", "file_path": "lib/emscripten/src/memory.rs", "rank": 69, "score": 213581.79879871354 }, { "content": "pub fn _getpagesize(_ctx: &mut Ctx) -> u32 {\n\n debug!(\"emscripten::_getpagesize\");\n\n 16384\n\n}\n\n\n", "file_path": "lib/emscripten/src/env/mod.rs", "rank": 70, "score": 213581.79879871354 }, { "content": "pub fn _llvm_stacksave(_ctx: &mut Ctx) -> i32 {\n\n debug!(\"emscripten::_llvm_stacksave\");\n\n -1\n\n}\n\n\n", "file_path": "lib/emscripten/src/process.rs", "rank": 71, "score": 213581.79879871354 }, { "content": "pub fn nullfunc(ctx: &mut Ctx, _x: u32) {\n\n use crate::process::abort_with_message;\n\n debug!(\"emscripten::nullfunc_i {}\", _x);\n\n abort_with_message(ctx, \"Invalid function pointer. Perhaps this is an invalid value \\\n\n (e.g. caused by calling a virtual method on a NULL pointer)? Or calling a function with an \\\n\n incorrect type, which will fail? (it is worth building your source files with -Werror (\\\n\n warnings are errors), as warnings can indicate undefined behavior which can cause this)\");\n\n}\n\n\n\n/// The current version of this crate\n\npub const VERSION: &str = env!(\"CARGO_PKG_VERSION\");\n", "file_path": "lib/emscripten/src/lib.rs", "rank": 72, "score": 211292.7831346981 }, { "content": "// NOTE: Not implemented by Emscripten\n\npub fn ___lock(_ctx: &mut Ctx, _what: c_int) {\n\n debug!(\"emscripten::___lock {}\", _what);\n\n}\n\n\n", "file_path": "lib/emscripten/src/lock.rs", "rank": 73, "score": 211292.7831346981 }, { "content": "// NOTE: Not implemented by Emscripten\n\npub fn ___unlock(_ctx: &mut Ctx, _what: c_int) {\n\n debug!(\"emscripten::___unlock {}\", _what);\n\n}\n\n\n", "file_path": "lib/emscripten/src/lock.rs", "rank": 74, "score": 211292.7831346981 }, { "content": "/// ### `fd_filestat_set_size()`\n\n/// Change the size of an open file, zeroing out any new bytes\n\n/// Inputs:\n\n/// - `__wasi_fd_t fd`\n\n/// File descriptor to adjust\n\n/// - `__wasi_filesize_t st_size`\n\n/// New size that `fd` will be set to\n\npub fn fd_filestat_set_size(\n\n ctx: &mut Ctx,\n\n fd: __wasi_fd_t,\n\n st_size: __wasi_filesize_t,\n\n) -> __wasi_errno_t {\n\n debug!(\"wasi::fd_filestat_set_size\");\n\n let memory = ctx.memory(0);\n\n let state = get_wasi_state(ctx);\n\n let fd_entry = wasi_try!(state.fs.get_fd(fd)).clone();\n\n let inode = fd_entry.inode;\n\n\n\n if !has_rights(fd_entry.rights, __WASI_RIGHT_FD_FILESTAT_SET_SIZE) {\n\n return __WASI_EACCES;\n\n }\n\n\n\n match &mut state.fs.inodes[inode].kind {\n\n Kind::File { handle, .. } => {\n\n if let Some(handle) = handle {\n\n wasi_try!(handle.set_len(st_size).map_err(WasiFsError::into_wasi_err));\n\n } else {\n", "file_path": "lib/wasi/src/syscalls/mod.rs", "rank": 75, "score": 210966.9775707086 }, { "content": "pub fn ___cxa_uncaught_exception(_ctx: &mut Ctx) -> i32 {\n\n debug!(\"emscripten::___cxa_uncaught_exception\");\n\n -1\n\n}\n\n\n", "file_path": "lib/emscripten/src/exception.rs", "rank": 76, "score": 210609.81167306576 }, { "content": "/// emscripten: getTotalMemory\n\npub fn get_total_memory(_ctx: &mut Ctx) -> u32 {\n\n debug!(\"emscripten::get_total_memory\");\n\n // instance.memories[0].current_pages()\n\n // TODO: Fix implementation\n\n _ctx.memory(0).size().bytes().0 as u32\n\n}\n\n\n", "file_path": "lib/emscripten/src/memory.rs", "rank": 77, "score": 210609.81167306576 }, { "content": "/// Declare local variables for the signature parameters that correspond to WebAssembly locals.\n\n///\n\n/// Return the number of local variables declared.\n\nfn declare_wasm_parameters(builder: &mut FunctionBuilder, entry_block: Ebb) -> usize {\n\n let sig_len = builder.func.signature.params.len();\n\n let mut next_local = 0;\n\n for i in 0..sig_len {\n\n let param_type = builder.func.signature.params[i];\n\n // There may be additional special-purpose parameters following the normal WebAssembly\n\n // signature parameters. For example, a `vmctx` pointer.\n\n if param_type.purpose == ir::ArgumentPurpose::Normal {\n\n // This is a normal WebAssembly signature parameter, so create a local for it.\n\n let local = Variable::new(next_local);\n\n builder.declare_var(local, param_type.value_type);\n\n next_local += 1;\n\n\n\n let param_value = builder.ebb_params(entry_block)[i];\n\n builder.def_var(local, param_value);\n\n }\n\n if param_type.purpose == ir::ArgumentPurpose::VMContext {\n\n let param_value = builder.ebb_params(entry_block)[i];\n\n builder.set_val_label(param_value, get_vmctx_value_label());\n\n }\n\n }\n\n\n\n next_local\n\n}\n", "file_path": "lib/clif-backend/src/code.rs", "rank": 78, "score": 210151.1460085936 }, { "content": "/// exit\n\npub fn ___syscall1(ctx: &mut Ctx, _which: c_int, mut varargs: VarArgs) {\n\n debug!(\"emscripten::___syscall1 (exit) {}\", _which);\n\n let status: i32 = varargs.get(ctx);\n\n unsafe {\n\n exit(status);\n\n }\n\n}\n\n\n", "file_path": "lib/emscripten/src/syscalls/mod.rs", "rank": 79, "score": 209809.96229087457 }, { "content": "// __exit\n\npub fn exit(_ctx: &mut Ctx, value: i32) {\n\n debug!(\"emscripten::exit {}\", value);\n\n ::std::process::exit(value);\n\n}\n", "file_path": "lib/emscripten/src/exit.rs", "rank": 80, "score": 208181.14895920255 }, { "content": "#[allow(unreachable_code)]\n\npub fn _exit(_ctx: &mut Ctx, status: c_int) {\n\n // -> !\n\n debug!(\"emscripten::_exit {}\", status);\n\n unsafe { exit(status) }\n\n}\n\n\n", "file_path": "lib/emscripten/src/process.rs", "rank": 81, "score": 208181.14895920255 }, { "content": "pub fn _pthread_exit(_ctx: &mut Ctx, _a: i32) -> () {\n\n trace!(\"emscripten::_pthread_exit\");\n\n}\n\n\n", "file_path": "lib/emscripten/src/pthread.rs", "rank": 82, "score": 208181.14895920255 }, { "content": "pub fn ___seterrno(_ctx: &mut Ctx, _value: i32) {\n\n debug!(\"emscripten::___seterrno {}\", _value);\n\n // TODO: Incomplete impl\n\n eprintln!(\"failed to set errno!\");\n\n // value\n\n}\n\n\n\n// pub enum ErrnoCodes {\n\n// EPERM = 1,\n\n// ENOENT = 2,\n\n// ESRCH = 3,\n\n// EINTR = 4,\n\n// EIO = 5,\n\n// ENXIO = 6,\n\n// E2BIG = 7,\n\n// ENOEXEC = 8,\n\n// EBADF = 9,\n\n// ECHILD = 10,\n\n// EAGAIN = 11,\n\n// EWOULDBLOCK = 11,\n", "file_path": "lib/emscripten/src/errno.rs", "rank": 83, "score": 208181.14895920255 }, { "content": "/// ### `sched_yield()`\n\n/// Yields execution of the thread\n\npub fn sched_yield(ctx: &mut Ctx) -> __wasi_errno_t {\n\n debug!(\"wasi::sched_yield\");\n\n ::std::thread::yield_now();\n\n __WASI_ESUCCESS\n\n}\n\n\n", "file_path": "lib/wasi/src/syscalls/mod.rs", "rank": 84, "score": 207768.27708627394 }, { "content": "/// emscripten: abortOnCannotGrowMemory\n\npub fn abort_on_cannot_grow_memory_old(ctx: &mut Ctx) -> u32 {\n\n debug!(\"emscripten::abort_on_cannot_grow_memory\");\n\n abort_with_message(ctx, \"Cannot enlarge memory arrays!\");\n\n 0\n\n}\n\n\n", "file_path": "lib/emscripten/src/memory.rs", "rank": 85, "score": 207768.27708627394 }, { "content": "pub fn getTempRet0(ctx: &mut Ctx) -> i32 {\n\n trace!(\"emscripten::getTempRet0\");\n\n get_emscripten_data(ctx).temp_ret_0\n\n}\n\n\n", "file_path": "lib/emscripten/src/emscripten_target.rs", "rank": 86, "score": 207768.27708627394 }, { "content": "pub fn ___cxa_current_primary_exception(_ctx: &mut Ctx) -> u32 {\n\n debug!(\"emscripten::___cxa_current_primary_exception\");\n\n unimplemented!()\n\n}\n\n\n", "file_path": "lib/emscripten/src/exception.rs", "rank": 87, "score": 207768.27708627394 }, { "content": "/// Compile WebAssembly binary code into a [`Module`].\n\n/// This function is useful if it is necessary to\n\n/// compile a module before it can be instantiated\n\n/// (otherwise, the [`instantiate`] function should be used).\n\n///\n\n/// [`Module`]: struct.Module.html\n\n/// [`instantiate`]: fn.instantiate.html\n\n///\n\n/// # Params:\n\n/// * `wasm`: A `&[u8]` containing the\n\n/// binary code of the wasm module you want to compile.\n\n/// # Errors:\n\n/// If the operation fails, the function returns `Err(error::CompileError::...)`.\n\npub fn compile(wasm: &[u8]) -> error::CompileResult<Module> {\n\n wasmer_runtime_core::compile_with(&wasm[..], &default_compiler())\n\n}\n\n\n", "file_path": "lib/runtime/src/lib.rs", "rank": 88, "score": 206854.46061254432 }, { "content": "pub fn dirent_to_le_bytes(ent: &__wasi_dirent_t) -> Vec<u8> {\n\n use std::mem::transmute;\n\n let mut out = Vec::with_capacity(std::mem::size_of::<__wasi_dirent_t>());\n\n let bytes: [u8; 8] = unsafe { transmute(ent.d_next.to_le()) };\n\n for &b in &bytes {\n\n out.push(b);\n\n }\n\n let bytes: [u8; 8] = unsafe { transmute(ent.d_ino.to_le()) };\n\n for &b in &bytes {\n\n out.push(b);\n\n }\n\n let bytes: [u8; 4] = unsafe { transmute(ent.d_namlen.to_le()) };\n\n for &b in &bytes {\n\n out.push(b);\n\n }\n\n out.push(ent.d_type);\n\n out.push(0);\n\n out.push(0);\n\n out.push(0);\n\n assert_eq!(out.len(), std::mem::size_of::<__wasi_dirent_t>());\n", "file_path": "lib/wasi/src/syscalls/types.rs", "rank": 89, "score": 206730.39837566868 }, { "content": "pub fn _llvm_stackrestore(_ctx: &mut Ctx, _one: i32) {\n\n debug!(\"emscripten::_llvm_stackrestore\");\n\n}\n\n\n", "file_path": "lib/emscripten/src/process.rs", "rank": 90, "score": 205209.1618335548 }, { "content": "pub fn ___resumeException(_ctx: &mut Ctx, _a: i32) {\n\n debug!(\"emscripten::___resumeException\");\n\n}\n", "file_path": "lib/emscripten/src/emscripten_target.rs", "rank": 91, "score": 205209.1618335548 }, { "content": "/// putchar\n\npub fn putchar(_ctx: &mut Ctx, chr: i32) {\n\n unsafe { libc::putchar(chr) };\n\n}\n\n\n", "file_path": "lib/emscripten/src/io/unix.rs", "rank": 92, "score": 205209.1618335548 }, { "content": "pub fn abort_with_message(ctx: &mut Ctx, message: &str) {\n\n debug!(\"emscripten::abort_with_message\");\n\n println!(\"{}\", message);\n\n _abort(ctx);\n\n}\n\n\n", "file_path": "lib/emscripten/src/process.rs", "rank": 93, "score": 205209.1618335548 }, { "content": "/// putchar\n\npub fn putchar(_ctx: &mut Ctx, chr: i32) {\n\n unsafe { libc::putchar(chr) };\n\n}\n\n\n", "file_path": "lib/emscripten/src/io/windows.rs", "rank": 94, "score": 205209.1618335548 }, { "content": "pub fn abort_stack_overflow(ctx: &mut Ctx, _what: c_int) {\n\n debug!(\"emscripten::abort_stack_overflow\");\n\n // TODO: Message incomplete. Need to finish em runtime data first\n\n abort_with_message(\n\n ctx,\n\n \"Stack overflow! Attempted to allocate some bytes on the stack\",\n\n );\n\n}\n\n\n", "file_path": "lib/emscripten/src/process.rs", "rank": 95, "score": 205209.1618335548 }, { "content": "pub fn em_abort(ctx: &mut Ctx, message: u32) {\n\n debug!(\"emscripten::em_abort {}\", message);\n\n let message_addr = emscripten_memory_pointer!(ctx.memory(0), message) as *mut c_char;\n\n unsafe {\n\n let message = CStr::from_ptr(message_addr)\n\n .to_str()\n\n .unwrap_or(\"Unexpected abort\");\n\n\n\n abort_with_message(ctx, message);\n\n }\n\n}\n\n\n", "file_path": "lib/emscripten/src/process.rs", "rank": 96, "score": 205209.1618335548 }, { "content": "pub fn invoke_v(ctx: &mut Ctx, index: i32) {\n\n debug!(\"emscripten::invoke_v\");\n\n invoke_no_return!(ctx, dyn_call_v, index);\n\n}\n", "file_path": "lib/emscripten/src/emscripten_target.rs", "rank": 97, "score": 205209.1618335548 } ]
Rust
datafusion/src/physical_plan/file_format/json.rs
andts/arrow-datafusion
1c39f5ce865e3e1225b4895196073be560a93e82
use async_trait::async_trait; use crate::error::{DataFusionError, Result}; use crate::execution::runtime_env::RuntimeEnv; use crate::physical_plan::{ DisplayFormatType, ExecutionPlan, Partitioning, SendableRecordBatchStream, Statistics, }; use arrow::{datatypes::SchemaRef, json}; use std::any::Any; use std::sync::Arc; use super::file_stream::{BatchIter, FileStream}; use super::PhysicalPlanConfig; #[derive(Debug, Clone)] pub struct NdJsonExec { base_config: PhysicalPlanConfig, projected_statistics: Statistics, projected_schema: SchemaRef, } impl NdJsonExec { pub fn new(base_config: PhysicalPlanConfig) -> Self { let (projected_schema, projected_statistics) = base_config.project(); Self { base_config, projected_schema, projected_statistics, } } } #[async_trait] impl ExecutionPlan for NdJsonExec { fn as_any(&self) -> &dyn Any { self } fn schema(&self) -> SchemaRef { self.projected_schema.clone() } fn output_partitioning(&self) -> Partitioning { Partitioning::UnknownPartitioning(self.base_config.file_groups.len()) } fn children(&self) -> Vec<Arc<dyn ExecutionPlan>> { Vec::new() } fn with_new_children( &self, children: Vec<Arc<dyn ExecutionPlan>>, ) -> Result<Arc<dyn ExecutionPlan>> { if children.is_empty() { Ok(Arc::new(self.clone()) as Arc<dyn ExecutionPlan>) } else { Err(DataFusionError::Internal(format!( "Children cannot be replaced in {:?}", self ))) } } async fn execute( &self, partition: usize, _runtime: Arc<RuntimeEnv>, ) -> Result<SendableRecordBatchStream> { let proj = self.base_config.projected_file_column_names(); let batch_size = self.base_config.batch_size; let file_schema = Arc::clone(&self.base_config.file_schema); let fun = move |file, _remaining: &Option<usize>| { Box::new(json::Reader::new( file, Arc::clone(&file_schema), batch_size, proj.clone(), )) as BatchIter }; Ok(Box::pin(FileStream::new( Arc::clone(&self.base_config.object_store), self.base_config.file_groups[partition].clone(), fun, Arc::clone(&self.projected_schema), self.base_config.limit, self.base_config.table_partition_cols.clone(), ))) } fn fmt_as( &self, t: DisplayFormatType, f: &mut std::fmt::Formatter, ) -> std::fmt::Result { match t { DisplayFormatType::Default => { write!( f, "JsonExec: batch_size={}, limit={:?}, files={}", self.base_config.batch_size, self.base_config.limit, super::FileGroupsDisplay(&self.base_config.file_groups), ) } } } fn statistics(&self) -> Statistics { self.projected_statistics.clone() } } #[cfg(test)] mod tests { use futures::StreamExt; use crate::datasource::{ file_format::{json::JsonFormat, FileFormat}, object_store::local::{ local_object_reader_stream, local_unpartitioned_file, LocalFileSystem, }, }; use super::*; const TEST_DATA_BASE: &str = "tests/jsons"; async fn infer_schema(path: String) -> Result<SchemaRef> { JsonFormat::default() .infer_schema(local_object_reader_stream(vec![path])) .await } #[tokio::test] async fn nd_json_exec_file_without_projection() -> Result<()> { let runtime = Arc::new(RuntimeEnv::default()); use arrow::datatypes::DataType; let path = format!("{}/1.json", TEST_DATA_BASE); let exec = NdJsonExec::new(PhysicalPlanConfig { object_store: Arc::new(LocalFileSystem {}), file_groups: vec![vec![local_unpartitioned_file(path.clone())]], file_schema: infer_schema(path).await?, statistics: Statistics::default(), projection: None, batch_size: 1024, limit: Some(3), table_partition_cols: vec![], }); let inferred_schema = exec.schema(); assert_eq!(inferred_schema.fields().len(), 4); inferred_schema.field_with_name("a").unwrap(); inferred_schema.field_with_name("b").unwrap(); inferred_schema.field_with_name("c").unwrap(); inferred_schema.field_with_name("d").unwrap(); assert_eq!( inferred_schema.field_with_name("a").unwrap().data_type(), &DataType::Int64 ); assert!(matches!( inferred_schema.field_with_name("b").unwrap().data_type(), DataType::List(_) )); assert_eq!( inferred_schema.field_with_name("d").unwrap().data_type(), &DataType::Utf8 ); let mut it = exec.execute(0, runtime).await?; let batch = it.next().await.unwrap()?; assert_eq!(batch.num_rows(), 3); let values = batch .column(0) .as_any() .downcast_ref::<arrow::array::Int64Array>() .unwrap(); assert_eq!(values.value(0), 1); assert_eq!(values.value(1), -10); assert_eq!(values.value(2), 2); Ok(()) } #[tokio::test] async fn nd_json_exec_file_projection() -> Result<()> { let runtime = Arc::new(RuntimeEnv::default()); let path = format!("{}/1.json", TEST_DATA_BASE); let exec = NdJsonExec::new(PhysicalPlanConfig { object_store: Arc::new(LocalFileSystem {}), file_groups: vec![vec![local_unpartitioned_file(path.clone())]], file_schema: infer_schema(path).await?, statistics: Statistics::default(), projection: Some(vec![0, 2]), batch_size: 1024, limit: None, table_partition_cols: vec![], }); let inferred_schema = exec.schema(); assert_eq!(inferred_schema.fields().len(), 2); inferred_schema.field_with_name("a").unwrap(); inferred_schema.field_with_name("b").unwrap_err(); inferred_schema.field_with_name("c").unwrap(); inferred_schema.field_with_name("d").unwrap_err(); let mut it = exec.execute(0, runtime).await?; let batch = it.next().await.unwrap()?; assert_eq!(batch.num_rows(), 4); let values = batch .column(0) .as_any() .downcast_ref::<arrow::array::Int64Array>() .unwrap(); assert_eq!(values.value(0), 1); assert_eq!(values.value(1), -10); assert_eq!(values.value(2), 2); Ok(()) } }
use async_trait::async_trait; use crate::error::{DataFusionError, Result}; use crate::execution::runtime_env::RuntimeEnv; use crate::physical_plan::{ DisplayFormatType, ExecutionPlan, Partitioning, SendableRecordBatchStream, Statistics, }; use arrow::{datatypes::SchemaRef, json}; use std::any::Any; use std::sync::Arc; use super::file_stream::{BatchIter, FileStream}; use super::PhysicalPlanConfig; #[derive(Debug, Clone)] pub struct NdJsonExec { base_config: PhysicalPlanConfig, projected_statistics: Statistics, projected_schema: SchemaRef, } impl NdJsonExec { pub fn new(base_config: PhysicalPlanConfig) -> Self { let (projected_schema, projected_statistics) = base_config.project(); Self { base_config, projected_schema, projected_statistics, } } } #[async_trait] impl ExecutionPlan for NdJsonExec { fn as_any(&self) -> &dyn Any { self } fn schema(&self) -> SchemaRef { self.projected_schema.clone() } fn output_partitioning(&self) -> Partitioning { Partitioning::UnknownPartitioning(self.base_config.file_groups.len()) } fn children(&self) -> Vec<Arc<dyn ExecutionPlan>> { Vec::new() } fn with_new_children( &self, children: Vec<Arc<dyn ExecutionPlan>>, ) -> Result<Arc<dyn ExecutionPlan>> { if children.is_empty() { Ok(Arc::new(self.clone()) as Arc<dyn ExecutionPlan>) } else {
} } async fn execute( &self, partition: usize, _runtime: Arc<RuntimeEnv>, ) -> Result<SendableRecordBatchStream> { let proj = self.base_config.projected_file_column_names(); let batch_size = self.base_config.batch_size; let file_schema = Arc::clone(&self.base_config.file_schema); let fun = move |file, _remaining: &Option<usize>| { Box::new(json::Reader::new( file, Arc::clone(&file_schema), batch_size, proj.clone(), )) as BatchIter }; Ok(Box::pin(FileStream::new( Arc::clone(&self.base_config.object_store), self.base_config.file_groups[partition].clone(), fun, Arc::clone(&self.projected_schema), self.base_config.limit, self.base_config.table_partition_cols.clone(), ))) } fn fmt_as( &self, t: DisplayFormatType, f: &mut std::fmt::Formatter, ) -> std::fmt::Result { match t { DisplayFormatType::Default => { write!( f, "JsonExec: batch_size={}, limit={:?}, files={}", self.base_config.batch_size, self.base_config.limit, super::FileGroupsDisplay(&self.base_config.file_groups), ) } } } fn statistics(&self) -> Statistics { self.projected_statistics.clone() } } #[cfg(test)] mod tests { use futures::StreamExt; use crate::datasource::{ file_format::{json::JsonFormat, FileFormat}, object_store::local::{ local_object_reader_stream, local_unpartitioned_file, LocalFileSystem, }, }; use super::*; const TEST_DATA_BASE: &str = "tests/jsons"; async fn infer_schema(path: String) -> Result<SchemaRef> { JsonFormat::default() .infer_schema(local_object_reader_stream(vec![path])) .await } #[tokio::test] async fn nd_json_exec_file_without_projection() -> Result<()> { let runtime = Arc::new(RuntimeEnv::default()); use arrow::datatypes::DataType; let path = format!("{}/1.json", TEST_DATA_BASE); let exec = NdJsonExec::new(PhysicalPlanConfig { object_store: Arc::new(LocalFileSystem {}), file_groups: vec![vec![local_unpartitioned_file(path.clone())]], file_schema: infer_schema(path).await?, statistics: Statistics::default(), projection: None, batch_size: 1024, limit: Some(3), table_partition_cols: vec![], }); let inferred_schema = exec.schema(); assert_eq!(inferred_schema.fields().len(), 4); inferred_schema.field_with_name("a").unwrap(); inferred_schema.field_with_name("b").unwrap(); inferred_schema.field_with_name("c").unwrap(); inferred_schema.field_with_name("d").unwrap(); assert_eq!( inferred_schema.field_with_name("a").unwrap().data_type(), &DataType::Int64 ); assert!(matches!( inferred_schema.field_with_name("b").unwrap().data_type(), DataType::List(_) )); assert_eq!( inferred_schema.field_with_name("d").unwrap().data_type(), &DataType::Utf8 ); let mut it = exec.execute(0, runtime).await?; let batch = it.next().await.unwrap()?; assert_eq!(batch.num_rows(), 3); let values = batch .column(0) .as_any() .downcast_ref::<arrow::array::Int64Array>() .unwrap(); assert_eq!(values.value(0), 1); assert_eq!(values.value(1), -10); assert_eq!(values.value(2), 2); Ok(()) } #[tokio::test] async fn nd_json_exec_file_projection() -> Result<()> { let runtime = Arc::new(RuntimeEnv::default()); let path = format!("{}/1.json", TEST_DATA_BASE); let exec = NdJsonExec::new(PhysicalPlanConfig { object_store: Arc::new(LocalFileSystem {}), file_groups: vec![vec![local_unpartitioned_file(path.clone())]], file_schema: infer_schema(path).await?, statistics: Statistics::default(), projection: Some(vec![0, 2]), batch_size: 1024, limit: None, table_partition_cols: vec![], }); let inferred_schema = exec.schema(); assert_eq!(inferred_schema.fields().len(), 2); inferred_schema.field_with_name("a").unwrap(); inferred_schema.field_with_name("b").unwrap_err(); inferred_schema.field_with_name("c").unwrap(); inferred_schema.field_with_name("d").unwrap_err(); let mut it = exec.execute(0, runtime).await?; let batch = it.next().await.unwrap()?; assert_eq!(batch.num_rows(), 4); let values = batch .column(0) .as_any() .downcast_ref::<arrow::array::Int64Array>() .unwrap(); assert_eq!(values.value(0), 1); assert_eq!(values.value(1), -10); assert_eq!(values.value(2), 2); Ok(()) } }
Err(DataFusionError::Internal(format!( "Children cannot be replaced in {:?}", self )))
call_expression
[]
Rust
language/vm/vm-runtime/src/loaded_data/loaded_module.rs
end-o/libra
38d2a1ed3fde793784b2dba978ee349099cdecfe
use crate::loaded_data::function::FunctionDef; use bytecode_verifier::VerifiedModule; use libra_types::{ identifier::Identifier, vm_error::{StatusCode, VMStatus}, }; use std::{collections::HashMap, sync::RwLock}; use vm::{ access::ModuleAccess, errors::VMResult, file_format::{ CompiledModule, FieldDefinitionIndex, FunctionDefinitionIndex, StructDefinitionIndex, StructFieldInformation, TableIndex, }, internals::ModuleIndex, }; use vm_runtime_types::loaded_data::struct_def::StructDef; #[derive(Debug, Eq, PartialEq)] pub struct LoadedModule { module: VerifiedModule, #[allow(dead_code)] pub struct_defs_table: HashMap<Identifier, StructDefinitionIndex>, #[allow(dead_code)] pub field_defs_table: HashMap<Identifier, FieldDefinitionIndex>, pub function_defs_table: HashMap<Identifier, FunctionDefinitionIndex>, pub function_defs: Vec<FunctionDef>, pub field_offsets: Vec<TableIndex>, cache: LoadedModuleCache, } impl ModuleAccess for LoadedModule { fn as_module(&self) -> &CompiledModule { &self.module.as_inner() } } #[derive(Debug)] struct LoadedModuleCache { struct_defs: Vec<RwLock<Option<StructDef>>>, } impl PartialEq for LoadedModuleCache { fn eq(&self, _other: &Self) -> bool { true } } impl Eq for LoadedModuleCache {} impl LoadedModule { pub fn new(module: VerifiedModule) -> Self { let mut struct_defs_table = HashMap::new(); let mut field_defs_table = HashMap::new(); let mut function_defs_table = HashMap::new(); let mut function_defs = vec![]; let struct_defs = module .struct_defs() .iter() .map(|_| RwLock::new(None)) .collect(); let cache = LoadedModuleCache { struct_defs }; let mut field_offsets: Vec<TableIndex> = module.field_defs().iter().map(|_| 0).collect(); for (idx, struct_def) in module.struct_defs().iter().enumerate() { let name = module .identifier_at(module.struct_handle_at(struct_def.struct_handle).name) .into(); let sd_idx = StructDefinitionIndex::new(idx as TableIndex); struct_defs_table.insert(name, sd_idx); if let StructFieldInformation::Declared { field_count, fields, } = &struct_def.field_information { for i in 0..*field_count { let field_index = fields.into_index(); assume!(field_index <= usize::max_value() - (i as usize)); field_offsets[field_index + (i as usize)] = i; } } } for (idx, field_def) in module.field_defs().iter().enumerate() { let name = module.identifier_at(field_def.name).into(); let fd_idx = FieldDefinitionIndex::new(idx as TableIndex); field_defs_table.insert(name, fd_idx); } for (idx, function_def) in module.function_defs().iter().enumerate() { let name = module .identifier_at(module.function_handle_at(function_def.function).name) .into(); let fd_idx = FunctionDefinitionIndex::new(idx as TableIndex); function_defs_table.insert(name, fd_idx); assume!(function_defs.len() < usize::max_value()); function_defs.push(FunctionDef::new(&module, fd_idx)); } LoadedModule { module, struct_defs_table, field_defs_table, function_defs_table, function_defs, field_offsets, cache, } } pub fn cached_struct_def_at(&self, idx: StructDefinitionIndex) -> Option<StructDef> { let cached = self.cache.struct_defs[idx.into_index()] .read() .expect("lock poisoned"); cached.clone() } pub fn cache_struct_def(&self, idx: StructDefinitionIndex, def: StructDef) { let mut cached = self.cache.struct_defs[idx.into_index()] .write() .expect("lock poisoned"); cached.replace(def); } pub fn get_field_offset(&self, idx: FieldDefinitionIndex) -> VMResult<TableIndex> { self.field_offsets .get(idx.into_index()) .cloned() .ok_or_else(|| VMStatus::new(StatusCode::LINKER_ERROR)) } } #[test] fn assert_thread_safe() { fn assert_send<T: Send>() {}; fn assert_sync<T: Sync>() {}; assert_send::<LoadedModule>(); assert_sync::<LoadedModule>(); }
use crate::loaded_data::function::FunctionDef; use bytecode_verifier::VerifiedModule; use libra_types::{ identifier::Identifier, vm_error::{StatusCode, VMStatus}, }; use std::{collections::HashMap, sync::RwLock}; use vm::{ access::ModuleAccess, errors::VMResult, file_format::{ CompiledModule, FieldDefinitionIndex, FunctionDefinitionIndex, StructDefinitionIndex, StructFieldInformation, TableIndex, }, internals::ModuleIndex, }; use vm_runtime_types::loaded_data::struct_def::StructDef; #[derive(Debug, Eq, PartialEq)] pub struct LoadedModule { module: VerifiedModule, #[allow(dead_code)] pub struct_defs_table: HashMap<Identifier, StructDefinitionIndex>, #[allow(dead_code)] pub field_defs_table: HashMap<Identifier, FieldDefinitionIndex>, pub function_defs_table: HashMap<Identifier, FunctionDefinitionIndex>, pub function_defs: Vec<FunctionDef>, pub field_offsets: Vec<TableIndex>, cache: LoadedModuleCache, } impl ModuleAccess for LoadedModule { fn as_module(&self) -> &CompiledModule { &self.module.as_inner() } } #[derive(Debug)] struct LoadedModuleCache { struct_defs: Vec<RwLock<Option<StructDef>>>, } impl PartialEq for LoadedModuleCache { fn eq(&self, _other: &Self) -> bool { true } } impl Eq for LoadedModuleCache {} impl LoadedModule { pub fn new(module: VerifiedModule) -> Self { let mut struct_defs_table = HashMap::new(); let mut field_defs_table = HashMap::new(); let mut function_defs_table = HashMap::new(); let mut function_defs = vec![]; let struct_defs = module .struct_defs() .iter() .map(|_| RwLock::new(None)) .collect(); let cache = LoadedModuleCache { struct_defs }; let mut field_offsets: Vec<TableIndex> = module.field_defs().iter().map(|_| 0).collect(); for (idx, struct_def) in module.struct_defs().iter().enumerate() { let name = module .identifier_at(module.struct_handle_at(struct_def.struct_handle).name) .into(); let sd_idx = StructDefinitionIndex::new(idx as TableIndex); struct_defs_table.insert(name, sd_idx);
} for (idx, field_def) in module.field_defs().iter().enumerate() { let name = module.identifier_at(field_def.name).into(); let fd_idx = FieldDefinitionIndex::new(idx as TableIndex); field_defs_table.insert(name, fd_idx); } for (idx, function_def) in module.function_defs().iter().enumerate() { let name = module .identifier_at(module.function_handle_at(function_def.function).name) .into(); let fd_idx = FunctionDefinitionIndex::new(idx as TableIndex); function_defs_table.insert(name, fd_idx); assume!(function_defs.len() < usize::max_value()); function_defs.push(FunctionDef::new(&module, fd_idx)); } LoadedModule { module, struct_defs_table, field_defs_table, function_defs_table, function_defs, field_offsets, cache, } } pub fn cached_struct_def_at(&self, idx: StructDefinitionIndex) -> Option<StructDef> { let cached = self.cache.struct_defs[idx.into_index()] .read() .expect("lock poisoned"); cached.clone() } pub fn cache_struct_def(&self, idx: StructDefinitionIndex, def: StructDef) { let mut cached = self.cache.struct_defs[idx.into_index()] .write() .expect("lock poisoned"); cached.replace(def); } pub fn get_field_offset(&self, idx: FieldDefinitionIndex) -> VMResult<TableIndex> { self.field_offsets .get(idx.into_index()) .cloned() .ok_or_else(|| VMStatus::new(StatusCode::LINKER_ERROR)) } } #[test] fn assert_thread_safe() { fn assert_send<T: Send>() {}; fn assert_sync<T: Sync>() {}; assert_send::<LoadedModule>(); assert_sync::<LoadedModule>(); }
if let StructFieldInformation::Declared { field_count, fields, } = &struct_def.field_information { for i in 0..*field_count { let field_index = fields.into_index(); assume!(field_index <= usize::max_value() - (i as usize)); field_offsets[field_index + (i as usize)] = i; } }
if_condition
[ { "content": "/// Get the StructTag for a StructDefinition defined in a published module.\n\npub fn resource_storage_key(module: &impl ModuleAccess, idx: StructDefinitionIndex) -> StructTag {\n\n let resource = module.struct_def_at(idx);\n\n let res_handle = module.struct_handle_at(resource.struct_handle);\n\n let res_module = module.module_handle_at(res_handle.module);\n\n let res_name = module.identifier_at(res_handle.name);\n\n let res_mod_addr = module.address_at(res_module.address);\n\n let res_mod_name = module.identifier_at(res_module.name);\n\n StructTag {\n\n module: res_mod_name.into(),\n\n address: *res_mod_addr,\n\n name: res_name.into(),\n\n type_params: vec![],\n\n }\n\n}\n\n\n", "file_path": "language/vm/vm-runtime/src/identifier.rs", "rank": 0, "score": 516877.8906725382 }, { "content": "fn parse_and_compile_modules(s: impl AsRef<str>) -> Vec<CompiledModule> {\n\n let compiler = Compiler {\n\n skip_stdlib_deps: true,\n\n ..Compiler::default()\n\n };\n\n compiler\n\n .into_compiled_program(s.as_ref())\n\n .expect(\"Failed to compile program\")\n\n .modules\n\n}\n\n\n", "file_path": "language/vm/vm-runtime/src/unit_tests/module_cache_tests.rs", "rank": 1, "score": 381971.84020833985 }, { "content": "pub fn struct_name_from_handle_index(module: &VerifiedModule, idx: StructHandleIndex) -> String {\n\n let struct_handle = module.struct_handle_at(idx);\n\n let struct_handle_view = StructHandleView::new(module, struct_handle);\n\n let module_name = module.identifier_at(struct_handle_view.module_handle().name);\n\n let struct_name = struct_handle_view.name();\n\n format!(\"{}_{}\", module_name, struct_name)\n\n}\n\n\n", "file_path": "language/stackless-bytecode/bytecode-to-boogie/src/translator.rs", "rank": 2, "score": 369594.79298051936 }, { "content": "pub fn struct_name_from_handle_index(module: &VerifiedModule, idx: StructHandleIndex) -> String {\n\n let struct_handle = module.struct_handle_at(idx);\n\n let struct_handle_view = StructHandleView::new(module, struct_handle);\n\n let module_name = module.identifier_at(struct_handle_view.module_handle().name);\n\n let struct_name = struct_handle_view.name();\n\n format!(\"{}_{}\", module_name, struct_name)\n\n}\n\n\n", "file_path": "language/stackless-bytecode/tree_heap/src/translator.rs", "rank": 3, "score": 369594.79298051936 }, { "content": "fn idents(names: impl IntoIterator<Item = &'static str>) -> Vec<Identifier> {\n\n names.into_iter().map(ident).collect()\n\n}\n", "file_path": "language/vm/vm-runtime/src/unit_tests/mod.rs", "rank": 4, "score": 359789.7001648424 }, { "content": "pub fn self_module_name() -> &'static IdentStr {\n\n &*SELF_MODULE_NAME\n\n}\n\n\n\n/// Index 0 into the LocalsSignaturePool, which is guaranteed to be an empty list.\n\n/// Used to represent function/struct instantiation with no type actuals -- effectively\n\n/// non-generic functions and structs.\n\npub const NO_TYPE_ACTUALS: LocalsSignatureIndex = LocalsSignatureIndex(0);\n\n\n\n// HANDLES:\n\n// Handles are structs that accompany opcodes that need references: a type reference,\n\n// or a function reference (a field reference being available only within the module that\n\n// defrines the field can be a definition).\n\n// Handles refer to both internal and external \"entities\" and are embedded as indexes\n\n// in the instruction stream.\n\n// Handles define resolution. Resolution is assumed to be by (name, signature)\n\n\n\n/// A `ModuleHandle` is a reference to a MOVE module. It is composed by an `address` and a `name`.\n\n///\n\n/// A `ModuleHandle` uniquely identifies a code resource in the blockchain.\n", "file_path": "language/vm/src/file_format.rs", "rank": 5, "score": 357808.4363244566 }, { "content": "/// Create a dummy module to wrap the bytecode program in local@code\n\npub fn dummy_procedure_module(code: Vec<Bytecode>) -> CompiledModule {\n\n let mut module = empty_module();\n\n let mut code_unit = CodeUnit::default();\n\n code_unit.code = code;\n\n let mut fun_def = FunctionDefinition::default();\n\n fun_def.code = code_unit;\n\n\n\n module.function_signatures.push(FunctionSignature {\n\n arg_types: vec![],\n\n return_types: vec![],\n\n type_formals: vec![],\n\n });\n\n let fun_handle = FunctionHandle {\n\n module: ModuleHandleIndex(0),\n\n name: IdentifierIndex(0),\n\n signature: FunctionSignatureIndex(0),\n\n };\n\n\n\n module.function_handles.push(fun_handle);\n\n module.function_defs.push(fun_def);\n\n module.freeze().unwrap()\n\n}\n", "file_path": "language/vm/src/file_format.rs", "rank": 6, "score": 341841.5201648878 }, { "content": "/// Return the simplest module that will pass the bounds checker\n\npub fn empty_module() -> CompiledModuleMut {\n\n CompiledModuleMut {\n\n module_handles: vec![ModuleHandle {\n\n address: AddressPoolIndex::new(0),\n\n name: IdentifierIndex::new(0),\n\n }],\n\n address_pool: vec![AccountAddress::default()],\n\n identifiers: vec![self_module_name().to_owned()],\n\n user_strings: vec![],\n\n function_defs: vec![],\n\n struct_defs: vec![],\n\n field_defs: vec![],\n\n struct_handles: vec![],\n\n function_handles: vec![],\n\n type_signatures: vec![],\n\n function_signatures: vec![],\n\n locals_signatures: vec![LocalsSignature(vec![])],\n\n byte_array_pool: vec![],\n\n }\n\n}\n\n\n", "file_path": "language/vm/src/file_format.rs", "rank": 7, "score": 333428.9009288688 }, { "content": "/// Create the following module which is convenient in tests:\n\n/// // module <SELF> {\n\n/// // struct Bar { x: u64 }\n\n/// //\n\n/// // foo() {\n\n/// // }\n\n/// // }\n\npub fn basic_test_module() -> CompiledModuleMut {\n\n let mut m = empty_module();\n\n\n\n m.function_signatures.push(FunctionSignature {\n\n return_types: vec![],\n\n arg_types: vec![],\n\n type_formals: vec![],\n\n });\n\n\n\n m.function_handles.push(FunctionHandle {\n\n module: ModuleHandleIndex::new(0),\n\n name: IdentifierIndex::new(m.identifiers.len() as u16),\n\n signature: FunctionSignatureIndex::new(0),\n\n });\n\n m.identifiers\n\n .push(Identifier::new(\"foo\".to_string()).unwrap());\n\n\n\n m.function_defs.push(FunctionDefinition {\n\n function: FunctionHandleIndex::new(0),\n\n flags: 0,\n", "file_path": "language/vm/src/file_format.rs", "rank": 8, "score": 328401.24190658785 }, { "content": "pub fn assert_status_eq(s1: &VMStatus, s2: &VMStatus) -> bool {\n\n assert_eq!(s1.major_status, s2.major_status);\n\n assert_eq!(s1.sub_status, s2.sub_status);\n\n true\n\n}\n\n\n", "file_path": "language/e2e-tests/src/lib.rs", "rank": 9, "score": 327948.0700677445 }, { "content": "fn generate_module_with_struct(resource: bool) -> CompiledModuleMut {\n\n let mut module: CompiledModuleMut = empty_module();\n\n module.type_signatures = vec![\n\n SignatureToken::Bool,\n\n SignatureToken::U64,\n\n SignatureToken::String,\n\n SignatureToken::ByteArray,\n\n SignatureToken::Address,\n\n ]\n\n .into_iter()\n\n .map(TypeSignature)\n\n .collect();\n\n\n\n let struct_index = 0;\n\n let num_fields = 5;\n\n let offset = module.identifiers.len() as TableIndex;\n\n module.identifiers.push(Identifier::new(\"struct0\").unwrap());\n\n\n\n let field_information = StructFieldInformation::Declared {\n\n field_count: num_fields as MemberCount,\n", "file_path": "language/tools/test-generation/tests/struct_instructions.rs", "rank": 10, "score": 326585.75396306184 }, { "content": "#[inline]\n\npub fn pick_slice_idxs(max: usize, indexes: &[impl AsRef<PropIndex>]) -> Vec<usize> {\n\n pick_idxs(max, indexes, indexes.len())\n\n}\n\n\n\n/// Wrapper for `proptest`'s [`Index`][proptest::sample::Index] that allows `AsRef` to work.\n\n///\n\n/// There is no blanket `impl<T> AsRef<T> for T`, so `&[PropIndex]` doesn't work with\n\n/// `&[impl AsRef<PropIndex>]` (unless an impl gets added upstream). `Index` does.\n\n#[derive(Arbitrary, Clone, Copy, Debug)]\n\npub struct Index(PropIndex);\n\n\n\nimpl AsRef<PropIndex> for Index {\n\n fn as_ref(&self) -> &PropIndex {\n\n &self.0\n\n }\n\n}\n\n\n\nimpl Deref for Index {\n\n type Target = PropIndex;\n\n\n\n fn deref(&self) -> &PropIndex {\n\n &self.0\n\n }\n\n}\n", "file_path": "common/proptest-helpers/src/lib.rs", "rank": 11, "score": 321899.89229617955 }, { "content": "fn test_module(name: &'static str) -> VerifiedModule {\n\n let compiled_module = CompiledModuleMut {\n\n module_handles: vec![ModuleHandle {\n\n name: IdentifierIndex::new(0),\n\n address: AddressPoolIndex::new(0),\n\n }],\n\n struct_handles: vec![],\n\n function_handles: vec![\n\n FunctionHandle {\n\n module: ModuleHandleIndex::new(0),\n\n name: IdentifierIndex::new(1),\n\n signature: FunctionSignatureIndex::new(0),\n\n },\n\n FunctionHandle {\n\n module: ModuleHandleIndex::new(0),\n\n name: IdentifierIndex::new(2),\n\n signature: FunctionSignatureIndex::new(1),\n\n },\n\n ],\n\n\n", "file_path": "language/vm/vm-runtime/src/unit_tests/module_cache_tests.rs", "rank": 12, "score": 320927.9230707136 }, { "content": "/// Determine if a struct at the given index is a resource\n\npub fn struct_is_resource(state: &AbstractState, struct_index: StructDefinitionIndex) -> bool {\n\n let struct_def = state.module.struct_def_at(struct_index);\n\n StructDefinitionView::new(&state.module, struct_def).is_nominal_resource()\n\n}\n\n\n", "file_path": "language/tools/test-generation/src/transitions.rs", "rank": 13, "score": 319053.4163692651 }, { "content": "pub fn native_sha3_256(mut arguments: VecDeque<Value>) -> VMResult<NativeResult> {\n\n if arguments.len() != 1 {\n\n let msg = format!(\n\n \"wrong number of arguments for sha3_256 expected 1 found {}\",\n\n arguments.len()\n\n );\n\n return Err(VMStatus::new(StatusCode::UNREACHABLE).with_message(msg));\n\n }\n\n let hash_arg = pop_arg!(arguments, ByteArray);\n\n let cost = SHA3_COST * hash_arg.len() as u64;\n\n\n\n let hash_vec = HashValue::from_sha3_256(hash_arg.as_bytes()).to_vec();\n\n let return_values = vec![Value::byte_array(ByteArray::new(hash_vec))];\n\n Ok(NativeResult::ok(cost, return_values))\n\n}\n", "file_path": "language/vm/vm-runtime/vm-runtime-types/src/native_functions/hash.rs", "rank": 14, "score": 309924.1516759888 }, { "content": "pub fn native_sha2_256(mut arguments: VecDeque<Value>) -> VMResult<NativeResult> {\n\n if arguments.len() != 1 {\n\n let msg = format!(\n\n \"wrong number of arguments for sha2_256 expected 1 found {}\",\n\n arguments.len()\n\n );\n\n return Err(VMStatus::new(StatusCode::UNREACHABLE).with_message(msg));\n\n }\n\n let hash_arg = pop_arg!(arguments, ByteArray);\n\n let cost = SHA2_COST * hash_arg.len() as u64;\n\n\n\n let hash_vec = Sha256::digest(hash_arg.as_bytes()).to_vec();\n\n let return_values = vec![Value::byte_array(ByteArray::new(hash_vec))];\n\n Ok(NativeResult::ok(cost, return_values))\n\n}\n\n\n", "file_path": "language/vm/vm-runtime/vm-runtime-types/src/native_functions/hash.rs", "rank": 15, "score": 309924.1516759888 }, { "content": "// Generate some random, well-formed, unsigned-varint length-prefixed byte arrays\n\n// for our fuzzer corpus to act as serialized inbound rpc calls.\n\npub fn generate_corpus(gen: &mut ValueGenerator) -> Vec<u8> {\n\n let small_data_strat = vec(any::<u8>(), 0..MAX_SMALL_MSG_BYTES);\n\n let medium_data_strat = vec(any::<u8>(), 0..MAX_MEDIUM_MSG_BYTES);\n\n\n\n // bias corpus generation to prefer small message sizes\n\n let data_strat = prop_oneof![small_data_strat, medium_data_strat];\n\n\n\n let length_prefixed_data_strat = data_strat.prop_map(|data| {\n\n let max_len = data.len() + MAX_UVI_PREFIX_BYTES;\n\n let mut buf = BytesMut::with_capacity(max_len);\n\n let mut codec = LengthDelimitedCodec::new();\n\n codec\n\n .encode(Bytes::from(data), &mut buf)\n\n .expect(\"Failed to create uvi-prefixed data for corpus\");\n\n buf.freeze().to_vec()\n\n });\n\n\n\n gen.generate(length_prefixed_data_strat)\n\n}\n\n\n", "file_path": "network/src/protocols/rpc/fuzzing.rs", "rank": 16, "score": 309905.99609448126 }, { "content": "fn collect_values(iter: SchemaIterator<TestSchema>) -> Vec<u32> {\n\n iter.map(|row| (row.unwrap().1).0).collect()\n\n}\n\n\n", "file_path": "storage/schemadb/tests/iterator.rs", "rank": 17, "score": 308500.0243726812 }, { "content": "pub fn native_bytearray_concat(mut arguments: VecDeque<Value>) -> VMResult<NativeResult> {\n\n if arguments.len() != 2 {\n\n let msg = format!(\n\n \"wrong number of arguments for bytearray_concat expected 2 found {}\",\n\n arguments.len()\n\n );\n\n return Err(VMStatus::new(StatusCode::UNREACHABLE).with_message(msg));\n\n }\n\n let arg2 = pop_arg!(arguments, ByteArray);\n\n let arg1 = pop_arg!(arguments, ByteArray);\n\n let mut return_val = arg1.as_bytes().to_vec();\n\n return_val.extend_from_slice(arg2.as_bytes());\n\n\n\n // TODO: Figure out the gas cost for concatenation.\n\n let cost = return_val.len() as u64;\n\n let return_values = vec![Value::byte_array(ByteArray::new(return_val))];\n\n Ok(NativeResult::ok(cost, return_values))\n\n}\n\n\n", "file_path": "language/vm/vm-runtime/vm-runtime-types/src/native_functions/primitive_helpers.rs", "rank": 18, "score": 303752.00434726296 }, { "content": "pub fn native_address_to_bytes(mut arguments: VecDeque<Value>) -> VMResult<NativeResult> {\n\n if arguments.len() != 1 {\n\n let msg = format!(\n\n \"wrong number of arguments for address_to_bytes expected 1 found {}\",\n\n arguments.len()\n\n );\n\n return Err(VMStatus::new(StatusCode::UNREACHABLE).with_message(msg));\n\n }\n\n let arg = pop_arg!(arguments, AccountAddress);\n\n let return_val = arg.to_vec();\n\n\n\n // TODO: Figure out the gas cost for conversion.\n\n let cost = return_val.len() as u64;\n\n let return_values = vec![Value::byte_array(ByteArray::new(return_val))];\n\n Ok(NativeResult::ok(cost, return_values))\n\n}\n\n\n", "file_path": "language/vm/vm-runtime/vm-runtime-types/src/native_functions/primitive_helpers.rs", "rank": 19, "score": 303752.004347263 }, { "content": "pub fn native_u64_to_bytes(mut arguments: VecDeque<Value>) -> VMResult<NativeResult> {\n\n if arguments.len() != 1 {\n\n let msg = format!(\n\n \"wrong number of arguments for u64_to_bytes expected 1 found {}\",\n\n arguments.len()\n\n );\n\n return Err(VMStatus::new(StatusCode::UNREACHABLE).with_message(msg));\n\n }\n\n let arg = pop_arg!(arguments, u64);\n\n let return_val: Vec<u8> = arg.to_le_bytes().to_vec();\n\n\n\n // TODO: Figure out the gas cost for conversion.\n\n let cost = return_val.len() as u64;\n\n let return_values = vec![Value::byte_array(ByteArray::new(return_val))];\n\n Ok(NativeResult::ok(cost, return_values))\n\n}\n", "file_path": "language/vm/vm-runtime/vm-runtime-types/src/native_functions/primitive_helpers.rs", "rank": 20, "score": 303752.00434726296 }, { "content": "fn verify_native_structs(module_view: &ModuleView<VerifiedModule>) -> Vec<VMStatus> {\n\n let mut errors = vec![];\n\n\n\n let module_id = module_view.id();\n\n for (idx, native_struct_definition_view) in module_view\n\n .structs()\n\n .enumerate()\n\n .filter(|sdv| sdv.1.is_native())\n\n {\n\n let struct_name = native_struct_definition_view.name();\n\n\n\n match resolve_native_struct(&module_id, struct_name) {\n\n None => errors.push(verification_error(\n\n IndexKind::StructHandle,\n\n idx,\n\n StatusCode::MISSING_DEPENDENCY,\n\n )),\n\n Some(vm_native_struct) => {\n\n let declared_index = idx as u16;\n\n let declared_is_nominal_resource =\n", "file_path": "language/bytecode-verifier/src/verifier.rs", "rank": 21, "score": 302091.2188406455 }, { "content": "#[test]\n\nfn test_same_module_struct_resolution() {\n\n let allocator = Arena::new();\n\n let vm_cache = VMModuleCache::new(&allocator);\n\n\n\n let code = \"\n\n modules:\n\n module M1 {\n\n struct X {}\n\n struct T { i: u64, x: Self.X }\n\n }\n\n script:\n\n main() {\n\n return;\n\n }\n\n \";\n\n\n\n let module = parse_and_compile_modules(code);\n\n let fetcher = FakeFetcher::new(module);\n\n let block_cache = BlockModuleCache::new(&vm_cache, fetcher);\n\n {\n", "file_path": "language/vm/vm-runtime/src/unit_tests/module_cache_tests.rs", "rank": 22, "score": 297033.8688675536 }, { "content": "#[inline]\n\nfn check_code_unit_bounds_impl<T, I>(pool: &[T], bytecode_offset: usize, idx: I) -> Vec<VMStatus>\n\nwhere\n\n I: ModuleIndex,\n\n{\n\n let idx = idx.into_index();\n\n let len = pool.len();\n\n if idx >= len {\n\n let status = bytecode_offset_err(\n\n I::KIND,\n\n idx,\n\n len,\n\n bytecode_offset,\n\n StatusCode::INDEX_OUT_OF_BOUNDS,\n\n );\n\n vec![status]\n\n } else {\n\n vec![]\n\n }\n\n}\n\n\n", "file_path": "language/vm/src/check_bounds.rs", "rank": 23, "score": 297026.83739584126 }, { "content": "/// This produces the genesis block\n\npub fn genesis_strategy() -> impl Strategy<Value = Block<Vec<usize>>> {\n\n Just(Block::make_genesis_block())\n\n}\n\n\n\nprop_compose! {\n\n /// This produces an unmoored block, with arbitrary parent & QC ancestor\n\n pub fn unmoored_block(ancestor_id_strategy: impl Strategy<Value = HashValue>)(\n\n ancestor_id in ancestor_id_strategy,\n\n )(\n\n block in new_proposal(\n\n ancestor_id,\n\n Round::arbitrary(),\n\n proptests::arb_signer(),\n\n QuorumCert::certificate_for_genesis(),\n\n )\n\n ) -> Block<Vec<usize>> {\n\n block\n\n }\n\n}\n\n\n", "file_path": "consensus/consensus-types/src/block_test_utils.rs", "rank": 24, "score": 296497.8936346347 }, { "content": "/// Offers the genesis block.\n\npub fn leaf_strategy() -> impl Strategy<Value = Block<Vec<usize>>> {\n\n genesis_strategy().boxed()\n\n}\n\n\n\nprop_compose! {\n\n /// This produces a block with an invalid id (and therefore signature)\n\n /// given a valid block\n\n pub fn fake_id(block_strategy: impl Strategy<Value = Block<Vec<usize>>>)\n\n (fake_id in HashValue::arbitrary(),\n\n block in block_strategy) -> Block<Vec<usize>> {\n\n Block {\n\n id: fake_id,\n\n block_data: BlockData::new_proposal(\n\n block.payload().unwrap().clone(),\n\n block.author().unwrap(),\n\n block.round(),\n\n get_current_timestamp().as_micros() as u64,\n\n block.quorum_cert().clone(),\n\n ),\n\n signature: Some(block.signature().unwrap().clone()),\n", "file_path": "consensus/consensus-types/src/block_test_utils.rs", "rank": 25, "score": 296497.8936346347 }, { "content": "/// Looks up the expected native struct definition from the module id (address and module) and\n\n/// function name where it was expected to be declared\n\npub fn resolve_native_struct(\n\n module: &ModuleId,\n\n struct_name: &IdentStr,\n\n) -> Option<&'static NativeStruct> {\n\n NATIVE_STRUCT_MAP.get(module)?.get(struct_name)\n\n}\n\n\n\nmacro_rules! add {\n\n ($m:ident, $addr:expr, $module:expr, $name:expr, $resource: expr, $ty_kinds: expr, $tag: expr) => {{\n\n let ty_args = $ty_kinds\n\n .iter()\n\n .enumerate()\n\n .map(|(id, _)| Type::TypeVariable(id as u16))\n\n .collect();\n\n let id = ModuleId::new($addr, Identifier::new($module).unwrap());\n\n let struct_table = $m.entry(id).or_insert_with(HashMap::new);\n\n let expected_index = StructHandleIndex(struct_table.len() as u16);\n\n\n\n let s = NativeStruct {\n\n expected_nominal_resource: $resource,\n\n expected_type_formals: $ty_kinds,\n\n expected_index,\n\n struct_type: NativeStructType::new($tag, ty_args),\n\n };\n\n let old = struct_table.insert(Identifier::new($name).unwrap(), s);\n\n assert!(old.is_none());\n\n }};\n\n}\n\n\n", "file_path": "language/vm/vm-runtime/vm-runtime-types/src/native_structs/dispatch.rs", "rank": 26, "score": 296100.5317622542 }, { "content": "/// generate_corpus produces an arbitrary SubmitTransactionRequest for admission control\n\npub fn generate_corpus(gen: &mut ValueGenerator) -> Vec<u8> {\n\n // use proptest to generate a SignedTransaction\n\n let signed_txn = gen.generate(proptest::arbitrary::any::<SignedTransaction>());\n\n // wrap it in a SubmitTransactionRequest\n\n let mut req = SubmitTransactionRequest::default();\n\n req.signed_txn = Some(signed_txn.into());\n\n\n\n let mut bytes = bytes::BytesMut::with_capacity(req.encoded_len());\n\n req.encode(&mut bytes).unwrap();\n\n bytes.to_vec()\n\n}\n\n\n", "file_path": "admission_control/admission-control-service/src/admission_control_fuzzing.rs", "rank": 27, "score": 295994.85253899265 }, { "content": "#[allow(dead_code)]\n\nfn get_mut_vector(v: &mut NativeStructValue) -> VMResult<&mut NativeVector> {\n\n match v {\n\n NativeStructValue::Vector(v) => Ok(v),\n\n }\n\n}\n\n\n", "file_path": "language/vm/vm-runtime/vm-runtime-types/src/native_structs/vector.rs", "rank": 28, "score": 294587.8748685931 }, { "content": "pub fn stack_struct_has_field(state: &AbstractState, field_index: FieldDefinitionIndex) -> bool {\n\n if let Some(struct_handle_index) = state.stack_peek(0).clone().and_then(|abstract_value| {\n\n SignatureToken::get_struct_handle_from_reference(&abstract_value.token)\n\n }) {\n\n return state\n\n .module\n\n .is_field_in_struct(field_index, struct_handle_index);\n\n }\n\n false\n\n}\n\n\n", "file_path": "language/tools/test-generation/src/transitions.rs", "rank": 29, "score": 293080.8172032102 }, { "content": "#[test]\n\nfn test_multi_module_struct_resolution() {\n\n let allocator = Arena::new();\n\n let vm_cache = VMModuleCache::new(&allocator);\n\n\n\n let code = format!(\n\n \"\n\n modules:\n\n module M1 {{\n\n struct X {{}}\n\n }}\n\n module M2 {{\n\n import 0x{0}.M1;\n\n struct T {{ i: u64, x: M1.X }}\n\n }}\n\n script:\n\n main() {{\n\n return;\n\n }}\n\n \",\n\n hex::encode(AccountAddress::default())\n", "file_path": "language/vm/vm-runtime/src/unit_tests/module_cache_tests.rs", "rank": 30, "score": 292665.2253876379 }, { "content": "fn ident(name: impl Into<Box<str>>) -> Identifier {\n\n Identifier::new(name).unwrap()\n\n}\n\n\n", "file_path": "language/vm/vm-runtime/src/unit_tests/mod.rs", "rank": 31, "score": 291657.3311564685 }, { "content": "/// Create a number of accounts without keypair from a wallet.\n\npub fn gen_accounts_from_wallet(wallet: &mut WalletLibrary, num_accounts: u64) -> Vec<AccountData> {\n\n (0..num_accounts)\n\n .map(|_| gen_next_account(wallet))\n\n .collect()\n\n}\n\n\n\n/// ---------------------------------------------------------------------------------- ///\n\n/// Helper functions and APIs to generate different types of transaction request(s). ///\n\n/// ---------------------------------------------------------------------------------- ///\n\n\n", "file_path": "benchmark/src/load_generator.rs", "rank": 32, "score": 283954.7923438043 }, { "content": "/// Helper function to iterate through all the files in the given directory, skipping hidden files,\n\n/// and return an iterator of their paths.\n\npub fn iterate_directory(path: &Path) -> impl Iterator<Item = PathBuf> {\n\n walkdir::WalkDir::new(path)\n\n .into_iter()\n\n .map(::std::result::Result::unwrap)\n\n .filter(|entry| {\n\n entry.file_type().is_file()\n\n && entry\n\n .file_name()\n\n .to_str()\n\n .map_or(false, |s| !s.starts_with('.')) // Skip hidden files\n\n })\n\n .map(|entry| entry.path().to_path_buf())\n\n}\n\n\n", "file_path": "common/datatest-stable/src/utils.rs", "rank": 33, "score": 283109.32562624954 }, { "content": "pub fn is_allowed_script(publishing_option: &VMPublishingOption, program: &[u8]) -> bool {\n\n match publishing_option {\n\n VMPublishingOption::Open | VMPublishingOption::CustomScripts => true,\n\n VMPublishingOption::Locked(whitelist) => {\n\n let hash_value = HashValue::from_sha3_256(program);\n\n whitelist.contains(hash_value.as_ref())\n\n }\n\n }\n\n}\n\n\n\n/// Represents a [`SignedTransaction`] that has been *validated*. This includes all the steps\n\n/// required to ensure that a transaction is valid, other than verifying the submitted program.\n\npub struct ValidatedTransaction<'alloc, 'txn, P>\n\nwhere\n\n 'alloc: 'txn,\n\n P: ModuleCache<'alloc>,\n\n{\n\n txn: SignatureCheckedTransaction,\n\n txn_state: Option<ValidatedTransactionState<'alloc, 'txn, P>>,\n\n}\n", "file_path": "language/vm/vm-runtime/src/process_txn/validate.rs", "rank": 34, "score": 281385.7616332952 }, { "content": "/// Deserializes a code stream (`Bytecode`s).\n\nfn load_code(cursor: &mut Cursor<&[u8]>, code: &mut Vec<Bytecode>) -> BinaryLoaderResult<()> {\n\n let bytecode_count = read_u16_internal(cursor)?;\n\n while code.len() < bytecode_count as usize {\n\n let byte = cursor\n\n .read_u8()\n\n .map_err(|_| VMStatus::new(StatusCode::MALFORMED))?;\n\n let bytecode = match Opcodes::from_u8(byte)? {\n\n Opcodes::POP => Bytecode::Pop,\n\n Opcodes::RET => Bytecode::Ret,\n\n Opcodes::BR_TRUE => {\n\n let jump = read_u16_internal(cursor)?;\n\n Bytecode::BrTrue(jump)\n\n }\n\n Opcodes::BR_FALSE => {\n\n let jump = read_u16_internal(cursor)?;\n\n Bytecode::BrFalse(jump)\n\n }\n\n Opcodes::BRANCH => {\n\n let jump = read_u16_internal(cursor)?;\n\n Bytecode::Branch(jump)\n", "file_path": "language/vm/src/deserializer.rs", "rank": 35, "score": 281384.58020995493 }, { "content": "/// Trait that describe a cache for modules. The idea is that this trait will in charge of\n\n/// loading resolving all dependencies of needed module from the storage.\n\npub trait ModuleCache<'alloc> {\n\n /// Given a function handle index, resolves that handle into an internal representation of\n\n /// move function.\n\n ///\n\n /// Returns:\n\n ///\n\n /// * `Ok(Some(FunctionRef))` if such function exists.\n\n /// * `Ok(None)` if such function doesn't exists.\n\n /// * `Err(...)` for a verification issue in a resolved dependency or VM invariant violation.\n\n fn resolve_function_ref(\n\n &self,\n\n caller_module: &LoadedModule,\n\n idx: FunctionHandleIndex,\n\n ) -> VMResult<Option<FunctionRef<'alloc>>>;\n\n\n\n /// Resolve a StructDefinitionIndex into a StructDef. This process will be recursive so we may\n\n /// charge gas on each recursive step.\n\n ///\n\n /// Returns:\n\n ///\n", "file_path": "language/vm/vm-runtime/src/code_cache/module_cache.rs", "rank": 36, "score": 280742.5147485478 }, { "content": "/// This function checks the extra requirements on the signature of the main function of a script.\n\npub fn verify_main_signature(script: &CompiledScript) -> Vec<VMStatus> {\n\n let function_handle = &script.function_handle_at(script.main().function);\n\n let function_signature = &script.function_signature_at(function_handle.signature);\n\n if !function_signature.return_types.is_empty() {\n\n return vec![VMStatus::new(StatusCode::INVALID_MAIN_FUNCTION_SIGNATURE)];\n\n }\n\n for arg_type in &function_signature.arg_types {\n\n if !arg_type.is_primitive() {\n\n return vec![VMStatus::new(StatusCode::INVALID_MAIN_FUNCTION_SIGNATURE)];\n\n }\n\n }\n\n vec![]\n\n}\n\n\n", "file_path": "language/bytecode-verifier/src/verifier.rs", "rank": 37, "score": 277052.15431489993 }, { "content": "pub fn append_err_info(status: VMStatus, kind: IndexKind, idx: usize) -> VMStatus {\n\n let msg = format!(\"at index {} while indexing {}\", idx, kind);\n\n status.append_message_with_separator(' ', msg)\n\n}\n\n\n", "file_path": "language/vm/src/errors.rs", "rank": 38, "score": 275663.84898125427 }, { "content": "pub fn do_compile_module<T: ModuleAccess>(\n\n source_path: &Path,\n\n address: AccountAddress,\n\n dependencies: &[T],\n\n) -> (CompiledModule, ModuleSourceMap<Loc>) {\n\n let source = fs::read_to_string(source_path)\n\n .unwrap_or_else(|_| panic!(\"Unable to read file: {:?}\", source_path));\n\n let parsed_module = parse_module(&source).unwrap();\n\n compile_module(address, parsed_module, dependencies).unwrap()\n\n}\n", "file_path": "language/compiler/src/util.rs", "rank": 39, "score": 273770.53135826375 }, { "content": "pub fn verification_error(kind: IndexKind, idx: usize, err: StatusCode) -> VMStatus {\n\n let msg = format!(\"at index {} while indexing {}\", idx, kind);\n\n VMStatus::new(err).with_message(msg)\n\n}\n\n\n", "file_path": "language/vm/src/errors.rs", "rank": 40, "score": 271249.38195531594 }, { "content": "fn load_nominal_resource_flag(cursor: &mut Cursor<&[u8]>) -> BinaryLoaderResult<bool> {\n\n if let Ok(byte) = cursor.read_u8() {\n\n Ok(match SerializedNominalResourceFlag::from_u8(byte)? {\n\n SerializedNominalResourceFlag::NOMINAL_RESOURCE => true,\n\n SerializedNominalResourceFlag::NORMAL_STRUCT => false,\n\n })\n\n } else {\n\n Err(VMStatus::new(StatusCode::MALFORMED))\n\n }\n\n}\n\n\n", "file_path": "language/vm/src/deserializer.rs", "rank": 42, "score": 269658.52704867476 }, { "content": "pub fn transaction_status_eq(t1: &TransactionStatus, t2: &TransactionStatus) -> bool {\n\n match (t1, t2) {\n\n (TransactionStatus::Discard(s1), TransactionStatus::Discard(s2))\n\n | (TransactionStatus::Keep(s1), TransactionStatus::Keep(s2)) => assert_status_eq(s1, s2),\n\n _ => false,\n\n }\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! assert_prologue_parity {\n\n ($e1:expr, $e2:expr, $e3:expr) => {\n\n assert_status_eq(&$e1.unwrap(), &$e3);\n\n assert!(transaction_status_eq($e2, &TransactionStatus::Discard($e3)));\n\n };\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! assert_prologue_disparity {\n\n ($e1:expr => $e2:expr, $e3:expr => $e4:expr) => {\n\n assert_eq!($e1, $e2);\n\n assert!(transaction_status_eq($e3, &$e4));\n\n };\n\n}\n", "file_path": "language/e2e-tests/src/lib.rs", "rank": 43, "score": 269277.6341058936 }, { "content": "/// Scan *.node.config.toml files under config_dir_name, parse them as node config\n\n/// and return libra-swarm's node addresses info as a vector.\n\npub fn parse_swarm_config_from_dir(config_dir_name: &str) -> Result<Vec<String>> {\n\n let mut validator_addresses: Vec<String> = Vec::new();\n\n let config_dir = PathBuf::from(config_dir_name);\n\n for entry in WalkDir::new(config_dir)\n\n .contents_first(true)\n\n .into_iter()\n\n .filter_map(|e| e.ok())\n\n .filter(|dir_entry| {\n\n let path = dir_entry.path();\n\n warn!(\"checking entry: {:?}\", path);\n\n path.is_file() && path.file_name() == Some(OsStr::new(\"node.config.toml\"))\n\n })\n\n {\n\n let path = entry.path();\n\n let filename = path.file_name().unwrap();\n\n debug!(\"Parsing node config file {:?}.\", filename);\n\n let config_string = fs::read_to_string(&path)\n\n .unwrap_or_else(|_| panic!(\"failed to load config file {:?}\", filename));\n\n let config = NodeConfig::parse(&config_string)\n\n .unwrap_or_else(|_| panic!(\"failed to parse NodeConfig from {:?}\", filename));\n", "file_path": "benchmark/src/cli_opt.rs", "rank": 44, "score": 269118.33015609905 }, { "content": "fn load_kinds(cursor: &mut Cursor<&[u8]>) -> BinaryLoaderResult<Vec<Kind>> {\n\n let len = read_uleb_u16_internal(cursor)?;\n\n let mut kinds = vec![];\n\n for _ in 0..len {\n\n kinds.push(load_kind(cursor)?);\n\n }\n\n Ok(kinds)\n\n}\n\n\n", "file_path": "language/vm/src/deserializer.rs", "rank": 45, "score": 268364.6820182024 }, { "content": "fn compile_files(file_names: Vec<String>) -> Vec<VerifiedModule> {\n\n let mut verified_modules = vec![];\n\n let files_len = file_names.len();\n\n let dep_files = &file_names[0..files_len];\n\n let address = AccountAddress::default();\n\n for file_name in dep_files {\n\n let code = fs::read_to_string(file_name).unwrap();\n\n let module = parse_module(&code).unwrap();\n\n let (compiled_module, _) =\n\n compile_module(address, module, &verified_modules).expect(\"module failed to compile\");\n\n let verified_module_res = VerifiedModule::new(compiled_module);\n\n\n\n match verified_module_res {\n\n Err(e) => {\n\n panic!(\"{:?}\", e);\n\n }\n\n Ok(verified_module) => {\n\n verified_modules.push(verified_module);\n\n }\n\n }\n\n }\n\n verified_modules\n\n}\n\n\n", "file_path": "language/stackless-bytecode/tree_heap/src/main.rs", "rank": 46, "score": 267704.2434969298 }, { "content": "pub fn coin_module_name() -> &'static IdentStr {\n\n &*COIN_MODULE_NAME\n\n}\n\n\n", "file_path": "types/src/account_config.rs", "rank": 47, "score": 265806.118057483 }, { "content": "pub fn account_module_name() -> &'static IdentStr {\n\n &*ACCOUNT_MODULE_NAME\n\n}\n\n\n", "file_path": "types/src/account_config.rs", "rank": 48, "score": 265806.118057483 }, { "content": "pub fn coin_struct_name() -> &'static IdentStr {\n\n &*COIN_STRUCT_NAME\n\n}\n\n\n", "file_path": "types/src/account_config.rs", "rank": 49, "score": 265788.9556028848 }, { "content": "pub fn account_struct_name() -> &'static IdentStr {\n\n &*ACCOUNT_STRUCT_NAME\n\n}\n\n\n", "file_path": "types/src/account_config.rs", "rank": 50, "score": 265788.9556028848 }, { "content": "/// Trait that describes how the VM expects code data to be stored.\n\npub trait ModuleFetcher {\n\n /// `ModuleId` is the fully qualified name for the module we are trying to fetch.\n\n fn get_module(&self, key: &ModuleId) -> Option<CompiledModule>;\n\n}\n\n\n\n/// A wrapper around State Store database for fetching code data stored on chain.\n\npub struct ModuleFetcherImpl<'a>(&'a dyn StateView);\n\n\n\nimpl<'a> ModuleFetcherImpl<'a> {\n\n /// Creates a new Fetcher instance with a `StateView` reference.\n\n pub fn new(storage: &'a dyn StateView) -> Self {\n\n ModuleFetcherImpl(storage)\n\n }\n\n}\n\n\n\nimpl<'a> ModuleFetcher for ModuleFetcherImpl<'a> {\n\n fn get_module(&self, key: &ModuleId) -> Option<CompiledModule> {\n\n let access_path = key.into();\n\n match self.0.get(&access_path) {\n\n Ok(opt_module_blob) => match opt_module_blob {\n", "file_path": "language/vm/vm-runtime/src/code_cache/module_adapter.rs", "rank": 51, "score": 265762.65297891496 }, { "content": "/// Builds and returns a copy of the standard library with this address as the self address.\n\n///\n\n/// A copy of the stdlib built with the [default address](account_config::core_code_address) is\n\n/// available through [`stdlib_modules`].\n\npub fn build_stdlib(address: AccountAddress) -> (Vec<VerifiedModule>, SourceMap<Loc>) {\n\n let mut stdlib_modules = vec![];\n\n let mut stdlib_source_maps = vec![];\n\n\n\n for module_def in stdlib::module_defs() {\n\n let (compiled_module, source_map) =\n\n compile_module(address, (*module_def).clone(), &stdlib_modules)\n\n .expect(\"stdlib module failed to compile\");\n\n let verified_module =\n\n VerifiedModule::new(compiled_module).expect(\"stdlib module failed to verify\");\n\n\n\n let verification_errors = verify_module_dependencies(&verified_module, &stdlib_modules);\n\n // Fail if the module doesn't verify\n\n for e in &verification_errors {\n\n println!(\"{:?}\", e);\n\n }\n\n assert!(verification_errors.is_empty());\n\n\n\n stdlib_modules.push(verified_module);\n\n stdlib_source_maps.push(source_map)\n\n }\n\n\n\n (stdlib_modules, stdlib_source_maps)\n\n}\n", "file_path": "language/stdlib/src/lib.rs", "rank": 52, "score": 264385.34171979217 }, { "content": "// mod translator;\n\nfn compile_files(file_names: Vec<String>) -> Vec<VerifiedModule> {\n\n let mut verified_modules = stdlib_modules().to_vec();\n\n let files_len = file_names.len();\n\n let dep_files = &file_names[0..files_len];\n\n\n\n // assuming the last file is a program that might contain a script\n\n let address = AccountAddress::default();\n\n for file_name in dep_files {\n\n let code = fs::read_to_string(file_name).unwrap();\n\n let module = parse_module(&code).unwrap();\n\n let (compiled_module, _) =\n\n compile_module(address, module, &verified_modules).expect(\"module failed to compile\");\n\n let verified_module_res = VerifiedModule::new(compiled_module);\n\n\n\n match verified_module_res {\n\n Err(e) => {\n\n panic!(\"{:?}\", e);\n\n }\n\n Ok(verified_module) => {\n\n verified_modules.push(verified_module);\n\n }\n\n }\n\n }\n\n verified_modules\n\n}\n\n\n", "file_path": "language/stackless-bytecode/tree_heap/tests/translator_tests.rs", "rank": 53, "score": 264383.03332950623 }, { "content": "/// Reads a `u16` in ULEB128 format from a `binary`.\n\n///\n\n/// Takes a `&mut Cursor<&[u8]>` and returns a pair:\n\n///\n\n/// u16 - value read\n\n///\n\n/// Return an error on an invalid representation.\n\npub fn read_uleb128_as_u16(cursor: &mut Cursor<&[u8]>) -> Result<u16> {\n\n let mut value: u16 = 0;\n\n let mut shift: u8 = 0;\n\n while let Ok(byte) = cursor.read_u8() {\n\n let val = byte & 0x7f;\n\n value |= u16::from(val) << shift;\n\n if val == byte {\n\n return Ok(value);\n\n }\n\n shift += 7;\n\n if shift > 14 {\n\n break;\n\n }\n\n }\n\n bail!(\"invalid ULEB128 representation for u16\")\n\n}\n\n\n", "file_path": "language/vm/src/file_format_common.rs", "rank": 54, "score": 263818.92158933956 }, { "content": "/// Reads a `u32` in ULEB128 format from a `binary`.\n\n///\n\n/// Takes a `&mut Cursor<&[u8]>` and returns a pair:\n\n///\n\n/// u32 - value read\n\n///\n\n/// Return an error on an invalid representation.\n\npub fn read_uleb128_as_u32(cursor: &mut Cursor<&[u8]>) -> Result<u32> {\n\n let mut value: u32 = 0;\n\n let mut shift: u8 = 0;\n\n while let Ok(byte) = cursor.read_u8() {\n\n let val = byte & 0x7f;\n\n value |= u32::from(val) << shift;\n\n if val == byte {\n\n return Ok(value);\n\n }\n\n shift += 7;\n\n if shift > 28 {\n\n break;\n\n }\n\n }\n\n bail!(\"invalid ULEB128 representation for u32\")\n\n}\n", "file_path": "language/vm/src/file_format_common.rs", "rank": 55, "score": 263818.92158933956 }, { "content": "/// Write a `u32` in Little Endian format.\n\npub fn write_u32(binary: &mut BinaryData, value: u32) -> Result<()> {\n\n binary.extend(&value.to_le_bytes())\n\n}\n\n\n", "file_path": "language/vm/src/file_format_common.rs", "rank": 56, "score": 263813.2663578152 }, { "content": "/// Write a `u16` in Little Endian format.\n\npub fn write_u16(binary: &mut BinaryData, value: u16) -> Result<()> {\n\n binary.extend(&value.to_le_bytes())\n\n}\n\n\n", "file_path": "language/vm/src/file_format_common.rs", "rank": 57, "score": 263813.2663578152 }, { "content": "/// Write a `u64` in Little Endian format.\n\npub fn write_u64(binary: &mut BinaryData, value: u64) -> Result<()> {\n\n binary.extend(&value.to_le_bytes())\n\n}\n\n\n", "file_path": "language/vm/src/file_format_common.rs", "rank": 58, "score": 263813.2663578152 }, { "content": "/// Compiles a program with the given arguments and executes it in the VM.\n\npub fn compile_and_execute(program: &str, args: Vec<TransactionArgument>) -> VMResult<()> {\n\n let address = AccountAddress::default();\n\n println!(\"{}\", address);\n\n let compiler = Compiler {\n\n address,\n\n ..Compiler::default()\n\n };\n\n let compiled_program = compiler\n\n .into_compiled_program(program)\n\n .expect(\"Failed to compile\");\n\n let (verified_script, modules) =\n\n verify(&address, compiled_program.script, compiled_program.modules);\n\n execute(verified_script, args, modules)\n\n}\n\n\n", "file_path": "language/e2e-tests/src/lib.rs", "rank": 59, "score": 263750.35745982133 }, { "content": "pub fn validator_set_module_name() -> &'static IdentStr {\n\n &*LIBRA_SYSTEM_MODULE_NAME\n\n}\n\n\n", "file_path": "types/src/validator_set.rs", "rank": 60, "score": 261629.0784002701 }, { "content": "pub fn validator_set_struct_name() -> &'static IdentStr {\n\n &*VALIDATOR_SET_STRUCT_NAME\n\n}\n\n\n", "file_path": "types/src/validator_set.rs", "rank": 61, "score": 261612.28098846902 }, { "content": "fn load_signature_tokens(cursor: &mut Cursor<&[u8]>) -> BinaryLoaderResult<Vec<SignatureToken>> {\n\n let len = read_uleb_u16_internal(cursor)?;\n\n let mut tokens = vec![];\n\n for _ in 0..len {\n\n tokens.push(load_signature_token(cursor)?);\n\n }\n\n Ok(tokens)\n\n}\n\n\n", "file_path": "language/vm/src/deserializer.rs", "rank": 62, "score": 261435.6588085921 }, { "content": "/// Take a `Vec<u8>` and a value to write to that vector and applies LEB128 logic to\n\n/// compress the u16.\n\npub fn write_u16_as_uleb128(binary: &mut BinaryData, value: u16) -> Result<()> {\n\n write_u32_as_uleb128(binary, u32::from(value))\n\n}\n\n\n", "file_path": "language/vm/src/file_format_common.rs", "rank": 63, "score": 260426.60615765152 }, { "content": "/// Take a `Vec<u8>` and a value to write to that vector and applies LEB128 logic to\n\n/// compress the u32.\n\npub fn write_u32_as_uleb128(binary: &mut BinaryData, value: u32) -> Result<()> {\n\n let mut val = value;\n\n loop {\n\n let v: u8 = (val & 0x7f) as u8;\n\n if u32::from(v) != val {\n\n binary.push(v | 0x80)?;\n\n val >>= 7;\n\n } else {\n\n binary.push(v)?;\n\n break;\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "language/vm/src/file_format_common.rs", "rank": 64, "score": 260426.60615765152 }, { "content": "/// Check whether the input string is a valid libra address.\n\npub fn is_address(data: &str) -> bool {\n\n match hex::decode(data) {\n\n Ok(vec) => vec.len() == ADDRESS_LENGTH,\n\n Err(_) => false,\n\n }\n\n}\n\n\n", "file_path": "client/src/commands.rs", "rank": 65, "score": 260160.6077993955 }, { "content": "/// List all known fuzz targets.\n\npub fn list_targets(no_desc: bool) {\n\n for target in FuzzTarget::all_targets() {\n\n if no_desc {\n\n println!(\"{}\", target.name())\n\n } else {\n\n println!(\" * {0: <24} {1}\", target.name(), target.description())\n\n }\n\n }\n\n}\n", "file_path": "testsuite/libra-fuzzer/src/commands.rs", "rank": 66, "score": 259670.93267612846 }, { "content": "/// Return the path to the Account resource. It can be used to create an AccessPath for an\n\n/// Account resource.\n\npub fn account_resource_path() -> Vec<u8> {\n\n AccessPath::resource_access_vec(&account_struct_tag(), &Accesses::empty())\n\n}\n\n\n\nlazy_static! {\n\n /// The path to the sent event counter for an Account resource.\n\n /// It can be used to query the event DB for the given event.\n\n pub static ref ACCOUNT_SENT_EVENT_PATH: Vec<u8> = {\n\n let mut path = account_resource_path();\n\n path.extend_from_slice(b\"/sent_events_count/\");\n\n path\n\n };\n\n\n\n /// Returns the path to the received event counter for an Account resource.\n\n /// It can be used to query the event DB for the given event.\n\n pub static ref ACCOUNT_RECEIVED_EVENT_PATH: Vec<u8> = {\n\n let mut path = account_resource_path();\n\n path.extend_from_slice(b\"/received_events_count/\");\n\n path\n\n };\n", "file_path": "types/src/account_config.rs", "rank": 67, "score": 259501.22445207008 }, { "content": "pub fn with_smr_id(id: String) -> impl Fn() {\n\n move || set_simple_logger_prefix(format!(\"{}[{}]{}\", Fg(LightBlack), id, Fg(Reset)))\n\n}\n", "file_path": "consensus/src/chained_bft/test_utils/mod.rs", "rank": 68, "score": 259425.58116991454 }, { "content": "fn verify_native_functions(module_view: &ModuleView<VerifiedModule>) -> Vec<VMStatus> {\n\n let mut errors = vec![];\n\n\n\n let module_id = module_view.id();\n\n for (idx, native_function_definition_view) in module_view\n\n .functions()\n\n .enumerate()\n\n .filter(|fdv| fdv.1.is_native())\n\n {\n\n let function_name = native_function_definition_view.name();\n\n match resolve_native_function(&module_id, function_name) {\n\n None => errors.push(verification_error(\n\n IndexKind::FunctionHandle,\n\n idx,\n\n StatusCode::MISSING_DEPENDENCY,\n\n )),\n\n Some(vm_native_function) => {\n\n let declared_function_signature =\n\n native_function_definition_view.signature().as_inner();\n\n let expected_function_signature = &vm_native_function.expected_signature;\n", "file_path": "language/bytecode-verifier/src/verifier.rs", "rank": 69, "score": 258394.37639174995 }, { "content": "/// Serializes a `ModuleHandle`.\n\n///\n\n/// A `ModuleHandle` gets serialized as follows:\n\n/// - `ModuleHandle.address` as a ULEB128 (index into the `AddressPool`)\n\n/// - `ModuleHandle.name` as a ULEB128 (index into the `IdentifierPool`)\n\nfn serialize_module_handle(binary: &mut BinaryData, module_handle: &ModuleHandle) -> Result<()> {\n\n write_u16_as_uleb128(binary, module_handle.address.0)?;\n\n write_u16_as_uleb128(binary, module_handle.name.0)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "language/vm/src/serializer.rs", "rank": 70, "score": 258351.14681668833 }, { "content": "/// Serializes a `StructHandle`.\n\n///\n\n/// A `StructHandle` gets serialized as follows:\n\n/// - `StructHandle.module` as a ULEB128 (index into the `ModuleHandle` table)\n\n/// - `StructHandle.name` as a ULEB128 (index into the `IdentifierPool`)\n\n/// - `StructHandle.is_nominal_resource` as a 1 byte boolean (0 for false, 1 for true)\n\nfn serialize_struct_handle(binary: &mut BinaryData, struct_handle: &StructHandle) -> Result<()> {\n\n write_u16_as_uleb128(binary, struct_handle.module.0)?;\n\n write_u16_as_uleb128(binary, struct_handle.name.0)?;\n\n serialize_nominal_resource_flag(binary, struct_handle.is_nominal_resource)?;\n\n serialize_kinds(binary, &struct_handle.type_formals)\n\n}\n\n\n", "file_path": "language/vm/src/serializer.rs", "rank": 71, "score": 258339.44400912194 }, { "content": "pub fn bounds_error(kind: IndexKind, idx: usize, len: usize, err: StatusCode) -> VMStatus {\n\n let msg = format!(\n\n \"Index {} out of bounds for {} while indexing {}\",\n\n idx, len, kind\n\n );\n\n VMStatus::new(err).with_message(msg)\n\n}\n\n\n", "file_path": "language/vm/src/errors.rs", "rank": 72, "score": 257685.54609905498 }, { "content": "/// Check whether a command is blocking.\n\npub fn blocking_cmd(cmd: &str) -> bool {\n\n cmd.ends_with('b')\n\n}\n\n\n", "file_path": "client/src/commands.rs", "rank": 73, "score": 256589.02392339602 }, { "content": "/// Compile a module.\n\npub fn compile_module<'a, T: 'a + ModuleAccess>(\n\n address: AccountAddress,\n\n module: ModuleDefinition,\n\n dependencies: impl IntoIterator<Item = &'a T>,\n\n) -> Result<(CompiledModule, ModuleSourceMap<Loc>)> {\n\n let current_module = QualifiedModuleIdent {\n\n address,\n\n name: module.name,\n\n };\n\n let mut context = Context::new(dependencies, current_module)?;\n\n let self_name = ModuleName::new(ModuleName::self_name().into());\n\n // Explicitly declare all imports as they will be included even if not used\n\n compile_imports(&mut context, address, module.imports)?;\n\n\n\n // Explicitly declare all structs as they will be included even if not used\n\n for s in &module.structs {\n\n let ident = QualifiedStructIdent {\n\n module: self_name.clone(),\n\n name: s.name.clone(),\n\n };\n", "file_path": "language/compiler/ir-to-bytecode/src/compiler.rs", "rank": 74, "score": 255479.63566300808 }, { "content": "#[test]\n\nfn test_cache_with_storage() {\n\n let allocator = Arena::new();\n\n\n\n let owned_entry_module = test_script().into_module();\n\n let loaded_main = LoadedModule::new(owned_entry_module);\n\n let entry_func = FunctionRef::new(&loaded_main, CompiledScript::MAIN_INDEX);\n\n let entry_module = entry_func.module();\n\n println!(\"MODULE: {}\", entry_module.as_module());\n\n\n\n let vm_cache = VMModuleCache::new(&allocator);\n\n\n\n // Function is not defined locally.\n\n assert!(vm_cache\n\n .resolve_function_ref(entry_module, FunctionHandleIndex::new(1))\n\n .unwrap()\n\n .is_none());\n\n\n\n {\n\n let fetcher = FakeFetcher::new(vec![test_module(\"module\").into_inner()]);\n\n let mut block_cache = BlockModuleCache::new(&vm_cache, fetcher);\n", "file_path": "language/vm/vm-runtime/src/unit_tests/module_cache_tests.rs", "rank": 75, "score": 254223.123272968 }, { "content": "/// Determine whether two tokens on the stack have the same type\n\npub fn stack_has_polymorphic_eq(state: &AbstractState, index1: usize, index2: usize) -> bool {\n\n if stack_has(state, index2, None) {\n\n state.stack_peek(index1) == state.stack_peek(index2)\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "language/tools/test-generation/src/transitions.rs", "rank": 76, "score": 253763.78517356553 }, { "content": "/// Check whether a command is debugging command.\n\npub fn debug_format_cmd(cmd: &str) -> bool {\n\n cmd.ends_with('?')\n\n}\n\n\n", "file_path": "client/src/commands.rs", "rank": 77, "score": 253176.12452772906 }, { "content": "/// The operand stack.\n\nstruct Stack(Vec<Value>);\n\n\n\nimpl Stack {\n\n /// Create a new empty operand stack.\n\n fn new() -> Self {\n\n Stack(vec![])\n\n }\n\n\n\n /// Push a `Value` on the stack if the max stack size has not been reached. Abort execution\n\n /// otherwise.\n\n fn push(&mut self, value: Value) -> VMResult<()> {\n\n if self.0.len() < OPERAND_STACK_SIZE_LIMIT {\n\n self.0.push(value);\n\n Ok(())\n\n } else {\n\n Err(VMStatus::new(StatusCode::EXECUTION_STACK_OVERFLOW))\n\n }\n\n }\n\n\n\n /// Pop a `Value` off the stack or abort execution if the stack is empty.\n", "file_path": "language/vm/vm-runtime/src/interpreter.rs", "rank": 78, "score": 253039.1670247675 }, { "content": "// mod translator;\n\nfn compile_files(file_names: Vec<String>) -> (Vec<VerifiedModule>, SourceMap<Loc>) {\n\n let mut verified_modules = stdlib_modules().to_vec();\n\n let mut source_maps = stdlib_source_map().to_vec();\n\n let files_len = file_names.len();\n\n // let dep_files = &file_names[0..files_len - 1];\n\n let dep_files = &file_names[0..files_len];\n\n\n\n // let main_file = &file_names[files_len - 1];\n\n let address = AccountAddress::default();\n\n for file_name in dep_files {\n\n let code = fs::read_to_string(file_name).unwrap();\n\n let module = parse_module(&code).unwrap();\n\n let (compiled_module, source_map) =\n\n compile_module(address, module, &verified_modules).expect(\"module failed to compile\");\n\n let verified_module_res = VerifiedModule::new(compiled_module);\n\n\n\n match verified_module_res {\n\n Err(e) => {\n\n panic!(\"{:?}\", e);\n\n }\n", "file_path": "language/stackless-bytecode/bytecode-to-boogie/src/main.rs", "rank": 79, "score": 251925.43600494083 }, { "content": "#[test]\n\nfn test_loader_one_module() {\n\n // This test tests the linking of function within a single module: We have a module that defines\n\n // two functions, each with different name and signature. This test will make sure that we\n\n // link the function handle with the right function definition within the same module.\n\n let module = test_module(\"module\");\n\n let mod_id = module.self_id();\n\n\n\n let allocator = Arena::new();\n\n let loaded_program = VMModuleCache::new(&allocator);\n\n loaded_program.cache_module(module);\n\n let module_ref = loaded_program.get_loaded_module(&mod_id).unwrap().unwrap();\n\n\n\n // Get the function reference of the first two function handles.\n\n let func1_ref = loaded_program\n\n .resolve_function_ref(module_ref, FunctionHandleIndex::new(0))\n\n .unwrap()\n\n .unwrap();\n\n let func2_ref = loaded_program\n\n .resolve_function_ref(module_ref, FunctionHandleIndex::new(1))\n\n .unwrap()\n", "file_path": "language/vm/vm-runtime/src/unit_tests/module_cache_tests.rs", "rank": 80, "score": 250662.52339918076 }, { "content": "#[test]\n\nfn test_loader_cross_modules() {\n\n let script = test_script();\n\n let module = test_module(\"module\");\n\n\n\n let allocator = Arena::new();\n\n let loaded_program = VMModuleCache::new(&allocator);\n\n loaded_program.cache_module(module);\n\n\n\n let owned_entry_module = script.into_module();\n\n let loaded_main = LoadedModule::new(owned_entry_module);\n\n let entry_func = FunctionRef::new(&loaded_main, CompiledScript::MAIN_INDEX);\n\n let entry_module = entry_func.module();\n\n let func1 = loaded_program\n\n .resolve_function_ref(entry_module, FunctionHandleIndex::new(1))\n\n .unwrap()\n\n .unwrap();\n\n let func2 = loaded_program\n\n .resolve_function_ref(entry_module, FunctionHandleIndex::new(2))\n\n .unwrap()\n\n .unwrap();\n", "file_path": "language/vm/vm-runtime/src/unit_tests/module_cache_tests.rs", "rank": 81, "score": 250662.52339918076 }, { "content": "/// Determine whether an abstract value on the stack that is a reference points to something of the\n\n/// same type as another abstract value on the stack\n\npub fn stack_ref_polymorphic_eq(state: &AbstractState, index1: usize, index2: usize) -> bool {\n\n if stack_has(state, index2, None) {\n\n if let Some(abstract_value) = state.stack_peek(index1) {\n\n match abstract_value.token {\n\n SignatureToken::MutableReference(token) | SignatureToken::Reference(token) => {\n\n let abstract_value_inner = AbstractValue {\n\n token: (*token).clone(),\n\n kind: SignatureTokenView::new(&state.module, &*token).kind(&[]),\n\n };\n\n return Some(abstract_value_inner) == state.stack_peek(index2);\n\n }\n\n _ => return false,\n\n }\n\n }\n\n }\n\n false\n\n}\n\n\n", "file_path": "language/tools/test-generation/src/transitions.rs", "rank": 82, "score": 250606.21239867792 }, { "content": "/// Determine whether an abstract value on the stack and a abstract value in the locals have the\n\n/// same type\n\npub fn stack_local_polymorphic_eq(state: &AbstractState, index1: usize, index2: usize) -> bool {\n\n if stack_has(state, index1, None) {\n\n if let Some((abstract_value, _)) = state.local_get(index2) {\n\n return state.stack_peek(index1) == Some(abstract_value.clone());\n\n }\n\n }\n\n false\n\n}\n\n\n", "file_path": "language/tools/test-generation/src/transitions.rs", "rank": 83, "score": 250606.21239867792 }, { "content": "/// Build a Tcp + Muxer transport\n\npub fn build_tcp_muxer_transport() -> impl Transport<Output = impl StreamMultiplexer> {\n\n TcpTransport::default().and_then(Yamux::upgrade_connection)\n\n}\n\n\n", "file_path": "network/socket-bench-server/src/lib.rs", "rank": 84, "score": 250196.30374884664 }, { "content": "/// Build a MemorySocket + Muxer transport\n\npub fn build_memsocket_muxer_transport() -> impl Transport<Output = impl StreamMultiplexer> {\n\n MemoryTransport::default().and_then(Yamux::upgrade_connection)\n\n}\n\n\n", "file_path": "network/socket-bench-server/src/lib.rs", "rank": 85, "score": 250196.30374884664 }, { "content": "// This generates a proposal for round 1\n\npub fn generate_corpus_proposal() -> Vec<u8> {\n\n let event_processor = create_node_for_fuzzing();\n\n block_on(async {\n\n let proposal = event_processor\n\n .generate_proposal(NewRoundEvent {\n\n round: 1,\n\n reason: NewRoundReason::QCReady,\n\n timeout: std::time::Duration::new(5, 0),\n\n })\n\n .await;\n\n // serialize and return proposal\n\n let proposal = proposal.unwrap();\n\n Proposal::try_from(proposal)\n\n .unwrap()\n\n .to_bytes()\n\n .unwrap()\n\n .to_vec()\n\n })\n\n}\n\n\n\n// optimization for the fuzzer\n\nlazy_static! {\n\n static ref STATIC_RUNTIME: Runtime = Runtime::new().unwrap();\n\n static ref FUZZING_SIGNER: ValidatorSigner = ValidatorSigner::from_int(1);\n\n}\n\n\n", "file_path": "consensus/src/chained_bft/event_processor_fuzzing.rs", "rank": 86, "score": 250112.76612909255 }, { "content": "/// Verify correctness of tables.\n\n///\n\n/// Tables cannot have duplicates, must cover the entire blob and must be disjoint.\n\nfn check_tables(tables: &mut Vec<Table>, end_tables: u64, length: u64) -> BinaryLoaderResult<()> {\n\n // there is no real reason to pass a mutable reference but we are sorting next line\n\n tables.sort_by(|t1, t2| t1.offset.cmp(&t2.offset));\n\n\n\n let mut current_offset = end_tables;\n\n let mut table_types = HashSet::new();\n\n for table in tables {\n\n let offset = u64::from(table.offset);\n\n if offset != current_offset {\n\n return Err(VMStatus::new(StatusCode::BAD_HEADER_TABLE));\n\n }\n\n if table.count == 0 {\n\n return Err(VMStatus::new(StatusCode::BAD_HEADER_TABLE));\n\n }\n\n let count = u64::from(table.count);\n\n if let Some(checked_offset) = current_offset.checked_add(count) {\n\n current_offset = checked_offset;\n\n }\n\n if current_offset > length {\n\n return Err(VMStatus::new(StatusCode::BAD_HEADER_TABLE));\n", "file_path": "language/vm/src/deserializer.rs", "rank": 87, "score": 250098.6260758966 }, { "content": "/// Determine if a character is permitted character.\n\n///\n\n/// A permitted character is either a permitted printable character, or a permitted\n\n/// newline. Any other characters are disallowed from appearing in the file.\n\npub fn is_permitted_char(c: char) -> bool {\n\n is_permitted_printable_char(c) || is_permitted_newline_char(c)\n\n}\n\n\n", "file_path": "language/compiler/ir-to-bytecode/src/parser.rs", "rank": 88, "score": 249911.5639780964 }, { "content": "/// Checks whether a line denotes the start of a new transaction.\n\npub fn is_new_transaction(s: &str) -> bool {\n\n let s = s.trim();\n\n if !s.starts_with(\"//!\") {\n\n return false;\n\n }\n\n s[3..].trim_start() == \"new-transaction\"\n\n}\n\n\n\nimpl Entry {\n\n pub fn try_parse(s: &str) -> Result<Option<Self>> {\n\n if s.starts_with(\"//!\") {\n\n Ok(Some(s.parse::<Entry>()?))\n\n } else {\n\n Ok(None)\n\n }\n\n }\n\n}\n\n\n\n/// A table of options specific to one transaction, fine tweaking how the transaction\n\n/// is handled by the testing infra.\n", "file_path": "language/functional_tests/src/config/transaction.rs", "rank": 89, "score": 249911.5639780964 }, { "content": "// mod translator;\n\nfn compile_files(file_names: Vec<String>) -> (Vec<VerifiedModule>, SourceMap<Loc>) {\n\n let mut verified_modules = stdlib_modules().to_vec();\n\n let mut source_maps = stdlib_source_map().to_vec();\n\n let files_len = file_names.len();\n\n let dep_files = &file_names[0..files_len];\n\n\n\n // assuming the last file is a program that might contain a script\n\n let address = AccountAddress::default();\n\n for file_name in dep_files {\n\n let code = fs::read_to_string(file_name).unwrap();\n\n let module = parse_module(&code).unwrap();\n\n let (compiled_module, source_map) =\n\n compile_module(address, module, &verified_modules).expect(\"module failed to compile\");\n\n let verified_module_res = VerifiedModule::new(compiled_module);\n\n\n\n match verified_module_res {\n\n Err(e) => {\n\n panic!(\"{:?}\", e);\n\n }\n\n Ok(verified_module) => {\n\n verified_modules.push(verified_module);\n\n source_maps.push(source_map);\n\n }\n\n }\n\n }\n\n (verified_modules, source_maps)\n\n}\n\n\n", "file_path": "language/stackless-bytecode/bytecode-to-boogie/tests/translator_tests.rs", "rank": 90, "score": 248934.059400947 }, { "content": "#[inline]\n\nfn check_bounds_impl<T, I>(pool: &[T], idx: I) -> Option<VMStatus>\n\nwhere\n\n I: ModuleIndex,\n\n{\n\n let idx = idx.into_index();\n\n let len = pool.len();\n\n if idx >= len {\n\n let status = bounds_error(I::KIND, idx, len, StatusCode::INDEX_OUT_OF_BOUNDS);\n\n Some(status)\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "language/vm/src/check_bounds.rs", "rank": 91, "score": 248688.56958884269 }, { "content": "/// Build a MemorySocket + Noise + Muxer transport\n\npub fn build_memsocket_noise_muxer_transport() -> impl Transport<Output = impl StreamMultiplexer> {\n\n MemoryTransport::default()\n\n .and_then(move |socket, origin| {\n\n async move {\n\n let noise_config = Arc::new(NoiseConfig::new_random());\n\n let (_remote_static_key, socket) =\n\n noise_config.upgrade_connection(socket, origin).await?;\n\n Ok(socket)\n\n }\n\n })\n\n .and_then(Yamux::upgrade_connection)\n\n}\n\n\n", "file_path": "network/socket-bench-server/src/lib.rs", "rank": 92, "score": 247819.46687087527 }, { "content": "/// Build a Tcp + Noise + Muxer transport\n\npub fn build_tcp_noise_muxer_transport() -> impl Transport<Output = impl StreamMultiplexer> {\n\n TcpTransport::default()\n\n .and_then(move |socket, origin| {\n\n async move {\n\n let noise_config = Arc::new(NoiseConfig::new_random());\n\n let (_remote_static_key, socket) =\n\n noise_config.upgrade_connection(socket, origin).await?;\n\n Ok(socket)\n\n }\n\n })\n\n .and_then(Yamux::upgrade_connection)\n\n}\n\n\n\n/// Server side handler for send throughput benchmark when the messages are sent\n\n/// over a simple stream (tcp or in-memory).\n\npub async fn server_stream_handler<L, I, S, E>(mut server_listener: L)\n\nwhere\n\n L: Stream<Item = Result<(I, Multiaddr), E>> + Unpin,\n\n I: Future<Output = Result<S, E>>,\n\n S: AsyncRead + AsyncWrite + Unpin,\n", "file_path": "network/socket-bench-server/src/lib.rs", "rank": 93, "score": 247819.4668708752 }, { "content": "fn encode_metrics(encoder: impl Encoder, whitelist: Vec<String>) -> Vec<u8> {\n\n let mut metric_families = prometheus::gather();\n\n if !whitelist.is_empty() {\n\n metric_families = whitelist_metrics(metric_families, whitelist);\n\n }\n\n let mut buffer = vec![];\n\n encoder.encode(&metric_families, &mut buffer).unwrap();\n\n buffer\n\n}\n\n\n", "file_path": "common/metrics/src/metric_server.rs", "rank": 94, "score": 247305.59616561522 }, { "content": "/// Get up to MAX_EVENTS_IN_QUEUE last events and clears the queue\n\npub fn pop_last_entries() -> Vec<JsonLogEntry> {\n\n let mut queue = JSON_LOG_ENTRY_QUEUE.lock().unwrap();\n\n queue.drain(..).collect()\n\n}\n", "file_path": "common/debug-interface/src/json_log.rs", "rank": 95, "score": 247240.52223438857 }, { "content": "/// Determine if a character is an allowed eye-visible (printable) character.\n\n///\n\n/// The only allowed printable characters are the printable ascii characters (SPACE through ~) and\n\n/// tabs. All other characters are invalid and we return false.\n\npub fn is_permitted_printable_char(c: char) -> bool {\n\n let x = c as u32;\n\n let is_above_space = x >= 0x20; // Don't allow meta characters\n\n let is_below_tilde = x <= 0x7E; // Don't allow DEL meta character\n\n let is_tab = x == 0x09; // Allow tabs\n\n (is_above_space && is_below_tilde) || is_tab\n\n}\n\n\n", "file_path": "language/compiler/ir-to-bytecode/src/parser.rs", "rank": 96, "score": 246785.87684046733 }, { "content": "/// Determine if a character is a permitted newline character.\n\n///\n\n/// The only permitted newline character is \\n. All others are invalid.\n\npub fn is_permitted_newline_char(c: char) -> bool {\n\n let x = c as u32;\n\n x == 0x0A\n\n}\n\n\n", "file_path": "language/compiler/ir-to-bytecode/src/parser.rs", "rank": 97, "score": 246785.87684046733 }, { "content": "/// Helper function to serialize version in a more efficient encoding.\n\n/// We use a super simple encoding - the high bit is set if more bytes follow.\n\nfn serialize_u64_varint(mut num: u64, binary: &mut Vec<u8>) {\n\n for _ in 0..8 {\n\n let low_bits = num as u8 & 0x7f;\n\n num >>= 7;\n\n let more = (num > 0) as u8;\n\n binary.push(low_bits | more << 7);\n\n if more == 0 {\n\n return;\n\n }\n\n }\n\n // Last byte is encoded raw; this means there are no bad encodings.\n\n assert_ne!(num, 0);\n\n assert!(num <= 0xff);\n\n binary.push(num as u8);\n\n}\n\n\n", "file_path": "storage/jellyfish-merkle/src/node_type/mod.rs", "rank": 98, "score": 246419.75972743327 }, { "content": "/// Parse a cmd string, the first element in the returned vector is the command to run\n\npub fn parse_cmd(cmd_str: &str) -> Vec<&str> {\n\n cmd_str.split_ascii_whitespace().collect()\n\n}\n\n\n", "file_path": "client/src/commands.rs", "rank": 99, "score": 243983.49421704287 } ]
Rust
src/serialization/v2.rs
duncandean/macaroon
601ca1d8d76a93c6e4deae9f8d3f4bf0b2d7a67c
use caveat::{Caveat, CaveatBuilder}; use error::MacaroonError; use serialization::macaroon_builder::MacaroonBuilder; use ByteString; use Macaroon; use Result; const EOS: u8 = 0; const LOCATION: u8 = 1; const IDENTIFIER: u8 = 2; const VID: u8 = 4; const SIGNATURE: u8 = 6; const VARINT_PACK_SIZE: usize = 128; fn varint_size(size: usize) -> Vec<u8> { let mut buffer: Vec<u8> = Vec::new(); let mut my_size: usize = size; while my_size >= VARINT_PACK_SIZE { buffer.push(((my_size & (VARINT_PACK_SIZE - 1)) | VARINT_PACK_SIZE) as u8); my_size >>= 7; } buffer.push(my_size as u8); buffer } fn serialize_field(tag: u8, value: &[u8], buffer: &mut Vec<u8>) { buffer.push(tag); buffer.extend(varint_size(value.len())); buffer.extend(value); } pub fn serialize(macaroon: &Macaroon) -> Result<Vec<u8>> { let mut buffer: Vec<u8> = Vec::new(); buffer.push(2); if let Some(ref location) = macaroon.location() { serialize_field(LOCATION, &location.as_bytes().to_vec(), &mut buffer); }; serialize_field(IDENTIFIER, &macaroon.identifier().0, &mut buffer); buffer.push(EOS); for c in macaroon.caveats() { match c { Caveat::FirstParty(fp) => { serialize_field(IDENTIFIER, &fp.predicate().0, &mut buffer); buffer.push(EOS); } Caveat::ThirdParty(tp) => { serialize_field(LOCATION, tp.location().as_bytes(), &mut buffer); serialize_field(IDENTIFIER, &tp.id().0, &mut buffer); serialize_field(VID, &tp.verifier_id().0, &mut buffer); buffer.push(EOS); } } } buffer.push(EOS); serialize_field(SIGNATURE, &macaroon.signature(), &mut buffer); Ok(buffer) } struct Deserializer<'r> { data: &'r [u8], index: usize, } impl<'r> Deserializer<'r> { pub fn new(data: &[u8]) -> Deserializer { Deserializer { data, index: 0 } } fn get_byte(&mut self) -> Result<u8> { if self.index > self.data.len() - 1 { return Err(MacaroonError::DeserializationError(String::from( "Buffer overrun", ))); } let byte = self.data[self.index]; self.index += 1; Ok(byte) } pub fn get_tag(&mut self) -> Result<u8> { self.get_byte() } pub fn get_eos(&mut self) -> Result<u8> { let eos = self.get_byte()?; match eos { EOS => Ok(eos), _ => Err(MacaroonError::DeserializationError(String::from( "Expected EOS", ))), } } pub fn get_field(&mut self) -> Result<Vec<u8>> { let size: usize = self.get_field_size()?; if size + self.index > self.data.len() { return Err(MacaroonError::DeserializationError(String::from( "Unexpected end of \ field", ))); } let field: Vec<u8> = self.data[self.index..self.index + size].to_vec(); self.index += size; Ok(field) } fn get_field_size(&mut self) -> Result<usize> { let mut size: usize = 0; let mut shift: usize = 0; let mut byte: u8; while shift <= 63 { byte = self.get_byte()?; if byte & 128 != 0 { size |= ((byte & 127) << shift) as usize; } else { size |= (byte << shift) as usize; return Ok(size); } shift += 7; } Err(MacaroonError::DeserializationError(String::from( "Error in field size", ))) } } pub fn deserialize(data: &[u8]) -> Result<Macaroon> { let mut builder: MacaroonBuilder = MacaroonBuilder::new(); let mut deserializer: Deserializer = Deserializer::new(data); if deserializer.get_byte()? != 2 { return Err(MacaroonError::DeserializationError(String::from( "Wrong version number", ))); } let mut tag: u8 = deserializer.get_tag()?; match tag { LOCATION => builder.set_location(&String::from_utf8(deserializer.get_field()?)?), IDENTIFIER => builder.set_identifier(ByteString(deserializer.get_field()?)), _ => { return Err(MacaroonError::DeserializationError(String::from( "Identifier not found", ))) } } if builder.has_location() { tag = deserializer.get_tag()?; match tag { IDENTIFIER => { builder.set_identifier(ByteString(deserializer.get_field()?)); } _ => { return Err(MacaroonError::DeserializationError(String::from( "Identifier not \ found", ))) } } } deserializer.get_eos()?; tag = deserializer.get_tag()?; while tag != EOS { let mut caveat_builder: CaveatBuilder = CaveatBuilder::new(); match tag { LOCATION => { let field: Vec<u8> = deserializer.get_field()?; caveat_builder.add_location(String::from_utf8(field)?); } IDENTIFIER => caveat_builder.add_id(ByteString(deserializer.get_field()?)), _ => { return Err(MacaroonError::DeserializationError(String::from( "Caveat identifier \ not found", ))) } } if caveat_builder.has_location() { tag = deserializer.get_tag()?; match tag { IDENTIFIER => { let field: Vec<u8> = deserializer.get_field()?; caveat_builder.add_id(ByteString(field)); } _ => { return Err(MacaroonError::DeserializationError(String::from( "Caveat identifier \ not found", ))) } } } tag = deserializer.get_tag()?; match tag { VID => { let field: Vec<u8> = deserializer.get_field()?; caveat_builder.add_verifier_id(ByteString(field)); builder.add_caveat(caveat_builder.build()?); deserializer.get_eos()?; tag = deserializer.get_tag()?; } EOS => { builder.add_caveat(caveat_builder.build()?); tag = deserializer.get_tag()?; } _ => { return Err(MacaroonError::DeserializationError( "Unexpected caveat tag found".into(), )) } } } tag = deserializer.get_tag()?; if tag == SIGNATURE { let sig: Vec<u8> = deserializer.get_field()?; if sig.len() != 32 { return Err(MacaroonError::DeserializationError( "Bad signature length".into(), )); } builder.set_signature(&sig); } else { return Err(MacaroonError::DeserializationError( "Unexpected tag found".into(), )); } Ok(builder.build()?) } #[cfg(test)] mod tests { use caveat; use caveat::Caveat; use serialization::macaroon_builder::MacaroonBuilder; use ByteString; use Macaroon; use MacaroonKey; #[test] fn test_deserialize() { const SERIALIZED: &str = "AgETaHR0cDovL2V4YW1wbGUub3JnLwIFa2V5aWQAAhRhY2NvdW50ID0gMzczNTkyODU1OQACDHVzZXIgPSBhbGljZQAABiBL6WfNHqDGsmuvakqU7psFsViG2guoXoxCqTyNDhJe_A=="; const SIGNATURE: [u8; 32] = [ 75, 233, 103, 205, 30, 160, 198, 178, 107, 175, 106, 74, 148, 238, 155, 5, 177, 88, 134, 218, 11, 168, 94, 140, 66, 169, 60, 141, 14, 18, 94, 252, ]; let serialized: Vec<u8> = base64::decode_config(SERIALIZED, base64::URL_SAFE).unwrap(); let macaroon = super::deserialize(&serialized).unwrap(); assert_eq!("http://example.org/", &macaroon.location().unwrap()); assert_eq!(ByteString::from("keyid"), macaroon.identifier()); assert_eq!(2, macaroon.caveats().len()); let predicate = match &macaroon.caveats()[0] { Caveat::FirstParty(fp) => fp.predicate(), _ => ByteString::default(), }; assert_eq!(ByteString::from("account = 3735928559"), predicate); let predicate = match &macaroon.caveats()[1] { Caveat::FirstParty(fp) => fp.predicate(), _ => ByteString::default(), }; assert_eq!(ByteString::from("user = alice"), predicate); assert_eq!(MacaroonKey::from(SIGNATURE), macaroon.signature()); } #[test] fn test_serialize() { const SERIALIZED: &str = "AgETaHR0cDovL2V4YW1wbGUub3JnLwIFa2V5aWQAAhRhY2NvdW50ID0gMzczNTkyODU1OQACDHVzZXIgPSBhbGljZQAABiBL6WfNHqDGsmuvakqU7psFsViG2guoXoxCqTyNDhJe_A=="; const SIGNATURE: [u8; 32] = [ 75, 233, 103, 205, 30, 160, 198, 178, 107, 175, 106, 74, 148, 238, 155, 5, 177, 88, 134, 218, 11, 168, 94, 140, 66, 169, 60, 141, 14, 18, 94, 252, ]; let mut builder = MacaroonBuilder::new(); builder.add_caveat(caveat::new_first_party("account = 3735928559".into())); builder.add_caveat(caveat::new_first_party("user = alice".into())); builder.set_location("http://example.org/"); builder.set_identifier("keyid".into()); builder.set_signature(&SIGNATURE); let serialized = super::serialize(&builder.build().unwrap()).unwrap(); assert_eq!( base64::decode_config(SERIALIZED, base64::URL_SAFE).unwrap(), serialized ); } #[test] fn test_serialize_deserialize() { let mut macaroon = Macaroon::create(Some("http://example.org/".into()), &"key".into(), "keyid".into()).unwrap(); macaroon.add_first_party_caveat("account = 3735928559".into()); macaroon.add_first_party_caveat("user = alice".into()); macaroon.add_third_party_caveat( "https://auth.mybank.com", &"caveat key".into(), "caveat".into(), ); let serialized = super::serialize(&macaroon).unwrap(); macaroon = super::deserialize(&serialized).unwrap(); assert_eq!("http://example.org/", &macaroon.location().unwrap()); assert_eq!(ByteString::from("keyid"), macaroon.identifier()); assert_eq!(3, macaroon.caveats().len()); let predicate = match &macaroon.caveats()[0] { Caveat::FirstParty(fp) => fp.predicate(), _ => ByteString::default(), }; assert_eq!(ByteString::from("account = 3735928559"), predicate); let predicate = match &macaroon.caveats()[1] { Caveat::FirstParty(fp) => fp.predicate(), _ => ByteString::default(), }; assert_eq!(ByteString::from("user = alice"), predicate); let id = match &macaroon.caveats()[2] { Caveat::ThirdParty(tp) => tp.id(), _ => ByteString::default(), }; assert_eq!(ByteString::from("caveat"), id); let location = match &macaroon.caveats()[2] { Caveat::ThirdParty(tp) => tp.location(), _ => String::default(), }; assert_eq!("https://auth.mybank.com", location); } }
use caveat::{Caveat, CaveatBuilder}; use error::MacaroonError; use serialization::macaroon_builder::MacaroonBuilder; use ByteString; use Macaroon; use Result; const EOS: u8 = 0; const LOCATION: u8 = 1; const IDENTIFIER: u8 = 2; const VID: u8 = 4; const SIGNATURE: u8 = 6; const VARINT_PACK_SIZE: usize = 128; fn varint_size(size: usize) -> Vec<u8> { let mut buffer: Vec<u8> = Vec::new(); let mut my_size: usize = size; while my_size >= VARINT_PACK_SIZE { buffer.push(((my_size & (VARINT_PACK_SIZE - 1)) | VARINT_PACK_SIZE) as u8); my_size >>= 7; } buffer.push(my_size as u8); buffer } fn serialize_field(tag: u8, value: &[u8], buffer: &mut Vec<u8>) { buffer.push(tag); buffer.extend(varint_size(value.len())); buffer.extend(value); } pub fn serialize(macaroon: &Macaroon) -> Result<Vec<u8>> { let mut buffer: Vec<u8> = Vec::new(); buffer.push(2); if let Some(ref location) = macaroon.location() { serialize_field(LOCATION, &location.as_bytes().to_vec(), &mut buffer); }; serialize_field(IDENTIFIER, &macaroon.identifier().0, &mut buffer); buffer.push(EOS); for c in macaroon.caveats() { match c { Caveat::FirstParty(fp) => { serialize_field(IDENTIFIER, &fp.predicate().0, &mut buffer); buffer.push(EOS); } Caveat::ThirdParty(tp) => { serialize_field(LOCATION, tp.location().as_bytes(), &mut buffer); serialize_field(IDENTIFIER, &tp.id().0, &mut buffer); serialize_field(VID, &tp.verifier_id().0, &mut buffer); buffer.push(EOS); } } } buffer.push(EOS); serialize_field(SIGNATURE, &macaroon.signature(), &mut buffer); Ok(buffer) } struct Deserializer<'r> { data: &'r [u8], index: usize, } impl<'r> Deserializer<'r> { pub fn new(data: &[u8]) -> Deserializer { Deserializer { data, index: 0 } } fn get_byte(&mut self) -> Result<u8> { if self.index > self.data.len() - 1 { return Err(MacaroonError::DeserializationError(String::from( "Buffer overrun", ))); } let byte = self.data[self.index]; self.index += 1; Ok(byte) } pub fn get_tag(&mut self) -> Result<u8> { self.get_byte() } pub fn get_eos(&mut self) -> Result<u8> { let eos = self.get_byte()?; match eos { EOS => Ok(eos), _ => Err(MacaroonError::DeserializationError(String::from( "Expected EOS", ))), } } pub fn get_field(&mut self) -> Result<Vec<u8>> { let size: usize = self.get_field_size()?; if size + self.index > self.data.len() { return Err(MacaroonError::DeserializationError(String::from( "Unexpected end of \ field", ))); } let field: Vec<u8> = self.data[self.index..self.index + size].to_vec(); self.index += size; Ok(field) } fn get_field_size(&mut self) -> Result<usize> { let mut size: usize = 0; let mut shift: usize = 0; let mut byte: u8; while shift <= 63 { byte = self.get_byte()?; if byte & 128 != 0 { size |= ((byte & 127) << shift) as usize; } else { size |= (byte << shift) as usize; return Ok(size); } shift += 7; } Err(MacaroonError::DeserializationError(String::from( "Error in field size", ))) } } pub fn deserialize(data: &[u8]) -> Result<Macaroon> { let mut builder: MacaroonBuilder = MacaroonBuilder::new(); let mut deserializer: Deserializer = Deserializer::new(data); if deserializer.get_byte()? != 2 { return Err(MacaroonError::DeserializationError(String::from( "Wrong version number", ))); } let mut tag: u8 = deserializer.get_tag()?; match tag { LOCATION => builder.set_location(&String::from_utf8(deserializer.get_field()?)?), IDENTIFIER => builder.set_identifier(ByteString(deserializer.get_field()?)), _ => { return Err(MacaroonError::DeserializationError(String::from( "Identifier not found", ))) } } if builder.has_location() { tag = deserializer.get_tag()?; match tag { IDENTIFIER => { builder.set_identifier(ByteString(deserializer.get_field()?)); } _ => { return Err(MacaroonError::DeserializationError(String::from( "Identifier not \ found", ))) } } } deserializer.get_eos()?; tag = deserializer.get_tag()?; while tag != EOS { let mut caveat_builder: CaveatBuilder = CaveatBuilder::new(); match tag { LOCATION => { let field: Vec<u8> = deserializer.get_field()?; caveat_builder.add_location(String::from_utf8(field)?); } IDENTIFIER => caveat_builder.add_id(ByteString(deserializer.get_field()?)), _ => { return Err(MacaroonError::DeserializationError(String::from( "Caveat identifier \ not found", ))) } } if caveat_builder.has_location() { tag = deserializer.get_tag()?; match tag { IDENTIFIER => { let field: Vec<u8> = deserializer.get_field()?; caveat_builder.add_id(ByteString(field)); } _ => { return Err(MacaroonError::DeserializationError(String::from( "Caveat identifier \ not found", ))) } } } tag = deserializer.get_tag()?; match tag { VID => { let field: Vec<u8> = deserializer.get_field()?; caveat_builder.add_verifier_id(ByteString(field)); builder.add_caveat(caveat_builder.build()?); deserializer.get_eos()?; tag = deserializer.get_tag()?; } EOS => { builder.add_caveat(caveat_builder.build()?); tag = deserializer.get_tag()?; } _ => { return Err(MacaroonError::DeserializationError( "Unexpected caveat tag found".into(), )) } } } tag = deserializer.get_tag()?; if tag == SIGNATURE { let sig: Vec<u8> = deserializer.get_field()?; if sig.len() != 32 { return Err(MacaroonError::DeserializationError( "Bad signature length".into(), )); } builder.set_signature(&sig); } else { return Err(MacaroonError::DeserializationError( "Unexpected tag found".into(), )); } Ok(builder.build()?) } #[cfg(test)] mod tests { use caveat; use caveat::Caveat; use serialization::macaroon_builder::MacaroonBuilder; use ByteString; use Macaroon; use MacaroonKey; #[test] fn test_deserialize() { const SERIALIZED: &str = "AgETaHR0cDovL2V4YW1wbGUub3JnLwIFa2V5aWQAAhRhY2NvdW50ID0gMzczNTkyODU1OQACDHVzZXIgPSBhbGljZQAABiBL6WfNHqDGsmuvakqU7psFsViG2guoXoxCqTyNDhJe_A=="; const SIGNATURE: [u8; 32] = [ 75, 233, 103, 205, 30, 160, 198, 178, 107, 175, 106, 74, 148, 238, 155, 5, 177, 88, 134, 218, 11, 168, 94, 140, 66, 169, 60, 141, 14, 18, 94, 252, ]; let serialized: Vec<u8> = base64::decode_config(SERIALIZED, base64::URL_SAFE).unwrap(); let macaroon = super::deserialize(&serialized).unwrap(); assert_eq!("http://example.org/", &macaroon.location().unwrap()); assert_eq!(ByteString::from("keyid"), macaroon.identifier()); assert_eq!(2, macaroon.caveats().len()); let predicate = match &macaroon.caveats()[0] { Caveat::FirstParty(fp) => fp.predicate(), _ => ByteString::default(), }; assert_eq!(ByteString::from("account = 3735928559"), predicate); let predicate = match &macaroon.caveats()[1] { Caveat::FirstParty(fp) => fp.predicate(), _ => ByteString::default(), }; assert_eq!(ByteString::from("user = alice"), predicate); assert_eq!(MacaroonKey::from(SIGNATURE), macaroon.signature()); } #[test] fn test_serialize() { const SERIALIZED: &str = "AgETaHR0cDovL2V4YW1wbGUub3JnLwIFa2V5aWQAAhRhY2NvdW50ID0gMzczNTkyODU1OQACDHVzZXIgPSBhbGljZQAABiBL6WfNHqDGsmuvakqU7psFsViG2guoXoxCqTyNDhJe_A=="; const SIGNATURE: [u8; 32] = [ 75, 233, 103, 205, 30, 160, 198, 178, 107, 175, 106, 74, 148, 238, 155, 5, 177, 88, 134, 218, 11, 168, 94, 140, 66, 169, 60, 141, 14, 18, 94, 252, ]; let mut builder = MacaroonBuilder::new(); builder.add_caveat(caveat::new_first_party("account = 3735928559".into())); builder.add_caveat(caveat::new_first_party("user = alice".into())); builder.set_location("http://example.org/"); builder.set_identifier("keyid".into()); builder.set_signature(&SIGNATURE); let serialized = super::serialize(&builder.build().unwrap()).unwrap(); assert_eq!( base64::decode_config(SERIALIZED, base64::URL_SAFE).unwrap(), serialized ); } #[test]
}
fn test_serialize_deserialize() { let mut macaroon = Macaroon::create(Some("http://example.org/".into()), &"key".into(), "keyid".into()).unwrap(); macaroon.add_first_party_caveat("account = 3735928559".into()); macaroon.add_first_party_caveat("user = alice".into()); macaroon.add_third_party_caveat( "https://auth.mybank.com", &"caveat key".into(), "caveat".into(), ); let serialized = super::serialize(&macaroon).unwrap(); macaroon = super::deserialize(&serialized).unwrap(); assert_eq!("http://example.org/", &macaroon.location().unwrap()); assert_eq!(ByteString::from("keyid"), macaroon.identifier()); assert_eq!(3, macaroon.caveats().len()); let predicate = match &macaroon.caveats()[0] { Caveat::FirstParty(fp) => fp.predicate(), _ => ByteString::default(), }; assert_eq!(ByteString::from("account = 3735928559"), predicate); let predicate = match &macaroon.caveats()[1] { Caveat::FirstParty(fp) => fp.predicate(), _ => ByteString::default(), }; assert_eq!(ByteString::from("user = alice"), predicate); let id = match &macaroon.caveats()[2] { Caveat::ThirdParty(tp) => tp.id(), _ => ByteString::default(), }; assert_eq!(ByteString::from("caveat"), id); let location = match &macaroon.caveats()[2] { Caveat::ThirdParty(tp) => tp.location(), _ => String::default(), }; assert_eq!("https://auth.mybank.com", location); }
function_block-full_function
[ { "content": "pub fn deserialize(data: &[u8]) -> Result<Macaroon> {\n\n let v2j: Serialization = serde_json::from_slice(data)?;\n\n Macaroon::from_json(v2j)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::super::Format;\n\n use ByteString;\n\n use Caveat;\n\n use Macaroon;\n\n use MacaroonKey;\n\n\n\n const SERIALIZED_JSON: &str = \"{\\\"v\\\":2,\\\"l\\\":\\\"http://example.org/\\\",\\\"i\\\":\\\"keyid\\\",\\\n\n \\\"c\\\":[{\\\"i\\\":\\\"account = 3735928559\\\"},{\\\"i\\\":\\\"user = \\\n\n alice\\\"}],\\\"s64\\\":\\\n\n \\\"S-lnzR6gxrJrr2pKlO6bBbFYhtoLqF6MQqk8jQ4SXvw\\\"}\";\n\n const SIGNATURE: [u8; 32] = [\n\n 75, 233, 103, 205, 30, 160, 198, 178, 107, 175, 106, 74, 148, 238, 155, 5, 177, 88, 134,\n\n 218, 11, 168, 94, 140, 66, 169, 60, 141, 14, 18, 94, 252,\n", "file_path": "src/serialization/v2json.rs", "rank": 0, "score": 260868.11102964397 }, { "content": "pub fn deserialize(base64: &[u8]) -> Result<Macaroon> {\n\n let data = base64_decode(&String::from_utf8(base64.to_vec())?)?;\n\n let mut builder: MacaroonBuilder = MacaroonBuilder::new();\n\n let mut caveat_builder: CaveatBuilder = CaveatBuilder::new();\n\n for packet in deserialize_as_packets(data.as_slice(), Vec::new())? {\n\n match packet.key.as_str() {\n\n LOCATION => {\n\n builder.set_location(&String::from_utf8(packet.value)?);\n\n }\n\n IDENTIFIER => {\n\n builder.set_identifier(ByteString(packet.value));\n\n }\n\n SIGNATURE => {\n\n if caveat_builder.has_id() {\n\n builder.add_caveat(caveat_builder.build()?);\n\n caveat_builder = CaveatBuilder::new();\n\n }\n\n if packet.value.len() != 32 {\n\n error!(\n\n \"deserialize_v1: Deserialization error - signature length is {}\",\n", "file_path": "src/serialization/v1.rs", "rank": 3, "score": 222544.2193225325 }, { "content": "pub fn serialize(macaroon: &Macaroon) -> Result<Vec<u8>> {\n\n let mut serialized: Vec<u8> = Vec::new();\n\n if let Some(ref location) = macaroon.location() {\n\n serialized.extend(serialize_as_packet(LOCATION, location.as_bytes()));\n\n };\n\n serialized.extend(serialize_as_packet(IDENTIFIER, &macaroon.identifier().0));\n\n for c in macaroon.caveats() {\n\n match c {\n\n Caveat::FirstParty(fp) => {\n\n serialized.extend(serialize_as_packet(CID, &fp.predicate().0));\n\n }\n\n Caveat::ThirdParty(tp) => {\n\n serialized.extend(serialize_as_packet(CID, &tp.id().0));\n\n serialized.extend(serialize_as_packet(VID, &tp.verifier_id().0));\n\n serialized.extend(serialize_as_packet(CL, tp.location().as_bytes()))\n\n }\n\n }\n\n }\n\n serialized.extend(serialize_as_packet(SIGNATURE, &macaroon.signature()));\n\n Ok(base64::encode_config(&serialized, base64::URL_SAFE)\n\n .as_bytes()\n\n .to_vec())\n\n}\n\n\n", "file_path": "src/serialization/v1.rs", "rank": 4, "score": 213836.7396571474 }, { "content": "pub fn serialize(macaroon: &Macaroon) -> Result<Vec<u8>> {\n\n let serialized: String =\n\n serde_json::to_string(&Serialization::from_macaroon(macaroon.clone())?)?;\n\n Ok(serialized.into_bytes())\n\n}\n\n\n", "file_path": "src/serialization/v2json.rs", "rank": 6, "score": 213836.7396571474 }, { "content": "fn serialize_as_packet<'r>(tag: &'r str, value: &'r [u8]) -> Vec<u8> {\n\n let mut packet: Vec<u8> = Vec::new();\n\n let size = HEADER_SIZE + 2 + tag.len() + value.len();\n\n packet.extend(packet_header(size));\n\n packet.extend_from_slice(tag.as_bytes());\n\n packet.extend_from_slice(b\" \");\n\n packet.extend_from_slice(value);\n\n packet.extend_from_slice(b\"\\n\");\n\n\n\n packet\n\n}\n\n\n", "file_path": "src/serialization/v1.rs", "rank": 7, "score": 212530.4440171532 }, { "content": "fn deserialize_as_packets(data: &[u8], mut packets: Vec<Packet>) -> Result<Vec<Packet>> {\n\n if data.is_empty() {\n\n return Ok(packets);\n\n }\n\n let hex: &str = str::from_utf8(&data[..4])?;\n\n let size: usize = usize::from_str_radix(hex, 16)?;\n\n let packet_data = &data[4..size];\n\n let index = split_index(packet_data)?;\n\n let (key_slice, value_slice) = packet_data.split_at(index);\n\n packets.push(Packet {\n\n key: String::from_utf8(key_slice.to_vec())?,\n\n // skip beginning space and terminating \\n\n\n value: value_slice[1..value_slice.len() - 1].to_vec(),\n\n });\n\n deserialize_as_packets(&data[size..], packets)\n\n}\n\n\n", "file_path": "src/serialization/v1.rs", "rank": 8, "score": 187255.91907302308 }, { "content": "fn split_index(packet: &[u8]) -> Result<usize> {\n\n match packet.iter().position(|&r| r == b' ') {\n\n Some(index) => Ok(index),\n\n None => Err(MacaroonError::DeserializationError(String::from(\n\n \"Key/value error\",\n\n ))),\n\n }\n\n}\n\n\n", "file_path": "src/serialization/v1.rs", "rank": 9, "score": 172608.47758572968 }, { "content": "pub fn new_third_party(id: ByteString, verifier_id: ByteString, location: &str) -> Caveat {\n\n Caveat::ThirdParty(ThirdParty {\n\n id,\n\n verifier_id,\n\n location: String::from(location),\n\n })\n\n}\n\n\n\n#[derive(Default)]\n\npub struct CaveatBuilder {\n\n id: Option<ByteString>,\n\n verifier_id: Option<ByteString>,\n\n location: Option<String>,\n\n}\n\n\n\nimpl CaveatBuilder {\n\n pub fn new() -> Self {\n\n Default::default()\n\n }\n\n\n", "file_path": "src/caveat.rs", "rank": 10, "score": 171152.50742734212 }, { "content": "fn base64_decode(s: &str) -> Result<Vec<u8>> {\n\n Ok(base64::decode_config(s, base64::URL_SAFE)?)\n\n}\n\n\n", "file_path": "src/serialization/v1.rs", "rank": 11, "score": 165548.57078105627 }, { "content": "pub fn new_first_party(predicate: ByteString) -> Caveat {\n\n Caveat::FirstParty(FirstParty { predicate })\n\n}\n\n\n", "file_path": "src/caveat.rs", "rank": 13, "score": 159773.5712005162 }, { "content": "pub fn decrypt_key<T, U>(key: &T, data: &U) -> Result<MacaroonKey>\n\nwhere\n\n T: AsRef<[u8; sodiumoxide::crypto::auth::KEYBYTES]> + ?Sized,\n\n U: AsRef<[u8]> + ?Sized,\n\n{\n\n let raw_data: &[u8] = data.as_ref();\n\n if raw_data.len() <= secretbox::NONCEBYTES + secretbox::MACBYTES {\n\n error!(\"crypto::decrypt: Encrypted data {:?} too short\", raw_data);\n\n return Err(MacaroonError::DecryptionError(\"Encrypted data too short\"));\n\n }\n\n let mut nonce: [u8; secretbox::NONCEBYTES] = [0; secretbox::NONCEBYTES];\n\n nonce.clone_from_slice(&raw_data[..secretbox::NONCEBYTES]);\n\n let mut temp: Vec<u8> = Vec::new();\n\n temp.extend(&raw_data[secretbox::NONCEBYTES..]);\n\n let ciphertext = temp.as_slice();\n\n match secretbox::open(\n\n ciphertext,\n\n &secretbox::Nonce(nonce),\n\n &secretbox::Key(*key.as_ref()),\n\n ) {\n", "file_path": "src/crypto.rs", "rank": 14, "score": 153219.96472809848 }, { "content": "fn packet_header(size: usize) -> Vec<u8> {\n\n let mut header: Vec<u8> = Vec::new();\n\n header.push(to_hex_char(((size >> 12) & 15) as u8));\n\n header.push(to_hex_char(((size >> 8) & 15) as u8));\n\n header.push(to_hex_char(((size >> 4) & 15) as u8));\n\n header.push(to_hex_char((size & 15) as u8));\n\n\n\n header\n\n}\n\n\n", "file_path": "src/serialization/v1.rs", "rank": 15, "score": 152740.31257115086 }, { "content": "fn to_hex_char(value: u8) -> u8 {\n\n let hex = format!(\"{:1x}\", value);\n\n hex.as_bytes()[0]\n\n}\n\n\n", "file_path": "src/serialization/v1.rs", "rank": 16, "score": 131642.2652580349 }, { "content": "/// Initializes the cryptographic libraries. Although you can use macaroon-rs without\n\n/// calling this, the underlying random-number generator is not guaranteed to be thread-safe\n\n/// if you don't.\n\npub fn initialize() -> Result<()> {\n\n match sodiumoxide::init() {\n\n Ok(_) => Ok(()),\n\n Err(_) => Err(MacaroonError::InitializationError),\n\n }\n\n}\n\n\n\n// An implementation that represents any binary data. By spec, most fields in a\n\n// macaroon support binary encoded as base64, so ByteString has methods to\n\n// convert to and from base64 strings\n\n#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]\n\npub struct ByteString(pub Vec<u8>);\n\n\n\nimpl AsRef<[u8]> for ByteString {\n\n fn as_ref(&self) -> &[u8] {\n\n &self.0\n\n }\n\n}\n\n\n\nimpl From<&str> for ByteString {\n", "file_path": "src/lib.rs", "rank": 17, "score": 129613.73871134606 }, { "content": "#[derive(Debug, Default, Deserialize, Serialize)]\n\nstruct Caveat {\n\n i: Option<String>,\n\n i64: Option<ByteString>,\n\n l: Option<String>,\n\n l64: Option<String>,\n\n v: Option<String>,\n\n v64: Option<ByteString>,\n\n}\n\n\n", "file_path": "src/serialization/v2json.rs", "rank": 18, "score": 123253.1713065952 }, { "content": "fn generate_derived_key(key: &[u8]) -> MacaroonKey {\n\n hmac(&KEY_GENERATOR, key)\n\n}\n\n\n", "file_path": "src/crypto.rs", "rank": 20, "score": 114869.60758924077 }, { "content": "pub fn hmac<T, U>(key: &T, text: &U) -> MacaroonKey\n\nwhere\n\n T: AsRef<[u8; sodiumoxide::crypto::auth::KEYBYTES]> + ?Sized,\n\n U: AsRef<[u8]> + ?Sized,\n\n{\n\n let Tag(result_bytes) = authenticate(text.as_ref(), &Key(*key.as_ref()));\n\n MacaroonKey(result_bytes)\n\n}\n\n\n", "file_path": "src/crypto.rs", "rank": 21, "score": 96071.4132980741 }, { "content": "pub fn encrypt_key<T>(key: &T, plaintext: &T) -> Vec<u8>\n\nwhere\n\n T: AsRef<[u8; sodiumoxide::crypto::auth::KEYBYTES]> + ?Sized,\n\n{\n\n let nonce = secretbox::gen_nonce();\n\n let encrypted = secretbox::seal(plaintext.as_ref(), &nonce, &secretbox::Key(*key.as_ref()));\n\n let mut ret: Vec<u8> = Vec::new();\n\n ret.extend(&nonce.0);\n\n ret.extend(encrypted);\n\n ret\n\n}\n\n\n", "file_path": "src/crypto.rs", "rank": 22, "score": 94834.3069048145 }, { "content": "pub fn hmac2<T, U>(key: &T, text1: &U, text2: &U) -> MacaroonKey\n\nwhere\n\n T: AsRef<[u8; sodiumoxide::crypto::auth::KEYBYTES]> + ?Sized,\n\n U: AsRef<[u8]> + ?Sized,\n\n{\n\n let MacaroonKey(tmp1) = hmac(key, text1);\n\n let MacaroonKey(tmp2) = hmac(key, text2);\n\n let tmp = [tmp1, tmp2].concat();\n\n hmac(key, &tmp.to_vec())\n\n}\n\n\n", "file_path": "src/crypto.rs", "rank": 23, "score": 88929.56857350146 }, { "content": "#[derive(Debug, Default, Deserialize, Serialize)]\n\nstruct Serialization {\n\n v: u8,\n\n i: Option<String>,\n\n i64: Option<ByteString>,\n\n l: Option<String>,\n\n l64: Option<String>,\n\n c: Vec<Caveat>,\n\n s: Option<Vec<u8>>,\n\n s64: Option<String>,\n\n}\n\n\n\nimpl Serialization {\n\n fn from_macaroon(macaroon: Macaroon) -> Result<Serialization> {\n\n let mut serialized: Serialization = Serialization {\n\n v: 2,\n\n i: None,\n\n i64: Some(macaroon.identifier()),\n\n l: macaroon.location(),\n\n l64: None,\n\n c: Vec::new(),\n", "file_path": "src/serialization/v2json.rs", "rank": 24, "score": 79764.32517463008 }, { "content": "use caveat::Caveat;\n\nuse error::MacaroonError;\n\nuse ByteString;\n\nuse Macaroon;\n\nuse MacaroonKey;\n\nuse Result;\n\n\n\n#[derive(Default)]\n\npub struct MacaroonBuilder {\n\n identifier: ByteString,\n\n location: Option<String>,\n\n signature: MacaroonKey,\n\n caveats: Vec<Caveat>,\n\n}\n\n\n\nimpl MacaroonBuilder {\n\n pub fn new() -> MacaroonBuilder {\n\n Default::default()\n\n }\n\n\n", "file_path": "src/serialization/macaroon_builder.rs", "rank": 25, "score": 74872.60189498741 }, { "content": " pub fn build(&self) -> Result<Macaroon> {\n\n if self.identifier.0.is_empty() {\n\n return Err(MacaroonError::BadMacaroon(\"No identifier found\"));\n\n }\n\n if self.signature.is_empty() {\n\n return Err(MacaroonError::BadMacaroon(\"No signature found\"));\n\n }\n\n\n\n Ok(Macaroon {\n\n identifier: self.identifier.clone(),\n\n location: self.location.clone(),\n\n signature: self.signature,\n\n caveats: self.caveats.clone(),\n\n })\n\n }\n\n}\n", "file_path": "src/serialization/macaroon_builder.rs", "rank": 26, "score": 74872.2302087141 }, { "content": " pub fn set_identifier(&mut self, identifier: ByteString) {\n\n self.identifier = identifier;\n\n }\n\n\n\n pub fn set_location(&mut self, location: &str) {\n\n self.location = Some((*location).to_string());\n\n }\n\n\n\n pub fn has_location(&self) -> bool {\n\n self.location.is_some()\n\n }\n\n\n\n pub fn set_signature(&mut self, signature: &[u8]) {\n\n self.signature.clone_from_slice(signature);\n\n }\n\n\n\n pub fn add_caveat(&mut self, caveat: Caveat) {\n\n self.caveats.push(caveat);\n\n }\n\n\n", "file_path": "src/serialization/macaroon_builder.rs", "rank": 27, "score": 74870.90733829193 }, { "content": "struct ByteStringVisitor;\n\n\n\nimpl<'de> Visitor<'de> for ByteStringVisitor {\n\n type Value = ByteString;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"base64 encoded string of bytes\")\n\n }\n\n\n\n fn visit_str<E>(self, value: &str) -> std::result::Result<Self::Value, E>\n\n where\n\n E: serde::de::Error,\n\n {\n\n let raw = match base64::decode(value) {\n\n Ok(v) => v,\n\n Err(_) => return Err(E::custom(\"unable to base64 decode value\")),\n\n };\n\n Ok(ByteString(raw))\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 28, "score": 73885.49967241668 }, { "content": "struct Packet {\n\n key: String,\n\n value: Vec<u8>,\n\n}\n\n\n", "file_path": "src/serialization/v1.rs", "rank": 29, "score": 73130.31029340094 }, { "content": "pub mod macaroon_builder;\n\npub mod v1;\n\npub mod v2;\n\npub mod v2json;\n\n\n\npub enum Format {\n\n V1,\n\n V2,\n\n V2JSON,\n\n}\n", "file_path": "src/serialization/mod.rs", "rank": 30, "score": 51966.25429930091 }, { "content": "use serde_json;\n\nuse std::{num, str, string};\n\n\n\n#[derive(Debug)]\n\npub enum MacaroonError {\n\n InitializationError,\n\n HashFailed,\n\n NotUTF8(str::Utf8Error),\n\n UnknownSerialization,\n\n DeserializationError(String),\n\n BadMacaroon(&'static str),\n\n KeyError(&'static str),\n\n DecryptionError(&'static str),\n\n InvalidMacaroon(&'static str),\n\n}\n\n\n\nimpl From<serde_json::Error> for MacaroonError {\n\n fn from(error: serde_json::Error) -> MacaroonError {\n\n MacaroonError::DeserializationError(format!(\"{}\", error))\n\n }\n", "file_path": "src/error.rs", "rank": 31, "score": 29017.793745126874 }, { "content": "}\n\n\n\nimpl From<string::FromUtf8Error> for MacaroonError {\n\n fn from(error: string::FromUtf8Error) -> MacaroonError {\n\n MacaroonError::DeserializationError(format!(\"{}\", error))\n\n }\n\n}\n\n\n\nimpl From<base64::DecodeError> for MacaroonError {\n\n fn from(error: base64::DecodeError) -> MacaroonError {\n\n MacaroonError::DeserializationError(format!(\"{}\", error))\n\n }\n\n}\n\n\n\nimpl From<num::ParseIntError> for MacaroonError {\n\n fn from(error: num::ParseIntError) -> MacaroonError {\n\n MacaroonError::DeserializationError(format!(\"{}\", error))\n\n }\n\n}\n\n\n\nimpl From<str::Utf8Error> for MacaroonError {\n\n fn from(error: str::Utf8Error) -> MacaroonError {\n\n MacaroonError::DeserializationError(format!(\"{}\", error))\n\n }\n\n}\n", "file_path": "src/error.rs", "rank": 32, "score": 29009.55813669893 }, { "content": " pub fn build(self) -> Result<Caveat> {\n\n if self.id.is_none() {\n\n return Err(MacaroonError::BadMacaroon(\"No identifier found\"));\n\n }\n\n if self.verifier_id.is_none() && self.location.is_none() {\n\n return Ok(new_first_party(self.id.unwrap()));\n\n }\n\n if self.verifier_id.is_some() && self.location.is_some() {\n\n return Ok(new_third_party(\n\n self.id.unwrap(),\n\n self.verifier_id.unwrap(),\n\n &self.location.unwrap(),\n\n ));\n\n }\n\n if self.verifier_id.is_none() {\n\n return Err(MacaroonError::BadMacaroon(\n\n \"Location but no verifier ID found\",\n\n ));\n\n }\n\n Err(MacaroonError::BadMacaroon(\n\n \"Verifier ID but no location found\",\n\n ))\n\n }\n\n}\n", "file_path": "src/caveat.rs", "rank": 33, "score": 28680.809372157935 }, { "content": "use crypto;\n\nuse crypto::MacaroonKey;\n\nuse error::MacaroonError;\n\nuse std::fmt::Debug;\n\nuse ByteString;\n\nuse Result;\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum Caveat {\n\n FirstParty(FirstParty),\n\n ThirdParty(ThirdParty),\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct FirstParty {\n\n predicate: ByteString,\n\n}\n\n\n\nimpl FirstParty {\n\n pub fn predicate(&self) -> ByteString {\n", "file_path": "src/caveat.rs", "rank": 34, "score": 28679.347239334435 }, { "content": " pub fn add_id(&mut self, id: ByteString) {\n\n self.id = Some(id);\n\n }\n\n\n\n pub fn has_id(&self) -> bool {\n\n self.id.is_some()\n\n }\n\n\n\n pub fn add_verifier_id(&mut self, vid: ByteString) {\n\n self.verifier_id = Some(vid);\n\n }\n\n\n\n pub fn add_location(&mut self, location: String) {\n\n self.location = Some(location);\n\n }\n\n\n\n pub fn has_location(&self) -> bool {\n\n self.location.is_some()\n\n }\n\n\n", "file_path": "src/caveat.rs", "rank": 35, "score": 28675.791409479425 }, { "content": " }\n\n}\n\n\n\nimpl Caveat {\n\n pub fn sign(&self, key: &MacaroonKey) -> MacaroonKey {\n\n match self {\n\n Self::FirstParty(fp) => crypto::hmac(key, &fp.predicate),\n\n Self::ThirdParty(tp) => crypto::hmac2(key, &tp.verifier_id, &tp.id),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/caveat.rs", "rank": 36, "score": 28673.502544629322 }, { "content": " self.predicate.clone()\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct ThirdParty {\n\n id: ByteString,\n\n verifier_id: ByteString,\n\n location: String,\n\n}\n\n\n\nimpl ThirdParty {\n\n pub fn id(&self) -> ByteString {\n\n self.id.clone()\n\n }\n\n pub fn verifier_id(&self) -> ByteString {\n\n self.verifier_id.clone()\n\n }\n\n pub fn location(&self) -> String {\n\n self.location.clone()\n", "file_path": "src/caveat.rs", "rank": 37, "score": 28671.616968115868 }, { "content": "use caveat::{Caveat, CaveatBuilder};\n\nuse error::MacaroonError;\n\nuse serialization::macaroon_builder::MacaroonBuilder;\n\nuse std::str;\n\nuse ByteString;\n\nuse Macaroon;\n\nuse Result;\n\n\n\n// Version 1 fields\n\nconst LOCATION: &str = \"location\";\n\nconst IDENTIFIER: &str = \"identifier\";\n\nconst SIGNATURE: &str = \"signature\";\n\nconst CID: &str = \"cid\";\n\nconst VID: &str = \"vid\";\n\nconst CL: &str = \"cl\";\n\n\n\nconst HEADER_SIZE: usize = 4;\n\n\n", "file_path": "src/serialization/v1.rs", "rank": 40, "score": 24348.095016842428 }, { "content": " ];\n\n\n\n #[test]\n\n fn test_deserialize() {\n\n let serialized_json: Vec<u8> = SERIALIZED_JSON.as_bytes().to_vec();\n\n let macaroon = super::deserialize(&serialized_json).unwrap();\n\n assert_eq!(\"http://example.org/\", &macaroon.location().unwrap());\n\n assert_eq!(ByteString::from(\"keyid\"), macaroon.identifier());\n\n assert_eq!(2, macaroon.caveats().len());\n\n let predicate = match &macaroon.caveats()[0] {\n\n Caveat::FirstParty(fp) => fp.predicate(),\n\n _ => ByteString::default(),\n\n };\n\n assert_eq!(ByteString::from(\"account = 3735928559\"), predicate);\n\n let predicate = match &macaroon.caveats()[1] {\n\n Caveat::FirstParty(fp) => fp.predicate(),\n\n _ => ByteString::default(),\n\n };\n\n assert_eq!(ByteString::from(\"user = alice\"), predicate);\n\n assert_eq!(MacaroonKey::from(SIGNATURE), macaroon.signature());\n", "file_path": "src/serialization/v2json.rs", "rank": 46, "score": 24336.528220250464 }, { "content": " caveat_builder.add_verifier_id(ByteString(packet.value));\n\n }\n\n CL => caveat_builder.add_location(String::from_utf8(packet.value)?),\n\n _ => {\n\n return Err(MacaroonError::DeserializationError(String::from(\n\n \"Unknown key\",\n\n )))\n\n }\n\n };\n\n }\n\n Ok(builder.build()?)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use ByteString;\n\n use Caveat;\n\n use Macaroon;\n\n use MacaroonKey;\n\n\n", "file_path": "src/serialization/v1.rs", "rank": 47, "score": 24335.884453923467 }, { "content": " let macaroon = super::deserialize(&serialized.as_bytes().to_vec()).unwrap();\n\n assert!(macaroon.location().is_some());\n\n assert_eq!(\"http://example.org/\", &macaroon.location().unwrap());\n\n assert_eq!(ByteString::from(\"keyid\"), macaroon.identifier());\n\n assert_eq!(signature, macaroon.signature());\n\n assert_eq!(2, macaroon.caveats().len());\n\n let predicate = match &macaroon.caveats()[0] {\n\n Caveat::FirstParty(fp) => fp.predicate(),\n\n _ => ByteString::default(),\n\n };\n\n assert_eq!(ByteString::from(\"account = 3735928559\"), predicate);\n\n let predicate = match &macaroon.caveats()[1] {\n\n Caveat::FirstParty(fp) => fp.predicate(),\n\n _ => ByteString::default(),\n\n };\n\n assert_eq!(ByteString::from(\"user = alice\"), predicate);\n\n }\n\n\n\n #[test]\n\n fn test_serialize_deserialize() {\n", "file_path": "src/serialization/v1.rs", "rank": 48, "score": 24335.78916764625 }, { "content": "use caveat;\n\nuse caveat::CaveatBuilder;\n\nuse error::MacaroonError;\n\nuse serde::{Deserialize, Serialize};\n\nuse serde_json;\n\nuse serialization::macaroon_builder::MacaroonBuilder;\n\nuse std::str;\n\nuse ByteString;\n\nuse Macaroon;\n\nuse Result;\n\n\n\n#[derive(Debug, Default, Deserialize, Serialize)]\n", "file_path": "src/serialization/v2json.rs", "rank": 49, "score": 24334.687193147674 }, { "content": " packet.value.len()\n\n );\n\n return Err(MacaroonError::DeserializationError(String::from(\n\n \"Illegal signature \\\n\n length in \\\n\n packet\",\n\n )));\n\n }\n\n builder.set_signature(&packet.value);\n\n }\n\n CID => {\n\n if caveat_builder.has_id() {\n\n builder.add_caveat(caveat_builder.build()?);\n\n caveat_builder = CaveatBuilder::new();\n\n caveat_builder.add_id(ByteString(packet.value));\n\n } else {\n\n caveat_builder.add_id(ByteString(packet.value));\n\n }\n\n }\n\n VID => {\n", "file_path": "src/serialization/v1.rs", "rank": 53, "score": 24330.90904525722 }, { "content": "\n\n let raw_sig = match ser.s {\n\n Some(sig) => sig,\n\n None => match ser.s64 {\n\n Some(sig) => base64::decode_config(&sig, base64::URL_SAFE)?,\n\n None => {\n\n return Err(MacaroonError::DeserializationError(\n\n \"No signature found\".into(),\n\n ))\n\n }\n\n },\n\n };\n\n if raw_sig.len() != 32 {\n\n return Err(MacaroonError::DeserializationError(\n\n \"Illegal signature length\".into(),\n\n ));\n\n }\n\n\n\n builder.set_signature(&raw_sig);\n\n\n", "file_path": "src/serialization/v2json.rs", "rank": 55, "score": 24329.46996729696 }, { "content": " let mut caveat_builder: CaveatBuilder = CaveatBuilder::new();\n\n for c in ser.c {\n\n caveat_builder.add_id(match c.i {\n\n Some(id) => id.into(),\n\n None => match c.i64 {\n\n Some(id64) => id64,\n\n None => {\n\n return Err(MacaroonError::DeserializationError(String::from(\n\n \"No caveat \\\n\n ID found\",\n\n )))\n\n }\n\n },\n\n });\n\n match c.l {\n\n Some(loc) => caveat_builder.add_location(loc),\n\n None => {\n\n if let Some(loc64) = c.l64 {\n\n caveat_builder.add_location(String::from_utf8(base64::decode_config(\n\n &loc64,\n", "file_path": "src/serialization/v2json.rs", "rank": 56, "score": 24329.34218494244 }, { "content": " return Err(MacaroonError::DeserializationError(String::from(\n\n \"Found i and i64 fields\",\n\n )));\n\n }\n\n if ser.l.is_some() && ser.l64.is_some() {\n\n return Err(MacaroonError::DeserializationError(String::from(\n\n \"Found l and l64 fields\",\n\n )));\n\n }\n\n if ser.s.is_some() && ser.s64.is_some() {\n\n return Err(MacaroonError::DeserializationError(String::from(\n\n \"Found s and s64 fields\",\n\n )));\n\n }\n\n\n\n let mut builder: MacaroonBuilder = MacaroonBuilder::new();\n\n builder.set_identifier(match ser.i {\n\n Some(id) => id.into(),\n\n None => match ser.i64 {\n\n Some(id) => id,\n", "file_path": "src/serialization/v2json.rs", "rank": 57, "score": 24328.82987562934 }, { "content": " assert!(macaroon.location().is_some());\n\n assert_eq!(\"http://example.org/\", &macaroon.location().unwrap());\n\n assert_eq!(ByteString::from(\"keyid\"), macaroon.identifier());\n\n assert_eq!(1, macaroon.caveats().len());\n\n let predicate = match &macaroon.caveats()[0] {\n\n Caveat::FirstParty(fp) => fp.predicate(),\n\n _ => ByteString::default(),\n\n };\n\n assert_eq!(ByteString::from(\"account = 3735928559\"), predicate);\n\n assert_eq!(signature, macaroon.signature());\n\n }\n\n\n\n #[test]\n\n fn test_deserialize_two_caveats() {\n\n let serialized = \"MDAyMWxvY2F0aW9uIGh0dHA6Ly9leGFtcGxlLm9yZy8KMDAxNWlkZW50aWZpZXIga2V5aWQKMDAxZGNpZCBhY2NvdW50ID0gMzczNTkyODU1OQowMDE1Y2lkIHVzZXIgPSBhbGljZQowMDJmc2lnbmF0dXJlIEvpZ80eoMaya69qSpTumwWxWIbaC6hejEKpPI0OEl78Cg\";\n\n let signature: MacaroonKey = [\n\n 75, 233, 103, 205, 30, 160, 198, 178, 107, 175, 106, 74, 148, 238, 155, 5, 177, 88,\n\n 134, 218, 11, 168, 94, 140, 66, 169, 60, 141, 14, 18, 94, 252,\n\n ]\n\n .into();\n", "file_path": "src/serialization/v1.rs", "rank": 58, "score": 24327.073036974412 }, { "content": " }\n\n\n\n #[test]\n\n fn test_serialize_deserialize() {\n\n let mut macaroon = Macaroon::create(\n\n Some(\"http://example.org/\".into()),\n\n &SIGNATURE.into(),\n\n \"keyid\".into(),\n\n )\n\n .unwrap();\n\n macaroon.add_first_party_caveat(\"user = alice\".into());\n\n macaroon.add_third_party_caveat(\n\n \"https://auth.mybank.com/\",\n\n &\"my key\".into(),\n\n \"keyid\".into(),\n\n );\n\n let serialized = macaroon.serialize(Format::V2JSON).unwrap();\n\n let other = Macaroon::deserialize(&serialized).unwrap();\n\n assert_eq!(macaroon, other);\n\n }\n\n}\n", "file_path": "src/serialization/v2json.rs", "rank": 59, "score": 24326.98001114545 }, { "content": " None => {\n\n return Err(MacaroonError::DeserializationError(String::from(\n\n \"No identifier \\\n\n found\",\n\n )))\n\n }\n\n },\n\n });\n\n\n\n match ser.l {\n\n Some(loc) => builder.set_location(&loc),\n\n None => {\n\n if let Some(loc) = ser.l64 {\n\n builder.set_location(&String::from_utf8(base64::decode_config(\n\n &loc,\n\n base64::URL_SAFE,\n\n )?)?)\n\n }\n\n }\n\n };\n", "file_path": "src/serialization/v2json.rs", "rank": 60, "score": 24326.974389670886 }, { "content": " #[test]\n\n fn test_deserialize() {\n\n let mut serialized = \"MDAyMWxvY2F0aW9uIGh0dHA6Ly9leGFtcGxlLm9yZy8KMDAxNWlkZW50aWZpZXIga2V5aWQKMDAyZnNpZ25hdHVyZSB83ueSURxbxvUoSFgF3-myTnheKOKpkwH51xHGCeOO9wo\";\n\n let mut signature: MacaroonKey = [\n\n 124, 222, 231, 146, 81, 28, 91, 198, 245, 40, 72, 88, 5, 223, 233, 178, 78, 120, 94,\n\n 40, 226, 169, 147, 1, 249, 215, 17, 198, 9, 227, 142, 247,\n\n ]\n\n .into();\n\n let macaroon = super::deserialize(&serialized.as_bytes().to_vec()).unwrap();\n\n assert!(macaroon.location().is_some());\n\n assert_eq!(\"http://example.org/\", &macaroon.location().unwrap());\n\n assert_eq!(ByteString::from(\"keyid\"), macaroon.identifier());\n\n assert_eq!(signature, macaroon.signature());\n\n serialized = \"MDAyMWxvY2F0aW9uIGh0dHA6Ly9leGFtcGxlLm9yZy8KMDAxNWlkZW50aWZpZXIga2V5aWQKMDAxZGNpZCBhY2NvdW50ID0gMzczNTkyODU1OQowMDJmc2lnbmF0dXJlIPVIB_bcbt-Ivw9zBrOCJWKjYlM9v3M5umF2XaS9JZ2HCg\";\n\n signature = [\n\n 245, 72, 7, 246, 220, 110, 223, 136, 191, 15, 115, 6, 179, 130, 37, 98, 163, 98, 83,\n\n 61, 191, 115, 57, 186, 97, 118, 93, 164, 189, 37, 157, 135,\n\n ]\n\n .into();\n\n let macaroon = super::deserialize(&serialized.as_bytes().to_vec()).unwrap();\n", "file_path": "src/serialization/v1.rs", "rank": 62, "score": 24324.447085264626 }, { "content": " let mut macaroon: Macaroon =\n\n Macaroon::create(Some(\"http://example.org/\".into()), &\"my key\".into(), \"keyid\".into()).unwrap();\n\n macaroon.add_first_party_caveat(\"account = 3735928559\".into());\n\n macaroon.add_first_party_caveat(\"user = alice\".into());\n\n macaroon.add_third_party_caveat(\n\n \"https://auth.mybank.com\",\n\n &\"caveat key\".into(),\n\n \"caveat\".into(),\n\n );\n\n let serialized = macaroon.serialize(super::super::Format::V1).unwrap();\n\n let deserialized = Macaroon::deserialize(&serialized).unwrap();\n\n assert_eq!(macaroon, deserialized);\n\n }\n\n}\n", "file_path": "src/serialization/v1.rs", "rank": 63, "score": 24322.646789114013 }, { "content": " s: None,\n\n s64: Some(base64::encode_config(\n\n &macaroon.signature(),\n\n base64::URL_SAFE,\n\n )),\n\n };\n\n for c in macaroon.caveats() {\n\n match c {\n\n caveat::Caveat::FirstParty(fp) => {\n\n let serialized_caveat: Caveat = Caveat {\n\n i: None,\n\n i64: Some(fp.predicate()),\n\n l: None,\n\n l64: None,\n\n v: None,\n\n v64: None,\n\n };\n\n serialized.c.push(serialized_caveat);\n\n }\n\n caveat::Caveat::ThirdParty(tp) => {\n", "file_path": "src/serialization/v2json.rs", "rank": 64, "score": 24322.15670396213 }, { "content": " let serialized_caveat: Caveat = Caveat {\n\n i: None,\n\n i64: Some(tp.id()),\n\n l: Some(tp.location()),\n\n l64: None,\n\n v: None,\n\n v64: Some(tp.verifier_id()),\n\n };\n\n serialized.c.push(serialized_caveat);\n\n }\n\n }\n\n }\n\n\n\n Ok(serialized)\n\n }\n\n}\n\n\n\nimpl Macaroon {\n\n fn from_json(ser: Serialization) -> Result<Macaroon> {\n\n if ser.i.is_some() && ser.i64.is_some() {\n", "file_path": "src/serialization/v2json.rs", "rank": 65, "score": 24320.608583909067 }, { "content": " base64::URL_SAFE,\n\n )?)?)\n\n }\n\n }\n\n };\n\n match c.v {\n\n Some(vid) => caveat_builder.add_verifier_id(vid.into()),\n\n None => {\n\n if let Some(vid64) = c.v64 {\n\n caveat_builder.add_verifier_id(vid64)\n\n }\n\n }\n\n };\n\n builder.add_caveat(caveat_builder.build()?);\n\n caveat_builder = CaveatBuilder::new();\n\n }\n\n\n\n Ok(builder.build()?)\n\n }\n\n}\n\n\n", "file_path": "src/serialization/v2json.rs", "rank": 66, "score": 24320.181867541545 }, { "content": "### Examples\n\n\n\n```rust\n\nuse macaroon::{Macaroon, Verifier, MacaroonKey};\n\n\n\n// Initialize to make crypto primitives thread-safe\n\nmacaroon::initialize().unwrap(); // Force panic if initialization fails\n\n\n\n// Create our key\n\nlet key = \"key\".into();\n\n\n\n// Create our macaroon. A location is optional.\n\nlet mut macaroon = match Macaroon::create(Some(\"location\".into()), &key, \"id\".into()) {\n\n Ok(macaroon) => macaroon,\n\n Err(error) => panic!(\"Error creating macaroon: {:?}\", error),\n\n};\n\n\n\n// Add our first-party caveat. We say that only someone with account 12345678\n\n// is authorized to access whatever the macaroon is protecting\n\n// Note that we can add however many of these we want, with different predicates\n\nmacaroon.add_first_party_caveat(\"account = 12345678\".into());\n\n\n\n// Now we verify the macaroon\n\n// First we create the verifier\n\nlet mut verifier = Verifier::default();\n\n\n\n// We assert that the account number is \"12345678\"\n\nverifier.satisfy_exact(\"account = 12345678\".into());\n\n\n\n// Now we verify the macaroon. It should return `Ok(true)` if the user is authorized\n\nmatch verifier.verify(&macaroon, &key, Default::default()) {\n\n Ok(_) => println!(\"Macaroon verified!\"),\n\n Err(error) => println!(\"Error validating macaroon: {:?}\", error),\n\n}\n\n\n\n// Now, let's add a third-party caveat, which just says that we need our third party\n\n// to authorize this for us as well.\n\n\n\n// Create a key for the third party caveat\n\nlet other_key = \"different key\".into();\n\n\n", "file_path": "README.md", "rank": 67, "score": 18552.055996464507 }, { "content": "## What are Macaroons?\n\n\n\nMacaroons are bearer tokens (similar to cookies) which encode within them\n\ncriteria within which the authorization is allowed to take place (referred to as\n\n\"caveats\"). For instance, authorization could be restricted to a particular\n\nuser, account, time of day, really anything. These criteria can be either\n\nevaluated locally (a \"first-party caveat\"), or using special macaroons\n\n(\"discharge macaroons\") generated by a third party (a \"third-party caveat\").\n\n\n\nA first-party caveat consists simply of a predicate which, when evaluated as\n\ntrue, authorizes the caveat. The predicate is a string which is either evaluated\n\nusing strict string comparison (`satisfy_exact`), or interpreted using a\n\nprovided function (`satisfy_general`).\n\n\n\nA third-party caveat consists of a location string, an identifier, and a\n\nspecially-generated signing key to authenticate the generated discharge\n\nmacaroons. The key and identifier is passed to the third-party who generates the\n\ndischarge macaroons. The receiver then binds each discharge macaroon to the\n\noriginal macaroon.\n\n\n\nDuring verification of a third-party caveat, a discharge macaroon is found from\n\nthose received whose identifier matches that of the caveat. The binding\n\nsignature is verified, and the discharge macaroon's caveats are verified using\n\nthe same process as the original macaroon.\n\n\n\nThe macaroon is considered authorized only if all its caveats are authorized by\n\nthe above process.\n\n\n\n## Functionality Implemented\n\n\n\n- Creating macaroons, and adding first- and third-party caveats\n\n- Serialization - versions 1, 2 & 2J are supported\n\n- Validation (mostly for validating deserialized macaroons)\n\n- Creation of discharge macaroons\n\n- Verification of both first- and third-party caveats (the latter using\n\n discharge macaroons)\n\n\n\n## Usage\n\n\n\nUntil we release a new version and get the name in crates, you'll have to\n\ninclude this dependency like so in your `Cargo.toml`:\n\n\n\n```\n\n[dependencies]\n\nmacaroon = { git = \"https://github.com/macaroon-rs/macaroon\", branch = \"trunk\" }\n\n```\n\n\n", "file_path": "README.md", "rank": 68, "score": 18550.70062605312 }, { "content": "macaroon.add_third_party_caveat(\"https://auth.mybank\", &other_key, \"caveat id\".into());\n\n\n\n// When we're ready to verify a third-party caveat, we use the location\n\n// (in this case, \"https://auth.mybank\") to retrieve the discharge macaroons we use to verify.\n\n// These would be created by the third party like so:\n\nlet mut discharge = match Macaroon::create(Some(\"http://auth.mybank/\".into()),\n\n &other_key,\n\n \"caveat id\".into()) {\n\n Ok(discharge) => discharge,\n\n Err(error) => panic!(\"Error creating discharge macaroon: {:?}\", error),\n\n};\n\n// And this is the criterion the third party requires for authorization\n\ndischarge.add_first_party_caveat(\"account = 12345678\".into());\n\n\n\n// Once we receive the discharge macaroon, we bind it to the original macaroon\n\nmacaroon.bind(&mut discharge);\n\n\n\n// Then we can verify using the same verifier (which will verify both the existing\n\n// first-party caveat and the third party one)\n\nmatch verifier.verify(&macaroon, &key, vec![discharge]) {\n\n Ok(_) => println!(\"Macaroon verified!\"),\n\n Err(error) => println!(\"Error validating macaroon: {:?}\", error),\n\n}\n\n```\n\n\n", "file_path": "README.md", "rank": 69, "score": 18549.672162410287 }, { "content": "## Backwards compatibility\n\n\n\nAs the original project is currently only released as a minor version, we expect to make\n\nbreaking changes to the API, especially as we begin to use it in more real-world\n\nscenarios. However, all of these changes will be enumerated in each version's\n\nchangelog and release notes. Once we have found an API that is sane and stable,\n\nwe will release a 1.0, after which point, all versions of the 1.X line will be\n\nbackwards compatible per [semver](https://semver.org).\n\n\n\n## Contributing\n\n\n\nWe :heart: any contributions. Any fixes to make things simpler or more idiomatic\n\nare also more than welcome. Please open a pull request if you have something you\n\nwant to contribute. As the project matures, we will add a more detailed\n\ncontributors guide\n", "file_path": "README.md", "rank": 70, "score": 18541.164367518257 }, { "content": "<img src=\"https://svgshare.com/i/JxB.svg\" alt=\"Ferris holding French macaroons\" width=\"200\"/>\n\n\n\n# macaroon\n\n\n\nRust implementation of\n\n[macaroons](https://research.google.com/pubs/pub41892.html).\n\n\n\n[![Build Status](https://github.com/macaroon-rs/macaroon/workflows/build_and_test/badge.svg?branch=trunk)](https://github.com/macaroon-rs/macaroon/actions)\n\n[![codecov](https://codecov.io/gh/macaroon-rs/macaroon/branch/trunk/graph/badge.svg)](https://codecov.io/gh/macaroon-rs/macaroon)\n\n\n\n**NOTE:** This project builds on previous work done by [Deis Labs](https://github.com/deislabs/libmacaroon-rs)\n\nand the original [libmacaroon-rs](https://github.com/jacklund/libmacaroon-rs).\n\nWe are currently working with [getting in touch](https://github.com/jacklund/libmacaroon-rs/issues/4)\n\nwith the author and other interested parties to see if we can transfer the crate\n\nname to us as we are working on maintaining this. Large breaking changes have been made to the API of the [macaroon crate](https://crates.io/crates/macaroon). The full list of changes will be listed in\n\nthe changelog once we release a new minor version for that same crate.\n\n\n", "file_path": "README.md", "rank": 71, "score": 18538.5349716563 }, { "content": "# macaroon Change Log\n\n\n\n## Version 0.1.1 - Feb 22, 2017 (libmacaroon-rs)\n\n\n\n- Coverage using [coveralls.io](https://coveralls.io/github/jacklund/libmacaroon-rs?branch=trunk)\n\n- Expanded coverage of unit tests\n\n- Bug fix for version 1 deserialization\n\n\n\n## Version 0.1.0 - Feb 20, 2017 (libmacaroon-rs)\n\n\n\nInitial commit. Functionality:\n\n\n\n- Macaroons with first- and third-party caveats\n\n- Serialization/Deserialization using [libmacaroons](https://github.com/rescrv/libmacaroons) version 1, 2, and 2J (JSON) formats\n\n- Verification of first-party caveats using either exact string comparison or submitted verification function\n\n- Verification of third-party caveats using discharge macaroons\n", "file_path": "ChangeLog.md", "rank": 72, "score": 17942.439032273585 }, { "content": " /// it with. You can use a bare str or &[u8] containing arbitrary data with\n\n /// `into` to automatically generate a suitable key\n\n ///\n\n /// # Errors\n\n /// Returns `MacaroonError::BadMacaroon` if the identifier is is empty\n\n pub fn create(\n\n location: Option<String>,\n\n key: &MacaroonKey,\n\n identifier: ByteString,\n\n ) -> Result<Macaroon> {\n\n let macaroon: Macaroon = Macaroon {\n\n location,\n\n identifier: identifier.clone(),\n\n signature: crypto::hmac(key, &identifier),\n\n caveats: Vec::new(),\n\n };\n\n debug!(\"Macaroon::create: {:?}\", macaroon);\n\n macaroon.validate()\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 73, "score": 30.82465642570477 }, { "content": " .collect()\n\n }\n\n\n\n /// Validate the macaroon - used mainly for validating deserialized macaroons\n\n fn validate(self) -> Result<Self> {\n\n if self.identifier.0.is_empty() {\n\n return Err(MacaroonError::BadMacaroon(\"No macaroon identifier\"));\n\n }\n\n if self.signature.is_empty() {\n\n return Err(MacaroonError::BadMacaroon(\"No macaroon signature\"));\n\n }\n\n\n\n Ok(self)\n\n }\n\n\n\n /// Add a first-party caveat to the macaroon\n\n ///\n\n /// A first-party caveat is just a string predicate in some\n\n /// DSL which can be verified either by exact string match,\n\n /// or by using a function to parse the string and validate it\n", "file_path": "src/lib.rs", "rank": 74, "score": 26.57345160839809 }, { "content": "extern crate base64;\n\nextern crate serde;\n\nextern crate serde_json;\n\nextern crate sodiumoxide;\n\n\n\nmod caveat;\n\npub mod crypto;\n\npub mod error;\n\nmod serialization;\n\npub mod verifier;\n\n\n\npub use caveat::Caveat;\n\npub use crypto::MacaroonKey;\n\npub use error::MacaroonError;\n\npub use serialization::Format;\n\npub use verifier::Verifier;\n\n\n\nuse serde::de::Visitor;\n\nuse serde::{Deserialize, Deserializer, Serialize, Serializer};\n\nuse std::fmt;\n\n\n\npub type Result<T> = std::result::Result<T, MacaroonError>;\n\n\n\n/// Initializes the cryptographic libraries. Although you can use macaroon-rs without\n\n/// calling this, the underlying random-number generator is not guaranteed to be thread-safe\n\n/// if you don't.\n", "file_path": "src/lib.rs", "rank": 75, "score": 25.898027489738098 }, { "content": "\n\nimpl<'de> Deserialize<'de> for ByteString {\n\n fn deserialize<D>(deserializer: D) -> std::result::Result<ByteString, D::Error>\n\n where\n\n D: Deserializer<'de>,\n\n {\n\n deserializer.deserialize_str(ByteStringVisitor)\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct Macaroon {\n\n identifier: ByteString,\n\n location: Option<String>,\n\n signature: MacaroonKey,\n\n caveats: Vec<Caveat>,\n\n}\n\n\n\nimpl Macaroon {\n\n /// Construct a macaroon, given a location and identifier, and a key to sign\n", "file_path": "src/lib.rs", "rank": 76, "score": 25.339136265595 }, { "content": " /// (see Verifier for more info).\n\n pub fn add_first_party_caveat(&mut self, predicate: ByteString) {\n\n let caveat: caveat::Caveat = caveat::new_first_party(predicate);\n\n self.signature = caveat.sign(&self.signature);\n\n self.caveats.push(caveat);\n\n debug!(\"Macaroon::add_first_party_caveat: {:?}\", self);\n\n }\n\n\n\n /// Add a third-party caveat to the macaroon\n\n ///\n\n /// A third-party caveat is a caveat which must be verified by a third party\n\n /// using macaroons provided by them (referred to as \"discharge macaroons\").\n\n pub fn add_third_party_caveat(&mut self, location: &str, key: &MacaroonKey, id: ByteString) {\n\n let vid: Vec<u8> = crypto::encrypt_key(&self.signature, key);\n\n let caveat: caveat::Caveat = caveat::new_third_party(id, ByteString(vid), location);\n\n self.signature = caveat.sign(&self.signature);\n\n self.caveats.push(caveat);\n\n debug!(\"Macaroon::add_third_party_caveat: {:?}\", self);\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 77, "score": 24.73527870220783 }, { "content": " serialization::Format::V2 => serialization::v2::serialize(self),\n\n serialization::Format::V2JSON => serialization::v2json::serialize(self),\n\n }\n\n }\n\n\n\n /// Deserialize a macaroon\n\n pub fn deserialize(data: &[u8]) -> Result<Macaroon> {\n\n let macaroon: Macaroon = match data[0] as char {\n\n '{' => serialization::v2json::deserialize(data)?,\n\n '\\x02' => serialization::v2::deserialize(data)?,\n\n 'a'..='z' | 'A'..='Z' | '0'..='9' | '+' | '-' | '/' | '_' => {\n\n serialization::v1::deserialize(data)?\n\n }\n\n _ => return Err(MacaroonError::UnknownSerialization),\n\n };\n\n macaroon.validate()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "src/lib.rs", "rank": 78, "score": 24.248859496419314 }, { "content": " /// Returns a clone of the identifier for the macaroon\n\n pub fn identifier(&self) -> ByteString {\n\n self.identifier.clone()\n\n }\n\n\n\n /// Returns the location for the macaroon\n\n pub fn location(&self) -> Option<String> {\n\n self.location.clone()\n\n }\n\n\n\n /// Returns the macaroon's signature\n\n pub fn signature(&self) -> MacaroonKey {\n\n self.signature\n\n }\n\n\n\n pub fn caveats(&self) -> Vec<Caveat> {\n\n self.caveats.clone()\n\n }\n\n\n\n /// Retrieve a list of the first-party caveats for the macaroon\n", "file_path": "src/lib.rs", "rank": 79, "score": 22.725184115427897 }, { "content": " /// Bind a discharge macaroon to the original macaroon\n\n ///\n\n /// When a macaroon with third-party caveats must be authorized, you send off to the various\n\n /// locations specified in the caveats, sending the caveat ID and key, and receive a set\n\n /// of one or more \"discharge macaroons\" which are used to verify the caveat. In order to ensure\n\n /// that the discharge macaroons aren't re-used in some other context, we bind them to the original\n\n /// macaroon so that they can't be used in a different context.\n\n pub fn bind(&self, discharge: &mut Macaroon) {\n\n let zero_key: MacaroonKey = [0; 32].into();\n\n discharge.signature = crypto::hmac2(&zero_key, &self.signature, &discharge.signature);\n\n debug!(\n\n \"Macaroon::bind: original: {:?}, discharge: {:?}\",\n\n self, discharge\n\n );\n\n }\n\n\n\n /// Serialize the macaroon using the serialization format provided\n\n pub fn serialize(&self, format: serialization::Format) -> Result<Vec<u8>> {\n\n match format {\n\n serialization::Format::V1 => serialization::v1::serialize(self),\n", "file_path": "src/lib.rs", "rank": 80, "score": 22.380296417564445 }, { "content": " let key: MacaroonKey = b\"this is a super duper secret key\".into();\n\n let mut macaroon =\n\n Macaroon::create(Some(\"location\".into()), &key, \"identifier\".into()).unwrap();\n\n macaroon.add_first_party_caveat(\"predicate\".into());\n\n assert_eq!(1, macaroon.caveats.len());\n\n let predicate = match &macaroon.caveats[0] {\n\n Caveat::FirstParty(fp) => fp.predicate(),\n\n _ => ByteString::default(),\n\n };\n\n assert_eq!(ByteString::from(\"predicate\"), predicate);\n\n assert_eq!(signature, macaroon.signature);\n\n assert_eq!(&macaroon.caveats[0], &macaroon.first_party_caveats()[0]);\n\n }\n\n\n\n #[test]\n\n fn create_macaroon_with_third_party_caveat() {\n\n let key: MacaroonKey = \"this is a super duper secret key\".into();\n\n let mut macaroon =\n\n Macaroon::create(Some(\"location\".into()), &key, \"identifier\".into()).unwrap();\n\n let location = \"https://auth.mybank.com\";\n", "file_path": "src/lib.rs", "rank": 81, "score": 22.08490878812388 }, { "content": "mod tests {\n\n use super::ByteString;\n\n use super::Caveat;\n\n use super::Macaroon;\n\n use super::MacaroonKey;\n\n use Result;\n\n\n\n #[test]\n\n fn create_macaroon() {\n\n let signature: MacaroonKey = [\n\n 118, 104, 143, 143, 101, 76, 166, 146, 84, 159, 42, 235, 57, 143, 191, 198, 87, 96, 27,\n\n 165, 196, 100, 12, 178, 175, 29, 112, 1, 253, 179, 216, 58,\n\n ]\n\n .into();\n\n let key: MacaroonKey = b\"this is a super duper secret key\".into();\n\n let macaroon_res = Macaroon::create(Some(\"location\".into()), &key, \"identifier\".into());\n\n assert!(macaroon_res.is_ok());\n\n let macaroon = macaroon_res.unwrap();\n\n assert!(macaroon.location.is_some());\n\n assert_eq!(\"location\", macaroon.location.unwrap());\n", "file_path": "src/lib.rs", "rank": 82, "score": 21.789216774988635 }, { "content": " ) -> Result<()> {\n\n let mut sig = crypto::hmac(key, &m.identifier());\n\n for c in m.caveats() {\n\n sig = match &c {\n\n Caveat::ThirdParty(tp) => {\n\n let caveat_key = crypto::decrypt_key(&sig, &tp.verifier_id().0)?;\n\n let dm = discharge_set.remove(&tp.id()).ok_or_else(|| MacaroonError::InvalidMacaroon(\"no discharge macaroon found (or discharge has already been used) for caveat\"))?;\n\n self.verify_with_sig(root_sig, &dm, &caveat_key, discharge_set)?;\n\n c.sign(&sig)\n\n }\n\n Caveat::FirstParty(fp) => {\n\n // This checks exact caveats first and then general second\n\n // if it fails due to logic short circuiting\n\n if !(self.exact.contains(&fp.predicate())\n\n || self.verify_general(&fp.predicate()))\n\n {\n\n // If both failed, it means we weren't successful at either\n\n return Err(MacaroonError::InvalidMacaroon(\"caveats are not valid\"));\n\n }\n\n c.sign(&sig)\n", "file_path": "src/verifier.rs", "rank": 83, "score": 20.937539646154057 }, { "content": "//! macaroon.\n\n//!\n\n//! During verification of a third-party caveat, a discharge macaroon is found from those received whose identifier\n\n//! matches that of the caveat. The binding signature is verified, and the discharge macaroon's caveats are verified\n\n//! using the same process as the original macaroon.\n\n//!\n\n//! The macaroon is considered authorized only if all its caveats are authorized by the above process.\n\n//!\n\n//! # Example\n\n//! ```\n\n//! use macaroon::{Macaroon, Verifier, MacaroonKey};\n\n//!\n\n//! // Initialize to make crypto primitives thread-safe\n\n//! macaroon::initialize().unwrap(); // Force panic if initialization fails\n\n//!\n\n//! // Create our key\n\n//! let key = \"key\".into();\n\n//!\n\n//! // Create our macaroon. A location is optional.\n\n//! let mut macaroon = match Macaroon::create(Some(\"location\".into()), &key, \"id\".into()) {\n", "file_path": "src/lib.rs", "rank": 84, "score": 18.53959315157971 }, { "content": " let mut discharge_set = discharges\n\n .iter()\n\n .map(|d| (d.identifier.clone(), d.clone()))\n\n .collect::<HashMap<ByteString, Macaroon>>();\n\n self.verify_with_sig(&m.signature, m, key, &mut discharge_set)?;\n\n // Now check that all discharges were used\n\n if !discharge_set.is_empty() {\n\n return Err(MacaroonError::InvalidMacaroon(\n\n \"all discharge macaroons were not used\",\n\n ));\n\n }\n\n Ok(())\n\n }\n\n\n\n fn verify_with_sig(\n\n &self,\n\n root_sig: &MacaroonKey,\n\n m: &Macaroon,\n\n key: &MacaroonKey,\n\n discharge_set: &mut HashMap<ByteString, Macaroon>,\n", "file_path": "src/verifier.rs", "rank": 85, "score": 18.260345125125934 }, { "content": " assert_eq!(ByteString::from(\"identifier\"), macaroon.identifier);\n\n assert_eq!(signature, macaroon.signature);\n\n assert_eq!(0, macaroon.caveats.len());\n\n }\n\n\n\n #[test]\n\n fn create_invalid_macaroon() {\n\n let key: MacaroonKey = \"this is a super duper secret key\".into();\n\n let macaroon_res: Result<Macaroon> =\n\n Macaroon::create(Some(\"location\".into()), &key, \"\".into());\n\n assert!(macaroon_res.is_err());\n\n }\n\n\n\n #[test]\n\n fn create_macaroon_with_first_party_caveat() {\n\n let signature: MacaroonKey = [\n\n 68, 26, 16, 191, 99, 247, 36, 188, 53, 140, 17, 49, 218, 48, 129, 178, 14, 196, 187,\n\n 82, 117, 4, 232, 42, 251, 131, 86, 98, 133, 201, 45, 6,\n\n ]\n\n .into();\n", "file_path": "src/lib.rs", "rank": 86, "score": 18.22226774756398 }, { "content": "//! discharge.add_first_party_caveat(\"account = 12345678\".into());\n\n//!\n\n//! // Once we receive the discharge macaroon, we bind it to the original macaroon\n\n//! macaroon.bind(&mut discharge);\n\n//!\n\n//! // Then we can verify using the same verifier (which will verify both the existing\n\n//! // first-party caveat and the third party one)\n\n//! match verifier.verify(&macaroon, &key, vec![discharge]) {\n\n//! Ok(_) => println!(\"Macaroon verified!\"),\n\n//! Err(error) => println!(\"Error validating macaroon: {:?}\", error),\n\n//! }\n\n//! ```\n\n//! # Supported Features\n\n//! This crate supports all the following features:\n\n//!\n\n//! - verification of first-party caveats either via exact string match or passed-in function\n\n//! - verification of third-party caveats using discharge macaroons (including ones that themselves have embedded third-party caveats)\n\n//! - serialization and deserialization of caveats via version 1, 2 or 2J serialization formats (fully compatible with libmacaroons)\n\n#[macro_use]\n\nextern crate log;\n", "file_path": "src/lib.rs", "rank": 87, "score": 17.856280712858748 }, { "content": "\n\n fn deref(&self) -> &Self::Target {\n\n &self.0\n\n }\n\n}\n\n\n\nimpl DerefMut for MacaroonKey {\n\n fn deref_mut(&mut self) -> &mut Self::Target {\n\n &mut self.0\n\n }\n\n}\n\n\n\nimpl From<&str> for MacaroonKey {\n\n fn from(s: &str) -> Self {\n\n generate_derived_key(s.as_bytes())\n\n }\n\n}\n\n\n\nimpl From<&[u8]> for MacaroonKey {\n\n fn from(b: &[u8]) -> Self {\n", "file_path": "src/crypto.rs", "rank": 88, "score": 17.185306165004956 }, { "content": "use error::MacaroonError;\n\nuse sodiumoxide::crypto::auth::hmacsha512256::{authenticate, gen_key, Key, Tag};\n\nuse sodiumoxide::crypto::secretbox;\n\nuse std::borrow::Borrow;\n\nuse std::ops::{Deref, DerefMut};\n\nuse Result;\n\n\n\nconst KEY_GENERATOR: MacaroonKey = MacaroonKey(*b\"macaroons-key-generator\\0\\0\\0\\0\\0\\0\\0\\0\\0\");\n\n\n\n// A convenience type for a MacaroonKey with helpful methods attached for\n\n// conversion. Using the default trait will return a randomly generated key\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub struct MacaroonKey([u8; sodiumoxide::crypto::auth::KEYBYTES]);\n\n\n\nimpl Default for MacaroonKey {\n\n fn default() -> Self {\n\n MacaroonKey(gen_key().0)\n\n }\n\n}\n\n\n", "file_path": "src/crypto.rs", "rank": 89, "score": 17.14787559491373 }, { "content": "use crypto;\n\nuse std::collections::BTreeSet;\n\nuse std::collections::HashMap;\n\nuse ByteString;\n\nuse Caveat;\n\nuse Macaroon;\n\nuse MacaroonError;\n\nuse MacaroonKey;\n\nuse Result;\n\n\n\npub type VerifyFunc = fn(&ByteString) -> bool;\n\n\n\n#[derive(Default)]\n\npub struct Verifier {\n\n exact: BTreeSet<ByteString>,\n\n general: Vec<VerifyFunc>,\n\n}\n\n\n\nimpl Verifier {\n\n pub fn verify(&self, m: &Macaroon, key: &MacaroonKey, discharges: Vec<Macaroon>) -> Result<()> {\n", "file_path": "src/verifier.rs", "rank": 90, "score": 16.827818499073363 }, { "content": " }\n\n };\n\n }\n\n // If the root sig equals the newly generated sig, that means we reached\n\n // the end of the line and we are ok to return\n\n if root_sig == &sig {\n\n return Ok(());\n\n }\n\n // Check the bound signature equals the signature of the discharge\n\n // macaroon\n\n let zero_key: MacaroonKey = [0; 32].into();\n\n let bound_sig = crypto::hmac2(&zero_key, &ByteString(root_sig.to_vec()), &sig.into());\n\n if bound_sig != m.signature {\n\n return Err(MacaroonError::InvalidMacaroon(\"signature is not valid\"));\n\n }\n\n Ok(())\n\n }\n\n\n\n pub fn satisfy_exact(&mut self, b: ByteString) {\n\n self.exact.insert(b);\n", "file_path": "src/verifier.rs", "rank": 91, "score": 16.514642473492923 }, { "content": " }\n\n\n\n pub fn satisfy_general(&mut self, f: VerifyFunc) {\n\n self.general.push(f)\n\n }\n\n\n\n fn verify_general(&self, value: &ByteString) -> bool {\n\n for f in self.general.iter() {\n\n if f(value) {\n\n return true;\n\n }\n\n }\n\n false\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n extern crate time;\n\n\n", "file_path": "src/verifier.rs", "rank": 92, "score": 15.74216693337223 }, { "content": " }\n\n}\n\n\n\nimpl Default for ByteString {\n\n fn default() -> ByteString {\n\n ByteString(Default::default())\n\n }\n\n}\n\n\n\nimpl fmt::Display for ByteString {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{}\", base64::encode(&self.0))\n\n }\n\n}\n\n\n\nimpl Serialize for ByteString {\n\n fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n serializer.serialize_str(&self.to_string())\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 93, "score": 15.726382910969477 }, { "content": " let mut verifier = Verifier::default();\n\n verifier.satisfy_exact(\"account = 3735928559\".into());\n\n verifier\n\n .verify(&macaroon, &key, Default::default())\n\n .unwrap_err();\n\n let mut verifier = Verifier::default();\n\n verifier.satisfy_exact(\"user = alice\".into());\n\n verifier\n\n .verify(&macaroon, &key, Default::default())\n\n .unwrap_err();\n\n }\n\n\n\n fn after_time_verifier(caveat: &ByteString) -> bool {\n\n if !caveat.0.starts_with(b\"time > \") {\n\n return false;\n\n }\n\n let strcaveat = match std::str::from_utf8(&caveat.0) {\n\n Ok(s) => s,\n\n Err(_) => return false,\n\n };\n", "file_path": "src/verifier.rs", "rank": 94, "score": 13.975321587188105 }, { "content": "//! }\n\n//!\n\n//! // Now, let's add a third-party caveat, which just says that we need our third party\n\n//! // to authorize this for us as well.\n\n//!\n\n//! // Create a key for the third party caveat\n\n//! let other_key = \"different key\".into();\n\n//!\n\n//! macaroon.add_third_party_caveat(\"https://auth.mybank\", &other_key, \"caveat id\".into());\n\n//!\n\n//! // When we're ready to verify a third-party caveat, we use the location\n\n//! // (in this case, \"https://auth.mybank\") to retrieve the discharge macaroons we use to verify.\n\n//! // These would be created by the third party like so:\n\n//! let mut discharge = match Macaroon::create(Some(\"http://auth.mybank/\".into()),\n\n//! &other_key,\n\n//! \"caveat id\".into()) {\n\n//! Ok(discharge) => discharge,\n\n//! Err(error) => panic!(\"Error creating discharge macaroon: {:?}\", error),\n\n//! };\n\n//! // And this is the criterion the third party requires for authorization\n", "file_path": "src/lib.rs", "rank": 95, "score": 13.569750863352828 }, { "content": "//! Ok(macaroon) => macaroon,\n\n//! Err(error) => panic!(\"Error creating macaroon: {:?}\", error),\n\n//! };\n\n//!\n\n//! // Add our first-party caveat. We say that only someone with account 12345678\n\n//! // is authorized to access whatever the macaroon is protecting\n\n//! // Note that we can add however many of these we want, with different predicates\n\n//! macaroon.add_first_party_caveat(\"account = 12345678\".into());\n\n//!\n\n//! // Now we verify the macaroon\n\n//! // First we create the verifier\n\n//! let mut verifier = Verifier::default();\n\n//!\n\n//! // We assert that the account number is \"12345678\"\n\n//! verifier.satisfy_exact(\"account = 12345678\".into());\n\n//!\n\n//! // Now we verify the macaroon. It should return `Ok(true)` if the user is authorized\n\n//! match verifier.verify(&macaroon, &key, Default::default()) {\n\n//! Ok(_) => println!(\"Macaroon verified!\"),\n\n//! Err(error) => println!(\"Error validating macaroon: {:?}\", error),\n", "file_path": "src/lib.rs", "rank": 96, "score": 13.337702147922133 }, { "content": " #[test]\n\n fn test_macaroon_two_exact_caveats() {\n\n let key: MacaroonKey = \"this is the key\".into();\n\n let mut macaroon = Macaroon::create(None, &key, \"testing\".into()).unwrap();\n\n macaroon.add_first_party_caveat(\"account = 3735928559\".into());\n\n macaroon.add_first_party_caveat(\"user = alice\".into());\n\n let mut verifier = Verifier::default();\n\n verifier.satisfy_exact(\"account = 3735928559\".into());\n\n verifier.satisfy_exact(\"user = alice\".into());\n\n verifier\n\n .verify(&macaroon, &key, Default::default())\n\n .unwrap()\n\n }\n\n\n\n #[test]\n\n fn test_macaroon_two_exact_caveats_incomplete_verifier() {\n\n let key: MacaroonKey = \"this is the key\".into();\n\n let mut macaroon = Macaroon::create(None, &key, \"testing\".into()).unwrap();\n\n macaroon.add_first_party_caveat(\"account = 3735928559\".into());\n\n macaroon.add_first_party_caveat(\"user = alice\".into());\n", "file_path": "src/verifier.rs", "rank": 97, "score": 13.250982875627503 }, { "content": " Ok(plaintext) => Ok(Key::from_slice(&plaintext)\n\n .ok_or_else(|| MacaroonError::DecryptionError(\"given key is incorrect length\"))?\n\n .into()),\n\n Err(()) => {\n\n error!(\n\n \"crypto::decrypt: Unknown decryption error decrypting {:?}\",\n\n raw_data\n\n );\n\n Err(MacaroonError::DecryptionError(\"Unknown decryption error\"))\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::{decrypt_key, encrypt_key, MacaroonKey};\n\n\n\n #[test]\n\n fn test_encrypt_decrypt() {\n\n let secret: MacaroonKey = \"This is my encrypted key\\0\\0\\0\\0\\0\\0\\0\\0\".into();\n\n let key: MacaroonKey = \"This is my secret key\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\".into();\n\n let encrypted = encrypt_key(&key, &secret);\n\n let decrypted = decrypt_key(&key, &encrypted).unwrap();\n\n assert_eq!(secret, decrypted);\n\n }\n\n}\n", "file_path": "src/crypto.rs", "rank": 98, "score": 13.004904664021936 }, { "content": " let cav_key: MacaroonKey = \"My key\".into();\n\n let id = \"My Caveat\";\n\n macaroon.add_third_party_caveat(location, &cav_key, id.into());\n\n assert_eq!(1, macaroon.caveats.len());\n\n let cav_id = match &macaroon.caveats[0] {\n\n Caveat::ThirdParty(tp) => tp.id(),\n\n _ => ByteString::default(),\n\n };\n\n let cav_location = match &macaroon.caveats[0] {\n\n Caveat::ThirdParty(tp) => tp.location(),\n\n _ => String::default(),\n\n };\n\n assert_eq!(location, cav_location);\n\n assert_eq!(ByteString::from(id), cav_id);\n\n assert_eq!(&macaroon.caveats[0], &macaroon.third_party_caveats()[0]);\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 99, "score": 12.586771273283125 } ]
Rust
src/main.rs
tcr/rager
b664f0a93c2a773d232b30f9827813d57a32d5f7
#[macro_use] extern crate structopt; extern crate termion; extern crate ransid; extern crate failure; extern crate libc; extern crate crossbeam_channel; use std::path::PathBuf; use structopt::StructOpt; use failure::Error; use termion::event::*; use termion::scroll; use termion::input::{TermRead, MouseTerminal}; use termion::raw::IntoRawMode; use termion::terminal_size; use termion::screen::*; use std::io::{Write, stdout, stdin, Stdout}; use crossbeam_channel::unbounded; use std::fs::File; use std::io::BufReader; use termion::raw::RawTerminal; enum RagerEvent { Line(Vec<u8>), ScrollDown, ScrollUp, Quit, EndInput, Home, End, PageUp, PageDown, } #[derive(Copy, Clone)] struct RagerChar(char, bool, bool, bool, bool, ransid::color::Color); struct Buffer(usize, usize, RagerChar, Vec<Vec<RagerChar>>); impl Buffer { fn new(width: usize, height: usize, default: RagerChar) -> Buffer { Buffer( width, height, default, (0..height).map(|_| vec![default; width]).collect::<Vec<_>>(), ) } fn expand_vertical(&mut self) { let width = self.0; self.1 += 1; self.3.push(vec![self.2; width]); } fn set(&mut self, x: usize, y: usize, val: RagerChar) { if y >= self.height() { for _ in 0..(y - self.height() + 1) { self.expand_vertical(); } } self.3[y][x] = val; } fn get(&self, x: usize, y: usize) -> RagerChar { self.3[y][x] } fn width(&self) -> usize { self.0 } fn height(&self) -> usize { self.1 } } #[derive(Debug, StructOpt)] #[structopt(name = "rager", about = "A pager, like more or less.", author = "")] struct Opt { #[structopt(parse(from_os_str))] input: Option<PathBuf>, } fn main() { let opt = Opt::from_args(); run(opt.input).expect("Program error"); } fn run( input_file: Option<PathBuf>, ) -> Result<(), Error> { let stdin = stdin(); let input = if !termion::is_tty(&stdin) { unsafe { use std::os::unix::io::*; let tty = File::open("/dev/tty").unwrap(); let stdin_fd = libc::dup(0); let ret = File::from_raw_fd(stdin_fd); libc::dup2(tty.as_raw_fd(), 0); ::std::mem::forget(tty); Some(ret) } } else if let Some(input_file) = input_file { let file = File::open(input_file)?; Some(file) } else { eprintln!("Expected 'rager <input>' or input over stdin."); ::std::process::exit(1); }; let mut screen = MouseTerminal::from(AlternateScreen::from(stdout().into_raw_mode().unwrap())); write!(screen, "{}", termion::cursor::Hide).unwrap(); write!(screen, "{}", termion::clear::All).unwrap(); screen.flush().unwrap(); let (screen_width, screen_height) = terminal_size().unwrap(); let screen_width = screen_width as usize; let screen_height = screen_height as usize; type MyTerminal = MouseTerminal<AlternateScreen<RawTerminal<Stdout>>>; let (tx, rx) = unbounded(); let actor = ::std::thread::spawn({ move || { let mut console = ransid::Console::new(screen_width, 32767); let mut matrix = Buffer::new(screen_width, screen_height, RagerChar(' ', false, false, false, false, ransid::color::Color::Ansi(0))); fn write_char(screen: &mut MyTerminal, c: RagerChar, x: usize, y: usize) { let _ = write!(screen, "{}{}{}{}{}{}{}{}", termion::cursor::Goto((x as u16) + 1, (y as u16) + 1), if c.1 { format!("{}", termion::style::Bold) } else { format!("") }, if c.2 { format!("{}", termion::style::Underline) } else { format!("") }, if c.3 { format!("{}", termion::style::Italic) } else { format!("") }, if c.4 { format!("{}", termion::style::CrossedOut) } else { format!("") }, match c.5 { ransid::color::Color::Ansi(c) => format!("{}", termion::color::Fg(termion::color::AnsiValue(c))), ransid::color::Color::TrueColor(r, g, b) => format!("{}", termion::color::Fg(termion::color::Rgb(r, g, b))), }, c.0, termion::style::Reset, ); } fn write_row(screen: &mut MyTerminal, buffer: &Buffer, row: usize, dest_row: usize) { let matrix_width = buffer.width() as usize; for x in 0..matrix_width { write_char(screen, buffer.get(x, row), x, dest_row); } } let redraw_from = |screen: &mut MyTerminal, buffer: &Buffer, row: usize| { for y in 0..screen_height { write_row(screen, buffer, row + y, y); } }; let update = |screen: &mut MyTerminal, matrix: &mut Buffer, c, x, y, bold, underlined, italic, strikethrough, color| { let c = RagerChar(c, bold, underlined, italic, strikethrough, color); matrix.set(x, y, c); if y < (screen_height as usize) { write_char(screen, c, x, y); } }; let mut scroll: usize = 0; while let Ok(event) = rx.recv() { match event { RagerEvent::Home => { scroll = 0; redraw_from(&mut screen, &mut matrix, scroll); } RagerEvent::End => { scroll = matrix.height() - screen_height; redraw_from(&mut screen, &mut matrix, scroll); } RagerEvent::PageUp => { scroll = if scroll <= screen_height { 0 } else { scroll - screen_height }; redraw_from(&mut screen, &mut matrix, scroll); } RagerEvent::PageDown => { let last_row = matrix.height() - screen_height; let next_row = scroll + screen_height; scroll = if next_row >= last_row { last_row } else { next_row }; redraw_from(&mut screen, &mut matrix, scroll); } RagerEvent::Line(line) => { unsafe { let screen: &'static mut MyTerminal = ::std::mem::transmute(&mut screen); let matrix: &'static mut Buffer = ::std::mem::transmute(&mut matrix); console.write(&line, move |event| { use ransid::Event; match event { Event::Char { x, y, c, bold, underlined, italic, strikethrough, color, } => { update(screen, matrix, c, x, y, bold, underlined, italic, strikethrough, color); }, _ => {}, } }); } } RagerEvent::EndInput => { } RagerEvent::ScrollDown => { if scroll > 0 { write!(screen, "{}", scroll::Down(1)).unwrap(); scroll -= 1; let matrix_width = matrix.width() as usize; for x in 0..matrix_width { write_char(&mut screen, matrix.get(x, scroll), x, 0); } } } RagerEvent::ScrollUp => { if scroll + (screen_height as usize) < matrix.height() - 1 { write!(screen, "{}", scroll::Up(1)).unwrap(); scroll += 1; let matrix_width = matrix.width() as usize; let matrix_height = matrix.height() as usize; for x in 0..matrix_width { write_char(&mut screen, matrix.get(x, scroll + screen_height as usize), x, matrix_height); } } } RagerEvent::Quit => break, } screen.flush().unwrap(); } write!(screen, "{}", termion::cursor::Show).unwrap(); screen.flush().unwrap(); } }); if input.is_some() { ::std::thread::spawn({ let tx = tx.clone(); let mut input = BufReader::new(input.unwrap()); let mut buf = String::new(); move || { while let Ok(len) = ::std::io::BufRead::read_line(&mut input, &mut buf) { if len == 0 { break; } let _ = tx.send(RagerEvent::Line(buf.as_bytes().to_owned())); buf.clear(); } let _ = tx.send(RagerEvent::EndInput); } }); } for c in stdin.events() { match c.unwrap() { Event::Key(Key::Char('q')) | Event::Key(Key::Ctrl('c')) => break, Event::Mouse(MouseEvent::Press(MouseButton::WheelDown, _, _)) | Event::Key(Key::Down) => { let _ = tx.send(RagerEvent::ScrollUp); } Event::Mouse(MouseEvent::Press(MouseButton::WheelUp, _, _)) | Event::Key(Key::Up) => { let _ = tx.send(RagerEvent::ScrollDown); } Event::Key(Key::Home) => { let _ = tx.send(RagerEvent::Home); } Event::Key(Key::End) => { let _ = tx.send(RagerEvent::End); } Event::Key(Key::PageUp) => { let _ = tx.send(RagerEvent::PageUp); } Event::Key(Key::PageDown) => { let _ = tx.send(RagerEvent::PageDown); } _ => {}, } } let _ = tx.send(RagerEvent::Quit); let _ = actor.join(); Ok(()) }
#[macro_use] extern crate structopt; extern crate termion; extern crate ransid; extern crate failure; extern crate libc; extern crate crossbeam_channel; use std::path::PathBuf; use structopt::StructOpt; use failure::Error; use termion::event::*; use termion::scroll; use termion::input::{TermRead, MouseTerminal}; use termion::raw::IntoRawMode; use termion::terminal_size; use termion::screen::*; use std::io::{Write, stdout, stdin, Stdout}; use crossbeam_channel::unbounded; use std::fs::File; use std::io::BufReader; use termion::raw::RawTerminal; enum RagerEvent { Line(Vec<u8>), ScrollDown, ScrollUp, Quit, EndInput, Home, End, PageUp, PageDown, } #[derive(Copy, Clone)] struct RagerChar(char, bool, bool, bool, bool, ransid::color::Color); struct Buffer(usize, usize, RagerChar, Vec<Vec<RagerChar>>); impl Buffer { fn new(width: usize, height: usize, default: RagerChar) -> Buffer { Buffer( width, height, default, (0..height).map(|_| vec![default; width]).collect::<Vec<_>>(), ) } fn expand_vertical(&mut self) { let width = self.0; self.1 += 1; self.3.push(vec![self.2; width]); }
fn get(&self, x: usize, y: usize) -> RagerChar { self.3[y][x] } fn width(&self) -> usize { self.0 } fn height(&self) -> usize { self.1 } } #[derive(Debug, StructOpt)] #[structopt(name = "rager", about = "A pager, like more or less.", author = "")] struct Opt { #[structopt(parse(from_os_str))] input: Option<PathBuf>, } fn main() { let opt = Opt::from_args(); run(opt.input).expect("Program error"); } fn run( input_file: Option<PathBuf>, ) -> Result<(), Error> { let stdin = stdin(); let input = if !termion::is_tty(&stdin) { unsafe { use std::os::unix::io::*; let tty = File::open("/dev/tty").unwrap(); let stdin_fd = libc::dup(0); let ret = File::from_raw_fd(stdin_fd); libc::dup2(tty.as_raw_fd(), 0); ::std::mem::forget(tty); Some(ret) } } else if let Some(input_file) = input_file { let file = File::open(input_file)?; Some(file) } else { eprintln!("Expected 'rager <input>' or input over stdin."); ::std::process::exit(1); }; let mut screen = MouseTerminal::from(AlternateScreen::from(stdout().into_raw_mode().unwrap())); write!(screen, "{}", termion::cursor::Hide).unwrap(); write!(screen, "{}", termion::clear::All).unwrap(); screen.flush().unwrap(); let (screen_width, screen_height) = terminal_size().unwrap(); let screen_width = screen_width as usize; let screen_height = screen_height as usize; type MyTerminal = MouseTerminal<AlternateScreen<RawTerminal<Stdout>>>; let (tx, rx) = unbounded(); let actor = ::std::thread::spawn({ move || { let mut console = ransid::Console::new(screen_width, 32767); let mut matrix = Buffer::new(screen_width, screen_height, RagerChar(' ', false, false, false, false, ransid::color::Color::Ansi(0))); fn write_char(screen: &mut MyTerminal, c: RagerChar, x: usize, y: usize) { let _ = write!(screen, "{}{}{}{}{}{}{}{}", termion::cursor::Goto((x as u16) + 1, (y as u16) + 1), if c.1 { format!("{}", termion::style::Bold) } else { format!("") }, if c.2 { format!("{}", termion::style::Underline) } else { format!("") }, if c.3 { format!("{}", termion::style::Italic) } else { format!("") }, if c.4 { format!("{}", termion::style::CrossedOut) } else { format!("") }, match c.5 { ransid::color::Color::Ansi(c) => format!("{}", termion::color::Fg(termion::color::AnsiValue(c))), ransid::color::Color::TrueColor(r, g, b) => format!("{}", termion::color::Fg(termion::color::Rgb(r, g, b))), }, c.0, termion::style::Reset, ); } fn write_row(screen: &mut MyTerminal, buffer: &Buffer, row: usize, dest_row: usize) { let matrix_width = buffer.width() as usize; for x in 0..matrix_width { write_char(screen, buffer.get(x, row), x, dest_row); } } let redraw_from = |screen: &mut MyTerminal, buffer: &Buffer, row: usize| { for y in 0..screen_height { write_row(screen, buffer, row + y, y); } }; let update = |screen: &mut MyTerminal, matrix: &mut Buffer, c, x, y, bold, underlined, italic, strikethrough, color| { let c = RagerChar(c, bold, underlined, italic, strikethrough, color); matrix.set(x, y, c); if y < (screen_height as usize) { write_char(screen, c, x, y); } }; let mut scroll: usize = 0; while let Ok(event) = rx.recv() { match event { RagerEvent::Home => { scroll = 0; redraw_from(&mut screen, &mut matrix, scroll); } RagerEvent::End => { scroll = matrix.height() - screen_height; redraw_from(&mut screen, &mut matrix, scroll); } RagerEvent::PageUp => { scroll = if scroll <= screen_height { 0 } else { scroll - screen_height }; redraw_from(&mut screen, &mut matrix, scroll); } RagerEvent::PageDown => { let last_row = matrix.height() - screen_height; let next_row = scroll + screen_height; scroll = if next_row >= last_row { last_row } else { next_row }; redraw_from(&mut screen, &mut matrix, scroll); } RagerEvent::Line(line) => { unsafe { let screen: &'static mut MyTerminal = ::std::mem::transmute(&mut screen); let matrix: &'static mut Buffer = ::std::mem::transmute(&mut matrix); console.write(&line, move |event| { use ransid::Event; match event { Event::Char { x, y, c, bold, underlined, italic, strikethrough, color, } => { update(screen, matrix, c, x, y, bold, underlined, italic, strikethrough, color); }, _ => {}, } }); } } RagerEvent::EndInput => { } RagerEvent::ScrollDown => { if scroll > 0 { write!(screen, "{}", scroll::Down(1)).unwrap(); scroll -= 1; let matrix_width = matrix.width() as usize; for x in 0..matrix_width { write_char(&mut screen, matrix.get(x, scroll), x, 0); } } } RagerEvent::ScrollUp => { if scroll + (screen_height as usize) < matrix.height() - 1 { write!(screen, "{}", scroll::Up(1)).unwrap(); scroll += 1; let matrix_width = matrix.width() as usize; let matrix_height = matrix.height() as usize; for x in 0..matrix_width { write_char(&mut screen, matrix.get(x, scroll + screen_height as usize), x, matrix_height); } } } RagerEvent::Quit => break, } screen.flush().unwrap(); } write!(screen, "{}", termion::cursor::Show).unwrap(); screen.flush().unwrap(); } }); if input.is_some() { ::std::thread::spawn({ let tx = tx.clone(); let mut input = BufReader::new(input.unwrap()); let mut buf = String::new(); move || { while let Ok(len) = ::std::io::BufRead::read_line(&mut input, &mut buf) { if len == 0 { break; } let _ = tx.send(RagerEvent::Line(buf.as_bytes().to_owned())); buf.clear(); } let _ = tx.send(RagerEvent::EndInput); } }); } for c in stdin.events() { match c.unwrap() { Event::Key(Key::Char('q')) | Event::Key(Key::Ctrl('c')) => break, Event::Mouse(MouseEvent::Press(MouseButton::WheelDown, _, _)) | Event::Key(Key::Down) => { let _ = tx.send(RagerEvent::ScrollUp); } Event::Mouse(MouseEvent::Press(MouseButton::WheelUp, _, _)) | Event::Key(Key::Up) => { let _ = tx.send(RagerEvent::ScrollDown); } Event::Key(Key::Home) => { let _ = tx.send(RagerEvent::Home); } Event::Key(Key::End) => { let _ = tx.send(RagerEvent::End); } Event::Key(Key::PageUp) => { let _ = tx.send(RagerEvent::PageUp); } Event::Key(Key::PageDown) => { let _ = tx.send(RagerEvent::PageDown); } _ => {}, } } let _ = tx.send(RagerEvent::Quit); let _ = actor.join(); Ok(()) }
fn set(&mut self, x: usize, y: usize, val: RagerChar) { if y >= self.height() { for _ in 0..(y - self.height() + 1) { self.expand_vertical(); } } self.3[y][x] = val; }
function_block-full_function
[ { "content": "# rager 🎉\n\n\n\n[![](http://meritbadge.herokuapp.com/rager)](https://crates.io/crates/rager)\n\n\n\nA terminal Pager written in Rust. Like more or less.\n\n\n\n* Supports any `xterm`-supporting terminal thanks to Termion.\n\n* Support mouse scrolling (or up/down keys)\n\n* Supports content over stdin or passed in as a filename.\n\n* `q` or Ctrl+C to quit.\n\n\n\n```\n\ncargo install rager\n\ncargo build --color=always |& rager\n\n```\n\n\n\n![](https://user-images.githubusercontent.com/80639/39799598-cea19382-5332-11e8-9c94-367ec317123f.png)\n\n\n\n**TODO:**\n\n\n\n* Visually indicate when stdin has terminated.\n\n* Support paging a file via command line argument.\n\n* Support dumping contents to your shell, or switching back.\n\n* Support pausing / resuming output.\n\n* Support follow mode (like `less +F`).\n\n* Add more key shortcuts?\n\n* Windows support?\n\n\n\nAll contributions welcome. How can rager be useful for you?\n\n\n\n## License\n\n\n\nMIT or Apache-2.0, at your option.\n", "file_path": "README.md", "rank": 6, "score": 8183.380764824399 } ]
Rust
src/function.rs
eisterman/RustaCUDA
4f78c255c9e017c47e21af72a9dd8710f3b571e1
use crate::context::{CacheConfig, SharedMemoryConfig}; use crate::error::{CudaResult, ToResult}; use crate::module::Module; use cuda_sys::cuda::{self, CUfunction}; use std::marker::PhantomData; use std::mem::transmute; #[derive(Debug, Clone, PartialEq, Eq)] pub struct GridSize { pub x: u32, pub y: u32, pub z: u32, } impl GridSize { #[inline] pub fn x(x: u32) -> GridSize { GridSize { x, y: 1, z: 1 } } #[inline] pub fn xy(x: u32, y: u32) -> GridSize { GridSize { x, y, z: 1 } } #[inline] pub fn xyz(x: u32, y: u32, z: u32) -> GridSize { GridSize { x, y, z } } } impl From<u32> for GridSize { fn from(x: u32) -> GridSize { GridSize::x(x) } } impl From<(u32, u32)> for GridSize { fn from((x, y): (u32, u32)) -> GridSize { GridSize::xy(x, y) } } impl From<(u32, u32, u32)> for GridSize { fn from((x, y, z): (u32, u32, u32)) -> GridSize { GridSize::xyz(x, y, z) } } impl<'a> From<&'a GridSize> for GridSize { fn from(other: &GridSize) -> GridSize { other.clone() } } #[derive(Debug, Clone, PartialEq, Eq)] pub struct BlockSize { pub x: u32, pub y: u32, pub z: u32, } impl BlockSize { #[inline] pub fn x(x: u32) -> BlockSize { BlockSize { x, y: 1, z: 1 } } #[inline] pub fn xy(x: u32, y: u32) -> BlockSize { BlockSize { x, y, z: 1 } } #[inline] pub fn xyz(x: u32, y: u32, z: u32) -> BlockSize { BlockSize { x, y, z } } } impl From<u32> for BlockSize { fn from(x: u32) -> BlockSize { BlockSize::x(x) } } impl From<(u32, u32)> for BlockSize { fn from((x, y): (u32, u32)) -> BlockSize { BlockSize::xy(x, y) } } impl From<(u32, u32, u32)> for BlockSize { fn from((x, y, z): (u32, u32, u32)) -> BlockSize { BlockSize::xyz(x, y, z) } } impl<'a> From<&'a BlockSize> for BlockSize { fn from(other: &BlockSize) -> BlockSize { other.clone() } } #[repr(u32)] #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum FunctionAttribute { MaxThreadsPerBlock = 0, SharedMemorySizeBytes = 1, ConstSizeBytes = 2, LocalSizeBytes = 3, NumRegisters = 4, PtxVersion = 5, BinaryVersion = 6, CacheModeCa = 7, #[doc(hidden)] __Nonexhaustive = 8, } #[derive(Debug)] pub struct Function<'a> { inner: CUfunction, module: PhantomData<&'a Module>, } impl<'a> Function<'a> { pub(crate) fn new(inner: CUfunction, _module: &Module) -> Function { Function { inner, module: PhantomData, } } pub fn get_attribute(&self, attr: FunctionAttribute) -> CudaResult<i32> { unsafe { let mut val = 0i32; cuda::cuFuncGetAttribute( &mut val as *mut i32, ::std::mem::transmute(attr), self.inner, ) .to_result()?; Ok(val) } } pub fn set_cache_config(&mut self, config: CacheConfig) -> CudaResult<()> { unsafe { cuda::cuFuncSetCacheConfig(self.inner, transmute(config)).to_result() } } pub fn set_shared_memory_config(&mut self, cfg: SharedMemoryConfig) -> CudaResult<()> { unsafe { cuda::cuFuncSetSharedMemConfig(self.inner, transmute(cfg)).to_result() } } pub(crate) fn to_inner(&self) -> CUfunction { self.inner } } #[macro_export] macro_rules! launch { ($module:ident . $function:ident <<<$grid:expr, $block:expr, $shared:expr, $stream:ident>>>( $( $arg:expr),* )) => { { let name = std::ffi::CString::new(stringify!($function)).unwrap(); let function = $module.get_function(&name); match function { Ok(f) => launch!(f<<<$grid, $block, $shared, $stream>>>( $($arg),* ) ), Err(e) => Err(e), } } }; ($function:ident <<<$grid:expr, $block:expr, $shared:expr, $stream:ident>>>( $( $arg:expr),* )) => { { fn assert_impl_devicecopy<T: $crate::memory::DeviceCopy>(_val: T) {}; if false { $( assert_impl_devicecopy($arg); )* }; $stream.launch(&$function, $grid, $block, $shared, &[ $( &$arg as *const _ as *mut ::std::ffi::c_void, )* ] ) } }; } #[cfg(test)] mod test { use super::*; use crate::memory::CopyDestination; use crate::memory::DeviceBuffer; use crate::quick_init; use crate::stream::{Stream, StreamFlags}; use std::error::Error; use std::ffi::CString; #[test] fn test_launch() -> Result<(), Box<dyn Error>> { let _context = quick_init(); let ptx_text = CString::new(include_str!("../resources/add.ptx"))?; let module = Module::load_from_string(&ptx_text)?; unsafe { let mut in_x = DeviceBuffer::from_slice(&[2.0f32; 128])?; let mut in_y = DeviceBuffer::from_slice(&[1.0f32; 128])?; let mut out: DeviceBuffer<f32> = DeviceBuffer::uninitialized(128)?; let stream = Stream::new(StreamFlags::NON_BLOCKING, None)?; launch!(module.sum<<<1, 128, 0, stream>>>(in_x.as_device_ptr(), in_y.as_device_ptr(), out.as_device_ptr(), out.len()))?; stream.synchronize()?; let mut out_host = [0f32; 128]; out.copy_to(&mut out_host[..])?; for x in out_host.iter() { assert_eq!(3, *x as u32); } } Ok(()) } }
use crate::context::{CacheConfig, SharedMemoryConfig}; use crate::error::{CudaResult, ToResult}; use crate::module::Module; use cuda_sys::cuda::{self, CUfunction}; use std::marker::PhantomData; use std::mem::transmute; #[derive(Debug, Clone, PartialEq, Eq)] pub struct GridSize { pub x: u32, pub y: u32, pub z: u32, } impl GridSize { #[inline] pub fn x(x: u32) -> GridSize { GridSize { x, y: 1, z: 1 } } #[inline] pub fn xy(x: u32, y: u32) -> GridSize { GridSize { x, y, z: 1 } } #[inline] pub fn xyz(x: u32, y: u32, z: u32) -> GridSize { GridSize { x, y, z } } } impl From<u32> for GridSize { fn from(x: u32) -> GridSize { GridSize::x(x) } } impl From<(u32, u32)> for GridSize { fn from((x, y): (u32, u32)) -> GridSize { GridSize::xy(x, y) } } impl From<(u32, u32, u32)> for GridSize { fn from((x, y, z): (u32, u32, u32)) -> GridSize { GridSize::xyz(x, y, z) } } impl<'a> From<&'a GridSize> for GridSize { fn from(other: &GridSize) -> GridSize { other.clone() } } #[derive(Debug, Clone, PartialEq, Eq)] pub struct BlockSize { pub x: u32, pub y: u32, pub z: u32, } impl BlockSize { #[inline] pub fn x(x: u32) -> BlockSize { BlockSize { x, y: 1, z: 1 } } #[inline] pub fn xy(x: u32, y: u32) -> BlockSize { BlockSize { x, y, z: 1 } } #[inline] pub fn xyz(x: u32, y: u32, z: u32) -> BlockSize { BlockSize { x, y, z } } } impl From<u32> for BlockSize { fn from(x: u32) -> BlockSize { BlockSize::x(x) } } impl From<(u32, u32)> for BlockSize { fn from((x, y): (u32, u32)) -> BlockSize { BlockSize::xy(x, y) } } impl From<(u32, u32, u32)> for BlockSize { fn from((x, y, z): (u32, u32, u32)) -> BlockSize { BlockSize::xyz(x, y, z) } } impl<'a> From<&'a BlockSize> for BlockSize { fn from(other: &BlockSize) -> BlockSize { other.clone() } } #[repr(u32)]
.to_result()?; Ok(val) } } pub fn set_cache_config(&mut self, config: CacheConfig) -> CudaResult<()> { unsafe { cuda::cuFuncSetCacheConfig(self.inner, transmute(config)).to_result() } } pub fn set_shared_memory_config(&mut self, cfg: SharedMemoryConfig) -> CudaResult<()> { unsafe { cuda::cuFuncSetSharedMemConfig(self.inner, transmute(cfg)).to_result() } } pub(crate) fn to_inner(&self) -> CUfunction { self.inner } } #[macro_export] macro_rules! launch { ($module:ident . $function:ident <<<$grid:expr, $block:expr, $shared:expr, $stream:ident>>>( $( $arg:expr),* )) => { { let name = std::ffi::CString::new(stringify!($function)).unwrap(); let function = $module.get_function(&name); match function { Ok(f) => launch!(f<<<$grid, $block, $shared, $stream>>>( $($arg),* ) ), Err(e) => Err(e), } } }; ($function:ident <<<$grid:expr, $block:expr, $shared:expr, $stream:ident>>>( $( $arg:expr),* )) => { { fn assert_impl_devicecopy<T: $crate::memory::DeviceCopy>(_val: T) {}; if false { $( assert_impl_devicecopy($arg); )* }; $stream.launch(&$function, $grid, $block, $shared, &[ $( &$arg as *const _ as *mut ::std::ffi::c_void, )* ] ) } }; } #[cfg(test)] mod test { use super::*; use crate::memory::CopyDestination; use crate::memory::DeviceBuffer; use crate::quick_init; use crate::stream::{Stream, StreamFlags}; use std::error::Error; use std::ffi::CString; #[test] fn test_launch() -> Result<(), Box<dyn Error>> { let _context = quick_init(); let ptx_text = CString::new(include_str!("../resources/add.ptx"))?; let module = Module::load_from_string(&ptx_text)?; unsafe { let mut in_x = DeviceBuffer::from_slice(&[2.0f32; 128])?; let mut in_y = DeviceBuffer::from_slice(&[1.0f32; 128])?; let mut out: DeviceBuffer<f32> = DeviceBuffer::uninitialized(128)?; let stream = Stream::new(StreamFlags::NON_BLOCKING, None)?; launch!(module.sum<<<1, 128, 0, stream>>>(in_x.as_device_ptr(), in_y.as_device_ptr(), out.as_device_ptr(), out.len()))?; stream.synchronize()?; let mut out_host = [0f32; 128]; out.copy_to(&mut out_host[..])?; for x in out_host.iter() { assert_eq!(3, *x as u32); } } Ok(()) } }
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum FunctionAttribute { MaxThreadsPerBlock = 0, SharedMemorySizeBytes = 1, ConstSizeBytes = 2, LocalSizeBytes = 3, NumRegisters = 4, PtxVersion = 5, BinaryVersion = 6, CacheModeCa = 7, #[doc(hidden)] __Nonexhaustive = 8, } #[derive(Debug)] pub struct Function<'a> { inner: CUfunction, module: PhantomData<&'a Module>, } impl<'a> Function<'a> { pub(crate) fn new(inner: CUfunction, _module: &Module) -> Function { Function { inner, module: PhantomData, } } pub fn get_attribute(&self, attr: FunctionAttribute) -> CudaResult<i32> { unsafe { let mut val = 0i32; cuda::cuFuncGetAttribute( &mut val as *mut i32, ::std::mem::transmute(attr), self.inner, )
random
[ { "content": "/// Shortcut for initializing the CUDA Driver API and creating a CUDA context with default settings\n\n/// for the first device.\n\n///\n\n/// This is useful for testing or just setting up a basic CUDA context quickly. Users with more\n\n/// complex needs (multiple devices, custom flags, etc.) should use `init` and create their own\n\n/// context.\n\npub fn quick_init() -> CudaResult<Context> {\n\n init(CudaFlags::empty())?;\n\n let device = Device::get_device(0)?;\n\n Context::create_and_push(ContextFlags::MAP_HOST | ContextFlags::SCHED_AUTO, device)\n\n}\n\n\n\n/// Struct representing the CUDA API version number.\n\n#[derive(Debug, Hash, Eq, PartialEq, Ord, PartialOrd, Copy, Clone)]\n\npub struct CudaApiVersion {\n\n version: i32,\n\n}\n\nimpl CudaApiVersion {\n\n /// Returns the latest CUDA version supported by the CUDA driver.\n\n pub fn get() -> CudaResult<CudaApiVersion> {\n\n unsafe {\n\n let mut version: i32 = 0;\n\n cuDriverGetVersion(&mut version as *mut i32).to_result()?;\n\n Ok(CudaApiVersion { version })\n\n }\n\n }\n", "file_path": "src/lib.rs", "rank": 0, "score": 75221.7915704248 }, { "content": "/// Initialize the CUDA Driver API.\n\n///\n\n/// This must be called before any other RustaCUDA (or CUDA) function is called. Typically, this\n\n/// should be at the start of your program. All other functions will fail unless the API is\n\n/// initialized first.\n\n///\n\n/// The `flags` parameter is used to configure the CUDA API. Currently no flags are defined, so\n\n/// it must be `CudaFlags::empty()`.\n\npub fn init(flags: CudaFlags) -> CudaResult<()> {\n\n unsafe { cuInit(flags.bits()).to_result() }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 1, "score": 70686.46019200361 }, { "content": "fn type_check_struct(s: &DataStruct) -> TokenStream {\n\n let checks = match s.fields {\n\n Fields::Named(ref named_fields) => {\n\n let fields: Vec<&Field> = named_fields.named.iter().collect();\n\n check_fields(&fields)\n\n }\n\n Fields::Unnamed(ref unnamed_fields) => {\n\n let fields: Vec<&Field> = unnamed_fields.unnamed.iter().collect();\n\n check_fields(&fields)\n\n }\n\n Fields::Unit => vec![],\n\n };\n\n quote!(\n\n #(#checks)*\n\n )\n\n}\n\n\n", "file_path": "rustacuda_derive/src/lib.rs", "rank": 2, "score": 69162.1577506508 }, { "content": "#[proc_macro_derive(DeviceCopy)]\n\npub fn derive_device_copy(input: BaseTokenStream) -> BaseTokenStream {\n\n let ast = syn::parse(input).unwrap();\n\n let gen = impl_device_copy(&ast);\n\n BaseTokenStream::from(gen)\n\n}\n\n\n", "file_path": "rustacuda_derive/src/lib.rs", "rank": 3, "score": 60540.439621563564 }, { "content": "#[derive(Clone, DeviceCopy)]\n\nstruct NormalStruct {\n\n x: u64,\n\n y: u64,\n\n}\n\n\n", "file_path": "tests/test_derive.rs", "rank": 4, "score": 58677.723245637426 }, { "content": "#[derive(Clone, DeviceCopy)]\n\nstruct ContainerStruct {\n\n a: NormalStruct,\n\n b: TupleStruct,\n\n}\n\n\n", "file_path": "tests/test_derive.rs", "rank": 5, "score": 58677.723245637426 }, { "content": "#[derive(Clone, DeviceCopy)]\n\nstruct ZeroSizedStruct;\n\n\n", "file_path": "tests/test_derive.rs", "rank": 6, "score": 57421.45454853314 }, { "content": "fn impl_device_copy(input: &DeriveInput) -> TokenStream {\n\n let input_type = &input.ident;\n\n\n\n // Generate the code to type-check all fields of the derived struct/enum/union. We can't perform\n\n // type checking at expansion-time, so instead we generate a dummy nested function with a\n\n // type-bound on DeviceCopy and call it with every type that's in the struct/enum/union.\n\n // This will fail to compile if any of the nested types doesn't implement DeviceCopy.\n\n let check_types_code = match input.data {\n\n Data::Struct(ref data_struct) => type_check_struct(data_struct),\n\n Data::Enum(ref data_enum) => type_check_enum(data_enum),\n\n Data::Union(ref data_union) => type_check_union(data_union),\n\n };\n\n\n\n // We need a function for the type-checking code to live in, so generate a complicated and\n\n // hopefully-unique name for that\n\n let type_test_func_name = format!(\n\n \"__verify_{}_can_implement_DeviceCopy\",\n\n input_type.to_string()\n\n );\n\n let type_test_func_ident = Ident::new(&type_test_func_name, Span::call_site());\n", "file_path": "rustacuda_derive/src/lib.rs", "rank": 7, "score": 57027.81924755415 }, { "content": "#[derive(Clone, DeviceCopy)]\n\nstruct GenericStruct<T> {\n\n value: T,\n\n}\n\n\n", "file_path": "tests/test_derive.rs", "rank": 8, "score": 54387.29838941563 }, { "content": "#[derive(Clone, DeviceCopy)]\n\nstruct TupleStruct(u64, u64);\n\n\n", "file_path": "tests/test_derive.rs", "rank": 9, "score": 51076.411690345005 }, { "content": " pub trait Sealed {}\n\n}\n", "file_path": "src/lib.rs", "rank": 10, "score": 46980.41505769591 }, { "content": "#[test]\n\nfn test_hidden_functions() {\n\n __verify_ZeroSizedStruct_can_implement_DeviceCopy(&ZeroSizedStruct);\n\n __verify_TupleStruct_can_implement_DeviceCopy(&TupleStruct(0, 0));\n\n __verify_NormalStruct_can_implement_DeviceCopy(&NormalStruct { x: 0, y: 0 });\n\n __verify_ContainerStruct_can_implement_DeviceCopy(&ContainerStruct {\n\n a: NormalStruct { x: 0, y: 0 },\n\n b: TupleStruct(0, 0),\n\n });\n\n __verify_GenericStruct_can_implement_DeviceCopy(&GenericStruct { value: 0u64 });\n\n __verify_TestEnum_can_implement_DeviceCopy(&TestEnum::Unit);\n\n __verify_GenericEnum_can_implement_DeviceCopy::<u64>(&GenericEnum::Unit);\n\n __verify_TestUnion_can_implement_DeviceCopy(&TestUnion { u: 0u64 });\n\n}\n", "file_path": "tests/test_derive.rs", "rank": 11, "score": 46229.57742862164 }, { "content": "#[test]\n\nfn test_stream_callbacks_execution_order() {\n\n let _ctx = quick_init();\n\n let stream = Stream::new(StreamFlags::NON_BLOCKING, None).unwrap();\n\n\n\n let (order_sender, order_receiver) = sync_channel(0);\n\n stream\n\n .add_callback(Box::new(|_| {\n\n order_sender.send(1).unwrap();\n\n }))\n\n .unwrap();\n\n stream\n\n .add_callback(Box::new(|_| {\n\n order_sender.send(2).unwrap();\n\n }))\n\n .unwrap();\n\n stream\n\n .add_callback(Box::new(|_| {\n\n order_sender.send(3).unwrap();\n\n }))\n\n .unwrap();\n\n for expected in &[1, 2, 3] {\n\n assert_eq!(*expected, order_receiver.recv().unwrap());\n\n }\n\n}\n\n\n", "file_path": "tests/test_stream.rs", "rank": 12, "score": 43754.8853593609 }, { "content": "#[test]\n\nfn test_stream_callbacks_status_propagation() {\n\n let _ctx = quick_init();\n\n let stream = Stream::new(StreamFlags::NON_BLOCKING, None).unwrap();\n\n\n\n let (status_sender, status_receiver) = sync_channel(0);\n\n stream\n\n .add_callback(Box::new(|status| {\n\n status_sender.send(status).unwrap();\n\n }))\n\n .unwrap();\n\n assert_eq!(Ok(()), status_receiver.recv().unwrap())\n\n}\n", "file_path": "tests/test_stream.rs", "rank": 13, "score": 43754.8853593609 }, { "content": "#[test]\n\nfn test_stream_callbacks_environment_capture() {\n\n let _ctx = quick_init();\n\n let stream = Stream::new(StreamFlags::NON_BLOCKING, None).unwrap();\n\n\n\n let (capture_sender, capture_receiver) = sync_channel(0);\n\n let magic_numbers = (42, Box::new(1337));\n\n stream\n\n .add_callback(Box::new(|_| {\n\n capture_sender.send(magic_numbers).unwrap();\n\n }))\n\n .unwrap();\n\n let captured_magic_numbers = capture_receiver.recv().unwrap();\n\n assert_eq!(42, captured_magic_numbers.0);\n\n assert_eq!(1337, *captured_magic_numbers.1);\n\n}\n\n\n", "file_path": "tests/test_stream.rs", "rank": 14, "score": 43754.8853593609 }, { "content": "/// Sealed trait for `Context` and `UnownedContext`. Not intended for use outside of RustaCUDA.\n\npub trait ContextHandle: Sealed {\n\n #[doc(hidden)]\n\n fn get_inner(&self) -> CUcontext;\n\n}\n\nimpl Sealed for Context {}\n\nimpl ContextHandle for Context {\n\n fn get_inner(&self) -> CUcontext {\n\n self.inner\n\n }\n\n}\n\nimpl Sealed for UnownedContext {}\n\nimpl ContextHandle for UnownedContext {\n\n fn get_inner(&self) -> CUcontext {\n\n self.inner\n\n }\n\n}\n\n\n\n/// Non-owning handle to a CUDA context.\n\n#[derive(Debug, Clone)]\n\npub struct UnownedContext {\n", "file_path": "src/context.rs", "rank": 15, "score": 41902.61387848474 }, { "content": "fn main() -> Result<(), Box<dyn Error>> {\n\n // Set up the context, load the module, and create a stream to run kernels in.\n\n rustacuda::init(CudaFlags::empty())?;\n\n let device = Device::get_device(0)?;\n\n let _ctx = Context::create_and_push(ContextFlags::MAP_HOST | ContextFlags::SCHED_AUTO, device)?;\n\n\n\n let ptx = CString::new(include_str!(\"../resources/add.ptx\"))?;\n\n let module = Module::load_from_string(&ptx)?;\n\n let stream = Stream::new(StreamFlags::NON_BLOCKING, None)?;\n\n\n\n // Create buffers for data\n\n let mut in_x = DeviceBuffer::from_slice(&[1.0f32; 10])?;\n\n let mut in_y = DeviceBuffer::from_slice(&[2.0f32; 10])?;\n\n let mut out_1 = DeviceBuffer::from_slice(&[0.0f32; 10])?;\n\n let mut out_2 = DeviceBuffer::from_slice(&[0.0f32; 10])?;\n\n\n\n // This kernel adds each element in `in_x` and `in_y` and writes the result into `out`.\n\n unsafe {\n\n // Launch the kernel with one block of one thread, no dynamic shared memory on `stream`.\n\n let result = launch!(module.sum<<<1, 1, 0, stream>>>(\n", "file_path": "examples/launch.rs", "rank": 16, "score": 37391.48432890157 }, { "content": "fn add_bound_to_generics(generics: &Generics) -> Generics {\n\n let mut new_generics = generics.clone();\n\n let bound: TypeParamBound =\n\n parse_str(&quote! {::rustacuda_core::DeviceCopy}.to_string()).unwrap();\n\n\n\n for type_param in &mut new_generics.type_params_mut() {\n\n type_param.bounds.push(bound.clone())\n\n }\n\n\n\n new_generics\n\n}\n\n\n", "file_path": "rustacuda_derive/src/lib.rs", "rank": 17, "score": 35264.22708059775 }, { "content": "fn type_check_enum(s: &DataEnum) -> TokenStream {\n\n let mut checks = vec![];\n\n\n\n for variant in &s.variants {\n\n match variant.fields {\n\n Fields::Named(ref named_fields) => {\n\n let fields: Vec<&Field> = named_fields.named.iter().collect();\n\n checks.extend(check_fields(&fields));\n\n }\n\n Fields::Unnamed(ref unnamed_fields) => {\n\n let fields: Vec<&Field> = unnamed_fields.unnamed.iter().collect();\n\n checks.extend(check_fields(&fields));\n\n }\n\n Fields::Unit => {}\n\n }\n\n }\n\n quote!(\n\n #(#checks)*\n\n )\n\n}\n\n\n", "file_path": "rustacuda_derive/src/lib.rs", "rank": 18, "score": 34332.570914627904 }, { "content": "fn type_check_union(s: &DataUnion) -> TokenStream {\n\n let fields: Vec<&Field> = s.fields.named.iter().collect();\n\n let checks = check_fields(&fields);\n\n quote!(\n\n #(#checks)*\n\n )\n\n}\n\n\n", "file_path": "rustacuda_derive/src/lib.rs", "rank": 19, "score": 34332.570914627904 }, { "content": "fn check_fields(fields: &Vec<&Field>) -> Vec<TokenStream> {\n\n fields\n\n .iter()\n\n .map(|field| {\n\n let field_type = &field.ty;\n\n quote! {assert_impl::<#field_type>();}\n\n })\n\n .collect()\n\n}\n", "file_path": "rustacuda_derive/src/lib.rs", "rank": 20, "score": 31401.022657436755 }, { "content": "/// Sealed trait implemented by types which can be the source or destination when copying data\n\n/// to/from the device or from one device allocation to another.\n\npub trait CopyDestination<O: ?Sized>: crate::private::Sealed {\n\n /// Copy data from `source`. `source` must be the same size as `self`.\n\n ///\n\n /// # Errors:\n\n ///\n\n /// If a CUDA error occurs, return the error.\n\n fn copy_from(&mut self, source: &O) -> CudaResult<()>;\n\n\n\n /// Copy data to `dest`. `dest` must be the same size as `self`.\n\n ///\n\n /// # Errors:\n\n ///\n\n /// If a CUDA error occurs, return the error.\n\n fn copy_to(&self, dest: &mut O) -> CudaResult<()>;\n\n}\n\n\n", "file_path": "src/memory/device/mod.rs", "rank": 21, "score": 31026.57214439332 }, { "content": "/// Sealed trait implemented by types which can be the source or destination when copying data\n\n/// asynchronously to/from the device or from one device allocation to another.\n\n///\n\n/// ## Safety:\n\n///\n\n/// The functions of this trait are unsafe because they return control to the calling code while\n\n/// the copy operation could still be occurring in the background. This could allow calling code\n\n/// to read, modify or deallocate the destination buffer, or to modify or deallocate the source\n\n/// buffer resulting in a data race and undefined behavior.\n\n///\n\n/// Thus to enforce safety, the following invariants must be upheld:\n\n/// * The source and destination are not deallocated\n\n/// * The source is not modified\n\n/// * The destination is not written or read by any other operation\n\n///\n\n/// These invariants must be preserved until the stream is synchronized or an event queued after\n\n/// the copy is triggered.\n\n///\n\npub trait AsyncCopyDestination<O: ?Sized>: crate::private::Sealed {\n\n /// Asynchronously copy data from `source`. `source` must be the same size as `self`.\n\n ///\n\n /// Host memory used as a source or destination must be page-locked.\n\n ///\n\n /// For why this function is unsafe, see [AsyncCopyDestination](trait.AsyncCopyDestination.html)\n\n ///\n\n /// # Errors:\n\n ///\n\n /// If a CUDA error occurs, return the error.\n\n unsafe fn async_copy_from(&mut self, source: &O, stream: &Stream) -> CudaResult<()>;\n\n\n\n /// Asynchronously copy data to `dest`. `dest` must be the same size as `self`.\n\n ///\n\n /// Host memory used as a source or destination must be page-locked.\n\n ///\n\n /// For why this function is unsafe, see [AsyncCopyDestination](trait.AsyncCopyDestination.html)\n\n ///\n\n /// # Errors:\n\n ///\n\n /// If a CUDA error occurs, return the error.\n\n unsafe fn async_copy_to(&self, dest: &mut O, stream: &Stream) -> CudaResult<()>;\n\n}\n", "file_path": "src/memory/device/mod.rs", "rank": 22, "score": 30218.602726691148 }, { "content": "//! Functions and types for enumerating CUDA devices and retrieving information about them.\n\n\n\nuse crate::error::{CudaResult, ToResult};\n\nuse cuda_sys::cuda::*;\n\nuse std::ffi::CStr;\n\nuse std::ops::Range;\n\n\n\n/// All supported device attributes for [Device::get_attribute](struct.Device.html#method.get_attribute)\n\n#[repr(u32)]\n\n#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]\n\npub enum DeviceAttribute {\n\n /// Maximum number of threads per block\n\n MaxThreadsPerBlock = 1,\n\n /// Maximum x-dimension of a block\n\n MaxBlockDimX = 2,\n\n /// Maximum y-dimension of a block\n\n MaxBlockDimY = 3,\n\n /// Maximum z-dimension of a block\n\n MaxBlockDimZ = 4,\n\n /// Maximum x-dimension of a grid\n", "file_path": "src/device.rs", "rank": 24, "score": 17.835539792557785 }, { "content": "use std::mem::transmute;\n\nuse std::ptr;\n\n\n\n/// This enumeration represents configuration settings for devices which share hardware resources\n\n/// between L1 cache and shared memory.\n\n///\n\n/// Note that this is only a preference - the driver will use the requested configuration if\n\n/// possible, but it is free to choose a different configuration if required to execute functions.\n\n///\n\n/// See\n\n/// [CurrentContext::get_cache_config](struct.CurrentContext.html#method.get_cache_config) and\n\n/// [CurrentContext::set_cache_config](struct.CurrentContext.html#method.set_cache_config) to get\n\n/// and set the cache config for the current context.\n\n#[repr(u32)]\n\n#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]\n\npub enum CacheConfig {\n\n /// No preference for shared memory or L1 (default)\n\n PreferNone = 0,\n\n /// Prefer larger shared memory and smaller L1 cache\n\n PreferShared = 1,\n", "file_path": "src/context.rs", "rank": 31, "score": 12.247451428009509 }, { "content": " /// Return the flags which were used to create this stream.\n\n ///\n\n /// # Examples:\n\n ///\n\n /// ```\n\n /// # use rustacuda::*;\n\n /// # use std::error::Error;\n\n /// # fn main() -> Result<(), Box<dyn Error>> {\n\n /// # let _ctx = quick_init()?;\n\n /// use rustacuda::stream::{Stream, StreamFlags};\n\n ///\n\n /// let stream = Stream::new(StreamFlags::NON_BLOCKING, None)?;\n\n /// assert_eq!(StreamFlags::NON_BLOCKING, stream.get_flags().unwrap());\n\n /// # Ok(())\n\n /// # }\n\n /// ```\n\n pub fn get_flags(&self) -> CudaResult<StreamFlags> {\n\n unsafe {\n\n let mut bits = 0u32;\n\n cuda::cuStreamGetFlags(self.inner, &mut bits as *mut u32).to_result()?;\n", "file_path": "src/stream.rs", "rank": 32, "score": 12.026052864991296 }, { "content": " /// Prefer larger L1 cache and smaller shared memory\n\n PreferL1 = 2,\n\n /// Prefer equal-sized L1 cache and shared memory\n\n PreferEqual = 3,\n\n\n\n #[doc(hidden)]\n\n __Nonexhaustive,\n\n}\n\n\n\n/// This enumeration represents the limited resources which can be accessed through\n\n/// [CurrentContext::get_resource_limit](struct.CurrentContext.html#method.get_resource_limit) and\n\n/// [CurrentContext::set_resource_limit](struct.CurrentContext.html#method.set_resource_limit).\n\n#[repr(u32)]\n\n#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]\n\npub enum ResourceLimit {\n\n /// The size in bytes of each GPU thread stack\n\n StackSize = 0,\n\n /// The size in bytes of the FIFO used by the `printf()` device system call.\n\n PrintfFifoSize = 1,\n\n /// The size in bytes of the heap used by the `malloc()` and `free()` device system calls.\n", "file_path": "src/context.rs", "rank": 33, "score": 11.967941909884477 }, { "content": " /// Indicates that the CUDA array will be used for texture gather. Texture gather can only\n\n /// be performed on 2D CUDA arrays.\n\n const TEXTURE_GATHER = cuda_sys::cuda::CUDA_ARRAY3D_TEXTURE_GATHER;\n\n }\n\n}\n\n\n\nimpl ArrayObjectFlags {\n\n /// Creates a default flags object with no flags set.\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n\n\n\n/// Describes a CUDA Array\n\n#[derive(Clone, Copy, Debug)]\n\npub struct ArrayDescriptor {\n\n desc: cuda_sys::cuda::CUDA_ARRAY3D_DESCRIPTOR,\n\n}\n\n\n\nimpl ArrayDescriptor {\n", "file_path": "src/memory/array.rs", "rank": 34, "score": 11.90371921554032 }, { "content": "use super::DeviceCopy;\n\nuse crate::error::*;\n\nuse crate::memory::malloc::{cuda_free_locked, cuda_malloc_locked};\n\nuse std::mem;\n\nuse std::ops;\n\nuse std::ptr;\n\nuse std::slice;\n\n\n\n/// Fixed-size host-side buffer in page-locked memory.\n\n///\n\n/// See the [`module-level documentation`](../memory/index.html) for more details on page-locked\n\n/// memory.\n\n#[derive(Debug)]\n\npub struct LockedBuffer<T: DeviceCopy> {\n\n buf: *mut T,\n\n capacity: usize,\n\n}\n\nimpl<T: DeviceCopy + Clone> LockedBuffer<T> {\n\n /// Allocate a new page-locked buffer large enough to hold `size` `T`'s and initialized with\n\n /// clones of `value`.\n", "file_path": "src/memory/locked.rs", "rank": 35, "score": 11.734528498559841 }, { "content": "//! #[macro_use]\n\n//! extern crate rustacuda;\n\n//! extern crate rustacuda_core;\n\n//!\n\n//! #[derive(Clone, DeviceCopy)]\n\n//! enum ShouldFailStructEnum {\n\n//! Unit,\n\n//! Struct{v: Vec<u64>},\n\n//! }\n\n//!\n\n//! fn main() {}\n\n//! ```\n\n//!\n\n//! ```compile_fail\n\n//! #[macro_use]\n\n//! extern crate rustacuda;\n\n//! extern crate rustacuda_core;\n\n//!\n\n//! #[derive(Copy, Clone, DeviceCopy)]\n\n//! union ShouldFailUnion {\n\n//! u: *const u64,\n\n//! o: *const i64,\n\n//! }\n\n//!\n\n//! fn main() {}\n\n//! ```\n", "file_path": "src/derive_compile_fail.rs", "rank": 36, "score": 11.404739016744827 }, { "content": "//! Routines for allocating and using CUDA Array Objects.\n\n//!\n\n//! Detailed documentation about allocating CUDA Arrays can be found in the\n\n//! [CUDA Driver API](https://docs.nvidia.com/cuda/cuda-driver-api/group__CUDA__MEM.html#group__CUDA__MEM_1gc2322c70b38c2984536c90ed118bb1d7)\n\n\n\nuse std::os::raw::c_uint;\n\n\n\nuse cuda_sys::cuda::{CUarray, CUarray_format, CUarray_format_enum};\n\n\n\nuse crate::context::CurrentContext;\n\nuse crate::device::DeviceAttribute;\n\nuse crate::error::*;\n\n\n\n/// Describes the format used for a CUDA Array.\n\n#[derive(Clone, Copy, Debug, PartialEq, Eq)]\n\npub enum ArrayFormat {\n\n /// Unsigned 8-bit integer\n\n UnsignedInt8,\n\n /// Unsigned 16-bit integer\n\n UnsignedInt16,\n", "file_path": "src/memory/array.rs", "rank": 37, "score": 11.394255471583925 }, { "content": " self.device\n\n }\n\n}\n\n\n\n/// Iterator over all available CUDA devices. See\n\n/// [the Device::devices function](./struct.Device.html#method.devices) for more information.\n\n#[derive(Debug, Clone)]\n\npub struct Devices {\n\n range: Range<u32>,\n\n}\n\nimpl Iterator for Devices {\n\n type Item = CudaResult<Device>;\n\n\n\n fn next(&mut self) -> Option<CudaResult<Device>> {\n\n self.range.next().map(Device::get_device)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n", "file_path": "src/device.rs", "rank": 38, "score": 11.277449931991992 }, { "content": " /// # let context = Context::create_and_push(ContextFlags::MAP_HOST | ContextFlags::SCHED_AUTO, device)?;\n\n /// let unowned = ContextStack::pop()?;\n\n /// ContextStack::push(&unowned)?;\n\n /// # Ok(())\n\n /// # }\n\n /// ```\n\n pub fn push<C: ContextHandle>(ctx: &C) -> CudaResult<()> {\n\n unsafe {\n\n cuda::cuCtxPushCurrent_v2(ctx.get_inner()).to_result()?;\n\n Ok(())\n\n }\n\n }\n\n}\n\n\n\n/// Struct representing a range of stream priorities.\n\n///\n\n/// By convention, lower numbers imply greater priorities. The range of meaningful stream priorities\n\n/// is given by `[greatest, least]` - that is (numerically), `greatest <= least`.\n\n#[derive(Debug, Clone, Hash, Eq, PartialEq)]\n\npub struct StreamPriorityRange {\n", "file_path": "src/context.rs", "rank": 39, "score": 11.030093387346469 }, { "content": "\n\n /// Return the major version number - eg. the 9 in version 9.2\n\n #[inline]\n\n pub fn major(self) -> i32 {\n\n self.version / 1000\n\n }\n\n\n\n /// Return the minor version number - eg. the 2 in version 9.2\n\n #[inline]\n\n pub fn minor(self) -> i32 {\n\n (self.version % 1000) / 10\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_api_version() {\n", "file_path": "src/lib.rs", "rank": 40, "score": 11.010232565741108 }, { "content": "use crate::memory::DeviceCopy;\n\nuse core::fmt;\n\nuse core::ptr;\n\n\n\n/// A pointer to device memory.\n\n///\n\n/// `DevicePointer` cannot be dereferenced by the CPU, as it is a pointer to a memory allocation in\n\n/// the device. It can be safely copied to the device (eg. as part of a kernel launch) and either\n\n/// unwrapped or transmuted to an appropriate pointer.\n\n///\n\n/// `DevicePointer` is guaranteed to have an equivalent internal representation to a raw pointer.\n\n/// Thus, it can be safely reinterpreted or transmuted to `*mut T`. It is safe to pass a\n\n/// `DevicePointer` through an FFI boundary to C code expecting a `*mut T`, so long as the code on\n\n/// the other side of that boundary does not attempt to dereference the pointer on the CPU. It is\n\n/// thus possible to pass a `DevicePointer` to a CUDA kernel written in C.\n\n#[repr(transparent)]\n\n#[derive(Debug, Hash, Eq, PartialEq, PartialOrd, Ord)]\n\npub struct DevicePointer<T>(*mut T);\n\nunsafe impl<T> DeviceCopy for DevicePointer<T> {}\n\nimpl<T> DevicePointer<T> {\n", "file_path": "rustacuda_core/src/memory/pointer.rs", "rank": 41, "score": 10.464046202360798 }, { "content": "/// #[derive(Clone, DeviceCopy)]\n\n/// struct MyStruct(u64);\n\n///\n\n/// # fn main() {}\n\n/// ```\n\n///\n\n/// This is safe because the `DeviceCopy` derive macro will check that all fields of the struct,\n\n/// enum or union implement `DeviceCopy`. For example, this fails to compile, because `Vec` cannot\n\n/// be copied to the device:\n\n///\n\n/// ```compile_fail\n\n/// # #[macro_use]\n\n/// # extern crate rustacuda;\n\n/// #[derive(Clone, DeviceCopy)]\n\n/// struct MyStruct(Vec<u64>);\n\n/// # fn main() {}\n\n/// ```\n\n///\n\n/// You can also implement `DeviceCopy` unsafely:\n\n///\n", "file_path": "rustacuda_core/src/memory/mod.rs", "rank": 42, "score": 10.32045431373285 }, { "content": "/// ```\n\n/// use rustacuda::memory::DeviceCopy;\n\n///\n\n/// #[derive(Clone)]\n\n/// struct MyStruct(u64);\n\n///\n\n/// unsafe impl DeviceCopy for MyStruct { }\n\n/// ```\n\n///\n\n/// ## What is the difference between `DeviceCopy` and `Copy`?\n\n///\n\n/// `DeviceCopy` is stricter than `Copy`. `DeviceCopy` must only be implemented for types which\n\n/// do not contain references or raw pointers to non-device-accessible memory. `DeviceCopy` also\n\n/// does not imply copy semantics - that is, `DeviceCopy` values are not implicitly copied on\n\n/// assignment the way that `Copy` values are. This is helpful, as it may be desirable to implement\n\n/// `DeviceCopy` for large structures that would be inefficient to copy for every assignment.\n\n///\n\n/// ## When can't my type be `DeviceCopy`?\n\n///\n\n/// Some types cannot be safely copied to the device. For example, copying `&T` would create an\n", "file_path": "rustacuda_core/src/memory/mod.rs", "rank": 43, "score": 10.183975635144304 }, { "content": " /// # Ok(())\n\n /// # }\n\n /// ```\n\n pub fn get_flags() -> CudaResult<ContextFlags> {\n\n unsafe {\n\n let mut flags = 0u32;\n\n cuda::cuCtxGetFlags(&mut flags as *mut u32).to_result()?;\n\n Ok(ContextFlags::from_bits_truncate(flags))\n\n }\n\n }\n\n\n\n /// Return resource limits for the current context.\n\n ///\n\n /// # Example:\n\n ///\n\n /// ```\n\n /// # use rustacuda::device::Device;\n\n /// # use rustacuda::context::{ Context, ContextFlags, CurrentContext, ResourceLimit };\n\n /// # use std::error::Error;\n\n /// #\n", "file_path": "src/context.rs", "rank": 44, "score": 10.063956526557117 }, { "content": "pub struct Device {\n\n pub(crate) device: CUdevice,\n\n}\n\nimpl Device {\n\n /// Get the number of CUDA-capable devices.\n\n ///\n\n /// Returns the number of devices with compute-capability 2.0 or greater which are available\n\n /// for execution.\n\n ///\n\n /// # Example:\n\n /// ```\n\n /// # use rustacuda::*;\n\n /// # use std::error::Error;\n\n /// # fn main() -> Result<(), Box<dyn Error>> {\n\n /// # init(CudaFlags::empty())?;\n\n /// use rustacuda::device::Device;\n\n /// let num_devices = Device::num_devices()?;\n\n /// println!(\"Number of devices: {}\", num_devices);\n\n /// # Ok(())\n\n /// # }\n", "file_path": "src/device.rs", "rank": 46, "score": 9.881540837605671 }, { "content": " /// ```\n\n pub fn num_devices() -> CudaResult<u32> {\n\n unsafe {\n\n let mut num_devices = 0i32;\n\n cuDeviceGetCount(&mut num_devices as *mut i32).to_result()?;\n\n Ok(num_devices as u32)\n\n }\n\n }\n\n\n\n /// Get a handle to the `ordinal`'th CUDA device.\n\n ///\n\n /// Ordinal must be in the range `0..num_devices()`. If not, an error will be returned.\n\n ///\n\n /// # Example:\n\n /// ```\n\n /// # use rustacuda::*;\n\n /// # use std::error::Error;\n\n /// # fn main() -> Result<(), Box<dyn Error>> {\n\n /// # init(CudaFlags::empty())?;\n\n /// use rustacuda::device::Device;\n", "file_path": "src/device.rs", "rank": 47, "score": 9.690223911775966 }, { "content": "\n\n /// Specify that the created event may be used as an interprocess event.\n\n /// (not supported yet by RustaCUDA). This flag requires\n\n /// `DISABLE_TIMING` to be set as well.\n\n const INTERPROCESS = 0x4;\n\n }\n\n}\n\n\n\n/// Status enum that represents the current status of an event.\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum EventStatus {\n\n /// Ready indicates that all work captured by the event has been completed.\n\n ///\n\n /// The CUDA documentation states that for Unified Memory, `EventStatus::Ready` is\n\n /// equivalent to having called `Event::synchronize`.\n\n Ready,\n\n\n\n /// `EventStatus::NotReady` indicates that the work captured by the event is still\n\n /// incomplete.\n\n NotReady,\n", "file_path": "src/event.rs", "rank": 48, "score": 9.5973730473602 }, { "content": "/// invalid reference on the device which would segfault if dereferenced. Generalizing this, any\n\n/// type implementing `Drop` cannot be `DeviceCopy` since it is responsible for some resource that\n\n/// would not be available on the device.\n\npub unsafe trait DeviceCopy {\n\n // Empty\n\n}\n\n\n\nmacro_rules! impl_device_copy {\n\n ($($t:ty)*) => {\n\n $(\n\n unsafe impl DeviceCopy for $t {}\n\n )*\n\n }\n\n}\n\n\n\nimpl_device_copy!(\n\n usize u8 u16 u32 u64 u128\n\n isize i8 i16 i32 i64 i128\n\n f32 f64\n\n bool char\n", "file_path": "rustacuda_core/src/memory/mod.rs", "rank": 49, "score": 9.586185528922782 }, { "content": "//! struct ShouldFailStruct{v: Vec<u64>}\n\n//!\n\n//! fn main() {}\n\n//! ```\n\n//!\n\n//! ```compile_fail\n\n//! #[macro_use]\n\n//! extern crate rustacuda;\n\n//! extern crate rustacuda_core;\n\n//!\n\n//! #[derive(Clone, DeviceCopy)]\n\n//! enum ShouldFailTupleEnum {\n\n//! Unit,\n\n//! Tuple(Vec<u64>),\n\n//! }\n\n//!\n\n//! fn main() {}\n\n//! ```\n\n//!\n\n//! ```compile_fail\n", "file_path": "src/derive_compile_fail.rs", "rank": 50, "score": 9.543895663316302 }, { "content": "}\n\n\n\nbitflags! {\n\n /// Bit flags for configuring a CUDA Stream waiting on an CUDA Event.\n\n ///\n\n /// Current versions of CUDA support only the default flag.\n\n pub struct StreamWaitEventFlags: u32 {\n\n /// No flags set.\n\n const DEFAULT = 0x0;\n\n }\n\n}\n\n\n\n/// A stream of work for the device to perform.\n\n///\n\n/// See the module-level documentation for more information.\n\n#[derive(Debug)]\n\npub struct Stream {\n\n inner: CUstream,\n\n}\n\nimpl Stream {\n", "file_path": "src/stream.rs", "rank": 51, "score": 9.542409842830459 }, { "content": " let version = CudaApiVersion { version: 9020 };\n\n assert_eq!(version.major(), 9);\n\n assert_eq!(version.minor(), 2);\n\n }\n\n\n\n #[test]\n\n fn test_init_twice() {\n\n init(CudaFlags::empty()).unwrap();\n\n init(CudaFlags::empty()).unwrap();\n\n }\n\n}\n\n\n\n// Fake module with a private trait used to prevent outside code from implementing certain traits.\n\npub(crate) mod private {\n", "file_path": "src/lib.rs", "rank": 52, "score": 9.47954036666924 }, { "content": " // No choice but to panic if this fails.\n\n unsafe {\n\n cuda_free_locked(self.buf).expect(\"Failed to deallocate CUDA page-locked memory.\");\n\n }\n\n }\n\n self.capacity = 0;\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use std::mem;\n\n\n\n #[derive(Clone, Debug)]\n\n struct ZeroSizedType;\n\n unsafe impl DeviceCopy for ZeroSizedType {}\n\n\n\n #[test]\n\n fn test_new() {\n", "file_path": "src/memory/locked.rs", "rank": 53, "score": 9.432521825322606 }, { "content": "// This works by faking a regular slice out of the device raw-pointer and the length and transmuting\n\n// I have no idea if this is safe or not. Probably not, though I can't imagine how the compiler\n\n// could possibly know that the pointer is not de-referenceable. I'm banking that we get proper\n\n// Dynamicaly-sized Types before the compiler authors break this assumption.\n\nimpl<T> DeviceSlice<T> {\n\n /// Returns the number of elements in the slice.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// # let _context = rustacuda::quick_init().unwrap();\n\n /// use rustacuda::memory::*;\n\n /// let a = DeviceBuffer::from_slice(&[1, 2, 3]).unwrap();\n\n /// assert_eq!(a.len(), 3);\n\n /// ```\n\n pub fn len(&self) -> usize {\n\n self.0.len()\n\n }\n\n\n\n /// Returns `true` if the slice has a length of 0.\n", "file_path": "src/memory/device/device_slice.rs", "rank": 54, "score": 9.425437567727492 }, { "content": "}\n\n\n\n/// An event to track work submitted to a stream.\n\n///\n\n/// See the module-level documentation for more information.\n\n#[derive(Debug)]\n\npub struct Event(CUevent);\n\n\n\nimpl Event {\n\n /// Create a new event with the specified flags.\n\n ///\n\n /// # Example:\n\n ///\n\n /// ```\n\n /// # use rustacuda::quick_init;\n\n /// # use std::error::Error;\n\n /// # fn main() -> Result<(), Box<dyn Error>> {\n\n /// # let _context = quick_init()?;\n\n /// use rustacuda::event::{Event, EventFlags};\n\n ///\n", "file_path": "src/event.rs", "rank": 55, "score": 9.406157920804144 }, { "content": "/// [CurrentContext::get_shared_memory_config](struct.CurrentContext.html#method.get_shared_memory_config) and\n\n/// [CurrentContext::set_shared_memory_config](struct.CurrentContext.html#method.set_shared_memory_config) to get\n\n/// and set the cache config for the current context.\n\n#[repr(u32)]\n\n#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]\n\npub enum SharedMemoryConfig {\n\n /// Set shared-memory bank size to the default.\n\n DefaultBankSize = 0,\n\n /// Set shared-memory bank width to four bytes\n\n FourByteBankSize = 1,\n\n /// Set shared-memory bank width to eight bytes\n\n EightByteBankSize = 2,\n\n\n\n #[doc(hidden)]\n\n __Nonexhaustive,\n\n}\n\n\n\nbitflags! {\n\n /// Bit flags for initializing the CUDA context.\n\n ///\n", "file_path": "src/context.rs", "rank": 56, "score": 9.373923325373468 }, { "content": " /// let unowned = context.get_unowned();\n\n /// let version = unowned.get_api_version()?;\n\n /// #\n\n /// # Ok(())\n\n /// # }\n\n /// ```\n\n pub fn get_api_version(&self) -> CudaResult<CudaApiVersion> {\n\n unsafe {\n\n let mut api_version = 0u32;\n\n cuda::cuCtxGetApiVersion(self.inner, &mut api_version as *mut u32).to_result()?;\n\n Ok(CudaApiVersion {\n\n version: api_version as i32,\n\n })\n\n }\n\n }\n\n}\n\n\n\n/// Type used to represent the thread-local context stack.\n\n#[derive(Debug)]\n\npub struct ContextStack;\n", "file_path": "src/context.rs", "rank": 57, "score": 9.354189813910613 }, { "content": "#[allow(missing_docs)]\n\n#[derive(Clone, Copy, Debug, Eq, PartialEq)]\n\npub enum CudaError {\n\n // CUDA errors\n\n InvalidValue = 1,\n\n OutOfMemory = 2,\n\n NotInitialized = 3,\n\n Deinitialized = 4,\n\n ProfilerDisabled = 5,\n\n ProfilerNotInitialized = 6,\n\n ProfilerAlreadyStarted = 7,\n\n ProfilerAlreadyStopped = 8,\n\n NoDevice = 100,\n\n InvalidDevice = 101,\n\n InvalidImage = 200,\n\n InvalidContext = 201,\n\n ContextAlreadyCurrent = 202,\n\n MapFailed = 205,\n\n UnmapFailed = 206,\n\n ArrayIsMapped = 207,\n", "file_path": "src/error.rs", "rank": 58, "score": 9.331408211831533 }, { "content": " }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_device_buffer {\n\n use super::*;\n\n use crate::memory::device::DeviceBox;\n\n use crate::stream::{Stream, StreamFlags};\n\n\n\n #[derive(Clone, Debug)]\n\n struct ZeroSizedType;\n\n unsafe impl DeviceCopy for ZeroSizedType {}\n\n\n\n #[test]\n\n fn test_from_slice_drop() {\n\n let _context = crate::quick_init().unwrap();\n\n let buf = DeviceBuffer::from_slice(&[0u64, 1, 2, 3, 4, 5]).unwrap();\n\n drop(buf);\n\n }\n\n\n", "file_path": "src/memory/device/device_buffer.rs", "rank": 59, "score": 9.30304124962423 }, { "content": "//! This module re-exports a number of commonly-used types for working with RustaCUDA.\n\n//!\n\n//! This allows the user to `use rustacuda::prelude::*;` and have the most commonly-used types\n\n//! available quickly.\n\n\n\npub use crate::context::{Context, ContextFlags};\n\npub use crate::device::Device;\n\npub use crate::memory::{CopyDestination, DeviceBuffer, UnifiedBuffer};\n\npub use crate::module::Module;\n\npub use crate::stream::{Stream, StreamFlags};\n\npub use crate::CudaFlags;\n", "file_path": "src/prelude.rs", "rank": 60, "score": 9.258269508240861 }, { "content": "/// `UnifiedPointer` can be safely dereferenced by the CPU, as the memory allocation it points to is\n\n/// shared between the CPU and the GPU. It can also be safely copied to the device (eg. as part of\n\n/// a kernel launch).\n\n///\n\n/// `UnifiedPointer` is guaranteed to have an equivalent internal representation to a raw pointer.\n\n/// Thus, it can be safely reinterpreted or transmuted to `*mut T`. It is also safe to pass a\n\n/// `UnifiedPointer` through an FFI boundary to C code expecting a `*mut T`. It is\n\n/// thus possible to pass a `UnifiedPointer` to a CUDA kernel written in C.\n\n#[repr(transparent)]\n\n#[derive(Debug, Hash, Eq, PartialEq, Ord, PartialOrd)]\n\npub struct UnifiedPointer<T: DeviceCopy>(*mut T);\n\nunsafe impl<T: DeviceCopy> DeviceCopy for UnifiedPointer<T> {}\n\nimpl<T: DeviceCopy> UnifiedPointer<T> {\n\n /// Returns a null unified pointer.\n\n ///\n\n /// # Examples:\n\n ///\n\n /// ```\n\n /// # let _context = rustacuda::quick_init().unwrap();\n\n /// use rustacuda::memory::*;\n", "file_path": "rustacuda_core/src/memory/pointer.rs", "rank": 61, "score": 9.190038270234382 }, { "content": "//! Functions and types for working with CUDA modules.\n\n\n\nuse crate::error::{CudaResult, DropResult, ToResult};\n\nuse crate::function::Function;\n\nuse crate::memory::{CopyDestination, DeviceCopy, DevicePointer};\n\nuse cuda_sys::cuda;\n\nuse std::ffi::{c_void, CStr};\n\nuse std::fmt;\n\nuse std::marker::PhantomData;\n\nuse std::mem;\n\nuse std::ptr;\n\n\n\n/// A compiled CUDA module, loaded into a context.\n\n#[derive(Debug)]\n\npub struct Module {\n\n inner: cuda::CUmodule,\n\n}\n\nimpl Module {\n\n /// Load a module from the given file name into the current context.\n\n ///\n", "file_path": "src/module.rs", "rank": 62, "score": 9.175624628207789 }, { "content": " /// use rustacuda::module::Module;\n\n /// use std::ffi::CString;\n\n ///\n\n /// let ptx = CString::new(include_str!(\"../resources/add.ptx\"))?;\n\n /// let module = Module::load_from_string(&ptx)?;\n\n /// let name = CString::new(\"my_constant\")?;\n\n /// let symbol = module.get_global::<u32>(&name)?;\n\n /// let mut host_const = 0;\n\n /// symbol.copy_to(&mut host_const)?;\n\n /// assert_eq!(314, host_const);\n\n /// # Ok(())\n\n /// # }\n\n /// ```\n\n pub fn get_global<'a, T: DeviceCopy>(&'a self, name: &CStr) -> CudaResult<Symbol<'a, T>> {\n\n unsafe {\n\n let mut ptr: DevicePointer<T> = DevicePointer::null();\n\n let mut size: usize = 0;\n\n\n\n cuda::cuModuleGetGlobal_v2(\n\n &mut ptr as *mut DevicePointer<T> as *mut cuda::CUdeviceptr,\n", "file_path": "src/module.rs", "rank": 63, "score": 9.12913642199655 }, { "content": "impl<T: Display + DeviceCopy> Display for UnifiedBox<T> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n fmt::Display::fmt(&**self, f)\n\n }\n\n}\n\nimpl<T: DeviceCopy> Pointer for UnifiedBox<T> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n fmt::Pointer::fmt(&self.ptr, f)\n\n }\n\n}\n\nimpl<T: DeviceCopy + PartialEq> PartialEq for UnifiedBox<T> {\n\n fn eq(&self, other: &UnifiedBox<T>) -> bool {\n\n PartialEq::eq(&**self, &**other)\n\n }\n\n}\n\nimpl<T: DeviceCopy + Eq> Eq for UnifiedBox<T> {}\n\nimpl<T: DeviceCopy + PartialOrd> PartialOrd for UnifiedBox<T> {\n\n fn partial_cmp(&self, other: &UnifiedBox<T>) -> Option<Ordering> {\n\n PartialOrd::partial_cmp(&**self, &**other)\n\n }\n", "file_path": "src/memory/unified.rs", "rank": 64, "score": 9.059761071621118 }, { "content": "#[doc(hidden)]\n\npub use rustacuda_derive::*;\n\n\n\npub mod context;\n\npub mod device;\n\npub mod error;\n\npub mod event;\n\npub mod function;\n\npub mod memory;\n\npub mod module;\n\npub mod prelude;\n\npub mod stream;\n\n\n\nmod derive_compile_fail;\n\n\n\nuse crate::context::{Context, ContextFlags};\n\nuse crate::device::Device;\n\nuse crate::error::{CudaResult, ToResult};\n\nuse cuda_sys::cuda::{cuDriverGetVersion, cuInit};\n\n\n", "file_path": "src/lib.rs", "rank": 65, "score": 9.052514899037032 }, { "content": " ///\n\n /// # Examples:\n\n ///\n\n /// ```\n\n /// # let _context = rustacuda::quick_init().unwrap();\n\n /// use rustacuda::memory::*;\n\n /// let mut buffer = UnifiedBuffer::new(&0u64, 5).unwrap();\n\n /// buffer[0] = 1;\n\n /// ```\n\n pub fn new(value: &T, size: usize) -> CudaResult<Self> {\n\n unsafe {\n\n let mut uninit = UnifiedBuffer::uninitialized(size)?;\n\n for x in 0..size {\n\n *uninit.get_unchecked_mut(x) = value.clone();\n\n }\n\n Ok(uninit)\n\n }\n\n }\n\n\n\n /// Allocate a new unified buffer of the same size as `slice`, initialized with a clone of\n", "file_path": "src/memory/unified.rs", "rank": 66, "score": 8.791955680723436 }, { "content": "//! guaranteed to be the same in different versions of RustaCUDA. If you need to pass them through\n\n//! an FFI boundary, you must convert them to FFI-safe primitives yourself. For example, with\n\n//! `UnifiedBuffer`, use the `as_unified_ptr()` and `len()` functions to get the primitives, and\n\n//! `mem::forget()` the Buffer so that it isn't dropped. Again, as with regular Rust, the caller is\n\n//! responsible for reconstructing the `UnifiedBuffer` using `from_raw_parts()` and dropping it to\n\n//! ensure that the memory allocation is safely cleaned up.\n\n\n\npub mod array;\n\n\n\nmod device;\n\nmod locked;\n\nmod malloc;\n\nmod unified;\n\n\n\npub use self::device::*;\n\npub use self::locked::*;\n\npub use self::malloc::*;\n\npub use self::unified::*;\n\npub use rustacuda_core::{DeviceCopy, DevicePointer, UnifiedPointer};\n", "file_path": "src/memory/mod.rs", "rank": 67, "score": 8.639277534361739 }, { "content": " /// ```\n\n /// # use rustacuda::device::Device;\n\n /// # use rustacuda::context::{Context, ContextFlags};\n\n /// # use std::error::Error;\n\n /// #\n\n /// # fn main () -> Result<(), Box<dyn Error>> {\n\n /// rustacuda::init(rustacuda::CudaFlags::empty())?;\n\n /// let device = Device::get_device(0)?;\n\n /// let context = Context::create_and_push(ContextFlags::MAP_HOST | ContextFlags::SCHED_AUTO, device)?;\n\n /// let version = context.get_api_version()?;\n\n /// # Ok(())\n\n /// # }\n\n /// ```\n\n pub fn get_api_version(&self) -> CudaResult<CudaApiVersion> {\n\n unsafe {\n\n let mut api_version = 0u32;\n\n cuda::cuCtxGetApiVersion(self.inner, &mut api_version as *mut u32).to_result()?;\n\n Ok(CudaApiVersion {\n\n version: api_version as i32,\n\n })\n", "file_path": "src/context.rs", "rank": 68, "score": 8.605003312897802 }, { "content": " /// The least stream priority\n\n pub least: i32,\n\n /// The greatest stream priority\n\n pub greatest: i32,\n\n}\n\n\n\n/// Type representing the top context in the thread-local stack.\n\n#[derive(Debug)]\n\npub struct CurrentContext;\n\nimpl CurrentContext {\n\n /// Returns the preferred cache configuration for the current context.\n\n ///\n\n /// On devices where the L1 cache and shared memory use the same hardware resources, this\n\n /// function returns the preferred cache configuration for the current context. For devices\n\n /// where the size of the L1 cache and shared memory are fixed, this will always return\n\n /// `CacheConfig::PreferNone`.\n\n ///\n\n /// # Example:\n\n ///\n\n /// ```\n", "file_path": "src/context.rs", "rank": 69, "score": 8.579775257376799 }, { "content": " /// let device = Device::get_device(0)?;\n\n /// println!(\"Device Name: {}\", device.name()?);\n\n /// # Ok(())\n\n /// # }\n\n /// ```\n\n pub fn get_device(ordinal: u32) -> CudaResult<Device> {\n\n unsafe {\n\n let mut device = Device { device: 0 };\n\n cuDeviceGet(&mut device.device as *mut CUdevice, ordinal as i32).to_result()?;\n\n Ok(device)\n\n }\n\n }\n\n\n\n /// Return an iterator over all CUDA devices.\n\n ///\n\n /// # Example:\n\n /// ```\n\n /// # use rustacuda::*;\n\n /// # use std::error::Error;\n\n /// # fn main() -> Result<(), Box<dyn Error>> {\n", "file_path": "src/device.rs", "rank": 70, "score": 8.510924428158269 }, { "content": "use crate::error::CudaResult;\n\nuse crate::stream::Stream;\n\n\n\nmod device_box;\n\nmod device_buffer;\n\nmod device_slice;\n\n\n\npub use self::device_box::*;\n\npub use self::device_buffer::*;\n\npub use self::device_slice::*;\n\n\n\n/// Sealed trait implemented by types which can be the source or destination when copying data\n\n/// to/from the device or from one device allocation to another.\n", "file_path": "src/memory/device/mod.rs", "rank": 71, "score": 8.498091498266156 }, { "content": " fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n fmt::Pointer::fmt(&self.0, f)\n\n }\n\n}\n\nimpl<T: DeviceCopy> Clone for UnifiedPointer<T> {\n\n fn clone(&self) -> Self {\n\n UnifiedPointer(self.0)\n\n }\n\n}\n\nimpl<T: DeviceCopy> Copy for UnifiedPointer<T> {}\n", "file_path": "rustacuda_core/src/memory/pointer.rs", "rank": 72, "score": 8.472612353262463 }, { "content": "use crate::stream::Stream;\n\n\n\nuse std::mem;\n\nuse std::ptr;\n\n\n\nbitflags! {\n\n /// Bit flags for configuring a CUDA Event.\n\n ///\n\n /// The CUDA documentation claims that setting `DISABLE_TIMING` and `BLOCKING_SYNC` provides\n\n /// the best performance for `query()` and `stream.wait_event()`.\n\n pub struct EventFlags: u32 {\n\n /// The default event creation flag.\n\n const DEFAULT = 0x0;\n\n\n\n /// Specify that the created event should busy-wait on blocking\n\n /// function calls.\n\n const BLOCKING_SYNC = 0x1;\n\n\n\n /// Specify that the created event does not need to record timing data.\n\n const DISABLE_TIMING = 0x2;\n", "file_path": "src/event.rs", "rank": 73, "score": 8.460454767488551 }, { "content": "pub struct UnifiedBox<T: DeviceCopy> {\n\n ptr: UnifiedPointer<T>,\n\n}\n\nimpl<T: DeviceCopy> UnifiedBox<T> {\n\n /// Allocate unified memory and place val into it.\n\n ///\n\n /// This doesn't actually allocate if `T` is zero-sized.\n\n ///\n\n /// # Errors:\n\n ///\n\n /// If a CUDA error occurs, returns that error.\n\n ///\n\n /// # Examples:\n\n ///\n\n /// ```\n\n /// # let _context = rustacuda::quick_init().unwrap();\n\n /// use rustacuda::memory::*;\n\n /// let five = UnifiedBox::new(5).unwrap();\n\n /// ```\n\n pub fn new(val: T) -> CudaResult<Self> {\n", "file_path": "src/memory/unified.rs", "rank": 74, "score": 8.446717752598229 }, { "content": "}\n\nimpl Context {\n\n /// Create a CUDA context for the given device.\n\n ///\n\n /// # Example:\n\n ///\n\n /// ```\n\n /// # use rustacuda::device::Device;\n\n /// # use rustacuda::context::{Context, ContextFlags};\n\n /// # use std::error::Error;\n\n /// #\n\n /// # fn main () -> Result<(), Box<dyn Error>> {\n\n /// rustacuda::init(rustacuda::CudaFlags::empty())?;\n\n /// let device = Device::get_device(0)?;\n\n /// let context = Context::create_and_push(ContextFlags::MAP_HOST | ContextFlags::SCHED_AUTO, device)?;\n\n /// # Ok(())\n\n /// # }\n\n /// ```\n\n pub fn create_and_push(flags: ContextFlags, device: Device) -> CudaResult<Context> {\n\n unsafe {\n", "file_path": "src/context.rs", "rank": 75, "score": 8.414311025609816 }, { "content": "use crate::error::{CudaResult, DropResult, ToResult};\n\nuse crate::memory::device::{AsyncCopyDestination, CopyDestination, DeviceSlice};\n\nuse crate::memory::malloc::{cuda_free, cuda_malloc};\n\nuse crate::memory::DeviceCopy;\n\nuse crate::memory::DevicePointer;\n\nuse crate::stream::Stream;\n\nuse cuda_sys::cuda;\n\nuse std::mem;\n\nuse std::ops::{Deref, DerefMut};\n\n\n\nuse std::ptr;\n\n\n\n/// Fixed-size device-side buffer. Provides basic access to device memory.\n\n#[derive(Debug)]\n\npub struct DeviceBuffer<T> {\n\n buf: DevicePointer<T>,\n\n capacity: usize,\n\n}\n\nimpl<T> DeviceBuffer<T> {\n\n /// Allocate a new device buffer large enough to hold `size` `T`'s, but without\n", "file_path": "src/memory/device/device_buffer.rs", "rank": 76, "score": 8.357234160292146 }, { "content": " where\n\n T: Sized,\n\n {\n\n self.wrapping_offset((count as isize).wrapping_neg())\n\n }\n\n}\n\nimpl<T> fmt::Pointer for DevicePointer<T> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n fmt::Pointer::fmt(&self.0, f)\n\n }\n\n}\n\nimpl<T> Clone for DevicePointer<T> {\n\n fn clone(&self) -> Self {\n\n DevicePointer(self.0)\n\n }\n\n}\n\nimpl<T> Copy for DevicePointer<T> {}\n\n\n\n/// A pointer to unified memory.\n\n///\n", "file_path": "rustacuda_core/src/memory/pointer.rs", "rank": 77, "score": 8.299371318005688 }, { "content": " Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[derive(Clone, Debug)]\n\n struct ZeroSizedType;\n\n unsafe impl DeviceCopy for ZeroSizedType {}\n\n\n\n #[test]\n\n fn test_cuda_malloc() {\n\n let _context = crate::quick_init().unwrap();\n\n unsafe {\n\n let device_mem = cuda_malloc::<u64>(1).unwrap();\n\n assert!(!device_mem.is_null());\n\n cuda_free(device_mem).unwrap();\n\n }\n\n }\n", "file_path": "src/memory/malloc.rs", "rank": 78, "score": 8.265463509625079 }, { "content": " (**self).hash(state);\n\n }\n\n}\n\n\n\n/// Fixed-size buffer in unified memory.\n\n///\n\n/// See the [`module-level documentation`](../memory/index.html) for more details on unified memory.\n\n#[derive(Debug)]\n\npub struct UnifiedBuffer<T: DeviceCopy> {\n\n buf: UnifiedPointer<T>,\n\n capacity: usize,\n\n}\n\nimpl<T: DeviceCopy + Clone> UnifiedBuffer<T> {\n\n /// Allocate a new unified buffer large enough to hold `size` `T`'s and initialized with\n\n /// clones of `value`.\n\n ///\n\n /// # Errors:\n\n ///\n\n /// If the allocation fails, returns the error from CUDA. If `size` is large enough that\n\n /// `size * mem::sizeof::<T>()` overflows usize, then returns InvalidMemoryAllocation.\n", "file_path": "src/memory/unified.rs", "rank": 79, "score": 8.115905067931854 }, { "content": " /// let mut host_buf = [0u64, 0, 0, 0, 0];\n\n /// slice.copy_to(&mut host_buf).unwrap();\n\n /// assert_eq!([0u64, 0, 2, 3, 0], host_buf);\n\n /// ```\n\n pub fn chunks_mut(&mut self, chunk_size: usize) -> DeviceChunksMut<T> {\n\n DeviceChunksMut(self.0.chunks_mut(chunk_size))\n\n }\n\n\n\n /// Private function used to transmute a CPU slice (which must have the device pointer as it's\n\n /// buffer pointer) to a DeviceSlice. Completely unsafe.\n\n pub(super) unsafe fn from_slice(slice: &[T]) -> &DeviceSlice<T> {\n\n &*(slice as *const [T] as *const DeviceSlice<T>)\n\n }\n\n\n\n /// Private function used to transmute a mutable CPU slice (which must have the device pointer\n\n /// as it's buffer pointer) to a mutable DeviceSlice. Completely unsafe.\n\n pub(super) unsafe fn from_slice_mut(slice: &mut [T]) -> &mut DeviceSlice<T> {\n\n &mut *(slice as *mut [T] as *mut DeviceSlice<T>)\n\n }\n\n\n", "file_path": "src/memory/device/device_slice.rs", "rank": 80, "score": 7.9354287336383145 }, { "content": " ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// # let _context = rustacuda::quick_init().unwrap();\n\n /// use rustacuda::memory::*;\n\n /// unsafe {\n\n /// let mut unified_ptr = cuda_malloc_unified::<u64>(5).unwrap();\n\n /// let offset = unified_ptr.wrapping_add(4).wrapping_sub(3); // Points to the 2nd u64 in the buffer\n\n /// cuda_free_unified(unified_ptr); // Must free the buffer using the original pointer\n\n /// }\n\n /// ```\n\n pub fn wrapping_sub(self, count: usize) -> Self\n\n where\n\n T: Sized,\n\n {\n\n self.wrapping_offset((count as isize).wrapping_neg())\n\n }\n\n}\n\nimpl<T: DeviceCopy> fmt::Pointer for UnifiedPointer<T> {\n", "file_path": "rustacuda_core/src/memory/pointer.rs", "rank": 81, "score": 7.92035520426586 }, { "content": " /// the data in `slice`.\n\n ///\n\n /// # Errors:\n\n ///\n\n /// If the allocation fails, returns the error from CUDA.\n\n ///\n\n /// # Examples:\n\n ///\n\n /// ```\n\n /// # let _context = rustacuda::quick_init().unwrap();\n\n /// use rustacuda::memory::*;\n\n /// let values = [0u64; 5];\n\n /// let mut buffer = UnifiedBuffer::from_slice(&values).unwrap();\n\n /// buffer[0] = 1;\n\n /// ```\n\n pub fn from_slice(slice: &[T]) -> CudaResult<Self> {\n\n unsafe {\n\n let mut uninit = UnifiedBuffer::uninitialized(slice.len())?;\n\n for (i, x) in slice.iter().enumerate() {\n\n *uninit.get_unchecked_mut(i) = x.clone();\n", "file_path": "src/memory/unified.rs", "rank": 82, "score": 7.839511861590573 }, { "content": " inner: CUcontext,\n\n}\n\nunsafe impl Send for UnownedContext {}\n\nunsafe impl Sync for UnownedContext {}\n\nimpl UnownedContext {\n\n /// Get the API version used to create this context.\n\n ///\n\n /// This is not necessarily the latest version supported by the driver.\n\n ///\n\n /// # Example:\n\n ///\n\n /// ```\n\n /// # use rustacuda::device::Device;\n\n /// # use rustacuda::context::{Context, ContextFlags};\n\n /// # use std::error::Error;\n\n /// #\n\n /// # fn main () -> Result<(), Box<dyn Error>> {\n\n /// # rustacuda::init(rustacuda::CudaFlags::empty())?;\n\n /// # let device = Device::get_device(0)?;\n\n /// let context = Context::create_and_push(ContextFlags::MAP_HOST | ContextFlags::SCHED_AUTO, device)?;\n", "file_path": "src/context.rs", "rank": 83, "score": 7.766102755176645 }, { "content": " Ok(())\n\n }\n\n\n\n // Ensure that the two enums always stay aligned.\n\n #[test]\n\n fn test_enums_align() {\n\n assert_eq!(\n\n DeviceAttribute::__NonExhaustive as u32,\n\n CUdevice_attribute_enum::CU_DEVICE_ATTRIBUTE_MAX as u32\n\n );\n\n }\n\n}\n", "file_path": "src/device.rs", "rank": 84, "score": 7.752295292515367 }, { "content": " /// ```\n\n /// # let _context = rustacuda::quick_init().unwrap();\n\n /// use rustacuda::memory::*;\n\n /// let buf = DeviceBuffer::from_slice(&[0u64, 1, 2, 3, 4, 5]).unwrap();\n\n /// let (left, right) = buf.split_at(3);\n\n /// let mut left_host = [0u64, 0, 0];\n\n /// let mut right_host = [0u64, 0, 0];\n\n /// left.copy_to(&mut left_host).unwrap();\n\n /// right.copy_to(&mut right_host).unwrap();\n\n /// assert_eq!([0u64, 1, 2], left_host);\n\n /// assert_eq!([3u64, 4, 5], right_host);\n\n /// ```\n\n pub fn split_at(&self, mid: usize) -> (&DeviceSlice<T>, &DeviceSlice<T>) {\n\n let (left, right) = self.0.split_at(mid);\n\n unsafe {\n\n (\n\n DeviceSlice::from_slice(left),\n\n DeviceSlice::from_slice(right),\n\n )\n\n }\n", "file_path": "src/memory/device/device_slice.rs", "rank": 85, "score": 7.655275222146266 }, { "content": "\n\n#[cfg(test)]\n\nmod test_device_box {\n\n use super::*;\n\n\n\n #[derive(Clone, Debug)]\n\n struct ZeroSizedType;\n\n unsafe impl DeviceCopy for ZeroSizedType {}\n\n\n\n #[test]\n\n fn test_allocate_and_free_device_box() {\n\n let _context = crate::quick_init().unwrap();\n\n let x = DeviceBox::new(&5u64).unwrap();\n\n drop(x);\n\n }\n\n\n\n #[test]\n\n fn test_device_box_allocates_for_non_zst() {\n\n let _context = crate::quick_init().unwrap();\n\n let x = DeviceBox::new(&5u64).unwrap();\n", "file_path": "src/memory/device/device_box.rs", "rank": 86, "score": 7.631864160465291 }, { "content": "//! This module is a dummy module. It contains doctests that should fail to compile. It's used for\n\n//! testing the DeriveCopy custom-derive macro and should not contain any actual code.\n\n//!\n\n//! ```compile_fail\n\n//! #[macro_use]\n\n//! extern crate rustacuda;\n\n//! extern crate rustacuda_core;\n\n//!\n\n//! #[derive(Clone, DeviceCopy)]\n\n//! struct ShouldFailTuple(Vec<u64>);\n\n//!\n\n//! fn main() {}\n\n//! ```\n\n//!\n\n//! ```compile_fail\n\n//! #[macro_use]\n\n//! extern crate rustacuda;\n\n//! extern crate rustacuda_core;\n\n//!\n\n//! #[derive(Clone, DeviceCopy)]\n", "file_path": "src/derive_compile_fail.rs", "rank": 87, "score": 7.593487296156065 }, { "content": "\n\n symbol.copy_from(&100)?;\n\n\n\n let mut constant_copy = 0u32;\n\n symbol.copy_to(&mut constant_copy)?;\n\n assert_eq!(100, constant_copy);\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/module.rs", "rank": 88, "score": 7.563691473327677 }, { "content": " ///\n\n /// # Errors:\n\n ///\n\n /// If the allocation fails, returns the error from CUDA. If `size` is large enough that\n\n /// `size * mem::sizeof::<T>()` overflows usize, then returns InvalidMemoryAllocation.\n\n ///\n\n /// # Examples:\n\n ///\n\n /// ```\n\n /// # let _context = rustacuda::quick_init().unwrap();\n\n /// use rustacuda::memory::*;\n\n /// let mut buffer = LockedBuffer::new(&0u64, 5).unwrap();\n\n /// buffer[0] = 1;\n\n /// ```\n\n pub fn new(value: &T, size: usize) -> CudaResult<Self> {\n\n unsafe {\n\n let mut uninit = LockedBuffer::uninitialized(size)?;\n\n for x in 0..size {\n\n *uninit.get_unchecked_mut(x) = value.clone();\n\n }\n", "file_path": "src/memory/locked.rs", "rank": 89, "score": 7.366013188966899 }, { "content": "}\n\n\n\n/// An iterator over a [`DeviceSlice`](struct.DeviceSlice.html) in (non-overlapping) chunks\n\n/// (`chunk_size` elements at a time).\n\n///\n\n/// When the slice len is not evenly divided by the chunk size, the last slice of the iteration will\n\n/// be the remainder.\n\n///\n\n/// This struct is created by the `chunks` method on `DeviceSlices`.\n\n#[derive(Debug, Clone)]\n\npub struct DeviceChunks<'a, T: 'a>(Chunks<'a, T>);\n\nimpl<'a, T> Iterator for DeviceChunks<'a, T> {\n\n type Item = &'a DeviceSlice<T>;\n\n\n\n fn next(&mut self) -> Option<&'a DeviceSlice<T>> {\n\n self.0\n\n .next()\n\n .map(|slice| unsafe { DeviceSlice::from_slice(slice) })\n\n }\n\n\n", "file_path": "src/memory/device/device_slice.rs", "rank": 90, "score": 7.349553954157687 }, { "content": " pub fn from_slice(slice: &[T]) -> CudaResult<Self> {\n\n unsafe {\n\n let mut uninit = LockedBuffer::uninitialized(slice.len())?;\n\n for (i, x) in slice.iter().enumerate() {\n\n *uninit.get_unchecked_mut(i) = x.clone();\n\n }\n\n Ok(uninit)\n\n }\n\n }\n\n}\n\nimpl<T: DeviceCopy> LockedBuffer<T> {\n\n /// Allocate a new page-locked buffer large enough to hold `size` `T`'s, but without\n\n /// initializing the contents.\n\n ///\n\n /// # Errors:\n\n ///\n\n /// If the allocation fails, returns the error from CUDA. If `size` is large enough that\n\n /// `size * mem::sizeof::<T>()` overflows usize, then returns InvalidMemoryAllocation.\n\n ///\n\n /// # Safety:\n", "file_path": "src/memory/locked.rs", "rank": 91, "score": 7.296470386770579 }, { "content": "\n\n // Kernel launches are asynchronous, so we wait for the kernels to finish executing.\n\n stream.synchronize()?;\n\n\n\n // Copy the results back to host memory\n\n let mut out_host = [0.0f32; 20];\n\n out_1.copy_to(&mut out_host[0..10])?;\n\n out_2.copy_to(&mut out_host[10..20])?;\n\n\n\n for x in out_host.iter() {\n\n assert_eq!(3.0 as u32, *x as u32);\n\n }\n\n\n\n println!(\"Launched kernel successfully.\");\n\n Ok(())\n\n}\n", "file_path": "examples/launch.rs", "rank": 92, "score": 7.296256257477442 }, { "content": " /// ```\n\n pub fn as_raw_mut(&mut self) -> *mut T {\n\n self.0\n\n }\n\n\n\n /// Returns true if the pointer is null.\n\n ///\n\n /// # Examples:\n\n ///\n\n /// ```\n\n /// # let _context = rustacuda::quick_init().unwrap();\n\n /// use rustacuda::memory::*;\n\n /// use std::ptr;\n\n /// unsafe {\n\n /// let null : *mut u64 = ptr::null_mut();\n\n /// assert!(UnifiedPointer::wrap(null).is_null());\n\n /// }\n\n /// ```\n\n pub fn is_null(self) -> bool {\n\n self.0.is_null()\n", "file_path": "rustacuda_core/src/memory/pointer.rs", "rank": 93, "score": 7.293876973730621 }, { "content": "impl<T: DeviceCopy> DeviceBox<T> {\n\n /// Allocate device memory and place val into it.\n\n ///\n\n /// This doesn't actually allocate if `T` is zero-sized.\n\n ///\n\n /// # Errors:\n\n ///\n\n /// If a CUDA error occurs, return the error.\n\n ///\n\n /// # Examples:\n\n ///\n\n /// ```\n\n /// # let _context = rustacuda::quick_init().unwrap();\n\n /// use rustacuda::memory::*;\n\n /// let five = DeviceBox::new(&5).unwrap();\n\n /// ```\n\n pub fn new(val: &T) -> CudaResult<Self> {\n\n let mut dev_box = unsafe { DeviceBox::uninitialized()? };\n\n dev_box.copy_from(val)?;\n\n Ok(dev_box)\n", "file_path": "src/memory/device/device_box.rs", "rank": 94, "score": 7.274983784416144 }, { "content": " /// # Examples:\n\n ///\n\n /// ```\n\n /// # let _context = rustacuda::quick_init().unwrap();\n\n /// use rustacuda::memory::*;\n\n /// let values = [0u64; 5];\n\n /// let mut buffer = DeviceBuffer::from_slice(&values).unwrap();\n\n /// ```\n\n pub fn from_slice(slice: &[T]) -> CudaResult<Self> {\n\n unsafe {\n\n let mut uninit = DeviceBuffer::uninitialized(slice.len())?;\n\n uninit.copy_from(slice)?;\n\n Ok(uninit)\n\n }\n\n }\n\n\n\n /// Asynchronously allocate a new buffer of the same size as `slice`, initialized\n\n /// with a clone of the data in `slice`.\n\n ///\n\n /// For why this function is unsafe, see [AsyncCopyDestination](trait.AsyncCopyDestination.html)\n", "file_path": "src/memory/device/device_buffer.rs", "rank": 95, "score": 7.256442719016523 }, { "content": "\n\nbitflags! {\n\n /// Bit flags for configuring a CUDA Stream.\n\n pub struct StreamFlags: u32 {\n\n /// No flags set.\n\n const DEFAULT = 0x00;\n\n\n\n /// This stream does not synchronize with the NULL stream.\n\n ///\n\n /// Note that the name is chosen to correspond to CUDA documentation, but is nevertheless\n\n /// misleading. All work within a single stream is ordered and asynchronous regardless\n\n /// of whether this flag is set. All streams in RustaCUDA may execute work concurrently,\n\n /// regardless of the flag. However, for legacy reasons, CUDA has a notion of a NULL stream,\n\n /// which is used as the default when no other stream is provided. Work on other streams\n\n /// may not be executed concurrently with work on the NULL stream unless this flag is set.\n\n /// Since RustaCUDA does not provide access to the NULL stream, this flag has no effect in\n\n /// most circumstances. However, it is recommended to use it anyway, as some other crate\n\n /// in this binary may be using the NULL stream directly.\n\n const NON_BLOCKING = 0x01;\n\n }\n", "file_path": "src/stream.rs", "rank": 96, "score": 7.216808690374027 }, { "content": "bitflags! {\n\n /// Bit flags for initializing the CUDA driver. Currently, no flags are defined,\n\n /// so `CudaFlags::empty()` is the only valid value.\n\n pub struct CudaFlags: u32 {\n\n // We need to give bitflags at least one constant.\n\n #[doc(hidden)]\n\n const _ZERO = 0;\n\n }\n\n}\n\n\n\n/// Initialize the CUDA Driver API.\n\n///\n\n/// This must be called before any other RustaCUDA (or CUDA) function is called. Typically, this\n\n/// should be at the start of your program. All other functions will fail unless the API is\n\n/// initialized first.\n\n///\n\n/// The `flags` parameter is used to configure the CUDA API. Currently no flags are defined, so\n\n/// it must be `CudaFlags::empty()`.\n", "file_path": "src/lib.rs", "rank": 97, "score": 7.214732047020288 }, { "content": " .nth(n)\n\n .map(|slice| unsafe { DeviceSlice::from_slice_mut(slice) })\n\n }\n\n\n\n #[inline]\n\n fn last(self) -> Option<Self::Item> {\n\n self.0\n\n .last()\n\n .map(|slice| unsafe { DeviceSlice::from_slice_mut(slice) })\n\n }\n\n}\n\nimpl<'a, T> DoubleEndedIterator for DeviceChunksMut<'a, T> {\n\n #[inline]\n\n fn next_back(&mut self) -> Option<&'a mut DeviceSlice<T>> {\n\n self.0\n\n .next_back()\n\n .map(|slice| unsafe { DeviceSlice::from_slice_mut(slice) })\n\n }\n\n}\n\nimpl<'a, T> ExactSizeIterator for DeviceChunksMut<'a, T> {}\n", "file_path": "src/memory/device/device_slice.rs", "rank": 98, "score": 7.177726423037173 } ]
Rust
src/client/options/test.rs
judy2k/mongo-rust-driver
f39251fce6ede7ad712cca26e00f0b09fef6e085
use bson::{Bson, Document}; use pretty_assertions::assert_eq; use serde::Deserialize; use crate::{ client::options::{ClientOptions, StreamAddress}, error::ErrorKind, selection_criteria::{ReadPreference, SelectionCriteria}, test::run_spec_test, RUNTIME, }; #[derive(Debug, Deserialize)] struct TestFile { pub tests: Vec<TestCase>, } #[derive(Debug, Deserialize)] #[serde(rename_all = "camelCase")] struct TestCase { pub description: String, pub uri: String, pub valid: bool, pub warning: Option<bool>, pub hosts: Option<Vec<Document>>, pub auth: Option<Document>, pub options: Option<Document>, } fn document_from_client_options(mut options: ClientOptions) -> Document { let mut doc = Document::new(); if let Some(s) = options.app_name.take() { doc.insert("appname", s); } if let Some(mechanism) = options .credential .get_or_insert_with(Default::default) .mechanism .take() { doc.insert("authmechanism", mechanism.as_str().to_string()); } if let Some(d) = options .credential .get_or_insert_with(Default::default) .mechanism_properties .take() { doc.insert("authmechanismproperties", d); } if let Some(s) = options .credential .get_or_insert_with(Default::default) .source .take() { doc.insert("authsource", s); } if let Some(i) = options.connect_timeout.take() { doc.insert("connecttimeoutms", i.as_millis() as i64); } if let Some(b) = options.direct_connection.take() { doc.insert("directconnection", b); } if let Some(i) = options.heartbeat_freq.take() { doc.insert("heartbeatfrequencyms", i.as_millis() as i64); } if let Some(i) = options.local_threshold.take() { doc.insert("localthresholdms", i.as_millis() as i64); } if let Some(i) = options.max_idle_time.take() { doc.insert("maxidletimems", i.as_millis() as i64); } if let Some(s) = options.repl_set_name.take() { doc.insert("replicaset", s); } if let Some(SelectionCriteria::ReadPreference(read_pref)) = options.selection_criteria.take() { let (level, tag_sets, max_staleness) = match read_pref { ReadPreference::Primary => ("primary", None, None), ReadPreference::PrimaryPreferred { tag_sets, max_staleness, } => ("primaryPreferred", tag_sets, max_staleness), ReadPreference::Secondary { tag_sets, max_staleness, } => ("secondary", tag_sets, max_staleness), ReadPreference::SecondaryPreferred { tag_sets, max_staleness, } => ("secondaryPreferred", tag_sets, max_staleness), ReadPreference::Nearest { tag_sets, max_staleness, } => ("nearest", tag_sets, max_staleness), }; doc.insert("readpreference", level); if let Some(tag_sets) = tag_sets { let tags: Vec<Bson> = tag_sets .into_iter() .map(|tag_set| { let mut tag_set: Vec<_> = tag_set.into_iter().collect(); tag_set.sort(); Bson::Document(tag_set.into_iter().map(|(k, v)| (k, v.into())).collect()) }) .collect(); doc.insert("readpreferencetags", tags); } if let Some(i) = max_staleness { doc.insert("maxstalenessseconds", i.as_secs() as i64); } } if let Some(b) = options.retry_reads.take() { doc.insert("retryreads", b); } if let Some(b) = options.retry_writes.take() { doc.insert("retrywrites", b); } if let Some(i) = options.server_selection_timeout.take() { doc.insert("serverselectiontimeoutms", i.as_millis() as i64); } if let Some(i) = options.socket_timeout.take() { doc.insert("sockettimeoutms", i.as_millis() as i64); } if let Some(mut opt) = options.tls_options() { let ca_file_path = opt.ca_file_path.take(); let cert_key_file_path = opt.cert_key_file_path.take(); let allow_invalid_certificates = opt.allow_invalid_certificates.take(); if let Some(s) = ca_file_path { doc.insert("tls", true); doc.insert("tlscafile", s); } if let Some(s) = cert_key_file_path { doc.insert("tlscertificatekeyfile", s); } if let Some(b) = allow_invalid_certificates { doc.insert("tlsallowinvalidcertificates", b); } } if let Some(vec) = options.compressors.take() { doc.insert( "compressors", Bson::Array(vec.into_iter().map(Bson::String).collect()), ); } if let Some(s) = options.read_concern.take() { doc.insert("readconcernlevel", s.as_str()); } if let Some(i_or_s) = options .write_concern .get_or_insert_with(Default::default) .w .take() { doc.insert("w", i_or_s.to_bson()); } if let Some(i) = options .write_concern .get_or_insert_with(Default::default) .w_timeout .take() { doc.insert("wtimeoutms", i.as_millis() as i64); } if let Some(b) = options .write_concern .get_or_insert_with(Default::default) .journal .take() { doc.insert("journal", b); } if let Some(i) = options.zlib_compression.take() { doc.insert("zlibcompressionlevel", i64::from(i)); } doc } fn run_test(test_file: TestFile) { for mut test_case in test_file.tests { if test_case.description.contains("ipv6") || test_case.description.contains("IP literal") || test_case .description .contains("tlsCertificateKeyFilePassword") || test_case.description.contains("tlsAllowInvalidHostnames") || test_case.description.contains("single-threaded") || test_case.description.contains("serverSelectionTryOnce") || test_case.description.contains("Unix") || test_case.description.contains("relative path") { continue; } let warning = test_case.warning.take().unwrap_or(false); if test_case.valid && !warning { let mut is_unsupported_host_type = false; if let Some(mut json_hosts) = test_case.hosts.take() { is_unsupported_host_type = json_hosts.iter_mut().any(|h_json| { match h_json.remove("type").as_ref().and_then(Bson::as_str) { Some("ip_literal") | Some("unix") => true, _ => false, } }); if !is_unsupported_host_type { let options = RUNTIME .block_on(ClientOptions::parse(&test_case.uri)) .unwrap(); let hosts: Vec<_> = options .hosts .into_iter() .map(StreamAddress::into_document) .collect(); assert_eq!(hosts, json_hosts); } } if !is_unsupported_host_type { let options = RUNTIME .block_on(ClientOptions::parse(&test_case.uri)) .expect(&test_case.description); let mut options_doc = document_from_client_options(options); if let Some(json_options) = test_case.options { let mut json_options: Document = json_options .into_iter() .filter_map(|(k, v)| { if let Bson::Null = v { None } else { Some((k.to_lowercase(), v)) } }) .collect(); if !json_options.contains_key("tlsallowinvalidcertificates") { if let Some(val) = json_options.remove("tlsinsecure") { json_options .insert("tlsallowinvalidcertificates", !val.as_bool().unwrap()); } } options_doc = options_doc .into_iter() .filter(|(ref key, _)| json_options.contains_key(key)) .collect(); assert_eq!(options_doc, json_options, "{}", test_case.description) } if let Some(json_auth) = test_case.auth { let json_auth: Document = json_auth .into_iter() .filter_map(|(k, v)| { if let Bson::Null = v { None } else { Some((k.to_lowercase(), v)) } }) .collect(); let options = RUNTIME .block_on(ClientOptions::parse(&test_case.uri)) .unwrap(); let mut expected_auth = options.credential.unwrap_or_default().into_document(); expected_auth = expected_auth .into_iter() .filter(|(ref key, _)| json_auth.contains_key(key)) .collect(); assert_eq!(expected_auth, json_auth); } } } else { let expected_type = if warning { "warning" } else { "error" }; match RUNTIME .block_on(ClientOptions::parse(&test_case.uri)) .as_ref() .map_err(|e| e.as_ref()) { Ok(_) => panic!("expected {}", expected_type), Err(ErrorKind::ArgumentError { .. }) => {} Err(e) => panic!("expected ArgumentError, but got {:?}", e), } } } } #[cfg_attr(feature = "tokio-runtime", tokio::test(core_threads = 2))] #[cfg_attr(feature = "async-std-runtime", async_std::test)] async fn run_uri_options_spec_tests() { run_spec_test(&["uri-options"], run_test); } #[cfg_attr(feature = "tokio-runtime", tokio::test(core_threads = 2))] #[cfg_attr(feature = "async-std-runtime", async_std::test)] async fn run_connection_string_spec_tests() { run_spec_test(&["connection-string"], run_test); }
use bson::{Bson, Document}; use pretty_assertions::assert_eq; use serde::Deserialize; use crate::{ client::options::{ClientOptions, StreamAddress}, error::ErrorKind, selection_criteria::{ReadPreference, SelectionCriteria}, test::run_spec_test, RUNTIME, }; #[derive(Debug, Deserialize)] struct TestFile { pub tests: Vec<TestCase>, } #[derive(Debug, Deserialize)] #[serde(rename_all = "camelCase")] struct TestCase { pub description: String, pub uri: String, pub valid: bool, pub warning: Option<bool>, pub hosts: Option<Vec<Document>>, pub auth: Option<Document>, pub options: Option<Document>, } fn document_from_client_options(mut options: ClientOptions) -> Document { let mut doc = Document::new(); if let Some(s) = options.app_name.take() { doc.insert("appname", s); } if let Some(mechanism) = options .credential .get_or_insert_with(Default::default) .mechanism .take() { doc.insert("authmechanism", mechanism.as_str().to_string()); } if let Some(d) = options .credential .get_or_insert_with(Default::default) .mechanism_properties .take() { doc.insert("authmechanismproperties", d); } if let Some(s) = options .credential .get_or_insert_with(Default::default) .source .take() { doc.insert("authsource", s); } if let Some(i) = options.connect_timeout.take() { doc.insert("connecttimeoutms", i.as_millis() as i64); } if let Some(b) = options.direct_connection.take() { doc.insert("directconnection", b); } if let Some(i) = options.heartbeat_freq.take() { doc.insert("heartbeatfrequencyms", i.as_millis() as i64); } if let Some(i) = options.local_threshold.take() { doc.insert("localthresholdms", i.as_millis() as i64); } if let Some(i) = options.max_idle_time.take() { doc.insert("maxidletimems", i.as_millis() as i64); } if let Some(s) = options.repl_set_name.take() { doc.insert("replicaset", s); } if let Some(SelectionCriteria::ReadPreference(read_pref)) = options.selection_criteria.take() { let (level, tag_sets, max_staleness) = match read_pref { ReadPreference::Primary => ("primary", None, None), ReadPreference::PrimaryPreferred { tag_sets, max_staleness, } => ("primaryPreferred", tag_sets, max_staleness), ReadPreference::Secondary { tag_sets, max_staleness, } => ("secondary", tag_sets, max_staleness), ReadPreference::SecondaryPreferred { tag_sets, max_staleness, } => ("secondaryPreferred", tag_sets, max_staleness), ReadPreference::Nearest { tag_sets, max_staleness, } => ("nearest", tag_sets, max_staleness), }; doc.insert("readpreference", level); if let Some(tag_sets) = tag_sets { let tags: Vec<Bson> = tag_sets .into_iter() .map(|tag_set| { let mut tag_set: Vec<_> = tag_set.into_iter().collect(); tag_set.sort(); Bson::Document(tag_set.into_iter().map(|(k, v)| (k, v.into())).collect()) }) .collect(); doc.insert("re
.block_on(ClientOptions::parse(&test_case.uri)) .unwrap(); let hosts: Vec<_> = options .hosts .into_iter() .map(StreamAddress::into_document) .collect(); assert_eq!(hosts, json_hosts); } } if !is_unsupported_host_type { let options = RUNTIME .block_on(ClientOptions::parse(&test_case.uri)) .expect(&test_case.description); let mut options_doc = document_from_client_options(options); if let Some(json_options) = test_case.options { let mut json_options: Document = json_options .into_iter() .filter_map(|(k, v)| { if let Bson::Null = v { None } else { Some((k.to_lowercase(), v)) } }) .collect(); if !json_options.contains_key("tlsallowinvalidcertificates") { if let Some(val) = json_options.remove("tlsinsecure") { json_options .insert("tlsallowinvalidcertificates", !val.as_bool().unwrap()); } } options_doc = options_doc .into_iter() .filter(|(ref key, _)| json_options.contains_key(key)) .collect(); assert_eq!(options_doc, json_options, "{}", test_case.description) } if let Some(json_auth) = test_case.auth { let json_auth: Document = json_auth .into_iter() .filter_map(|(k, v)| { if let Bson::Null = v { None } else { Some((k.to_lowercase(), v)) } }) .collect(); let options = RUNTIME .block_on(ClientOptions::parse(&test_case.uri)) .unwrap(); let mut expected_auth = options.credential.unwrap_or_default().into_document(); expected_auth = expected_auth .into_iter() .filter(|(ref key, _)| json_auth.contains_key(key)) .collect(); assert_eq!(expected_auth, json_auth); } } } else { let expected_type = if warning { "warning" } else { "error" }; match RUNTIME .block_on(ClientOptions::parse(&test_case.uri)) .as_ref() .map_err(|e| e.as_ref()) { Ok(_) => panic!("expected {}", expected_type), Err(ErrorKind::ArgumentError { .. }) => {} Err(e) => panic!("expected ArgumentError, but got {:?}", e), } } } } #[cfg_attr(feature = "tokio-runtime", tokio::test(core_threads = 2))] #[cfg_attr(feature = "async-std-runtime", async_std::test)] async fn run_uri_options_spec_tests() { run_spec_test(&["uri-options"], run_test); } #[cfg_attr(feature = "tokio-runtime", tokio::test(core_threads = 2))] #[cfg_attr(feature = "async-std-runtime", async_std::test)] async fn run_connection_string_spec_tests() { run_spec_test(&["connection-string"], run_test); }
adpreferencetags", tags); } if let Some(i) = max_staleness { doc.insert("maxstalenessseconds", i.as_secs() as i64); } } if let Some(b) = options.retry_reads.take() { doc.insert("retryreads", b); } if let Some(b) = options.retry_writes.take() { doc.insert("retrywrites", b); } if let Some(i) = options.server_selection_timeout.take() { doc.insert("serverselectiontimeoutms", i.as_millis() as i64); } if let Some(i) = options.socket_timeout.take() { doc.insert("sockettimeoutms", i.as_millis() as i64); } if let Some(mut opt) = options.tls_options() { let ca_file_path = opt.ca_file_path.take(); let cert_key_file_path = opt.cert_key_file_path.take(); let allow_invalid_certificates = opt.allow_invalid_certificates.take(); if let Some(s) = ca_file_path { doc.insert("tls", true); doc.insert("tlscafile", s); } if let Some(s) = cert_key_file_path { doc.insert("tlscertificatekeyfile", s); } if let Some(b) = allow_invalid_certificates { doc.insert("tlsallowinvalidcertificates", b); } } if let Some(vec) = options.compressors.take() { doc.insert( "compressors", Bson::Array(vec.into_iter().map(Bson::String).collect()), ); } if let Some(s) = options.read_concern.take() { doc.insert("readconcernlevel", s.as_str()); } if let Some(i_or_s) = options .write_concern .get_or_insert_with(Default::default) .w .take() { doc.insert("w", i_or_s.to_bson()); } if let Some(i) = options .write_concern .get_or_insert_with(Default::default) .w_timeout .take() { doc.insert("wtimeoutms", i.as_millis() as i64); } if let Some(b) = options .write_concern .get_or_insert_with(Default::default) .journal .take() { doc.insert("journal", b); } if let Some(i) = options.zlib_compression.take() { doc.insert("zlibcompressionlevel", i64::from(i)); } doc } fn run_test(test_file: TestFile) { for mut test_case in test_file.tests { if test_case.description.contains("ipv6") || test_case.description.contains("IP literal") || test_case .description .contains("tlsCertificateKeyFilePassword") || test_case.description.contains("tlsAllowInvalidHostnames") || test_case.description.contains("single-threaded") || test_case.description.contains("serverSelectionTryOnce") || test_case.description.contains("Unix") || test_case.description.contains("relative path") { continue; } let warning = test_case.warning.take().unwrap_or(false); if test_case.valid && !warning { let mut is_unsupported_host_type = false; if let Some(mut json_hosts) = test_case.hosts.take() { is_unsupported_host_type = json_hosts.iter_mut().any(|h_json| { match h_json.remove("type").as_ref().and_then(Bson::as_str) { Some("ip_literal") | Some("unix") => true, _ => false, } }); if !is_unsupported_host_type { let options = RUNTIME
random
[ { "content": "fn parse_i64_ext_json(doc: &Document) -> Option<i64> {\n\n let number_string = doc.get(\"$numberLong\").and_then(Bson::as_str)?;\n\n number_string.parse::<i64>().ok()\n\n}\n\n\n", "file_path": "src/test/util/matchable.rs", "rank": 0, "score": 304714.2147247278 }, { "content": "fn normalize_write_concern_doc(mut write_concern_doc: Document) -> Document {\n\n if let Some(&Bson::I32(i)) = write_concern_doc.get(\"w\") {\n\n write_concern_doc.insert(\"w\", i64::from(i));\n\n }\n\n\n\n if let Some(w_timeout) = write_concern_doc.remove(\"wtimeout\") {\n\n write_concern_doc.insert(\"wtimeoutMS\", w_timeout);\n\n }\n\n\n\n if let Some(j) = write_concern_doc.remove(\"j\") {\n\n write_concern_doc.insert(\"journal\", j);\n\n }\n\n\n\n write_concern_doc\n\n}\n\n\n", "file_path": "src/test/spec/read_write_concern/connection_string.rs", "rank": 1, "score": 254540.72693461902 }, { "content": "fn return_document_to_bool(return_document: Option<ReturnDocument>) -> Option<bool> {\n\n if let Some(return_document) = return_document {\n\n return match return_document {\n\n ReturnDocument::After => Some(true),\n\n ReturnDocument::Before => Some(false),\n\n };\n\n }\n\n None\n\n}\n", "file_path": "src/operation/find_and_modify/options.rs", "rank": 3, "score": 231763.6166143772 }, { "content": "pub fn doc_size_bytes(doc: &Document) -> usize {\n\n // \n\n // * i32 length prefix (4 bytes)\n\n // * for each element:\n\n // * type (1 byte)\n\n // * number of UTF-8 bytes in key\n\n // * null terminator for the key (1 byte)\n\n // * size of the value\n\n // * null terminator (1 byte)\n\n 4 + doc\n\n .into_iter()\n\n .map(|(key, val)| 1 + key.len() + 1 + size_bytes(val))\n\n .sum::<usize>()\n\n + 1\n\n}\n\n\n", "file_path": "src/bson_util/mod.rs", "rank": 4, "score": 226508.95844685423 }, { "content": "pub fn find_all(coll: &Collection) -> Vec<Document> {\n\n coll.find(None, None).unwrap().map(Result::unwrap).collect()\n\n}\n", "file_path": "src/test/spec/crud/mod.rs", "rank": 5, "score": 218591.31453369663 }, { "content": "fn write_concern_from_document(write_concern_doc: Document) -> Option<WriteConcern> {\n\n let mut write_concern = WriteConcern::default();\n\n\n\n for (key, value) in write_concern_doc {\n\n match (&key[..], value) {\n\n (\"w\", Bson::I64(i)) => {\n\n write_concern.w = Some(Acknowledgment::from(i as i32));\n\n }\n\n (\"w\", Bson::String(s)) => {\n\n write_concern.w = Some(Acknowledgment::from(s));\n\n }\n\n (\"journal\", Bson::Boolean(b)) => {\n\n write_concern.journal = Some(b);\n\n }\n\n (\"wtimeoutMS\", Bson::I64(i)) if i > 0 => {\n\n write_concern.w_timeout = Some(Duration::from_millis(i as u64));\n\n }\n\n (\"wtimeoutMS\", Bson::I64(_)) => {\n\n // WriteConcern has an unsigned integer for the wtimeout field, so this is\n\n // impossible to test.\n\n return None;\n\n }\n\n _ => {}\n\n };\n\n }\n\n\n\n Some(write_concern)\n\n}\n\n\n", "file_path": "src/test/spec/read_write_concern/document.rs", "rank": 6, "score": 216300.8915736191 }, { "content": "fn build_test(db_name: &str, list_collections: ListCollections, mut expected_body: Document) {\n\n let mut cmd = list_collections\n\n .build(&StreamDescription::new_testing())\n\n .expect(\"build should succeed\");\n\n assert_eq!(cmd.name, \"listCollections\");\n\n assert_eq!(cmd.target_db, db_name);\n\n assert_eq!(cmd.read_pref, None);\n\n\n\n bson_util::sort_document(&mut cmd.body);\n\n bson_util::sort_document(&mut expected_body);\n\n\n\n assert_eq!(cmd.body, expected_body);\n\n}\n\n\n\n#[cfg_attr(feature = \"tokio-runtime\", tokio::test(core_threads = 2))]\n\n#[cfg_attr(feature = \"async-std-runtime\", async_std::test)]\n\nasync fn build() {\n\n let list_collections = ListCollections::new(\"test_db\".to_string(), None, false, None);\n\n let expected_body = doc! {\n\n \"listCollections\": 1,\n", "file_path": "src/operation/list_collections/test.rs", "rank": 7, "score": 213197.5583233925 }, { "content": "fn normalize_write_concern_doc(write_concern_doc: Document) -> Document {\n\n write_concern_doc\n\n .into_iter()\n\n .map(|(key, mut val)| {\n\n if key == \"w\" {\n\n if let Bson::I32(i) = val {\n\n val = Bson::I64(i64::from(i));\n\n }\n\n }\n\n\n\n (key, val)\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "src/test/spec/read_write_concern/document.rs", "rank": 8, "score": 194782.86879790865 }, { "content": "fn get_int(value: &Bson) -> Option<i64> {\n\n bson_util::get_int(value).or_else(|| value.as_document().and_then(parse_i64_ext_json))\n\n}\n", "file_path": "src/test/util/matchable.rs", "rank": 9, "score": 193874.300617886 }, { "content": "pub fn drop_collection(coll: &Collection) {\n\n match coll.drop(None).as_ref().map_err(|e| e.as_ref()) {\n\n Err(ErrorKind::CommandError(CommandError { code: 26, .. })) | Ok(_) => {}\n\n e @ Err(_) => {\n\n e.unwrap();\n\n }\n\n };\n\n}\n\n\n", "file_path": "src/test/util/mod.rs", "rank": 10, "score": 192373.80843998777 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct TestCredential {\n\n pub username: Option<String>,\n\n pub password: Option<String>,\n\n pub source: Option<String>,\n\n pub mechanism: Option<String>,\n\n pub mechanism_properties: Option<Document>,\n\n}\n\n\n\nimpl Into<Credential> for TestCredential {\n\n fn into(self) -> Credential {\n\n Credential {\n\n username: self.username,\n\n password: self.password,\n\n source: self.source,\n\n mechanism: self\n\n .mechanism\n\n .and_then(|s| AuthMechanism::from_str(s.as_str()).ok()),\n\n mechanism_properties: self.mechanism_properties,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/test/spec/auth.rs", "rank": 11, "score": 188815.65296652142 }, { "content": "fn is_auth_error(error: Error) -> bool {\n\n match error.kind.as_ref() {\n\n ErrorKind::AuthenticationError { .. } => true,\n\n _ => false,\n\n }\n\n}\n\n\n\n/// Performs an operation that requires authentication and verifies that it either succeeded or\n\n/// failed with an authentication error according to the `should_succeed` parameter.\n\nasync fn auth_test(client: Client, should_succeed: bool) {\n\n let result = client.list_database_names(None).await;\n\n if should_succeed {\n\n result.expect(\"operation should have succeeded\");\n\n } else {\n\n assert!(is_auth_error(result.unwrap_err()));\n\n }\n\n}\n\n\n\n/// Attempts to authenticate using the given username/password, optionally specifying a mechanism\n\n/// via the `ClientOptions` api.\n", "file_path": "src/test/client.rs", "rank": 12, "score": 185136.97239704058 }, { "content": "fn validate_command_success(response: &Document) -> Result<()> {\n\n let ok = response\n\n .get(\"ok\")\n\n .ok_or_else(|| Error::invalid_authentication_response(\"SCRAM\"))?;\n\n match bson_util::get_int(ok) {\n\n Some(1) => Ok(()),\n\n Some(_) => Err(Error::authentication_error(\n\n \"SCRAM\",\n\n response\n\n .get_str(\"errmsg\")\n\n .unwrap_or(\"Authentication failure\"),\n\n )),\n\n _ => Err(Error::invalid_authentication_response(\"SCRAM\")),\n\n }\n\n}\n\n\n", "file_path": "src/client/auth/scram.rs", "rank": 13, "score": 178603.0500950794 }, { "content": "fn filter_servers_by_tag_sets(servers: &mut Vec<&ServerDescription>, tag_sets: &[TagSet]) {\n\n if tag_sets.is_empty() {\n\n return;\n\n }\n\n\n\n for tag_set in tag_sets {\n\n let matches_tag_set = |server: &&ServerDescription| server.matches_tag_set(tag_set);\n\n\n\n if servers.iter().any(matches_tag_set) {\n\n servers.retain(matches_tag_set);\n\n\n\n return;\n\n }\n\n }\n\n\n\n servers.clear();\n\n}\n", "file_path": "src/sdam/description/topology/server_selection/mod.rs", "rank": 14, "score": 168449.61708550266 }, { "content": "/// Returns a vector of documents that cannot be sent in one batch (35000 documents).\n\n/// Includes duplicate _id's across different batches.\n\nfn multibatch_documents_with_duplicate_keys() -> Vec<Document> {\n\n let large_doc = LARGE_DOC.clone();\n\n\n\n let mut docs: Vec<Document> = Vec::new();\n\n docs.extend(vec![large_doc.clone(); 7498]);\n\n\n\n docs.push(doc! { \"_id\": 1 });\n\n docs.push(doc! { \"_id\": 1 }); // error in first batch, index 7499\n\n\n\n docs.extend(vec![large_doc.clone(); 14999]);\n\n docs.push(doc! { \"_id\": 1 }); // error in second batch, index 22499\n\n\n\n docs.extend(vec![large_doc.clone(); 9999]);\n\n docs.push(doc! { \"_id\": 1 }); // error in third batch, index 32499\n\n\n\n docs.extend(vec![large_doc; 2500]);\n\n\n\n assert_eq!(docs.len(), 35000);\n\n docs\n\n}\n", "file_path": "src/test/coll.rs", "rank": 15, "score": 161442.9512485517 }, { "content": "fn server_type_from_str(s: &str) -> Option<ServerType> {\n\n let t = match s {\n\n \"Standalone\" => ServerType::Standalone,\n\n \"Mongos\" => ServerType::Mongos,\n\n \"RSPrimary\" => ServerType::RSPrimary,\n\n \"RSSecondary\" => ServerType::RSSecondary,\n\n \"RSArbiter\" => ServerType::RSArbiter,\n\n \"RSOther\" => ServerType::RSOther,\n\n \"RSGhost\" => ServerType::RSGhost,\n\n \"Unknown\" | \"PossiblePrimary\" => ServerType::Unknown,\n\n _ => return None,\n\n };\n\n\n\n Some(t)\n\n}\n\n\n", "file_path": "src/sdam/description/topology/test/sdam.rs", "rank": 16, "score": 160438.48107187063 }, { "content": "fn convert_read_preference(test_read_pref: TestReadPreference) -> Option<ReadPreference> {\n\n let max_staleness = test_read_pref\n\n .max_staleness_seconds\n\n .map(Duration::from_secs);\n\n\n\n let read_pref = match &test_read_pref.mode.as_ref()?[..] {\n\n \"Primary\" => ReadPreference::Primary,\n\n \"Secondary\" => ReadPreference::Secondary {\n\n tag_sets: test_read_pref.tag_sets,\n\n max_staleness,\n\n },\n\n \"PrimaryPreferred\" => ReadPreference::PrimaryPreferred {\n\n tag_sets: test_read_pref.tag_sets,\n\n max_staleness,\n\n },\n\n \"SecondaryPreferred\" => ReadPreference::SecondaryPreferred {\n\n tag_sets: test_read_pref.tag_sets,\n\n max_staleness,\n\n },\n\n \"Nearest\" => ReadPreference::Nearest {\n\n tag_sets: test_read_pref.tag_sets,\n\n max_staleness,\n\n },\n\n _ => panic!(\"invalid read preference: {:?}\", test_read_pref),\n\n };\n\n\n\n Some(read_pref)\n\n}\n\n\n", "file_path": "src/sdam/description/topology/server_selection/test.rs", "rank": 17, "score": 160259.64134321135 }, { "content": "fn utc_datetime_from_millis(millis: i64) -> UtcDateTime {\n\n let seconds_portion = millis / 1000;\n\n let nanos_portion = (millis % 1000) * 1_000_000;\n\n\n\n let naive_datetime = NaiveDateTime::from_timestamp(seconds_portion, nanos_portion as u32);\n\n let datetime = DateTime::from_utc(naive_datetime, Utc);\n\n\n\n UtcDateTime(datetime)\n\n}\n\n\n", "file_path": "src/sdam/description/topology/server_selection/test.rs", "rank": 18, "score": 158056.12251461763 }, { "content": "pub fn assert_matches<A: Matchable + Debug, E: Matchable + Debug>(\n\n actual: &A,\n\n expected: &E,\n\n description: Option<&str>,\n\n) {\n\n assert!(\n\n actual.matches(expected),\n\n \"{}\\n{:?}\\n did not MATCH \\n{:?}\",\n\n description.unwrap_or(\"\"),\n\n actual,\n\n expected\n\n );\n\n}\n\n\n", "file_path": "src/test/util/matchable.rs", "rank": 19, "score": 157581.5942437302 }, { "content": "fn run_test(name: &str, test: impl Fn(EventClient, Database, Collection)) {\n\n // TODO RUST-51: Disable retryable writes once they're implemented.\n\n let client = RUNTIME.block_on(EventClient::new());\n\n\n\n if client.options.repl_set_name.is_none() {\n\n return;\n\n }\n\n\n\n let name = format!(\"step-down-{}\", name);\n\n\n\n let db = client.database(&name);\n\n let coll = db.collection(&name);\n\n\n\n let wc_majority = WriteConcern::builder().w(Acknowledgment::Majority).build();\n\n\n\n let _ = coll.drop(Some(\n\n DropCollectionOptions::builder()\n\n .write_concern(wc_majority.clone())\n\n .build(),\n\n ));\n", "file_path": "src/test/spec/connection_stepdown.rs", "rank": 20, "score": 151158.37682708973 }, { "content": "/// Tests connection string parsing of authentication options.\n\nfn run_auth_test(test_file: TestFile) {\n\n for mut test_case in test_file.tests {\n\n test_case.description = test_case.description.replace('$', \"%\");\n\n\n\n let skipped_mechanisms = [\"GSSAPI\", \"MONGODB-X509\", \"PLAIN\", \"MONGODB-CR\"];\n\n\n\n // TODO: X509 (RUST-147)\n\n // TODO: GSSAPI (RUST-196)\n\n // TODO: PLAIN (RUST-197)\n\n if skipped_mechanisms\n\n .iter()\n\n .any(|mech| test_case.description.contains(mech))\n\n {\n\n continue;\n\n }\n\n\n\n match RUNTIME.block_on(ClientOptions::parse(test_case.uri.as_str())) {\n\n Ok(options) => {\n\n assert!(test_case.valid, \"{}\", test_case.description);\n\n match test_case.credential {\n", "file_path": "src/test/spec/auth.rs", "rank": 21, "score": 150324.1944116889 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct TestFile {\n\n pub tests: Vec<TestCase>,\n\n}\n\n\n", "file_path": "src/test/spec/auth.rs", "rank": 22, "score": 150111.9020869988 }, { "content": "#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct TestCase {\n\n pub description: String,\n\n pub uri: String,\n\n pub valid: bool,\n\n pub credential: Option<TestCredential>,\n\n}\n\n\n", "file_path": "src/test/spec/auth.rs", "rank": 23, "score": 150111.65017442653 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct TestTopologyDescription {\n\n #[serde(rename = \"type\")]\n\n topology_type: TopologyType,\n\n servers: Vec<TestServerDescription>,\n\n}\n\n\n", "file_path": "src/sdam/description/topology/server_selection/test.rs", "rank": 26, "score": 148066.0846230077 }, { "content": "#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct TestServerDescription {\n\n address: String,\n\n #[serde(rename = \"avg_rtt_ms\")]\n\n avg_rtt_ms: Option<f64>,\n\n #[serde(rename = \"type\")]\n\n server_type: TestServerType,\n\n tags: Option<TagSet>,\n\n last_update_time: Option<i64>,\n\n last_write: Option<LastWriteDate>,\n\n max_wire_version: Option<i32>,\n\n}\n\n\n", "file_path": "src/sdam/description/topology/server_selection/test.rs", "rank": 27, "score": 148065.8327104354 }, { "content": "fn deserialize_i64_from_ext_json<'de, D>(\n\n deserializer: D,\n\n) -> std::result::Result<Option<i64>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n let document = Option::<Document>::deserialize(deserializer)?;\n\n match document {\n\n Some(document) => {\n\n let number_string = document\n\n .get(\"$numberLong\")\n\n .and_then(Bson::as_str)\n\n .ok_or_else(|| de::Error::custom(\"missing $numberLong field\"))?;\n\n let parsed = number_string\n\n .parse::<i64>()\n\n .map_err(|_| de::Error::custom(\"failed to parse to i64\"))?;\n\n Ok(Some(parsed))\n\n }\n\n None => Ok(None),\n\n }\n\n}\n\n\n\n// This struct is necessary because the command monitoring tests specify the options in a very old\n\n// way (SPEC-1519).\n", "file_path": "src/test/spec/command_monitoring/operation.rs", "rank": 28, "score": 147068.646866471 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct CollectionInfo {\n\n pub name: String,\n\n #[serde(rename = \"type\")]\n\n pub coll_type: String,\n\n pub options: Document,\n\n pub info: Info,\n\n}\n\n\n", "file_path": "src/test/db.rs", "rank": 29, "score": 144018.8796763331 }, { "content": "fn run_document_test(test_file: TestFile) {\n\n for test_case in test_file.tests {\n\n if let Some(specified_write_concern) = test_case.write_concern {\n\n let wc = write_concern_from_document(specified_write_concern).map(|write_concern| {\n\n write_concern.validate().map_err(Error::from).and_then(|_| {\n\n let doc = bson::to_bson(&write_concern)?;\n\n\n\n Ok(doc)\n\n })\n\n });\n\n\n\n let actual_write_concern = match wc {\n\n Some(Ok(Bson::Document(write_concern))) => {\n\n assert!(test_case.valid, \"{}\", &test_case.description);\n\n write_concern\n\n }\n\n Some(Ok(x)) => panic!(\"wat: {:?}\", x),\n\n Some(Err(_)) => {\n\n assert!(!test_case.valid, \"{}\", &test_case.description);\n\n continue;\n", "file_path": "src/test/spec/read_write_concern/document.rs", "rank": 30, "score": 143281.83803515547 }, { "content": "fn kill_cursors_sent(client: &EventClient) -> bool {\n\n client\n\n .command_events\n\n .read()\n\n .unwrap()\n\n .iter()\n\n .any(|event| match event {\n\n CommandEvent::CommandStartedEvent(CommandStartedEvent { command_name, .. }) => {\n\n command_name == \"killCursors\"\n\n }\n\n _ => false,\n\n })\n\n}\n\n\n\n#[cfg_attr(feature = \"tokio-runtime\", tokio::test(core_threads = 2))]\n\n#[cfg_attr(feature = \"async-std-runtime\", async_std::test)]\n\n#[function_name::named]\n\nasync fn kill_cursors_on_drop() {\n\n let _guard = LOCK.run_concurrently();\n\n\n", "file_path": "src/test/coll.rs", "rank": 31, "score": 142657.8499394711 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct TestFile {\n\n pub tests: Vec<TestCase>,\n\n}\n\n\n", "file_path": "src/test/spec/read_write_concern/document.rs", "rank": 33, "score": 139731.23381375492 }, { "content": "#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct TestCase {\n\n pub description: String,\n\n pub valid: bool,\n\n pub write_concern: Option<Document>,\n\n pub write_concern_document: Option<Document>,\n\n pub read_concern: Option<Document>,\n\n pub read_concern_document: Option<Document>,\n\n pub is_acknowledged: Option<bool>,\n\n}\n\n\n", "file_path": "src/test/spec/read_write_concern/document.rs", "rank": 34, "score": 139730.98190118262 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct TestFile {\n\n #[serde(rename = \"heartbeatFrequencyMS\")]\n\n heartbeat_frequency_ms: Option<u64>,\n\n topology_description: TestTopologyDescription,\n\n read_preference: TestReadPreference,\n\n suitable_servers: Option<Vec<TestServerDescription>>,\n\n in_latency_window: Option<Vec<TestServerDescription>>,\n\n error: Option<bool>,\n\n}\n\n\n", "file_path": "src/sdam/description/topology/server_selection/test.rs", "rank": 35, "score": 139355.0604100468 }, { "content": "fn run_connection_string_test(test_file: TestFile) {\n\n for test_case in test_file.tests {\n\n match RUNTIME.block_on(ClientOptions::parse(&test_case.uri)) {\n\n Ok(options) => {\n\n assert!(test_case.valid);\n\n\n\n if let Some(ref expected_read_concern) = test_case.read_concern {\n\n let mut actual_read_concern = Document::new();\n\n\n\n if let Some(client_read_concern) = options.read_concern {\n\n actual_read_concern.insert(\"level\", client_read_concern.as_str());\n\n }\n\n\n\n assert_eq!(\n\n &actual_read_concern, expected_read_concern,\n\n \"{}\",\n\n test_case.description\n\n );\n\n }\n\n\n", "file_path": "src/test/spec/read_write_concern/connection_string.rs", "rank": 36, "score": 138958.49889187605 }, { "content": "fn convert_server_description(\n\n test_server_desc: TestServerDescription,\n\n) -> Option<ServerDescription> {\n\n let server_type = match test_server_desc.server_type.into_server_type() {\n\n Some(server_type) => server_type,\n\n None => return None,\n\n };\n\n\n\n let mut command_response = is_master_response_from_server_type(server_type);\n\n command_response.tags = test_server_desc.tags;\n\n command_response.last_write = test_server_desc.last_write.map(|last_write| {\n\n let millis: i64 = last_write.last_write_date.number.parse().unwrap();\n\n\n\n LastWrite {\n\n last_write_date: utc_datetime_from_millis(millis),\n\n }\n\n });\n\n\n\n let is_master = IsMasterReply {\n\n command_response,\n", "file_path": "src/sdam/description/topology/server_selection/test.rs", "rank": 37, "score": 137487.85667828502 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct TestFile {\n\n pub tests: Vec<TestCase>,\n\n}\n\n\n", "file_path": "src/test/spec/read_write_concern/connection_string.rs", "rank": 38, "score": 136638.71013111924 }, { "content": "#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct TestCase {\n\n pub description: String,\n\n pub uri: String,\n\n pub valid: bool,\n\n pub read_concern: Option<Document>,\n\n pub write_concern: Option<Document>,\n\n}\n\n\n", "file_path": "src/test/spec/read_write_concern/connection_string.rs", "rank": 39, "score": 136638.45821854693 }, { "content": "fn run_test(test_file: TestFile) {\n\n let options = RUNTIME\n\n .block_on(ClientOptions::parse(&test_file.uri))\n\n .expect(&test_file.description);\n\n\n\n let test_description = &test_file.description;\n\n let mut topology_description = TopologyDescription::new(options).expect(test_description);\n\n\n\n for (i, phase) in test_file.phases.into_iter().enumerate() {\n\n for Response(address, command_response) in phase.responses {\n\n let is_master_reply = if command_response == Default::default() {\n\n Err(ErrorKind::OperationError {\n\n message: \"dummy error\".to_string(),\n\n }\n\n .into())\n\n } else {\n\n Ok(IsMasterReply {\n\n command_response,\n\n round_trip_time: Some(Duration::from_millis(1234)), // Doesn't matter for tests.\n\n })\n", "file_path": "src/sdam/description/topology/test/sdam.rs", "rank": 40, "score": 135598.1712087535 }, { "content": "fn run_test(test_file: TestFile) {\n\n let avg_rtt_ms = match test_file.avg_rtt_ms {\n\n AverageRtt::F(f) => Some(f),\n\n AverageRtt::S(ref s) if s == \"NULL\" => None,\n\n AverageRtt::S(ref s) => panic!(\"invalid average round trip time: {}\", s),\n\n };\n\n\n\n // The address is not used, so it doesn't matter.\n\n let mut old_server_desc = ServerDescription::new(Default::default(), None);\n\n let mut new_server_desc = old_server_desc.clone();\n\n\n\n old_server_desc.average_round_trip_time = avg_rtt_ms.map(f64_ms_as_duration);\n\n new_server_desc.average_round_trip_time = Some(f64_ms_as_duration(test_file.new_rtt_ms));\n\n\n\n let topology = TopologyDescription {\n\n single_seed: false,\n\n topology_type: TopologyType::ReplicaSetNoPrimary,\n\n set_name: None,\n\n max_set_version: None,\n\n max_election_id: None,\n", "file_path": "src/sdam/description/topology/test/rtt.rs", "rank": 41, "score": 135598.1712087535 }, { "content": "fn write_concern_to_document(write_concern: &WriteConcern) -> Result<Document> {\n\n match bson::to_bson(&write_concern)? {\n\n Bson::Document(doc) => Ok(doc),\n\n _ => unreachable!(),\n\n }\n\n}\n", "file_path": "src/test/spec/read_write_concern/mod.rs", "rank": 42, "score": 135527.0700412582 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct Options {\n\n pub ordered: bool,\n\n}\n\n\n", "file_path": "src/test/spec/crud/insert_many.rs", "rank": 43, "score": 135093.98868390685 }, { "content": "fn verify_max_await_time(max_await_time: Option<Duration>, cursor_type: Option<CursorType>) {\n\n let ns = Namespace::empty();\n\n let address = StreamAddress {\n\n hostname: \"localhost\".to_string(),\n\n port: None,\n\n };\n\n let find = Find::new(\n\n ns,\n\n None,\n\n Some(FindOptions {\n\n cursor_type,\n\n max_await_time,\n\n ..Default::default()\n\n }),\n\n );\n\n\n\n let response = CommandResponse::with_document_and_address(\n\n address,\n\n doc! {\n\n \"cursor\": {\n", "file_path": "src/operation/find/test.rs", "rank": 44, "score": 133843.31871689117 }, { "content": "pub fn parse_version(version: &str) -> (u64, u64) {\n\n let parts: Vec<u64> = version.split('.').map(|s| s.parse().unwrap()).collect();\n\n if parts.len() != 2 {\n\n panic!(\"not two part version string: {:?}\", parts);\n\n }\n\n (parts[0], parts[1])\n\n}\n\n\n", "file_path": "src/test/util/mod.rs", "rank": 45, "score": 133311.29522600194 }, { "content": "fn run_test(test_file: TestFile) {\n\n let read_pref = match convert_read_preference(test_file.read_preference) {\n\n Some(read_pref) => read_pref,\n\n None => return,\n\n };\n\n\n\n let servers: Option<Vec<ServerDescription>> = test_file\n\n .topology_description\n\n .servers\n\n .into_iter()\n\n // The driver doesn't support server versions low enough not to support max staleness, so we\n\n // just manually filter them out here.\n\n .filter(|server| server.max_wire_version.map(|version| version >= 5).unwrap_or(true))\n\n .map(convert_server_description)\n\n .collect();\n\n\n\n let servers = match servers {\n\n Some(servers) => servers,\n\n None => return,\n\n };\n", "file_path": "src/sdam/description/topology/server_selection/test.rs", "rank": 46, "score": 133151.28615173005 }, { "content": "#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct ResultDoc {\n\n pub matched_count: i64,\n\n pub modified_count: i64,\n\n pub upserted_count: Option<i64>,\n\n pub upserted_id: Option<Bson>,\n\n}\n\n\n", "file_path": "src/test/spec/crud/update_many.rs", "rank": 47, "score": 132234.27269345155 }, { "content": "#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct ResultDoc {\n\n pub matched_count: i64,\n\n pub modified_count: i64,\n\n pub upserted_count: Option<i64>,\n\n pub upserted_id: Option<Bson>,\n\n}\n\n\n", "file_path": "src/test/spec/crud/replace_one.rs", "rank": 48, "score": 132234.27269345155 }, { "content": "#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct ResultDoc {\n\n pub deleted_count: i64,\n\n}\n\n\n", "file_path": "src/test/spec/crud/delete_one.rs", "rank": 49, "score": 132234.27269345155 }, { "content": "#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct ResultDoc {\n\n pub matched_count: i64,\n\n pub modified_count: i64,\n\n pub upserted_count: Option<i64>,\n\n pub upserted_id: Option<Bson>,\n\n}\n\n\n", "file_path": "src/test/spec/crud/update_one.rs", "rank": 50, "score": 132234.27269345155 }, { "content": "#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct ResultDoc {\n\n pub deleted_count: i64,\n\n}\n\n\n", "file_path": "src/test/spec/crud/delete_many.rs", "rank": 51, "score": 132234.27269345155 }, { "content": "#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct ResultDoc {\n\n inserted_ids: Option<Document>,\n\n}\n\n\n", "file_path": "src/test/spec/crud/insert_many.rs", "rank": 52, "score": 132234.27269345155 }, { "content": "#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct ResultDoc {\n\n inserted_id: Bson,\n\n}\n\n\n", "file_path": "src/test/spec/crud/insert_one.rs", "rank": 53, "score": 132234.27269345155 }, { "content": "fn verify_max_staleness(max_staleness: Option<Duration>) -> Result<()> {\n\n if max_staleness\n\n .map(|staleness| staleness > Duration::from_secs(0) && staleness < Duration::from_secs(90))\n\n .unwrap_or(false)\n\n {\n\n return Err(ErrorKind::ArgumentError {\n\n message: \"max staleness cannot be both positive and below 90 seconds\".into(),\n\n }\n\n .into());\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "src/sdam/description/topology/mod.rs", "rank": 54, "score": 131962.2274202171 }, { "content": "fn percent_decode(s: &str, err_message: &str) -> Result<String> {\n\n match percent_encoding::percent_decode_str(s).decode_utf8() {\n\n Ok(result) => Ok(result.to_string()),\n\n Err(_) => Err(ErrorKind::ArgumentError {\n\n message: err_message.to_string(),\n\n }\n\n .into()),\n\n }\n\n}\n\n\n", "file_path": "src/client/options/mod.rs", "rank": 55, "score": 129853.25493548441 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct NumberLong {\n\n #[serde(rename = \"$numberLong\")]\n\n number: String,\n\n}\n\n\n", "file_path": "src/sdam/description/topology/server_selection/test.rs", "rank": 56, "score": 128399.91412286766 }, { "content": "#[derive(Debug, Deserialize, Default, PartialEq)]\n\nstruct ParsedOptions {\n\n user: Option<String>,\n\n password: Option<String>,\n\n db: Option<String>,\n\n}\n\n\n\n#[cfg_attr(feature = \"tokio-runtime\", tokio::test(core_threads = 2))]\n\n#[cfg_attr(feature = \"async-std-runtime\", async_std::test)]\n\nasync fn run() {\n\n let run_test = |mut test_file: TestFile| {\n\n // TODO DRIVERS-796: unskip this test\n\n if test_file.uri == \"mongodb+srv://test5.test.build.10gen.cc/?authSource=otherDB\" {\n\n return;\n\n }\n\n\n\n let result = RUNTIME.block_on(ClientOptions::parse(&test_file.uri));\n\n\n\n if let Some(true) = test_file.error {\n\n assert!(matches!(result, Err(_)), test_file.comment.unwrap());\n\n return;\n", "file_path": "src/test/spec/initial_dns_seedlist_discovery.rs", "rank": 57, "score": 128031.39930441818 }, { "content": "#[derive(Debug, Deserialize, PartialEq)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct ResolvedOptions {\n\n replica_set: Option<String>,\n\n auth_source: Option<String>,\n\n ssl: bool,\n\n}\n\n\n", "file_path": "src/test/spec/initial_dns_seedlist_discovery.rs", "rank": 58, "score": 128031.22187482411 }, { "content": "/// Parses a string slice of the form \"<expected_key>=<body>\" into \"<body>\", if possible.\n\nfn parse_kvp(str: &str, expected_key: char) -> Result<String> {\n\n if !str.starts_with(expected_key) || str.chars().nth(1) != Some('=') {\n\n Err(Error::invalid_authentication_response(\"SCRAM\"))\n\n } else {\n\n Ok(str.chars().skip(2).collect())\n\n }\n\n}\n\n\n", "file_path": "src/client/auth/scram.rs", "rank": 59, "score": 127619.00290954206 }, { "content": "#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct LastWriteDate {\n\n last_write_date: NumberLong,\n\n}\n\n\n", "file_path": "src/sdam/description/topology/server_selection/test.rs", "rank": 60, "score": 125192.47403574467 }, { "content": "fn deserialize_checkout_failed<'de, D>(\n\n deserializer: D,\n\n) -> Result<ConnectionCheckoutFailedEvent, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n let helper = ConnectionCheckoutFailedHelper::deserialize(deserializer)?;\n\n\n\n // The driver doesn't have a concept of a \"closed pool\", instead having the pool closed when the\n\n // pool is dropped. Because of this, the driver doesn't implement the \"poolClosed\" reason for a\n\n // connection checkout failure. While we skip over the corresponding tests in our spec test\n\n // runner, we still need to be able to deserialize the \"poolClosed\" reason to avoid the test\n\n // harness panicking, so we arbitrarily map the \"poolClosed\" to \"connectionError\".\n\n let reason = match helper.reason {\n\n CheckoutFailedReasonHelper::PoolClosed | CheckoutFailedReasonHelper::ConnectionError => {\n\n ConnectionCheckoutFailedReason::ConnectionError\n\n }\n\n CheckoutFailedReasonHelper::Timeout => ConnectionCheckoutFailedReason::Timeout,\n\n };\n\n\n\n Ok(ConnectionCheckoutFailedEvent {\n\n address: StreamAddress {\n\n hostname: Default::default(),\n\n port: None,\n\n },\n\n reason,\n\n })\n\n}\n", "file_path": "src/cmap/test/event.rs", "rank": 61, "score": 124386.64650748069 }, { "content": "/// Splits a string into a section before a given index and a section exclusively after the index.\n\n/// Empty portions are returned as `None`.\n\nfn exclusive_split_at(s: &str, i: usize) -> (Option<&str>, Option<&str>) {\n\n let (l, r) = s.split_at(i);\n\n\n\n let lout = if !l.is_empty() { Some(l) } else { None };\n\n let rout = if r.len() > 1 { Some(&r[1..]) } else { None };\n\n\n\n (lout, rout)\n\n}\n\n\n", "file_path": "src/client/options/mod.rs", "rank": 62, "score": 120353.28336515561 }, { "content": "#[derive(Copy, Clone)]\n\nstruct AsyncRuntimeProvider;\n\n\n\nimpl trust_dns_resolver::name_server::RuntimeProvider for AsyncRuntimeProvider {\n\n type Handle = AsyncRuntime;\n\n type Tcp = AsyncTcpStream;\n\n type Timer = AsyncRuntime;\n\n type Udp = AsyncUdpSocket;\n\n}\n\n\n\n// Below are implementations of the various trait requirements that trustdns imposes to use a\n\n// custom runtime. We define them for both async-std and tokio.\n\n\n\n#[async_trait]\n\nimpl trust_dns_proto::Time for AsyncRuntime {\n\n async fn delay_for(duration: Duration) {\n\n Delay::new(duration).await\n\n }\n\n\n\n async fn timeout<F: 'static + Future + Send>(\n\n timeout: Duration,\n", "file_path": "src/runtime/resolver.rs", "rank": 63, "score": 115352.3572391459 }, { "content": "fn validate_userinfo(s: &str, userinfo_type: &str) -> Result<()> {\n\n if s.chars().any(|c| USERINFO_RESERVED_CHARACTERS.contains(&c)) {\n\n return Err(ErrorKind::ArgumentError {\n\n message: format!(\"{} must be URL encoded\", userinfo_type),\n\n }\n\n .into());\n\n }\n\n Ok(())\n\n}\n\n\n\nimpl ClientOptionsParser {\n\n fn parse(s: &str) -> Result<Self> {\n\n let end_of_scheme = match s.find(\"://\") {\n\n Some(index) => index,\n\n None => {\n\n return Err(ErrorKind::ArgumentError {\n\n message: \"connection string contains no scheme\".to_string(),\n\n }\n\n .into())\n\n }\n", "file_path": "src/client/options/mod.rs", "rank": 64, "score": 115130.2143606388 }, { "content": "fn build_test(\n\n ns: Namespace,\n\n filter: Option<Document>,\n\n options: Option<FindOptions>,\n\n mut expected_body: Document,\n\n) {\n\n let find = Find::new(ns.clone(), filter, options);\n\n\n\n let mut cmd = find.build(&StreamDescription::new_testing()).unwrap();\n\n\n\n assert_eq!(cmd.name.as_str(), \"find\");\n\n assert_eq!(cmd.target_db.as_str(), ns.db.as_str());\n\n\n\n bson_util::sort_document(&mut expected_body);\n\n bson_util::sort_document(&mut cmd.body);\n\n\n\n assert_eq!(cmd.body, expected_body);\n\n}\n\n\n\n#[cfg_attr(feature = \"tokio-runtime\", tokio::test(core_threads = 2))]\n", "file_path": "src/operation/find/test.rs", "rank": 65, "score": 112170.29392837506 }, { "content": "fn build_test(\n\n target: impl Into<AggregateTarget>,\n\n pipeline: Vec<Document>,\n\n options: Option<AggregateOptions>,\n\n mut expected_body: Document,\n\n) {\n\n let target = target.into();\n\n\n\n let aggregate = Aggregate::new(target.clone(), pipeline, options);\n\n\n\n let mut cmd = aggregate.build(&StreamDescription::new_testing()).unwrap();\n\n\n\n assert_eq!(cmd.name.as_str(), \"aggregate\");\n\n assert_eq!(cmd.target_db.as_str(), target.db_name());\n\n\n\n bson_util::sort_document(&mut expected_body);\n\n bson_util::sort_document(&mut cmd.body);\n\n\n\n assert_eq!(cmd.body, expected_body);\n\n}\n", "file_path": "src/operation/aggregate/test.rs", "rank": 66, "score": 112170.29392837506 }, { "content": "fn build_test(\n\n ns: Namespace,\n\n cursor_id: i64,\n\n address: StreamAddress,\n\n batch_size: Option<u32>,\n\n max_time: Option<Duration>,\n\n mut expected_body: Document,\n\n) {\n\n let get_more = GetMore::new(ns.clone(), cursor_id, address, batch_size, max_time);\n\n\n\n let build_result = get_more.build(&StreamDescription::new_testing());\n\n assert!(build_result.is_ok());\n\n\n\n let mut cmd = build_result.unwrap();\n\n assert_eq!(cmd.name, \"getMore\".to_string());\n\n assert_eq!(cmd.read_pref, None);\n\n assert_eq!(cmd.target_db, ns.db);\n\n\n\n bson_util::sort_document(&mut expected_body);\n\n bson_util::sort_document(&mut cmd.body);\n", "file_path": "src/operation/get_more/test.rs", "rank": 67, "score": 112170.29392837506 }, { "content": "#[derive(Debug, Default, PartialEq)]\n\nstruct ClientOptionsParser {\n\n pub hosts: Vec<StreamAddress>,\n\n pub srv: bool,\n\n pub app_name: Option<String>,\n\n pub tls: Option<Tls>,\n\n pub heartbeat_freq: Option<Duration>,\n\n pub local_threshold: Option<Duration>,\n\n pub read_concern: Option<ReadConcern>,\n\n pub selection_criteria: Option<SelectionCriteria>,\n\n pub repl_set_name: Option<String>,\n\n pub write_concern: Option<WriteConcern>,\n\n pub server_selection_timeout: Option<Duration>,\n\n pub max_pool_size: Option<u32>,\n\n pub min_pool_size: Option<u32>,\n\n pub max_idle_time: Option<Duration>,\n\n pub wait_queue_timeout: Option<Duration>,\n\n pub compressors: Option<Vec<String>>,\n\n pub connect_timeout: Option<Duration>,\n\n pub retry_reads: Option<bool>,\n\n pub retry_writes: Option<bool>,\n", "file_path": "src/client/options/mod.rs", "rank": 68, "score": 111935.65977267778 }, { "content": "struct TestFixtures {\n\n op: Insert,\n\n documents: Vec<Document>,\n\n options: InsertManyOptions,\n\n}\n\n\n", "file_path": "src/operation/insert/test.rs", "rank": 69, "score": 111689.60597292034 }, { "content": "// Removes all items in the `VecDeque` that fulfill the predicate and return them in order as a new\n\n// `Vec`.\n\nfn remove_by<T, F>(vec: &mut VecDeque<T>, pred: F) -> Vec<T>\n\nwhere\n\n F: Fn(&T) -> bool,\n\n{\n\n let mut i = 0;\n\n let mut removed = Vec::new();\n\n\n\n while i < vec.len() {\n\n if pred(&vec[i]) {\n\n removed.push(vec.remove(i).unwrap());\n\n } else {\n\n i += 1;\n\n }\n\n }\n\n\n\n removed\n\n}\n", "file_path": "src/cmap/test/file.rs", "rank": 70, "score": 110838.29927512555 }, { "content": "fn is_not_master(code: i32, message: &str) -> bool {\n\n if NOTMASTER_CODES.contains(&code) {\n\n return true;\n\n } else if is_recovering(code, message) {\n\n return false;\n\n }\n\n message.contains(\"not master\")\n\n}\n\n\n", "file_path": "src/error.rs", "rank": 71, "score": 109812.73707008682 }, { "content": "fn is_recovering(code: i32, message: &str) -> bool {\n\n if RECOVERING_CODES.contains(&code) {\n\n return true;\n\n }\n\n message.contains(\"not master or secondary\") || message.contains(\"node is recovering\")\n\n}\n\n\n\n/// An error that occurred due to a database command failing.\n\n#[derive(Clone, Debug, Deserialize)]\n\npub struct CommandError {\n\n /// Identifies the type of error.\n\n pub code: i32,\n\n\n\n /// The name associated with the error code.\n\n #[serde(rename = \"codeName\", default)]\n\n pub code_name: String,\n\n\n\n /// A description of the error that occurred.\n\n #[serde(rename = \"errmsg\")]\n\n pub message: String,\n", "file_path": "src/error.rs", "rank": 72, "score": 109812.73707008682 }, { "content": "pub fn size_bytes(val: &Bson) -> usize {\n\n match val {\n\n Bson::FloatingPoint(_) => 8,\n\n // \n\n // * length prefix (4 bytes)\n\n // * number of UTF-8 bytes\n\n // * null terminator (1 byte)\n\n Bson::String(s) => 4 + s.len() + 1,\n\n // An array is serialized as a document with the keys \"0\", \"1\", \"2\", etc., so the size of\n\n // an array is:\n\n //\n\n // * length prefix (4 bytes)\n\n // * for each element:\n\n // * type (1 byte)\n\n // * number of decimal digits in key\n\n // * null terminator for the key (1 byte)\n\n // * size of value\n\n // * null terminator (1 byte)\n\n Bson::Array(arr) => {\n\n 4 + arr\n", "file_path": "src/bson_util/mod.rs", "rank": 73, "score": 108016.02094222448 }, { "content": "fn deserialize_pool_created<'de, D>(deserializer: D) -> Result<PoolCreatedEvent, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n let helper = PoolCreatedEventHelper::deserialize(deserializer)?;\n\n\n\n // The CMAP spec tests use \"42\" as a placeholder in the expected events to indicate that the\n\n // driver should assert that a value is present without any constraints on the value itself.\n\n // This idiom is used for the connection pool creation options even when no options are\n\n // specified, meaning that there isn't any useful assertion we can do based on this value.\n\n // Because of this, we deserialize the value `42` into `None` for the options, which prevents\n\n // deserialization failure due to an unexpected type. For other integer values, we raise an\n\n // error indicating that we expect `42` instead.\n\n let options = match helper.options {\n\n Some(PoolOptionsHelper::Options(opts)) => Some(opts),\n\n Some(PoolOptionsHelper::Number(42)) | None => None,\n\n Some(PoolOptionsHelper::Number(other)) => {\n\n return Err(serde::de::Error::invalid_value(\n\n Unexpected::Unsigned(other),\n\n &\"42\",\n", "file_path": "src/cmap/test/event.rs", "rank": 74, "score": 107514.46634353764 }, { "content": "/// Get an Insert operation and the documents/options used to construct it.\n\nfn fixtures() -> TestFixtures {\n\n let documents = vec![\n\n Document::new(),\n\n doc! {\"_id\": 1234, \"a\": 1},\n\n doc! {\"a\": 123, \"b\": \"hello world\" },\n\n ];\n\n\n\n let options = InsertManyOptions {\n\n ordered: Some(true),\n\n write_concern: Some(WriteConcern::builder().journal(true).build()),\n\n ..Default::default()\n\n };\n\n\n\n let op = Insert::new(\n\n Namespace {\n\n db: \"test_db\".to_string(),\n\n coll: \"test_coll\".to_string(),\n\n },\n\n documents.clone(),\n\n Some(options.clone()),\n", "file_path": "src/operation/insert/test.rs", "rank": 75, "score": 107081.64998138878 }, { "content": "#[derive(Deserialize)]\n\nstruct TestCase {\n\n description: String,\n\n #[serde(rename = \"ignore_if_server_version_greater_than\", default)]\n\n max_version: Option<String>,\n\n #[serde(rename = \"ignore_if_server_version_less_than\", default)]\n\n min_version: Option<String>,\n\n operation: Document,\n\n expectations: Vec<TestEvent>,\n\n}\n\n\n", "file_path": "src/test/spec/command_monitoring/mod.rs", "rank": 76, "score": 107031.05967837057 }, { "content": "#[derive(Deserialize)]\n\nstruct TestFile {\n\n data: Vec<Document>,\n\n collection_name: String,\n\n database_name: String,\n\n tests: Vec<TestCase>,\n\n}\n\n\n", "file_path": "src/test/spec/command_monitoring/mod.rs", "rank": 77, "score": 107031.05967837057 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct Metadata {\n\n #[serde(rename = \"clientMetadata\")]\n\n pub client: ClientMetadata,\n\n}\n\n\n", "file_path": "src/test/client.rs", "rank": 78, "score": 106998.33143415742 }, { "content": "#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct Info {\n\n pub read_only: bool,\n\n pub uuid: Bson,\n\n}\n\n\n", "file_path": "src/test/db.rs", "rank": 79, "score": 106998.07952158514 }, { "content": "#[derive(Debug)]\n\nstruct CollectionInner {\n\n client: Client,\n\n db: Database,\n\n name: String,\n\n selection_criteria: Option<SelectionCriteria>,\n\n read_concern: Option<ReadConcern>,\n\n write_concern: Option<WriteConcern>,\n\n}\n\n\n\nimpl Collection {\n\n pub(crate) fn new(db: Database, name: &str, options: Option<CollectionOptions>) -> Self {\n\n let options = options.unwrap_or_default();\n\n let selection_criteria = options\n\n .selection_criteria\n\n .or_else(|| db.selection_criteria().cloned());\n\n\n\n let read_concern = options.read_concern.or_else(|| db.read_concern().cloned());\n\n\n\n let write_concern = options\n\n .write_concern\n", "file_path": "src/coll/mod.rs", "rank": 80, "score": 106959.84945709168 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct TestFile {\n\n uri: String,\n\n seeds: Vec<String>,\n\n hosts: Vec<String>,\n\n options: Option<ResolvedOptions>,\n\n parsed_options: Option<ParsedOptions>,\n\n error: Option<bool>,\n\n comment: Option<String>,\n\n}\n\n\n", "file_path": "src/test/spec/initial_dns_seedlist_discovery.rs", "rank": 81, "score": 104894.52151606895 }, { "content": "#[function_name::named]\n\nfn run_aggregate_test(test_file: TestFile) {\n\n let client = RUNTIME.block_on(TestClient::new());\n\n\n\n let data = test_file.data;\n\n\n\n for test_case in test_file.tests {\n\n if test_case.operation.name != \"aggregate\" {\n\n continue;\n\n }\n\n\n\n let _guard = LOCK.run_concurrently();\n\n\n\n let coll = client.init_db_and_coll(\n\n function_name!(),\n\n &test_case.description.replace('$', \"%\").replace(' ', \"_\"),\n\n );\n\n coll.insert_many(data.clone(), None)\n\n .expect(&test_case.description);\n\n\n\n let arguments: Arguments = bson::from_bson(Bson::Document(test_case.operation.arguments))\n", "file_path": "src/test/spec/crud/aggregate.rs", "rank": 82, "score": 104218.27768041815 }, { "content": "#[function_name::named]\n\nfn run_distinct_test(test_file: TestFile) {\n\n let client = RUNTIME.block_on(TestClient::new());\n\n let data = test_file.data;\n\n\n\n for mut test_case in test_file.tests {\n\n if test_case.operation.name != \"distinct\" {\n\n continue;\n\n }\n\n\n\n let _guard = LOCK.run_concurrently();\n\n\n\n test_case.description = test_case.description.replace('$', \"%\");\n\n\n\n let coll = client.init_db_and_coll(function_name!(), &test_case.description);\n\n coll.insert_many(data.clone(), None)\n\n .expect(&test_case.description);\n\n\n\n let arguments: Arguments = bson::from_bson(Bson::Document(test_case.operation.arguments))\n\n .expect(&test_case.description);\n\n let outcome: Outcome<Vec<Bson>> =\n", "file_path": "src/test/spec/crud/distinct.rs", "rank": 83, "score": 104218.27768041815 }, { "content": "#[function_name::named]\n\nfn run_count_test(test_file: TestFile) {\n\n let client = RUNTIME.block_on(TestClient::new());\n\n let data = test_file.data;\n\n\n\n for mut test_case in test_file.tests {\n\n let lower_description = test_case.description.to_lowercase();\n\n\n\n // old `count` not implemented, collation not implemented\n\n if !test_case.operation.name.contains(\"count\") || lower_description.contains(\"deprecated\") {\n\n continue;\n\n }\n\n\n\n let _guard = LOCK.run_concurrently();\n\n\n\n test_case.description = test_case.description.replace('$', \"%\");\n\n\n\n let coll = client.init_db_and_coll(function_name!(), &test_case.description);\n\n\n\n if !data.is_empty() {\n\n coll.insert_many(data.clone(), None)\n", "file_path": "src/test/spec/crud/count.rs", "rank": 84, "score": 104218.27768041815 }, { "content": "#[function_name::named]\n\nfn run_find_test(test_file: TestFile) {\n\n let client = RUNTIME.block_on(TestClient::new());\n\n let data = test_file.data;\n\n\n\n for mut test_case in test_file.tests {\n\n if test_case.operation.name != \"find\" {\n\n continue;\n\n }\n\n\n\n let _guard = LOCK.run_concurrently();\n\n\n\n test_case.description = test_case.description.replace('$', \"%\");\n\n\n\n let coll = client.init_db_and_coll(function_name!(), &test_case.description);\n\n coll.insert_many(data.clone(), None)\n\n .expect(&test_case.description);\n\n\n\n let arguments: Arguments = bson::from_bson(Bson::Document(test_case.operation.arguments))\n\n .expect(&test_case.description);\n\n let outcome: Outcome<Vec<Document>> =\n", "file_path": "src/test/spec/crud/find.rs", "rank": 85, "score": 104218.27768041815 }, { "content": "#[derive(Deserialize)]\n\nstruct IsMasterReply {\n\n ismaster: bool,\n\n ok: f64,\n\n}\n\n\n\nasync fn get_coll_info(db: &Database, filter: Option<Document>) -> Vec<CollectionInfo> {\n\n let colls: Result<Vec<Document>, _> =\n\n db.list_collections(filter, None).await.unwrap().collect();\n\n let mut colls: Vec<CollectionInfo> = colls\n\n .unwrap()\n\n .into_iter()\n\n .map(|doc| bson::from_bson(Bson::Document(doc)).unwrap())\n\n .collect();\n\n colls.sort_by(|c1, c2| c1.name.cmp(&c2.name));\n\n\n\n colls\n\n}\n\n\n\n#[cfg_attr(feature = \"tokio-runtime\", tokio::test(core_threads = 2))]\n\n#[cfg_attr(feature = \"async-std-runtime\", async_std::test)]\n", "file_path": "src/test/db.rs", "rank": 86, "score": 103993.24199773138 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct ClientMetadata {\n\n pub driver: DriverMetadata,\n\n pub os: OsMetadata,\n\n}\n\n\n", "file_path": "src/test/client.rs", "rank": 87, "score": 103993.17486145181 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct OsMetadata {\n\n #[serde(rename = \"type\")]\n\n pub os_type: String,\n\n pub architecture: String,\n\n}\n\n\n\n// This test currently doesn't pass on replica sets and sharded clusters consistently due to\n\n// `currentOp` sometimes detecting heartbeats between the server. Eventually we can test this using\n\n// APM or coming up with something more clever, but for now, we're just disabling it.\n\n//\n\n// #[cfg_attr(feature = \"tokio-runtime\", tokio::test(core_threads = 2))]\n\n// #[cfg_attr(feature = \"async-std-runtime\", async_std::test)]\n\n#[allow(unused)]\n\nasync fn metadata_sent_in_handshake() {\n\n let client = TestClient::new().await;\n\n let db = client.database(\"admin\");\n\n let result = db.run_command(doc! { \"currentOp\": 1 }, None).await.unwrap();\n\n\n\n let in_prog = match result.get(\"inprog\") {\n\n Some(Bson::Array(in_prog)) => in_prog,\n", "file_path": "src/test/client.rs", "rank": 88, "score": 103993.17486145181 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct DriverMetadata {\n\n pub name: String,\n\n pub version: String,\n\n}\n\n\n", "file_path": "src/test/client.rs", "rank": 89, "score": 103993.17486145181 }, { "content": "#[derive(Debug)]\n\nstruct Executor {\n\n description: String,\n\n operations: Vec<Operation>,\n\n error: Option<self::file::Error>,\n\n events: Vec<Event>,\n\n state: Arc<State>,\n\n}\n\n\n", "file_path": "src/cmap/test/mod.rs", "rank": 90, "score": 103988.09071109802 }, { "content": "#[derive(Debug)]\n\nstruct State {\n\n handler: Arc<EventHandler>,\n\n connections: RwLock<HashMap<String, Connection>>,\n\n threads: RwLock<HashMap<String, AsyncJoinHandle<Result<()>>>>,\n\n\n\n // In order to drop the pool when performing a `close` operation, we use an `Option` so that we\n\n // can replace it with `None`. Since none of the tests should use the pool after its closed\n\n // (besides the ones we manually skip over), it's fine for us to `unwrap` the pool during these\n\n // tests, as panicking is sufficient to exit any aberrant test with a failure.\n\n pool: RwLock<Option<ConnectionPool>>,\n\n}\n\n\n\nimpl State {\n\n fn count_all_events(&self) -> usize {\n\n self.handler.events.read().unwrap().len()\n\n }\n\n\n\n // Counts the number of events of the given type that have occurred so far.\n\n fn count_events(&self, event_type: &str) -> usize {\n\n self.handler\n", "file_path": "src/cmap/test/mod.rs", "rank": 91, "score": 103988.09071109802 }, { "content": "/// Model of the final message sent by the client.\n\n///\n\n/// Contains the \"AuthMessage\" mentioned in the RFC used in computing the client and server\n\n/// signatures.\n\nstruct ClientFinal {\n\n message: String,\n\n auth_message: String,\n\n conversation_id: Bson,\n\n}\n\n\n\nimpl ClientFinal {\n\n fn new(\n\n salted_password: &[u8],\n\n client_first: &ClientFirst,\n\n server_first: &ServerFirst,\n\n scram: &ScramVersion,\n\n ) -> Result<Self> {\n\n let client_key = scram.hmac(salted_password, b\"Client Key\")?;\n\n let stored_key = scram.h(client_key.as_slice());\n\n\n\n let without_proof = format!(\n\n \"{}={},{}={}\",\n\n CHANNEL_BINDING_KEY,\n\n base64::encode(client_first.gs2_header()),\n", "file_path": "src/client/auth/scram.rs", "rank": 92, "score": 103917.65318344411 }, { "content": "/// Model of the final message received from the server.\n\n///\n\n/// This MUST be validated before sending the final no-op message to the server.\n\nstruct ServerFinal {\n\n conversation_id: Bson,\n\n done: bool,\n\n body: ServerFinalBody,\n\n}\n\n\n\nimpl ServerFinal {\n\n fn parse(response: Document) -> Result<Self> {\n\n validate_command_success(&response)?;\n\n\n\n let conversation_id = response\n\n .get(\"conversationId\")\n\n .ok_or_else(|| Error::invalid_authentication_response(\"SCRAM\"))?;\n\n let done = response\n\n .get_bool(\"done\")\n\n .or_else(|_| Err(Error::invalid_authentication_response(\"SCRAM\")))?;\n\n let payload = response\n\n .get_binary_generic(\"payload\")\n\n .or_else(|_| Err(Error::invalid_authentication_response(\"SCRAM\")))?;\n\n let message = str::from_utf8(payload)\n", "file_path": "src/client/auth/scram.rs", "rank": 93, "score": 103913.4707004283 }, { "content": "/// Model of the first message received from the server.\n\n///\n\n/// This MUST be validated before sending the `ClientFinal` message back to the server.\n\nstruct ServerFirst {\n\n conversation_id: Bson,\n\n done: bool,\n\n message: String,\n\n nonce: String,\n\n salt: Vec<u8>,\n\n i: usize,\n\n}\n\n\n\nimpl ServerFirst {\n\n fn parse(response: Document) -> Result<Self> {\n\n validate_command_success(&response)?;\n\n\n\n let conversation_id = response\n\n .get(\"conversationId\")\n\n .ok_or_else(|| Error::authentication_error(\"SCRAM\", \"mismatched conversationId's\"))?;\n\n let payload = match response.get_binary_generic(\"payload\") {\n\n Ok(p) => p,\n\n Err(_) => return Err(Error::invalid_authentication_response(\"SCRAM\")),\n\n };\n", "file_path": "src/client/auth/scram.rs", "rank": 94, "score": 103913.41745721089 }, { "content": "#[derive(Hash, Eq, PartialEq)]\n\nstruct CacheEntry {\n\n password: String,\n\n salt: Vec<u8>,\n\n i: usize,\n\n mechanism: ScramVersion,\n\n}\n\n\n\n/// The versions of SCRAM supported by the driver (classified according to hash function used).\n\n#[derive(Hash, Eq, PartialEq, Clone)]\n\npub(crate) enum ScramVersion {\n\n Sha1,\n\n Sha256,\n\n}\n\n\n\nimpl ScramVersion {\n\n /// Perform SCRAM authentication for a given stream.\n\n pub(crate) async fn authenticate_stream(\n\n &self,\n\n conn: &mut Connection,\n\n credential: &Credential,\n", "file_path": "src/client/auth/scram.rs", "rank": 95, "score": 103908.85297970416 }, { "content": "/// Model of the first message sent by the client.\n\nstruct ClientFirst {\n\n message: String,\n\n\n\n gs2_header: Range<usize>,\n\n\n\n bare: Range<usize>,\n\n}\n\n\n\nimpl ClientFirst {\n\n fn new(username: &str, nonce: &str) -> Self {\n\n let gs2_header = format!(\"{},,\", NO_CHANNEL_BINDING);\n\n let bare = format!(\"{}={},{}={}\", USERNAME_KEY, username, NONCE_KEY, nonce);\n\n let full = format!(\"{}{}\", &gs2_header, &bare);\n\n let end = full.len();\n\n ClientFirst {\n\n message: full,\n\n gs2_header: Range {\n\n start: 0,\n\n end: gs2_header.len(),\n\n },\n", "file_path": "src/client/auth/scram.rs", "rank": 96, "score": 103908.85297970416 }, { "content": "struct NoCertVerifier {}\n\n\n\nimpl ServerCertVerifier for NoCertVerifier {\n\n fn verify_server_cert(\n\n &self,\n\n _: &RootCertStore,\n\n _: &[Certificate],\n\n _: webpki::DNSNameRef,\n\n _: &[u8],\n\n ) -> std::result::Result<ServerCertVerified, TLSError> {\n\n Ok(ServerCertVerified::assertion())\n\n }\n\n}\n\n\n\nimpl TlsOptions {\n\n pub fn into_rustls_config(self) -> Result<rustls::ClientConfig> {\n\n let mut config = rustls::ClientConfig::new();\n\n\n\n if let Some(true) = self.allow_invalid_certificates {\n\n config\n", "file_path": "src/client/options/mod.rs", "rank": 97, "score": 103295.12196391137 }, { "content": "fn run_command_monitoring_test(test_file: TestFile) {\n\n let client = RUNTIME.block_on(TestClient::new());\n\n\n\n let skipped_tests = vec![\n\n // uses old count\n\n \"A successful command\",\n\n \"A failed command event\",\n\n \"A successful command with a non-primary read preference\",\n\n // bulk write not implemented\n\n \"A successful mixed bulk write\",\n\n \"A successful unordered bulk write with an unacknowledged write concern\",\n\n // We can't pass this test since it relies on old OP_QUERY behavior (SPEC-1519)\n\n \"A successful find event with a getmore and the server kills the cursor\",\n\n ];\n\n\n\n for test_case in test_file.tests {\n\n if skipped_tests.iter().any(|st| st == &test_case.description) {\n\n println!(\"Skipping {}\", test_case.description);\n\n continue;\n\n }\n", "file_path": "src/test/spec/command_monitoring/mod.rs", "rank": 98, "score": 101207.36320382441 }, { "content": "#[function_name::named]\n\nfn run_delete_many_test(test_file: TestFile) {\n\n let client = RUNTIME.block_on(TestClient::new());\n\n let data = test_file.data;\n\n\n\n for mut test_case in test_file.tests {\n\n if test_case.operation.name != \"deleteMany\" {\n\n continue;\n\n }\n\n\n\n let _guard = LOCK.run_concurrently();\n\n\n\n test_case.description = test_case.description.replace('$', \"%\");\n\n\n\n let coll = client.init_db_and_coll(function_name!(), &test_case.description);\n\n coll.insert_many(data.clone(), None)\n\n .expect(&test_case.description);\n\n\n\n let arguments: Arguments = bson::from_bson(Bson::Document(test_case.operation.arguments))\n\n .expect(&test_case.description);\n\n let outcome: Outcome<ResultDoc> =\n", "file_path": "src/test/spec/crud/delete_many.rs", "rank": 99, "score": 101207.36320382441 } ]
Rust
tremor-pipeline/src/op/runtime/tremor.rs
0xd34b33f/tremor-runtime
73af8033509e224e4cbf078559f27bec4c12cf3d
use crate::op::prelude::*; use crate::FN_REGISTRY; use simd_json::borrowed::Value; use tremor_script::highlighter::Dumb as DumbHighlighter; use tremor_script::path::load as load_module_path; use tremor_script::prelude::*; use tremor_script::{self, AggrType, EventContext, Return, Script}; op!(TremorFactory(node) { if let Some(map) = &node.config { let config: Config = Config::new(map)?; match tremor_script::Script::parse( &load_module_path(), "<operator>", config.script.clone(), &*FN_REGISTRY.lock()?) { Ok(runtime) => Ok(Box::new(Tremor { runtime, config, id: node.id.clone().to_string(), })), Err(e) => { let mut h = DumbHighlighter::new(); if let Err(e) = tremor_script::Script::format_error_from_script(&config.script, &mut h, &e) { error!("{}", e.to_string()); } else { error!("{}", h.to_string()); }; Err(e.error().into()) } } } else { Err(ErrorKind::MissingOpConfig(node.id.clone().to_string()).into()) } }); #[derive(Debug, Clone, Deserialize)] struct Config { script: String, } impl ConfigImpl for Config {} #[derive(Debug)] pub struct Tremor { config: Config, runtime: Script, id: String, } impl Operator for Tremor { fn on_event( &mut self, _in_port: &str, state: &mut Value<'static>, mut event: Event, ) -> Result<Vec<(Cow<'static, str>, Event)>> { let out_port = { let context = EventContext::new(event.ingest_ns, event.origin_uri); let (unwind_event, event_meta) = event.data.parts(); let value = self.runtime.run( &context, AggrType::Emit, unwind_event, state, event_meta, ); event.origin_uri = context.origin_uri; match value { Ok(Return::EmitEvent { port }) => port.map_or_else(|| "out".into(), Cow::Owned), Ok(Return::Emit { value, port }) => { *unwind_event = value; port.map_or_else(|| "out".into(), Cow::Owned) } Ok(Return::Drop) => return Ok(vec![]), Err(ref e) => { let mut o = Value::from(hashmap! { "error".into() => Value::from(self.runtime.format_error(&e)), }); std::mem::swap(&mut o, unwind_event); if let Some(error) = unwind_event.as_object_mut() { error.insert("event".into(), o); } else { unreachable!(); }; "error".into() } } }; Ok(vec![(out_port, event)]) } } #[cfg(test)] mod test { use super::*; use crate::FN_REGISTRY; use simd_json::json; use tremor_script::path::ModulePath; #[test] fn mutate() { let config = Config { script: r#"match event.a of case 1 => let event.snot = "badger" end; event;"# .to_string(), }; let runtime = Script::parse( &ModulePath { mounts: vec![] }, "<test>", config.script.clone(), &*FN_REGISTRY.lock().expect("could not claim lock"), ) .expect("failed to parse script"); let mut op = Tremor { config, runtime, id: "badger".into(), }; let event = Event { origin_uri: None, is_batch: false, id: 1, ingest_ns: 1, data: Value::from(json!({"a": 1})).into(), kind: None, }; let mut state = Value::null(); let (out, event) = op .on_event("in", &mut state, event) .expect("failed to run pipeline") .pop() .expect("no event returned"); assert_eq!("out", out); assert_eq!( *event.data.suffix().value(), Value::from(json!({"snot": "badger", "a": 1})) ) } #[test] pub fn test_how_it_handles_errors() { let config = Config { script: r#"match this is invalid code so no match case"#.to_string(), }; let _runtime = Script::parse( &ModulePath { mounts: vec![] }, "<test>", config.script, &*FN_REGISTRY.lock().expect("could not claim lock"), ); } }
use crate::op::prelude::*; use crate::FN_REGISTRY; use simd_json::borrowed::Value; use tremor_script::highlighter::Dumb as DumbHighlighter; use tremor_script::path::load as load_module_path; use tremor_script::prelude::*; use tremor_script::{self, AggrType, EventContext, Return, Script}; op!(TremorFactory(node) { if let Some(map) = &node.config { let config: Config = Config::new(map)?; match tremor_script::Script::parse( &load_module_path(), "<operator>", config.script.clone(), &*FN_REGISTRY.lock()?) { Ok(runtime) => Ok(Box::new(Tremor { runtime, config, id: node.id.clone().to_string(), })), Err(e) => { let mut h = DumbHighlighter::new(); if let Err(e) = tremor_script::Script::format_error_from_script(&config.script, &mut h, &e) { error!("{}", e.to_string()); } else { error!("{}", h.to_string()); }; Err(e.error().into()) } } } else { Err(ErrorKind::MissingOpConfig(node
t_eq!("out", out); assert_eq!( *event.data.suffix().value(), Value::from(json!({"snot": "badger", "a": 1})) ) } #[test] pub fn test_how_it_handles_errors() { let config = Config { script: r#"match this is invalid code so no match case"#.to_string(), }; let _runtime = Script::parse( &ModulePath { mounts: vec![] }, "<test>", config.script, &*FN_REGISTRY.lock().expect("could not claim lock"), ); } }
.id.clone().to_string()).into()) } }); #[derive(Debug, Clone, Deserialize)] struct Config { script: String, } impl ConfigImpl for Config {} #[derive(Debug)] pub struct Tremor { config: Config, runtime: Script, id: String, } impl Operator for Tremor { fn on_event( &mut self, _in_port: &str, state: &mut Value<'static>, mut event: Event, ) -> Result<Vec<(Cow<'static, str>, Event)>> { let out_port = { let context = EventContext::new(event.ingest_ns, event.origin_uri); let (unwind_event, event_meta) = event.data.parts(); let value = self.runtime.run( &context, AggrType::Emit, unwind_event, state, event_meta, ); event.origin_uri = context.origin_uri; match value { Ok(Return::EmitEvent { port }) => port.map_or_else(|| "out".into(), Cow::Owned), Ok(Return::Emit { value, port }) => { *unwind_event = value; port.map_or_else(|| "out".into(), Cow::Owned) } Ok(Return::Drop) => return Ok(vec![]), Err(ref e) => { let mut o = Value::from(hashmap! { "error".into() => Value::from(self.runtime.format_error(&e)), }); std::mem::swap(&mut o, unwind_event); if let Some(error) = unwind_event.as_object_mut() { error.insert("event".into(), o); } else { unreachable!(); }; "error".into() } } }; Ok(vec![(out_port, event)]) } } #[cfg(test)] mod test { use super::*; use crate::FN_REGISTRY; use simd_json::json; use tremor_script::path::ModulePath; #[test] fn mutate() { let config = Config { script: r#"match event.a of case 1 => let event.snot = "badger" end; event;"# .to_string(), }; let runtime = Script::parse( &ModulePath { mounts: vec![] }, "<test>", config.script.clone(), &*FN_REGISTRY.lock().expect("could not claim lock"), ) .expect("failed to parse script"); let mut op = Tremor { config, runtime, id: "badger".into(), }; let event = Event { origin_uri: None, is_batch: false, id: 1, ingest_ns: 1, data: Value::from(json!({"a": 1})).into(), kind: None, }; let mut state = Value::null(); let (out, event) = op .on_event("in", &mut state, event) .expect("failed to run pipeline") .pop() .expect("no event returned"); asser
random
[]
Rust
node/src/components/rpc_server/rpcs/docs.rs
sacherjj/casper-node
2569ebd51923d0353542c93c5d47480246170cbb
#![allow(clippy::field_reassign_with_default)] use futures::{future::BoxFuture, FutureExt}; use http::Response; use hyper::Body; use once_cell::sync::Lazy; use schemars::{ gen::{SchemaGenerator, SchemaSettings}, schema::Schema, JsonSchema, Map, MapEntry, }; use semver::Version; use serde::{Deserialize, Serialize}; use serde_json::{json, Value}; use warp_json_rpc::Builder; use super::{ account::PutDeploy, chain::{GetBlock, GetBlockTransfers, GetStateRootHash}, info::{GetDeploy, GetPeers, GetStatus}, state::{GetAuctionInfo, GetBalance, GetItem}, Error, ReactorEventT, RpcWithOptionalParams, RpcWithParams, RpcWithoutParams, RpcWithoutParamsExt, }; use crate::{effect::EffectBuilder, rpcs::chain::GetEraInfoBySwitchBlock}; pub(crate) static DOCS_EXAMPLE_PROTOCOL_VERSION: Lazy<Version> = Lazy::new(|| Version::new(1, 1, 2)); const DEFINITIONS_PATH: &str = "#/components/schemas/"; static OPEN_RPC_SCHEMA: Lazy<OpenRpcSchema> = Lazy::new(|| { let contact = OpenRpcContactField { name: "CasperLabs".to_string(), url: "https://casperlabs.io".to_string(), }; let license = OpenRpcLicenseField { name: "CasperLabs Open Source License Version 1.0".to_string(), url: "https://raw.githubusercontent.com/CasperLabs/casper-node/master/LICENSE".to_string(), }; let info = OpenRpcInfoField { version: DOCS_EXAMPLE_PROTOCOL_VERSION.to_string(), title: "Client API of Casper Node".to_string(), description: "This describes the JSON-RPC 2.0 API of a node on the Casper network." .to_string(), contact, license, }; let server = OpenRpcServerEntry { name: "any Casper Network node".to_string(), url: "http://IP:PORT/rpc/".to_string(), }; let mut schema = OpenRpcSchema { openrpc: "1.0.0-rc1".to_string(), info, servers: vec![server], methods: vec![], components: Components { schemas: Map::new(), }, }; schema.push_with_params::<PutDeploy>("receives a Deploy to be executed by the network"); schema.push_with_params::<GetDeploy>("returns a Deploy from the network"); schema.push_without_params::<GetPeers>("returns a list of peers connected to the node"); schema.push_without_params::<GetStatus>("returns the current status of the node"); schema.push_with_optional_params::<GetBlock>("returns a Block from the network"); schema.push_with_optional_params::<GetBlockTransfers>( "returns all transfers for a Block from the network", ); schema.push_with_optional_params::<GetStateRootHash>( "returns a state root hash at a given Block", ); schema.push_with_params::<GetItem>("returns a stored value from the network"); schema.push_with_params::<GetBalance>("returns a purse's balance from the network"); schema.push_with_optional_params::<GetEraInfoBySwitchBlock>( "returns an EraInfo from the network", ); schema.push_without_params::<GetAuctionInfo>( "returns the bids and validators as of the most recently added Block", ); schema }); static LIST_RPCS_RESULT: Lazy<ListRpcsResult> = Lazy::new(|| ListRpcsResult { api_version: DOCS_EXAMPLE_PROTOCOL_VERSION.clone(), name: "OpenRPC Schema".to_string(), schema: OPEN_RPC_SCHEMA.clone(), }); pub trait DocExample { fn doc_example() -> &'static Self; } #[derive(Clone, Serialize, Deserialize, Debug)] struct OpenRpcSchema { openrpc: String, info: OpenRpcInfoField, servers: Vec<OpenRpcServerEntry>, methods: Vec<Method>, components: Components, } impl OpenRpcSchema { fn new_generator() -> SchemaGenerator { let settings = SchemaSettings::default().with(|settings| { settings.definitions_path = DEFINITIONS_PATH.to_string(); }); settings.into_generator() } fn push_with_params<T: RpcWithParams>(&mut self, summary: &str) { let mut generator = Self::new_generator(); let params_schema = T::RequestParams::json_schema(&mut generator); let params = Self::make_params(params_schema); let result_schema = T::ResponseResult::json_schema(&mut generator); let result = ResponseResult { name: format!("{}_result", T::METHOD), schema: result_schema, }; let examples = vec![Example::from_rpc_with_params::<T>()]; let method = Method { name: T::METHOD.to_string(), summary: summary.to_string(), params, result, examples, }; self.methods.push(method); self.update_schemas::<T::RequestParams>(); self.update_schemas::<T::ResponseResult>(); } fn push_without_params<T: RpcWithoutParams>(&mut self, summary: &str) { let mut generator = Self::new_generator(); let result_schema = T::ResponseResult::json_schema(&mut generator); let result = ResponseResult { name: format!("{}_result", T::METHOD), schema: result_schema, }; let examples = vec![Example::from_rpc_without_params::<T>()]; let method = Method { name: T::METHOD.to_string(), summary: summary.to_string(), params: vec![], result, examples, }; self.methods.push(method); self.update_schemas::<T::ResponseResult>(); } fn push_with_optional_params<T: RpcWithOptionalParams>(&mut self, summary: &str) { let mut generator = Self::new_generator(); let params_schema = T::OptionalRequestParams::json_schema(&mut generator); let params = Self::make_params(params_schema); let result_schema = T::ResponseResult::json_schema(&mut generator); let result = ResponseResult { name: format!("{}_result", T::METHOD), schema: result_schema, }; let examples = vec![Example::from_rpc_with_optional_params::<T>()]; let method = Method { name: T::METHOD.to_string(), summary: summary.to_string(), params, result, examples, }; self.methods.push(method); self.update_schemas::<T::OptionalRequestParams>(); self.update_schemas::<T::ResponseResult>(); } fn make_params(schema: Schema) -> Vec<SchemaParam> { let schema_object = schema.into_object().object.expect("should be object"); let mut required_params = schema_object .properties .iter() .filter(|(name, _)| schema_object.required.contains(*name)) .map(|(name, schema)| SchemaParam { name: name.clone(), schema: schema.clone(), required: true, }) .collect::<Vec<_>>(); let optional_params = schema_object .properties .iter() .filter(|(name, _)| !schema_object.required.contains(*name)) .map(|(name, schema)| SchemaParam { name: name.clone(), schema: schema.clone(), required: false, }) .collect::<Vec<_>>(); required_params.extend(optional_params); required_params } fn update_schemas<S: JsonSchema>(&mut self) { let generator = Self::new_generator(); let mut root_schema = generator.into_root_schema_for::<S>(); for (key, value) in root_schema.definitions.drain(..) { match self.components.schemas.entry(key) { MapEntry::Occupied(current_value) => { assert_eq!( current_value.get().clone().into_object().metadata, value.into_object().metadata ) } MapEntry::Vacant(vacant) => { let _ = vacant.insert(value); } } } } } #[derive(Clone, Serialize, Deserialize, Debug)] struct OpenRpcInfoField { version: String, title: String, description: String, contact: OpenRpcContactField, license: OpenRpcLicenseField, } #[derive(Clone, Serialize, Deserialize, Debug)] struct OpenRpcContactField { name: String, url: String, } #[derive(Clone, Serialize, Deserialize, Debug)] struct OpenRpcLicenseField { name: String, url: String, } #[derive(Clone, Serialize, Deserialize, Debug)] struct OpenRpcServerEntry { name: String, url: String, } #[derive(Clone, Serialize, Deserialize, Debug)] pub struct Method { name: String, summary: String, params: Vec<SchemaParam>, result: ResponseResult, examples: Vec<Example>, } #[derive(Clone, Serialize, Deserialize, Debug)] struct SchemaParam { name: String, schema: Schema, required: bool, } #[derive(Clone, Serialize, Deserialize, Debug)] struct ResponseResult { name: String, schema: Schema, } #[derive(Clone, Serialize, Deserialize, Debug)] pub struct Example { name: String, params: Vec<ExampleParam>, result: ExampleResult, } impl Example { fn new(method_name: &str, maybe_params_obj: Option<Value>, result_value: Value) -> Self { let params = match maybe_params_obj { Some(params_obj) => params_obj .as_object() .unwrap() .iter() .map(|(name, value)| ExampleParam { name: name.clone(), value: value.clone(), }) .collect(), None => vec![], }; Example { name: format!("{}_example", method_name), params, result: ExampleResult { name: format!("{}_example_result", method_name), value: result_value, }, } } fn from_rpc_with_params<T: RpcWithParams>() -> Self { Self::new( T::METHOD, Some(json!(T::RequestParams::doc_example())), json!(T::ResponseResult::doc_example()), ) } fn from_rpc_without_params<T: RpcWithoutParams>() -> Self { Self::new(T::METHOD, None, json!(T::ResponseResult::doc_example())) } fn from_rpc_with_optional_params<T: RpcWithOptionalParams>() -> Self { Self::new( T::METHOD, Some(json!(T::OptionalRequestParams::doc_example())), json!(T::ResponseResult::doc_example()), ) } } #[derive(Clone, Serialize, Deserialize, Debug)] struct ExampleParam { name: String, value: Value, } #[derive(Clone, Serialize, Deserialize, Debug)] struct ExampleResult { name: String, value: Value, } #[derive(Clone, Serialize, Deserialize, Debug)] struct Components { schemas: Map<String, Schema>, } #[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)] #[serde(deny_unknown_fields)] pub struct ListRpcsResult { #[schemars(with = "String")] api_version: Version, name: String, #[schemars(skip)] schema: OpenRpcSchema, } impl DocExample for ListRpcsResult { fn doc_example() -> &'static Self { &*LIST_RPCS_RESULT } } #[derive(Clone, Serialize, Deserialize, Debug)] pub struct ListRpcs {} impl RpcWithoutParams for ListRpcs { const METHOD: &'static str = "rpc.discover"; type ResponseResult = ListRpcsResult; } impl RpcWithoutParamsExt for ListRpcs { fn handle_request<REv: ReactorEventT>( _effect_builder: EffectBuilder<REv>, response_builder: Builder, _api_version: Version, ) -> BoxFuture<'static, Result<Response<Body>, Error>> { async move { Ok(response_builder.success(ListRpcsResult::doc_example().clone())?) }.boxed() } } #[cfg(test)] mod tests { use crate::{types::Chainspec, utils::Loadable}; use super::*; #[test] fn check_docs_example_version() { let chainspec = Chainspec::from_resources("production"); assert_eq!( *DOCS_EXAMPLE_PROTOCOL_VERSION, chainspec.protocol_config.version, "DOCS_EXAMPLE_VERSION needs to be updated to match the [protocol.version] in \ 'resources/production/chainspec.toml'" ); } }
#![allow(clippy::field_reassign_with_default)] use futures::{future::BoxFuture, FutureExt}; use http::Response; use hyper::Body; use once_cell::sync::Lazy; use schemars::{ gen::{SchemaGenerator, SchemaSettings}, schema::Schema, JsonSchema, Map, MapEntry, }; use semver::Version; use serde::{Deserialize, Serialize}; use serde_json::{json, Value}; use warp_json_rpc::Builder; use super::{ account::PutDeploy, chain::{GetBlock, GetBlockTransfers, GetStateRootHash}, info::{GetDeploy, GetPeers, GetStatus}, state::{GetAuctionInfo, GetBalance, GetItem}, Error, ReactorEventT, RpcWithOptionalParams, RpcWithParams, RpcWithoutParams, RpcWithoutParamsExt, }; use crate::{effect::EffectBuilder, rpcs::chain::GetEraInfoBySwitchBlock}; pub(crate) static DOCS_EXAMPLE_PROTOCOL_VERSION: Lazy<Version> = Lazy::new(|| Version::new(1, 1, 2)); const DEFINITIONS_PATH: &str = "#/components/schemas/"; static OPEN_RPC_SCHEMA: Lazy<OpenRpcSchema> = Lazy::new(|| { let contact = OpenRpcContactField { name: "CasperLabs".to_string(), url: "https://casperlabs.io".to_string(), }; let license = OpenRpcLicenseField { name: "CasperLabs Open Source License Version 1.0".to_string(), url: "https://raw.githubusercontent.com/CasperLabs/casper-node/master/LICENSE".to_string(), }; let info = OpenRpcInfoField { version: DOCS_EXAMPLE_PROTOCOL_VERSION.to_string(), title: "Client API of Casper Node".to_string(), description: "This describes the JSON-RPC 2.0 API of a node on the Casper network." .to_string(), contact, license, }; let server = OpenRpcServerEntry { name: "any Casper Network node".to_string(), url: "http://IP:PORT/rpc/".to_string(), }; let mut schema = OpenRpcSchema { openrpc: "1.0.0-rc1".to_string(), info, servers: vec![server], methods: vec![], components: Components { schemas: Map::new(), }, }; schema.push_with_params::<PutDeploy>("receives a Deploy to be executed by the network"); schema.push_with_params::<GetDeploy>("returns a Deploy from the network"); schema.push_without_params::<GetPeers>("returns a list of peers connected to the node"); schema.push_without_params::<GetStatus>("returns the current status of the node"); schema.push_with_optional_params::<GetBlock>("returns a Block from the network"); schema.push_with_optional_params::<GetBlockTransfers>( "returns all transfers for a Block from the network", ); schema.push_with_optional_params::<GetStateRootHash>( "returns a state root hash at a given Block", ); schema.push_with_params::<GetItem>("returns a stored value from the network"); schema.push_with_params::<GetBalance>("returns a purse's balance from the network"); schema.push_with_optional_params::<GetEraInfoBySwitchBlock>( "returns an EraInfo from the network", ); schema.push_without_params::<GetAuctionInfo>( "returns the bids and validators as of the most recently added Block", ); schema }); static LIST_RPCS_RESULT: Lazy<ListRpcsResult> = Lazy::new(|| ListRpcsResult { api_version: DOCS_EXAMPLE_PROTOCOL_VERSION.clone(), name: "OpenRPC Schema".to_string(), schema: OPEN_RPC_SCHEMA.clone(), }); pub trait DocExample { fn doc_example() -> &'static Self; } #[derive(Clone, Serialize, Deserialize, Debug)] struct OpenRpcSchema { openrpc: String, info: OpenRpcInfoField, servers: Vec<OpenRpcServerEntry>, methods: Vec<Method>, components: Components, } impl OpenRpcSchema { fn new_generator() -> SchemaGenerator { let settings = SchemaSettings::default().with(|settings| { settings.definitions_path = DEFINITIONS_PATH.to_string(); }); settings.into_generator() } fn push_with_params<T: RpcWithParams>(&mut self, summary: &str) { let mut generator = Self::new_generator(); let params_schema = T::RequestParams::json_schema(&mut generator); let params = Self::make_params(params_schema); let result_schema = T::ResponseResult::json_schema(&mut generator); let result = ResponseResult { name: format!("{}_result", T::METHOD), schema: result_schema, }; let examples = vec![Example::from_rpc_with_params::<T>()]; let method = Method { name: T::METHOD.to_string(), summary: summary.to_string(), params, result, examples, }; self.methods.push(method); self.update_schemas::<T::RequestParams>(); self.update_schemas::<T::ResponseResult>(); } fn push_without_params<T: RpcWithoutParams>(&mut self, summary: &str) { let mut generator = Self::new_generator(); let result_schema = T::ResponseResult::json_schema(&mut generator); let result = ResponseResult { name: format!("{}_result", T::METHOD), schema: result_schema, }; let examples = vec![Example::from_rpc_without_params::<T>()]; let method = Method { name: T::METHOD.to_string(), summary: summary.to_string(), params: vec![], result, examples, }; self.methods.push(method); self.update_schemas::<T::ResponseResult>(); } fn push_with_optional_params<T: RpcWithOptionalParams>(&mut self, summary: &str) { let mut generator = Self::new_generator(); let params_schema = T::OptionalRequestParams::json_schema(&mut generator); let params = Self::make_params(params_schema); let result_schema = T::ResponseResult::json_schema(&mut generator); let result = ResponseResult { name: format!("{}_result", T::METHOD), schema: result_schema, }; let examples = vec![Example::from_rpc_with_optional_params::<T>()]; let method = Method { name: T::METHOD.to_string(), summary: summary.to_string(), params, result, examples, }; self.methods.push(method); self.update_schemas::<T::OptionalRequestParams>(); self.update_schemas::<T::ResponseResult>(); } fn make_params(schema: Schema) -> Vec<SchemaParam> { let schema_object = schema.into_object().object.expect("should be object"); let mut required_params = schema_object .properties .iter() .filter(|(name, _)| schema_object.required.contains(*name)) .map(|(name, schema)| SchemaParam { name: name.clone(), schema: schema.clone(), required: true, }) .collect::<Vec<_>>(); let optional_params = schema_object .properties .iter() .filter(|(name, _)| !schema_object.required.contains(*name)) .map(|(name, schema)| SchemaParam { name: name.clone(), schema: schema.clone(), required: false, }) .collect::<Vec<_>>(); required_params.extend(optional_params); required_params } fn update_schemas<S: JsonSchema>(&mut self) { let generator = Self::new_generator(); let mut root_schema = generator.into_root_schema_for::<S>(); for (key, value) in root_schema.definitions.drain(..) { match self.components.schemas.entry(key) { MapEntry::Occupied(current_value) => { assert_eq!( current_value.get().clone().into_object().metadata, value.into_object().metadata ) } MapEntry::Vacant(vacant) => { let _ = vacant.insert(value); } } } } } #[derive(Clone, Serialize, Deserialize, Debug)] struct OpenRpcInfoField { version: String, title: String, description: String, contact: OpenRpcContactField, license: OpenRpcLicenseField, } #[derive(Clone, Serialize, Deserialize, Debug)] struct OpenRpcContactField { name: String, url: String, } #[derive(Clone, Serialize, Deserialize, Debug)] struct OpenRpcLicenseField { name: String, url: String, } #[derive(Clone, Serialize, Deserialize, Debug)] struct OpenRpcServerEntry { name: String, url: String, } #[derive(Clone, Serialize, Deserialize, Debug)] pub struct Method { name: String, summary: String, params: Vec<SchemaParam>, result: ResponseResult, examples: Vec<Example>, } #[derive(Clone, Serialize, Deserialize, Debug)] struct SchemaParam { name: String, schema: Schema, required: bool, } #[derive(Clone, Serialize, Deserialize, Debug)] struct ResponseResult { name: String, schema: Schema, } #[derive(Clone, Serialize, Deserialize, Debug)] pub struct Example { name: String, params: Vec<ExampleParam>, result: ExampleResult, } impl Example { fn new(method_name: &str, maybe_params_obj: Option<Value>, result_value: Value) -> Self { let params =
; Example { name: format!("{}_example", method_name), params, result: ExampleResult { name: format!("{}_example_result", method_name), value: result_value, }, } } fn from_rpc_with_params<T: RpcWithParams>() -> Self { Self::new( T::METHOD, Some(json!(T::RequestParams::doc_example())), json!(T::ResponseResult::doc_example()), ) } fn from_rpc_without_params<T: RpcWithoutParams>() -> Self { Self::new(T::METHOD, None, json!(T::ResponseResult::doc_example())) } fn from_rpc_with_optional_params<T: RpcWithOptionalParams>() -> Self { Self::new( T::METHOD, Some(json!(T::OptionalRequestParams::doc_example())), json!(T::ResponseResult::doc_example()), ) } } #[derive(Clone, Serialize, Deserialize, Debug)] struct ExampleParam { name: String, value: Value, } #[derive(Clone, Serialize, Deserialize, Debug)] struct ExampleResult { name: String, value: Value, } #[derive(Clone, Serialize, Deserialize, Debug)] struct Components { schemas: Map<String, Schema>, } #[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)] #[serde(deny_unknown_fields)] pub struct ListRpcsResult { #[schemars(with = "String")] api_version: Version, name: String, #[schemars(skip)] schema: OpenRpcSchema, } impl DocExample for ListRpcsResult { fn doc_example() -> &'static Self { &*LIST_RPCS_RESULT } } #[derive(Clone, Serialize, Deserialize, Debug)] pub struct ListRpcs {} impl RpcWithoutParams for ListRpcs { const METHOD: &'static str = "rpc.discover"; type ResponseResult = ListRpcsResult; } impl RpcWithoutParamsExt for ListRpcs { fn handle_request<REv: ReactorEventT>( _effect_builder: EffectBuilder<REv>, response_builder: Builder, _api_version: Version, ) -> BoxFuture<'static, Result<Response<Body>, Error>> { async move { Ok(response_builder.success(ListRpcsResult::doc_example().clone())?) }.boxed() } } #[cfg(test)] mod tests { use crate::{types::Chainspec, utils::Loadable}; use super::*; #[test] fn check_docs_example_version() { let chainspec = Chainspec::from_resources("production"); assert_eq!( *DOCS_EXAMPLE_PROTOCOL_VERSION, chainspec.protocol_config.version, "DOCS_EXAMPLE_VERSION needs to be updated to match the [protocol.version] in \ 'resources/production/chainspec.toml'" ); } }
match maybe_params_obj { Some(params_obj) => params_obj .as_object() .unwrap() .iter() .map(|(name, value)| ExampleParam { name: name.clone(), value: value.clone(), }) .collect(), None => vec![], }
if_condition
[ { "content": "fn dependencies(values: &[&str]) -> Result<Vec<DeployHash>> {\n\n let mut hashes = Vec::with_capacity(values.len());\n\n for value in values {\n\n let digest = Digest::from_hex(value).map_err(|error| Error::CryptoError {\n\n context: \"dependencies\",\n\n error,\n\n })?;\n\n hashes.push(DeployHash::new(digest))\n\n }\n\n Ok(hashes)\n\n}\n\n\n\n/// Handles providing the arg for and retrieval of simple session and payment args.\n\nmod arg_simple {\n\n use super::*;\n\n\n\n const ARG_VALUE_NAME: &str = r#\"\"NAME:TYPE='VALUE'\" OR \"NAME:TYPE=null\"\"#;\n\n\n\n pub(crate) mod session {\n\n use super::*;\n", "file_path": "client/lib/parsing.rs", "rank": 0, "score": 472230.20512735454 }, { "content": "/// Returns `true` if `name` exists in the current context's named keys.\n\n///\n\n/// The current context is either the caller's account or a stored contract depending on whether the\n\n/// currently-executing module is a direct call or a sub-call respectively.\n\npub fn has_key(name: &str) -> bool {\n\n let (name_ptr, name_size, _bytes) = contract_api::to_ptr(name);\n\n let result = unsafe { ext_ffi::casper_has_key(name_ptr, name_size) };\n\n result == 0\n\n}\n\n\n", "file_path": "smart_contracts/contract/src/contract_api/runtime.rs", "rank": 1, "score": 444429.0952582777 }, { "content": "/// Takes the known_addresses of a node and returns if it is isolated.\n\n///\n\n/// An isolated node has no chance of recovering a connection to the network and is not\n\n/// connected to any peer.\n\nfn network_is_isolated(known_addresses: &HashMap<Multiaddr, ConnectionState>) -> bool {\n\n known_addresses\n\n .values()\n\n .all(|state| *state == ConnectionState::Failed)\n\n}\n\n\n\nasync fn handle_one_way_messaging_event<REv: ReactorEventT<P>, P: PayloadT>(\n\n swarm: &mut Swarm<Behavior>,\n\n event_queue: EventQueueHandle<REv>,\n\n event: RequestResponseEvent<Vec<u8>, ()>,\n\n) {\n\n match event {\n\n RequestResponseEvent::Message {\n\n peer,\n\n message: RequestResponseMessage::Request { request, .. },\n\n } => {\n\n // We've received a one-way request from a peer: announce it via the reactor on the\n\n // `NetworkIncoming` queue.\n\n let sender = NodeId::from(peer);\n\n match bincode::deserialize::<P>(&request) {\n", "file_path": "node/src/components/network.rs", "rank": 2, "score": 439315.75576678506 }, { "content": "/// Create a state storage key for block proposer deploy sets based on a chainspec.\n\n///\n\n/// We namespace based on a chainspec to prevent validators from loading data for a different chain\n\n/// if they forget to clear their state.\n\npub fn create_storage_key(chainspec: &Chainspec) -> Vec<u8> {\n\n format!(\n\n \"block_proposer_deploy_sets:version={},chain_name={}\",\n\n chainspec.protocol_config.version, chainspec.network_config.name\n\n )\n\n .into()\n\n}\n\n\n\nimpl BlockProposerDeploySets {\n\n /// Prunes expired deploy information from the BlockProposerState, returns the total deploys\n\n /// pruned\n\n pub(crate) fn prune(&mut self, current_instant: Timestamp) -> usize {\n\n let pending = prune_pending_deploys(&mut self.pending, current_instant);\n\n let finalized = prune_deploys(&mut self.finalized_deploys, current_instant);\n\n pending + finalized\n\n }\n\n}\n\n\n\n/// Prunes expired deploy information from an individual deploy collection, returns the total\n\n/// deploys pruned\n", "file_path": "node/src/components/block_proposer/deploy_sets.rs", "rank": 3, "score": 439231.88133397966 }, { "content": "/// Serializes `t` into a `Vec<u8>`.\n\npub fn serialize(t: impl ToBytes) -> Result<Vec<u8>, Error> {\n\n t.into_bytes()\n\n}\n\n\n\npub(crate) fn safe_split_at(bytes: &[u8], n: usize) -> Result<(&[u8], &[u8]), Error> {\n\n if n > bytes.len() {\n\n Err(Error::EarlyEndOfStream)\n\n } else {\n\n Ok(bytes.split_at(n))\n\n }\n\n}\n\n\n\nimpl ToBytes for () {\n\n fn to_bytes(&self) -> Result<Vec<u8>, Error> {\n\n Ok(Vec::new())\n\n }\n\n\n\n fn serialized_length(&self) -> usize {\n\n UNIT_SERIALIZED_LENGTH\n\n }\n", "file_path": "types/src/bytesrepr.rs", "rank": 4, "score": 437505.52063355787 }, { "content": "fn unsafe_str_arg(arg: *const c_char, arg_name: &'static str) -> Result<&'static str> {\n\n unsafe {\n\n // Strings are never required to be passed at this level, instead we return \"\" if the ptr ==\n\n // null and let the library deal with parsing values.\n\n if arg.is_null() {\n\n return Ok(Default::default());\n\n }\n\n CStr::from_ptr(arg).to_str()\n\n }\n\n .map_err(|error| {\n\n Error::InvalidArgument(\n\n arg_name,\n\n format!(\n\n \"invalid utf8 value passed for arg '{}': {:?}\",\n\n stringify!($arg),\n\n error,\n\n ),\n\n )\n\n })\n\n}\n\n\n", "file_path": "client/lib/ffi.rs", "rank": 5, "score": 436357.9893616072 }, { "content": "pub fn set_bids<P>(provider: &mut P, validators: Bids) -> Result<(), Error>\n\nwhere\n\n P: StorageProvider + RuntimeProvider + ?Sized,\n\n{\n\n for (_, bid) in validators.into_iter() {\n\n let account_hash = AccountHash::from(bid.validator_public_key());\n\n provider.write_bid(account_hash, bid)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "types/src/system/auction/detail.rs", "rank": 6, "score": 429102.07432587433 }, { "content": "/// Helper to call TryInto::try_into on a *const ptr of our rust type implementing it.\n\n/// This is used for\n\nfn unsafe_try_into<T, I>(value: *const I, field_name: &'static str) -> Result<T>\n\nwhere\n\n I: Clone,\n\n I: TryInto<T, Error = Error>,\n\n{\n\n if value.is_null() {\n\n Err(Error::FFIPtrNullButRequired(field_name))\n\n } else {\n\n let value: T = unsafe { (*value).clone().try_into()? };\n\n Ok(value)\n\n }\n\n}\n\n\n", "file_path": "client/lib/ffi.rs", "rank": 7, "score": 424709.71179639234 }, { "content": "/// Parse string using arglang.\n\npub fn parse(input: &str) -> Result<Value, Error> {\n\n let mut tokens = tokenize(input)?.into_iter().peekable();\n\n let value = parse_stream(&mut tokens)?;\n\n\n\n // Check if there is trailing input.\n\n if let Some(trailing) = tokens.next() {\n\n return Err(Error::TrailingInput(trailing));\n\n }\n\n\n\n Ok(value)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use toml::Value;\n\n\n\n use super::{parse, tokenize, Error, Token};\n\n\n\n #[test]\n\n fn tokenize_single() {\n", "file_path": "node/src/app/cli/arglang.rs", "rank": 8, "score": 421206.5869487814 }, { "content": "/// Returns an account value paired with its key\n\npub fn mocked_account(account_hash: AccountHash) -> Vec<(Key, StoredValue)> {\n\n let purse = URef::new([0u8; 32], AccessRights::READ_ADD_WRITE);\n\n let account = Account::create(account_hash, NamedKeys::new(), purse);\n\n vec![(Key::Account(account_hash), StoredValue::Account(account))]\n\n}\n", "file_path": "execution_engine/src/shared/test_utils.rs", "rank": 9, "score": 417683.9212713215 }, { "content": "/// Stores the given [`Key`] under `name` in the current context's named keys.\n\n///\n\n/// The current context is either the caller's account or a stored contract depending on whether the\n\n/// currently-executing module is a direct call or a sub-call respectively.\n\npub fn put_key(name: &str, key: Key) {\n\n let (name_ptr, name_size, _bytes) = contract_api::to_ptr(name);\n\n let (key_ptr, key_size, _bytes2) = contract_api::to_ptr(key);\n\n unsafe { ext_ffi::casper_put_key(name_ptr, name_size, key_ptr, key_size) };\n\n}\n\n\n", "file_path": "smart_contracts/contract/src/contract_api/runtime.rs", "rank": 10, "score": 411880.65890783776 }, { "content": "fn name(value: &str) -> Option<String> {\n\n none_if_empty(value).map(str::to_string)\n\n}\n\n\n", "file_path": "client/lib/parsing.rs", "rank": 11, "score": 404082.51239491126 }, { "content": "fn version(value: &str) -> Result<u32> {\n\n value\n\n .parse::<u32>()\n\n .map_err(|error| Error::FailedToParseInt(\"version\", error))\n\n}\n\n\n", "file_path": "client/lib/parsing.rs", "rank": 12, "score": 403855.6116089878 }, { "content": "/// Iterates the given path, returning the subdir representing the immediate next SemVer version\n\n/// after `current_version`.\n\n///\n\n/// Subdir names should be semvers with dots replaced with underscores.\n\nfn next_installed_version(dir: &Path, current_version: &Version) -> Result<Version, Error> {\n\n let max_version = Version::new(u64::max_value(), u64::max_value(), u64::max_value());\n\n\n\n let mut next_version = max_version.clone();\n\n let mut read_version = false;\n\n for entry in fs::read_dir(dir).map_err(|error| Error::ReadDir {\n\n dir: dir.to_path_buf(),\n\n error,\n\n })? {\n\n let path = match entry {\n\n Ok(dir_entry) => dir_entry.path(),\n\n Err(error) => {\n\n debug!(dir=%dir.display(), %error, \"bad entry while reading dir\");\n\n continue;\n\n }\n\n };\n\n\n\n let subdir_name = match path.file_name() {\n\n Some(name) => name.to_string_lossy().replace(\"_\", \".\"),\n\n None => continue,\n", "file_path": "node/src/components/chainspec_loader.rs", "rank": 13, "score": 403301.72081010626 }, { "content": "/// Returns the requested named [`Key`] from the current context.\n\n///\n\n/// The current context is either the caller's account or a stored contract depending on whether the\n\n/// currently-executing module is a direct call or a sub-call respectively.\n\npub fn get_key(name: &str) -> Option<Key> {\n\n let (name_ptr, name_size, _bytes) = contract_api::to_ptr(name);\n\n let mut key_bytes = vec![0u8; Key::max_serialized_length()];\n\n let mut total_bytes: usize = 0;\n\n let ret = unsafe {\n\n ext_ffi::casper_get_key(\n\n name_ptr,\n\n name_size,\n\n key_bytes.as_mut_ptr(),\n\n key_bytes.len(),\n\n &mut total_bytes as *mut usize,\n\n )\n\n };\n\n match api_error::result_from(ret) {\n\n Ok(_) => {}\n\n Err(ApiError::MissingKey) => return None,\n\n Err(e) => revert(e),\n\n }\n\n key_bytes.truncate(total_bytes);\n\n let key: Key = bytesrepr::deserialize(key_bytes).unwrap_or_revert();\n\n Some(key)\n\n}\n\n\n", "file_path": "smart_contracts/contract/src/contract_api/runtime.rs", "rank": 14, "score": 391132.2846277052 }, { "content": "/// Converts an `i32` to a `Result<(), ApiError>`, where `0` represents `Ok(())`, and all other\n\n/// inputs are mapped to `Err(ApiError::<variant>)`. The full list of mappings can be found in the\n\n/// [docs for `ApiError`](ApiError#mappings).\n\npub fn result_from(value: i32) -> Result<(), ApiError> {\n\n match value {\n\n 0 => Ok(()),\n\n _ => Err(ApiError::from(value as u32)),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::{i32, u16, u8};\n\n\n\n use super::*;\n\n\n\n fn round_trip(result: Result<(), ApiError>) {\n\n let code = i32_from(result);\n\n assert_eq!(result, result_from(code));\n\n }\n\n\n\n #[test]\n\n fn error_values() {\n", "file_path": "types/src/api_error.rs", "rank": 15, "score": 390920.5777161135 }, { "content": "pub trait NodeIdT: Clone + Display + Debug + Send + Eq + Hash + DataSize + 'static {}\n\nimpl<I> NodeIdT for I where I: Clone + Display + Debug + Send + Eq + Hash + DataSize + 'static {}\n\n\n\n/// A validator identifier.\n\npub(crate) trait ValidatorIdT: Eq + Ord + Clone + Debug + Hash + Send + DataSize {}\n\nimpl<VID> ValidatorIdT for VID where VID: Eq + Ord + Clone + Debug + Hash + Send + DataSize {}\n\n\n\n/// The consensus value type, e.g. a list of transactions.\n\npub(crate) trait ConsensusValueT:\n\n Eq + Clone + Debug + Hash + Serialize + DeserializeOwned + Send + DataSize\n\n{\n\n type Hash: HashT;\n\n\n\n /// Returns hash of self.\n\n fn hash(&self) -> Self::Hash;\n\n\n\n /// Returns whether the consensus value needs validation.\n\n fn needs_validation(&self) -> bool;\n\n\n\n /// Returns the value's timestamp.\n", "file_path": "node/src/components/consensus/traits.rs", "rank": 16, "score": 390438.69833027385 }, { "content": "/// Reads a previously-saved `Deploy` from a file, cryptographically signs it, and outputs it to a\n\n/// file or stdout.\n\n///\n\n/// * `input_path` specifies the path to the previously-saved `Deploy` file.\n\n/// * `secret_key` specifies the path to the secret key with which to sign the `Deploy`.\n\n/// * `maybe_output_path` specifies the output file, or if empty, will print it to `stdout`. If the\n\n/// file already exists, it will be overwritten.\n\npub fn sign_deploy_file(input_path: &str, secret_key: &str, maybe_output_path: &str) -> Result<()> {\n\n let secret_key = parsing::secret_key(secret_key)?;\n\n let maybe_output_path = parsing::output(maybe_output_path);\n\n\n\n let output = deploy::output_or_stdout(maybe_output_path).map_err(|error| Error::IoError {\n\n context: format!(\n\n \"unable to get file or stdout, provided '{:?}'\",\n\n maybe_output_path\n\n ),\n\n error,\n\n })?;\n\n\n\n let input = File::open(&input_path).map_err(|error| Error::IoError {\n\n context: format!(\"unable to read deploy file at '{}'\", input_path),\n\n error,\n\n })?;\n\n\n\n Deploy::sign_and_write_deploy(input, secret_key, output)\n\n}\n\n\n", "file_path": "client/lib/lib.rs", "rank": 17, "score": 389946.8125634068 }, { "content": "#[test]\n\nfn store_execution_results_twice_for_same_block_deploy_pair() {\n\n let mut harness = ComponentHarness::default();\n\n let mut storage = storage_fixture(&harness);\n\n\n\n let block_hash = BlockHash::random(&mut harness.rng);\n\n let deploy_hash = DeployHash::random(&mut harness.rng);\n\n\n\n let mut exec_result_1 = HashMap::new();\n\n exec_result_1.insert(deploy_hash, harness.rng.gen());\n\n\n\n let mut exec_result_2 = HashMap::new();\n\n exec_result_2.insert(deploy_hash, harness.rng.gen());\n\n\n\n put_execution_results(&mut harness, &mut storage, block_hash, exec_result_1);\n\n\n\n // Storing a second execution result for the same deploy on the same block should panic.\n\n put_execution_results(&mut harness, &mut storage, block_hash, exec_result_2);\n\n}\n\n\n", "file_path": "node/src/components/storage/tests.rs", "rank": 18, "score": 387419.8472795974 }, { "content": "/// Removes the [`Key`] stored under `name` in the current context's named keys.\n\n///\n\n/// The current context is either the caller's account or a stored contract depending on whether the\n\n/// currently-executing module is a direct call or a sub-call respectively.\n\npub fn remove_key(name: &str) {\n\n let (name_ptr, name_size, _bytes) = contract_api::to_ptr(name);\n\n unsafe { ext_ffi::casper_remove_key(name_ptr, name_size) }\n\n}\n\n\n", "file_path": "smart_contracts/contract/src/contract_api/runtime.rs", "rank": 19, "score": 382979.1019749909 }, { "content": "/// Retrieves information and examples for all currently supported RPCs.\n\n///\n\n/// * `maybe_rpc_id` is the JSON-RPC identifier, applied to the request and returned in the\n\n/// response. If it can be parsed as an `i64` it will be used as a JSON integer. If empty, a\n\n/// random `i64` will be assigned. Otherwise the provided string will be used verbatim.\n\n/// * `node_address` is the hostname or IP and port of the node on which the HTTP service is\n\n/// running, e.g. `\"http://127.0.0.1:7777\"`.\n\n/// * When `verbosity_level` is `1`, the JSON-RPC request will be printed to `stdout` with long\n\n/// string fields (e.g. hex-formatted raw Wasm bytes) shortened to a string indicating the char\n\n/// count of the field. When `verbosity_level` is greater than `1`, the request will be printed\n\n/// to `stdout` with no abbreviation of long fields. When `verbosity_level` is `0`, the request\n\n/// will not be printed to `stdout`.\n\npub fn list_rpcs(maybe_rpc_id: &str, node_address: &str, verbosity_level: u64) -> Result<JsonRpc> {\n\n RpcCall::new(maybe_rpc_id, node_address, verbosity_level).list_rpcs()\n\n}\n\n\n\n/// Container for `Deploy` construction options.\n\n#[derive(Default, Debug)]\n\npub struct DeployStrParams<'a> {\n\n /// Path to secret key file.\n\n pub secret_key: &'a str,\n\n /// RFC3339-like formatted timestamp. e.g. `2018-02-16T00:31:37Z`.\n\n ///\n\n /// If `timestamp` is empty, the current time will be used. Note that timestamp is UTC, not\n\n /// local.\n\n ///\n\n /// See\n\n /// [the `humantime` docs](https://docs.rs/humantime/latest/humantime/fn.parse_rfc3339_weak.html)\n\n /// for more information.\n\n pub timestamp: &'a str,\n\n /// Time that the `Deploy` will remain valid for.\n\n ///\n", "file_path": "client/lib/lib.rs", "rank": 20, "score": 379752.63242391066 }, { "content": "/// Generates a new asymmetric key pair using the specified algorithm, and writes them to files in\n\n/// the specified directory.\n\n///\n\n/// The secret key is written to \"secret_key.pem\", and the public key is written to \"public_key.pem\"\n\n/// and also in hex format to \"public_key_hex\". For the hex format, the algorithm's tag is\n\n/// prepended, e.g. `01` for Ed25519, `02` for secp256k1.\n\n///\n\n/// If `force` is true, existing files will be overwritten. If `force` is false and any of the\n\n/// files exist, [`Error::FileAlreadyExists`](../enum.Error.html#variant.FileAlreadyExists) is\n\n/// returned and no files are written.\n\npub fn generate_files(output_dir: &str, algorithm: &str, force: bool) -> Result<()> {\n\n if output_dir.is_empty() {\n\n return Err(Error::InvalidArgument(\n\n \"generate_files\",\n\n \"empty output_dir provided, must be a valid path\".to_string(),\n\n ));\n\n }\n\n let _ = fs::create_dir_all(output_dir).map_err(move |error| Error::IoError {\n\n context: format!(\"unable to create directory at '{}'\", output_dir),\n\n error,\n\n })?;\n\n let output_dir = Path::new(output_dir)\n\n .canonicalize()\n\n .map_err(|error| Error::IoError {\n\n context: format!(\"unable get canonical path at '{}'\", output_dir),\n\n error,\n\n })?;\n\n\n\n if !force {\n\n for file in FILES.iter().map(|filename| output_dir.join(filename)) {\n", "file_path": "client/lib/keygen.rs", "rank": 21, "score": 378565.5698153174 }, { "content": "pub fn trie_arb() -> impl Strategy<Value = Trie<Key, StoredValue>> {\n\n prop_oneof![\n\n (key_arb(), stored_value_arb()).prop_map(|(key, value)| Trie::Leaf { key, value }),\n\n trie_pointer_block_arb().prop_map(|pointer_block| Trie::Node {\n\n pointer_block: Box::new(pointer_block)\n\n }),\n\n (vec(any::<u8>(), 0..32), trie_pointer_arb()).prop_map(|(affix, pointer)| {\n\n Trie::Extension {\n\n affix: affix.into(),\n\n pointer,\n\n }\n\n })\n\n ]\n\n}\n", "file_path": "execution_engine/src/storage/trie/gens.rs", "rank": 22, "score": 378407.306120428 }, { "content": "/// Tokenizes a stream of characters.\n\nfn tokenize(input: &str) -> Result<Vec<Token>, Error> {\n\n let mut chars = input.chars();\n\n let mut tokens = Vec::new();\n\n\n\n let mut buffer = String::new();\n\n\n\n loop {\n\n let ch = chars.next();\n\n\n\n // Check if we need to complete a token.\n\n if !buffer.is_empty() {\n\n match ch {\n\n Some(' ') | Some('\"') | Some('[') | Some(']') | Some(',') | None => {\n\n // Try to parse as number or bool first.\n\n if let Ok(value) = i64::from_str(&buffer) {\n\n tokens.push(Token::I64(value));\n\n } else if let Ok(value) = bool::from_str(&buffer) {\n\n tokens.push(Token::Boolean(value));\n\n } else {\n\n tokens.push(Token::String(buffer.clone()))\n", "file_path": "node/src/app/cli/arglang.rs", "rank": 23, "score": 377342.70083561854 }, { "content": "// TODO: Get rid of the `Arc<Mutex<_>>` ASAP.\n\nfn estimate_known_addresses(map: &Arc<Mutex<HashMap<Multiaddr, ConnectionState>>>) -> usize {\n\n ds::hash_map_fixed_size(&*(map.lock().expect(\"lock poisoned\")))\n\n}\n\n\n\n#[derive(DataSize)]\n\npub struct Network<REv, P> {\n\n #[data_size(skip)]\n\n network_identity: NetworkIdentity,\n\n our_id: NodeId,\n\n /// The set of peers which are current connected to our node. Kept in sync with libp2p\n\n /// internals.\n\n // DataSize note: Connected point contains `Arc`'ed Vecs internally, this is better than\n\n // skipping at least.\n\n #[data_size(with = ds::hash_map_fixed_size)]\n\n peers: HashMap<NodeId, ConnectedPoint>,\n\n /// The set of peers whose address we currently know. Kept in sync with the internal Kademlia\n\n /// routing table.\n\n // DataSize note: `PeerId`s can likely be estimated using `mem::size_of`.\n\n #[data_size(with = ds::hash_set_fixed_size)]\n\n seen_peers: HashSet<PeerId>,\n", "file_path": "node/src/components/network.rs", "rank": 24, "score": 374645.12266034144 }, { "content": "fn write_to<P, T>(provider: &mut P, name: &str, value: T) -> Result<(), Error>\n\nwhere\n\n P: StorageProvider + RuntimeProvider + ?Sized,\n\n T: ToBytes + CLTyped,\n\n{\n\n let key = provider.named_keys_get(name).ok_or(Error::MissingKey)?;\n\n let uref = key.into_uref().ok_or(Error::InvalidKeyVariant)?;\n\n provider.write(uref, value)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "types/src/system/auction/detail.rs", "rank": 25, "score": 374016.2765884392 }, { "content": "fn lmdb_roundtrip_succeeds(inputs: Vec<Trie<Key, StoredValue>>) -> bool {\n\n use crate::storage::{\n\n transaction_source::lmdb::LmdbEnvironment, trie_store::lmdb::LmdbTrieStore,\n\n };\n\n\n\n let tmp_dir = tempdir().unwrap();\n\n let env = LmdbEnvironment::new(\n\n &tmp_dir.path().to_path_buf(),\n\n DEFAULT_TEST_MAX_DB_SIZE,\n\n DEFAULT_TEST_MAX_READERS,\n\n )\n\n .unwrap();\n\n let store = LmdbTrieStore::new(&env, None, DatabaseFlags::empty()).unwrap();\n\n\n\n let inputs: BTreeMap<Blake2bHash, Trie<Key, StoredValue>> = inputs\n\n .into_iter()\n\n .map(|trie| (Blake2bHash::new(&trie.to_bytes().unwrap()), trie))\n\n .collect();\n\n\n\n let ret = store_tests::roundtrip_succeeds(&env, &store, inputs).unwrap();\n", "file_path": "execution_engine/src/storage/trie_store/tests/proptests.rs", "rank": 26, "score": 372744.48725395044 }, { "content": "fn in_memory_roundtrip_succeeds(inputs: Vec<Trie<Key, StoredValue>>) -> bool {\n\n use crate::storage::{\n\n transaction_source::in_memory::InMemoryEnvironment,\n\n trie_store::in_memory::InMemoryTrieStore,\n\n };\n\n\n\n let env = InMemoryEnvironment::new();\n\n let store = InMemoryTrieStore::new(&env, None);\n\n\n\n let inputs: BTreeMap<Blake2bHash, Trie<Key, StoredValue>> = inputs\n\n .into_iter()\n\n .map(|trie| (Blake2bHash::new(&trie.to_bytes().unwrap()), trie))\n\n .collect();\n\n\n\n store_tests::roundtrip_succeeds(&env, &store, inputs).unwrap()\n\n}\n\n\n", "file_path": "execution_engine/src/storage/trie_store/tests/proptests.rs", "rank": 27, "score": 372744.48725395044 }, { "content": "fn account(value: &str) -> Result<PublicKey> {\n\n PublicKey::from_hex(value).map_err(|error| Error::CryptoError {\n\n context: \"account\",\n\n error: error.into(),\n\n })\n\n}\n\n\n\npub(crate) fn transfer_id(value: &str) -> Result<Option<u64>> {\n\n if str::is_empty(value) {\n\n return Ok(None);\n\n }\n\n let value = value\n\n .parse::<u64>()\n\n .map_err(|error| Error::FailedToParseInt(\"transfer_id\", error))?;\n\n Ok(Some(value))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::{convert::TryFrom, result::Result as StdResult};\n", "file_path": "client/lib/parsing.rs", "rank": 30, "score": 367738.47506456857 }, { "content": "/// A helper function to change NamedKeys into a Vec<NamedKey>\n\npub fn vectorize(keys: &NamedKeys) -> Vec<NamedKey> {\n\n let named_keys = keys\n\n .iter()\n\n .map(|(name, key)| NamedKey {\n\n name: name.clone(),\n\n key: key.to_formatted_string(),\n\n })\n\n .collect();\n\n named_keys\n\n}\n", "file_path": "node/src/types/json_compatibility.rs", "rank": 31, "score": 367719.490875542 }, { "content": "fn store(value: String) {\n\n // Store `value` under a new unforgeable reference.\n\n let value_ref: URef = storage::new_uref(value);\n\n\n\n // Wrap the unforgeable reference in a value of type `Key`.\n\n let value_key: Key = value_ref.into();\n\n\n\n // Store this key under the name \"special_value\" in context-local storage.\n\n runtime::put_key(KEY, value_key);\n\n}\n\n\n\n// All session code must have a `call` entrypoint.\n\n#[no_mangle]\n\npub extern \"C\" fn call() {\n\n // Get the optional first argument supplied to the argument.\n\n let value: String = runtime::get_named_arg(ARG_MESSAGE);\n\n store(value);\n\n}\n\n\"#;\n\n\n", "file_path": "execution_engine_testing/cargo_casper/src/contract_package.rs", "rank": 32, "score": 367541.9072550743 }, { "content": "/// Retrieves a state root hash at a given `Block`.\n\n///\n\n/// * `maybe_rpc_id` is the JSON-RPC identifier, applied to the request and returned in the\n\n/// response. If it can be parsed as an `i64` it will be used as a JSON integer. If empty, a\n\n/// random `i64` will be assigned. Otherwise the provided string will be used verbatim.\n\n/// * `node_address` is the hostname or IP and port of the node on which the HTTP service is\n\n/// running, e.g. `\"http://127.0.0.1:7777\"`.\n\n/// * When `verbosity_level` is `1`, the JSON-RPC request will be printed to `stdout` with long\n\n/// string fields (e.g. hex-formatted raw Wasm bytes) shortened to a string indicating the char\n\n/// count of the field. When `verbosity_level` is greater than `1`, the request will be printed\n\n/// to `stdout` with no abbreviation of long fields. When `verbosity_level` is `0`, the request\n\n/// will not be printed to `stdout`.\n\n/// * `maybe_block_id` must be a hex-encoded, 32-byte hash digest or a `u64` representing the\n\n/// `Block` height or empty. If empty, the latest `Block` will be used.\n\npub fn get_state_root_hash(\n\n maybe_rpc_id: &str,\n\n node_address: &str,\n\n verbosity_level: u64,\n\n maybe_block_id: &str,\n\n) -> Result<JsonRpc> {\n\n RpcCall::new(maybe_rpc_id, node_address, verbosity_level).get_state_root_hash(maybe_block_id)\n\n}\n\n\n", "file_path": "client/lib/lib.rs", "rank": 33, "score": 367371.739638865 }, { "content": "/// Logs the metrics associated with the specified host function.\n\npub fn log_host_function_metrics(_host_function: &str, _properties: BTreeMap<&str, String>) {\n\n // TODO: Metrics story https://casperlabs.atlassian.net/browse/NDRS-120\n\n}\n", "file_path": "execution_engine/src/shared/logging/mod.rs", "rank": 34, "score": 367094.87753156497 }, { "content": "pub fn get_bids<P>(provider: &mut P) -> Result<Bids, Error>\n\nwhere\n\n P: StorageProvider + RuntimeProvider + ?Sized,\n\n{\n\n let bids_keys = provider.get_keys(&KeyTag::Bid)?;\n\n\n\n let mut ret = BTreeMap::new();\n\n\n\n for key in bids_keys {\n\n let account_hash = match key {\n\n Key::Bid(account_ash) => account_ash,\n\n _ => return Err(Error::InvalidKeyVariant),\n\n };\n\n let bid = match provider.read_bid(&account_hash)? {\n\n Some(bid) => bid,\n\n None => return Err(Error::ValidatorNotFound),\n\n };\n\n ret.insert(*bid.validator_public_key(), bid);\n\n }\n\n\n\n Ok(ret)\n\n}\n\n\n", "file_path": "types/src/system/auction/detail.rs", "rank": 36, "score": 360532.8802111208 }, { "content": "#[test]\n\nfn store_execution_results_for_two_blocks() {\n\n let mut harness = ComponentHarness::default();\n\n let mut storage = storage_fixture(&harness);\n\n\n\n let deploy = Deploy::random(&mut harness.rng);\n\n\n\n let block_hash_a = BlockHash::random(&mut harness.rng);\n\n let block_hash_b = BlockHash::random(&mut harness.rng);\n\n\n\n // Store the deploy.\n\n put_deploy(&mut harness, &mut storage, Box::new(deploy.clone()));\n\n\n\n // Ensure deploy exists.\n\n assert_eq!(\n\n get_deploys(&mut harness, &mut storage, smallvec![*deploy.id()]),\n\n vec![Some(deploy.clone())]\n\n );\n\n\n\n // Put first execution result.\n\n let first_result: ExecutionResult = harness.rng.gen();\n", "file_path": "node/src/components/storage/tests.rs", "rank": 37, "score": 359895.61749922193 }, { "content": "/// Serializes a `Duration` as milliseconds.\n\n///\n\n/// Limited to 64 bit.\n\npub fn serialize<S>(value: &Duration, serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n let ms = u64::try_from(value.as_millis()).map_err(|_err| {\n\n S::Error::custom(format!(\n\n \"duration {:?} is too large to be convert down to 64-bit milliseconds\",\n\n value\n\n ))\n\n })?;\n\n serializer.serialize_u64(ms)\n\n}\n\n\n", "file_path": "node/src/utils/milliseconds.rs", "rank": 38, "score": 359590.0138303173 }, { "content": "/// A JSON-RPC requiring the \"params\" field to be present.\n\npub trait RpcWithParams {\n\n /// The JSON-RPC \"method\" name.\n\n const METHOD: &'static str;\n\n\n\n /// The JSON-RPC request's \"params\" type.\n\n type RequestParams: Serialize\n\n + for<'de> Deserialize<'de>\n\n + JsonSchema\n\n + DocExample\n\n + Send\n\n + 'static;\n\n\n\n /// The JSON-RPC response's \"result\" type.\n\n type ResponseResult: Serialize\n\n + for<'de> Deserialize<'de>\n\n + JsonSchema\n\n + DocExample\n\n + Send\n\n + 'static;\n\n}\n", "file_path": "node/src/components/rpc_server/rpcs.rs", "rank": 39, "score": 359276.18354288756 }, { "content": "pub fn parse_hash(encoded_hash: &str) -> Vec<u8> {\n\n base16::decode(encoded_hash).expect(\"Expected a valid, hex-encoded hash\")\n\n}\n\n\n", "file_path": "execution_engine_testing/tests/src/profiling/mod.rs", "rank": 40, "score": 358171.5178190914 }, { "content": "// Computationally expensive validity check for a given deploy instance, including\n\n// asymmetric_key signing verification.\n\nfn validate_deploy(deploy: &Deploy) -> Result<(), DeployValidationFailure> {\n\n let serialized_body = serialize_body(&deploy.payment, &deploy.session);\n\n let body_hash = hash::hash(&serialized_body);\n\n if body_hash != deploy.header.body_hash {\n\n warn!(?deploy, ?body_hash, \"invalid deploy body hash\");\n\n return Err(DeployValidationFailure::InvalidBodyHash);\n\n }\n\n\n\n let serialized_header = serialize_header(&deploy.header);\n\n let hash = DeployHash::new(hash::hash(&serialized_header));\n\n if hash != deploy.hash {\n\n warn!(?deploy, ?hash, \"invalid deploy hash\");\n\n return Err(DeployValidationFailure::InvalidDeployHash);\n\n }\n\n\n\n // We don't need to check for an empty set here. EE checks that the correct number and weight of\n\n // signatures are provided when executing the deploy, so all we need to do here is check that\n\n // any provided signatures are valid.\n\n for (index, approval) in deploy.approvals.iter().enumerate() {\n\n if let Err(error) = crypto::verify(&deploy.hash, &approval.signature, &approval.signer) {\n", "file_path": "node/src/types/deploy.rs", "rank": 41, "score": 357441.85152267857 }, { "content": "pub fn contract_version_key_arb() -> impl Strategy<Value = ContractVersionKey> {\n\n (1..32u32, 1..1000u32)\n\n .prop_map(|(major, contract_ver)| ContractVersionKey::new(major, contract_ver))\n\n}\n\n\n", "file_path": "types/src/gens.rs", "rank": 42, "score": 356204.1733273072 }, { "content": "fn create_0_leaf_trie() -> Result<(Blake2bHash, Vec<HashedTestTrie>), bytesrepr::Error> {\n\n let root = HashedTrie::new(Trie::node(&[]))?;\n\n\n\n let root_hash: Blake2bHash = root.hash;\n\n\n\n let parents: Vec<HashedTestTrie> = vec![root];\n\n\n\n let tries: Vec<HashedTestTrie> = {\n\n let mut ret = Vec::new();\n\n ret.extend(parents);\n\n ret\n\n };\n\n\n\n Ok((root_hash, tries))\n\n}\n\n\n", "file_path": "execution_engine/src/storage/trie_store/operations/tests/mod.rs", "rank": 43, "score": 355797.72207641124 }, { "content": "fn create_1_leaf_trie() -> Result<(Blake2bHash, Vec<HashedTestTrie>), bytesrepr::Error> {\n\n let leaves = hash_test_tries(&TEST_LEAVES[..1])?;\n\n\n\n let root = HashedTrie::new(Trie::node(&[(0, Pointer::LeafPointer(leaves[0].hash))]))?;\n\n\n\n let root_hash: Blake2bHash = root.hash;\n\n\n\n let parents: Vec<HashedTestTrie> = vec![root];\n\n\n\n let tries: Vec<HashedTestTrie> = {\n\n let mut ret = Vec::new();\n\n ret.extend(leaves);\n\n ret.extend(parents);\n\n ret\n\n };\n\n\n\n Ok((root_hash, tries))\n\n}\n\n\n", "file_path": "execution_engine/src/storage/trie_store/operations/tests/mod.rs", "rank": 44, "score": 355797.72207641124 }, { "content": "fn create_4_leaf_trie() -> Result<(Blake2bHash, Vec<HashedTestTrie>), bytesrepr::Error> {\n\n let leaves = hash_test_tries(&TEST_LEAVES[..4])?;\n\n\n\n let node_1 = HashedTrie::new(Trie::node(&[\n\n (0, Pointer::LeafPointer(leaves[0].hash)),\n\n (1, Pointer::LeafPointer(leaves[1].hash)),\n\n ]))?;\n\n\n\n let node_2 = HashedTrie::new(Trie::node(&[\n\n (0, Pointer::NodePointer(node_1.hash)),\n\n (255, Pointer::LeafPointer(leaves[3].hash)),\n\n ]))?;\n\n\n\n let ext_1 = HashedTrie::new(Trie::extension(\n\n vec![0u8],\n\n Pointer::NodePointer(node_2.hash),\n\n ))?;\n\n\n\n let node_3 = HashedTrie::new(Trie::node(&[\n\n (0, Pointer::NodePointer(ext_1.hash)),\n", "file_path": "execution_engine/src/storage/trie_store/operations/tests/mod.rs", "rank": 45, "score": 355797.72207641124 }, { "content": "fn create_3_leaf_trie() -> Result<(Blake2bHash, Vec<HashedTestTrie>), bytesrepr::Error> {\n\n let leaves = hash_test_tries(&TEST_LEAVES[..3])?;\n\n\n\n let node_1 = HashedTrie::new(Trie::node(&[\n\n (0, Pointer::LeafPointer(leaves[0].hash)),\n\n (1, Pointer::LeafPointer(leaves[1].hash)),\n\n ]))?;\n\n\n\n let ext_1 = HashedTrie::new(Trie::extension(\n\n vec![0u8, 0],\n\n Pointer::NodePointer(node_1.hash),\n\n ))?;\n\n\n\n let node_2 = HashedTrie::new(Trie::node(&[\n\n (0, Pointer::NodePointer(ext_1.hash)),\n\n (2, Pointer::LeafPointer(leaves[2].hash)),\n\n ]))?;\n\n\n\n let ext_2 = HashedTrie::new(Trie::extension(\n\n vec![0u8, 0],\n", "file_path": "execution_engine/src/storage/trie_store/operations/tests/mod.rs", "rank": 46, "score": 355797.72207641124 }, { "content": "fn create_5_leaf_trie() -> Result<(Blake2bHash, Vec<HashedTestTrie>), bytesrepr::Error> {\n\n let leaves = hash_test_tries(&TEST_LEAVES[..5])?;\n\n\n\n let node_1 = HashedTrie::new(Trie::node(&[\n\n (0, Pointer::LeafPointer(leaves[0].hash)),\n\n (1, Pointer::LeafPointer(leaves[1].hash)),\n\n ]))?;\n\n\n\n let node_2 = HashedTrie::new(Trie::node(&[\n\n (0, Pointer::NodePointer(node_1.hash)),\n\n (255, Pointer::LeafPointer(leaves[3].hash)),\n\n ]))?;\n\n\n\n let ext_1 = HashedTrie::new(Trie::extension(\n\n vec![0u8],\n\n Pointer::NodePointer(node_2.hash),\n\n ))?;\n\n\n\n let node_3 = HashedTrie::new(Trie::node(&[\n\n (0, Pointer::NodePointer(ext_1.hash)),\n", "file_path": "execution_engine/src/storage/trie_store/operations/tests/mod.rs", "rank": 47, "score": 355797.72207641124 }, { "content": "fn create_2_leaf_trie() -> Result<(Blake2bHash, Vec<HashedTestTrie>), bytesrepr::Error> {\n\n let leaves = hash_test_tries(&TEST_LEAVES[..2])?;\n\n\n\n let node = HashedTrie::new(Trie::node(&[\n\n (0, Pointer::LeafPointer(leaves[0].hash)),\n\n (1, Pointer::LeafPointer(leaves[1].hash)),\n\n ]))?;\n\n\n\n let ext = HashedTrie::new(Trie::extension(\n\n vec![0u8, 0, 0, 0, 0],\n\n Pointer::NodePointer(node.hash),\n\n ))?;\n\n\n\n let root = HashedTrie::new(Trie::node(&[(0, Pointer::NodePointer(ext.hash))]))?;\n\n\n\n let root_hash = root.hash;\n\n\n\n let parents: Vec<HashedTestTrie> = vec![root, ext, node];\n\n\n\n let tries: Vec<HashedTestTrie> = {\n\n let mut ret = Vec::new();\n\n ret.extend(leaves);\n\n ret.extend(parents);\n\n ret\n\n };\n\n\n\n Ok((root_hash, tries))\n\n}\n\n\n", "file_path": "execution_engine/src/storage/trie_store/operations/tests/mod.rs", "rank": 48, "score": 355797.72207641124 }, { "content": "fn create_6_leaf_trie() -> Result<(Blake2bHash, Vec<HashedTestTrie>), bytesrepr::Error> {\n\n let leaves = hash_test_tries(&TEST_LEAVES)?;\n\n\n\n let node_1 = HashedTrie::new(Trie::node(&[\n\n (0, Pointer::LeafPointer(leaves[0].hash)),\n\n (1, Pointer::LeafPointer(leaves[1].hash)),\n\n ]))?;\n\n\n\n let node_2 = HashedTrie::new(Trie::node(&[\n\n (0, Pointer::NodePointer(node_1.hash)),\n\n (255, Pointer::LeafPointer(leaves[3].hash)),\n\n ]))?;\n\n\n\n let ext = HashedTrie::new(Trie::extension(\n\n vec![0u8],\n\n Pointer::NodePointer(node_2.hash),\n\n ))?;\n\n\n\n let node_3 = HashedTrie::new(Trie::node(&[\n\n (0, Pointer::NodePointer(ext.hash)),\n", "file_path": "execution_engine/src/storage/trie_store/operations/tests/mod.rs", "rank": 49, "score": 355797.72207641124 }, { "content": "fn parse_contract_hash(value: &str) -> Result<Option<HashAddr>> {\n\n if value.is_empty() {\n\n return Ok(None);\n\n }\n\n if let Ok(digest) = Digest::from_hex(value) {\n\n return Ok(Some(digest.to_array()));\n\n }\n\n if let Ok(Key::Hash(hash)) = Key::from_formatted_str(value) {\n\n return Ok(Some(hash));\n\n }\n\n Err(Error::FailedToParseKey)\n\n}\n\n\n", "file_path": "client/lib/parsing.rs", "rank": 52, "score": 354126.2649934335 }, { "content": "/// Creates an ASN1 name from string components.\n\n///\n\n/// If `c` or `o` are empty string, they are omitted from the result.\n\nfn mkname(c: &str, o: &str, cn: &str) -> Result<X509Name, ErrorStack> {\n\n let mut builder = X509NameBuilder::new()?;\n\n\n\n if !c.is_empty() {\n\n builder.append_entry_by_text(\"C\", c)?;\n\n }\n\n\n\n if !o.is_empty() {\n\n builder.append_entry_by_text(\"O\", o)?;\n\n }\n\n\n\n builder.append_entry_by_text(\"CN\", cn)?;\n\n Ok(builder.build())\n\n}\n\n\n", "file_path": "node/src/tls.rs", "rank": 53, "score": 354056.5758767445 }, { "content": "/// Returns the named keys of the current context.\n\n///\n\n/// The current context is either the caller's account or a stored contract depending on whether the\n\n/// currently-executing module is a direct call or a sub-call respectively.\n\npub fn list_named_keys() -> NamedKeys {\n\n let (total_keys, result_size) = {\n\n let mut total_keys = MaybeUninit::uninit();\n\n let mut result_size = 0;\n\n let ret = unsafe {\n\n ext_ffi::casper_load_named_keys(total_keys.as_mut_ptr(), &mut result_size as *mut usize)\n\n };\n\n api_error::result_from(ret).unwrap_or_revert();\n\n let total_keys = unsafe { total_keys.assume_init() };\n\n (total_keys, result_size)\n\n };\n\n if total_keys == 0 {\n\n return NamedKeys::new();\n\n }\n\n let bytes = read_host_buffer(result_size).unwrap_or_revert();\n\n bytesrepr::deserialize(bytes).unwrap_or_revert()\n\n}\n\n\n", "file_path": "smart_contracts/contract/src/contract_api/runtime.rs", "rank": 54, "score": 352885.87848519185 }, { "content": "/// Deserializes `bytes` into an instance of `T`.\n\n///\n\n/// Returns an error if the bytes cannot be deserialized into `T` or if not all of the input bytes\n\n/// are consumed in the operation.\n\npub fn deserialize<T: FromBytes>(bytes: Vec<u8>) -> Result<T, Error> {\n\n let (t, remainder) = T::from_vec(bytes)?;\n\n if remainder.is_empty() {\n\n Ok(t)\n\n } else {\n\n Err(Error::LeftOverBytes)\n\n }\n\n}\n\n\n", "file_path": "types/src/bytesrepr.rs", "rank": 55, "score": 351685.2225635298 }, { "content": "/// A JSON-RPC requiring the \"params\" field to be absent.\n\npub trait RpcWithoutParams {\n\n /// The JSON-RPC \"method\" name.\n\n const METHOD: &'static str;\n\n\n\n /// The JSON-RPC response's \"result\" type.\n\n type ResponseResult: Serialize\n\n + for<'de> Deserialize<'de>\n\n + JsonSchema\n\n + DocExample\n\n + Send\n\n + 'static;\n\n}\n\n\n\n/// A trait for creating a JSON-RPC filter where the request is not required to have \"params\".\n\npub(super) trait RpcWithoutParamsExt: RpcWithoutParams {\n\n /// Creates the warp filter for this particular RPC.\n\n fn create_filter<REv: ReactorEventT>(\n\n effect_builder: EffectBuilder<REv>,\n\n api_version: Version,\n\n ) -> BoxedFilter<(Response<Body>,)> {\n", "file_path": "node/src/components/rpc_server/rpcs.rs", "rank": 57, "score": 351425.9298597588 }, { "content": "/// A JSON-RPC with the \"params\" field optional.\n\npub trait RpcWithOptionalParams {\n\n /// The JSON-RPC \"method\" name.\n\n const METHOD: &'static str;\n\n\n\n /// The JSON-RPC request's \"params\" type. This will be passed to the handler wrapped in an\n\n /// `Option`.\n\n type OptionalRequestParams: Serialize\n\n + for<'de> Deserialize<'de>\n\n + JsonSchema\n\n + DocExample\n\n + Send\n\n + 'static;\n\n\n\n /// The JSON-RPC response's \"result\" type.\n\n type ResponseResult: Serialize\n\n + for<'de> Deserialize<'de>\n\n + JsonSchema\n\n + DocExample\n\n + Send\n\n + 'static;\n", "file_path": "node/src/components/rpc_server/rpcs.rs", "rank": 58, "score": 351419.639310788 }, { "content": "pub fn bincode_roundtrip<T: Serialize + DeserializeOwned + Eq + Debug>(value: &T) {\n\n let serialized = bincode::serialize(value).unwrap();\n\n let deserialized = bincode::deserialize(serialized.as_slice()).unwrap();\n\n assert_eq!(*value, deserialized);\n\n}\n\n\n\n/// Create an unused port on localhost.\n\n#[allow(clippy::assertions_on_constants)]\n\npub(crate) fn unused_port_on_localhost() -> u16 {\n\n // Prime used for the LCG.\n\n const PRIME: u16 = 54101;\n\n // Generating member of prime group.\n\n const GENERATOR: u16 = 35892;\n\n\n\n // This assertion can never fail, but the compiler should output a warning if the constants\n\n // combined exceed the valid values of `u16`.\n\n assert!(PORT_LOWER_BOUND + PRIME + 10 < u16::MAX);\n\n\n\n // Poor man's linear congurential random number generator:\n\n static RNG_STATE: AtomicU16 = AtomicU16::new(GENERATOR);\n", "file_path": "node/src/testing.rs", "rank": 59, "score": 350965.04742489307 }, { "content": "fn create_6_leaf_corrupt_trie() -> Result<(Blake2bHash, Vec<HashedTestTrie>), bytesrepr::Error> {\n\n let leaves = hash_test_tries(&TEST_LEAVES)?;\n\n\n\n let node_1 = HashedTrie::new(Trie::node(&[\n\n (0, Pointer::LeafPointer(leaves[0].hash)),\n\n (1, Pointer::LeafPointer(leaves[1].hash)),\n\n ]))?;\n\n\n\n let node_2 = HashedTrie::new(Trie::node(&[\n\n (0, Pointer::NodePointer(node_1.hash)),\n\n (255, Pointer::LeafPointer(leaves[3].hash)),\n\n ]))?;\n\n\n\n let ext = HashedTrie::new(Trie::extension(\n\n vec![0u8],\n\n Pointer::NodePointer(node_2.hash),\n\n ))?;\n\n\n\n let node_3: HashedTestTrie = HashedTrie::new(Trie::node(&[\n\n (0, Pointer::NodePointer(ext.hash)),\n", "file_path": "execution_engine/src/storage/trie_store/operations/tests/mod.rs", "rank": 60, "score": 350611.52679510706 }, { "content": "/// Creates a TOML-formatted string from a given configuration.\n\npub fn to_string<C: Serialize>(cfg: &C) -> anyhow::Result<String> {\n\n toml::to_string_pretty(cfg).with_context(|| \"Failed to serialize default configuration\")\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use casper_node::reactor::validator::Config;\n\n\n\n #[test]\n\n fn example_config_should_parse() {\n\n let config_path = format!(\n\n \"{}/../resources/local/config.toml\",\n\n env!(\"CARGO_MANIFEST_DIR\")\n\n );\n\n let _config: Config = super::load_from_file(config_path).unwrap();\n\n }\n\n}\n", "file_path": "node/src/app/config.rs", "rank": 61, "score": 350562.481152784 }, { "content": "/// A queue of contents of blocks that we know have been finalized, but we are still missing\n\n/// notifications about finalization of some of their ancestors. It maps block height to the\n\n/// deploys contained in the corresponding block.\n\ntype FinalizationQueue = HashMap<BlockHeight, Vec<DeployHash>>;\n\n\n", "file_path": "node/src/components/block_proposer.rs", "rank": 62, "score": 350126.0176492381 }, { "content": "/// A helper trait capturing all of this components Request type dependencies.\n\npub trait ReactorEventT:\n\n From<Event>\n\n + From<RpcRequest<NodeId>>\n\n + From<RpcServerAnnouncement>\n\n + From<ChainspecLoaderRequest>\n\n + From<ContractRuntimeRequest>\n\n + From<ConsensusRequest>\n\n + From<LinearChainRequest<NodeId>>\n\n + From<MetricsRequest>\n\n + From<NetworkInfoRequest<NodeId>>\n\n + From<StorageRequest>\n\n + Send\n\n{\n\n}\n\n\n\nimpl<REv> ReactorEventT for REv where\n\n REv: From<Event>\n\n + From<RpcRequest<NodeId>>\n\n + From<RpcServerAnnouncement>\n\n + From<ChainspecLoaderRequest>\n", "file_path": "node/src/components/rpc_server.rs", "rank": 63, "score": 348534.1905569912 }, { "content": "/// A helper trait capturing all of this components Request type dependencies.\n\npub trait ReactorEventT:\n\n From<Event>\n\n + From<RestRequest<NodeId>>\n\n + From<NetworkInfoRequest<NodeId>>\n\n + From<StorageRequest>\n\n + From<ChainspecLoaderRequest>\n\n + From<ConsensusRequest>\n\n + From<MetricsRequest>\n\n + Send\n\n{\n\n}\n\n\n\nimpl<REv> ReactorEventT for REv where\n\n REv: From<Event>\n\n + From<RestRequest<NodeId>>\n\n + From<NetworkInfoRequest<NodeId>>\n\n + From<StorageRequest>\n\n + From<ChainspecLoaderRequest>\n\n + From<ConsensusRequest>\n\n + From<MetricsRequest>\n", "file_path": "node/src/components/rest_server.rs", "rank": 64, "score": 348534.1905569912 }, { "content": "/// A helper trait whose bounds represent the requirements for a reactor event that `BlockExecutor`\n\n/// can work with.\n\npub trait ReactorEventT:\n\n From<Event>\n\n + From<StorageRequest>\n\n + From<LinearChainRequest<NodeId>>\n\n + From<ContractRuntimeRequest>\n\n + From<BlockExecutorAnnouncement>\n\n + From<ConsensusRequest>\n\n + Send\n\n{\n\n}\n\n\n\nimpl<REv> ReactorEventT for REv where\n\n REv: From<Event>\n\n + From<StorageRequest>\n\n + From<LinearChainRequest<NodeId>>\n\n + From<ContractRuntimeRequest>\n\n + From<BlockExecutorAnnouncement>\n\n + From<ConsensusRequest>\n\n + Send\n\n{\n\n}\n\n\n", "file_path": "node/src/components/block_executor.rs", "rank": 65, "score": 348475.8366352902 }, { "content": "/// A helper trait constraining `DeployAcceptor` compatible reactor events.\n\npub trait ReactorEventT:\n\n From<Event>\n\n + From<DeployAcceptorAnnouncement<NodeId>>\n\n + From<StorageRequest>\n\n + From<ContractRuntimeRequest>\n\n + Send\n\n{\n\n}\n\n\n\nimpl<REv> ReactorEventT for REv where\n\n REv: From<Event>\n\n + From<DeployAcceptorAnnouncement<NodeId>>\n\n + From<StorageRequest>\n\n + From<ContractRuntimeRequest>\n\n + Send\n\n{\n\n}\n\n\n\n/// The `DeployAcceptor` is the component which handles all new `Deploy`s immediately after they're\n\n/// received by this node, regardless of whether they were provided by a peer or a client.\n", "file_path": "node/src/components/deploy_acceptor.rs", "rank": 66, "score": 348408.07800802204 }, { "content": "#[derive(DataSize, Debug)]\n\nstruct ExecutedBlockSummary {\n\n hash: BlockHash,\n\n state_root_hash: Digest,\n\n accumulated_seed: Digest,\n\n}\n\n\n", "file_path": "node/src/components/block_executor.rs", "rank": 67, "score": 348269.2920303991 }, { "content": "fn version_string(color: bool) -> String {\n\n let mut version = format!(\"{}-{}\", env!(\"CARGO_PKG_VERSION\"), env!(\"VERGEN_SHA_SHORT\"));\n\n\n\n // Add a `@DEBUG` (or similar) tag to release string on non-release builds.\n\n if env!(\"NODE_BUILD_PROFILE\") != \"release\" {\n\n version += \"@\";\n\n let profile = env!(\"NODE_BUILD_PROFILE\").to_uppercase();\n\n version.push_str(&if color {\n\n Red.paint(&profile).to_string()\n\n } else {\n\n profile\n\n });\n\n }\n\n\n\n version\n\n}\n\n\n\n/// Color version string for the compiled node. Filled in at build time, output allocated at\n\n/// runtime.\n\npub static VERSION_STRING_COLOR: Lazy<String> = Lazy::new(|| version_string(true));\n", "file_path": "node/src/lib.rs", "rank": 68, "score": 346940.74816100835 }, { "content": "/// Initializes the global logger using the given settings.\n\n///\n\n/// The logger will write all log messages from crates prefixed with \"casper_\" to stdout, and\n\n/// can also log internal metrics generated by the Execution Engine.\n\n///\n\n/// Returns an error if the global logger has already been set in this process.\n\npub fn initialize(settings: Settings) -> Result<(), SetLoggerError> {\n\n let logger = Box::new(TerminalLogger::new(&settings));\n\n initialize_with_logger(logger, settings)\n\n}\n\n\n\n/// This and the `TerminalLogger` are public but undocumented to allow functional testing of this\n\n/// crate, e.g. by passing a logger composed of a `TerminalLogger`.\n", "file_path": "execution_engine/src/shared/logging/mod.rs", "rank": 69, "score": 346894.3550046456 }, { "content": "pub fn key_arb() -> impl Strategy<Value = Key> {\n\n prop_oneof![\n\n account_hash_arb().prop_map(Key::Account),\n\n u8_slice_32().prop_map(Key::Hash),\n\n uref_arb().prop_map(Key::URef),\n\n transfer_addr_arb().prop_map(Key::Transfer),\n\n deploy_hash_arb().prop_map(Key::DeployInfo),\n\n any::<u64>().prop_map(Key::EraInfo),\n\n uref_arb().prop_map(|uref| Key::Balance(uref.addr())),\n\n account_hash_arb().prop_map(Key::Bid),\n\n account_hash_arb().prop_map(Key::Withdraw),\n\n ]\n\n}\n\n\n", "file_path": "types/src/gens.rs", "rank": 70, "score": 346389.24519486085 }, { "content": "fn read_from<P, T>(provider: &mut P, name: &str) -> Result<T, Error>\n\nwhere\n\n P: StorageProvider + RuntimeProvider + ?Sized,\n\n T: FromBytes + CLTyped,\n\n{\n\n let key = provider.named_keys_get(name).ok_or(Error::MissingKey)?;\n\n let uref = key.into_uref().ok_or(Error::InvalidKeyVariant)?;\n\n let value: T = provider.read(uref)?.ok_or(Error::MissingValue)?;\n\n Ok(value)\n\n}\n\n\n", "file_path": "types/src/system/auction/detail.rs", "rank": 71, "score": 346120.5371900052 }, { "content": "pub fn named_keys_arb(depth: usize) -> impl Strategy<Value = NamedKeys> {\n\n btree_map(\"\\\\PC*\", key_arb(), depth)\n\n}\n\n\n", "file_path": "types/src/gens.rs", "rank": 72, "score": 345901.75410950684 }, { "content": "/// Returns a `Vec<u8>` initialized with sufficient capacity to hold `to_be_serialized` after\n\n/// serialization, or an error if the capacity would exceed `u32::max_value()`.\n\npub fn allocate_buffer<T: ToBytes>(to_be_serialized: &T) -> Result<Vec<u8>, Error> {\n\n let serialized_length = to_be_serialized.serialized_length();\n\n if serialized_length > u32::max_value() as usize {\n\n return Err(Error::OutOfMemory);\n\n }\n\n Ok(Vec::with_capacity(serialized_length))\n\n}\n\n\n\n/// Serialization and deserialization errors.\n\n#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]\n\n#[cfg_attr(feature = \"std\", derive(Error))]\n\n#[repr(u8)]\n\npub enum Error {\n\n /// Early end of stream while deserializing.\n\n #[cfg_attr(feature = \"std\", error(\"Deserialization error: early end of stream\"))]\n\n EarlyEndOfStream = 0,\n\n /// Formatting error while deserializing.\n\n #[cfg_attr(feature = \"std\", error(\"Deserialization error: formatting\"))]\n\n Formatting,\n\n /// Not all input bytes were consumed in [`deserialize`].\n\n #[cfg_attr(feature = \"std\", error(\"Deserialization error: left-over bytes\"))]\n\n LeftOverBytes,\n\n /// Out of memory error.\n\n #[cfg_attr(feature = \"std\", error(\"Serialization error: out of memory\"))]\n\n OutOfMemory,\n\n}\n\n\n", "file_path": "types/src/bytesrepr.rs", "rank": 73, "score": 345267.26969341154 }, { "content": "fn hash_test_tries(tries: &[TestTrie]) -> Result<Vec<HashedTestTrie>, bytesrepr::Error> {\n\n tries\n\n .iter()\n\n .map(|trie| HashedTestTrie::new(trie.to_owned()))\n\n .collect()\n\n}\n\n\n", "file_path": "execution_engine/src/storage/trie_store/operations/tests/mod.rs", "rank": 74, "score": 344209.53730763576 }, { "content": "pub fn print_error_and_exit(msg: &str) -> ! {\n\n e_red!(\"error\");\n\n eprintln!(\"{}\", msg);\n\n process::exit(FAILURE_EXIT_CODE)\n\n}\n\n\n", "file_path": "execution_engine_testing/cargo_casper/src/common.rs", "rank": 75, "score": 343704.76987617015 }, { "content": "/// Returns a value built from a single arg which has been split into its constituent parts.\n\npub fn parts_to_cl_value(cl_type: CLType, value: &str) -> Result<CLValue> {\n\n let (cl_type_to_parse, optional_status, trimmed_value) = match cl_type {\n\n CLType::Option(inner_type) => {\n\n if value == \"null\" {\n\n (*inner_type, OptionalStatus::None, \"\")\n\n } else {\n\n (*inner_type, OptionalStatus::Some, value.trim_matches('\\''))\n\n }\n\n }\n\n _ => (\n\n cl_type,\n\n OptionalStatus::NotOptional,\n\n value.trim_matches('\\''),\n\n ),\n\n };\n\n\n\n if value == trimmed_value {\n\n return Err(Error::InvalidCLValue(format!(\n\n \"value in simple arg should be surrounded by single quotes unless it's a null \\\n\n optional value (value passed: {})\",\n", "file_path": "client/lib/cl_type.rs", "rank": 76, "score": 342956.8129018321 }, { "content": "pub trait BlockLike: Eq + Hash {\n\n fn deploys(&self) -> Vec<&DeployHash>;\n\n}\n\n\n\n/// A cryptographic hash identifying a `ProtoBlock`.\n\n#[derive(\n\n Copy,\n\n Clone,\n\n DataSize,\n\n Ord,\n\n PartialOrd,\n\n Eq,\n\n PartialEq,\n\n Hash,\n\n Serialize,\n\n Deserialize,\n\n Debug,\n\n Default,\n\n)]\n\npub struct ProtoBlockHash(Digest);\n", "file_path": "node/src/types/block.rs", "rank": 77, "score": 342485.3039910742 }, { "content": "#[allow(clippy::cognitive_complexity)]\n\nfn extract_urefs(cl_value: &CLValue) -> Result<Vec<URef>, Error> {\n\n match cl_value.cl_type() {\n\n CLType::Bool\n\n | CLType::I32\n\n | CLType::I64\n\n | CLType::U8\n\n | CLType::U32\n\n | CLType::U64\n\n | CLType::U128\n\n | CLType::U256\n\n | CLType::U512\n\n | CLType::Unit\n\n | CLType::String\n\n | CLType::PublicKey\n\n | CLType::Any => Ok(vec![]),\n\n CLType::Option(ty) => match **ty {\n\n CLType::URef => {\n\n let opt: Option<URef> = cl_value.to_owned().into_t()?;\n\n Ok(opt.into_iter().collect())\n\n }\n", "file_path": "execution_engine/src/core/runtime/mod.rs", "rank": 78, "score": 339678.874982364 }, { "content": "/// Deserializes a `Duration` as 64-bit milliseconds.\n\npub fn deserialize<'de, D>(deserializer: D) -> Result<Duration, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n let ms = u64::deserialize(deserializer)?;\n\n\n\n Ok(Duration::from_millis(ms))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use serde::{Deserialize, Serialize};\n\n use std::time::Duration;\n\n\n\n #[test]\n\n fn round_trip() {\n\n #[derive(Debug, Deserialize, PartialEq, Serialize)]\n\n struct Example(#[serde(with = \"super\")] Duration);\n\n\n\n let value = Example(Duration::from_millis(12345));\n\n\n\n let json = serde_json::to_string(&value).expect(\"serialization failed\");\n\n assert_eq!(json, \"12345\");\n\n let deserialized = serde_json::from_str(&json).expect(\"deserialization failed\");\n\n\n\n assert_eq!(value, deserialized);\n\n }\n\n}\n", "file_path": "node/src/utils/milliseconds.rs", "rank": 79, "score": 339237.19519889366 }, { "content": "pub fn get_validator_slots<P>(provider: &mut P) -> Result<usize, Error>\n\nwhere\n\n P: StorageProvider + RuntimeProvider + ?Sized,\n\n{\n\n let validator_slots: u32 = read_from(provider, VALIDATOR_SLOTS_KEY)?;\n\n let validator_slots = validator_slots\n\n .try_into()\n\n .map_err(|_| Error::InvalidValidatorSlotsValue)?;\n\n Ok(validator_slots)\n\n}\n\n\n", "file_path": "types/src/system/auction/detail.rs", "rank": 80, "score": 339228.8577423596 }, { "content": "pub fn run_cargo_new(package_name: &str) {\n\n let mut command = Command::new(\"cargo\");\n\n command\n\n .args(&[\"new\", \"--vcs\", \"none\"])\n\n .arg(package_name)\n\n .current_dir(ARGS.root_path());\n\n\n\n let output = match command.output() {\n\n Ok(output) => output,\n\n Err(error) => print_error_and_exit(&format!(\": failed to run '{:?}': {}\", command, error)),\n\n };\n\n\n\n if !output.status.success() {\n\n let stdout = str::from_utf8(&output.stdout).expect(\"should be valid UTF8\");\n\n let stderr = str::from_utf8(&output.stderr).expect(\"should be valid UTF8\");\n\n print_error_and_exit(&format!(\n\n \": failed to run '{:?}':\\n{}\\n{}\\n\",\n\n command, stdout, stderr\n\n ));\n\n }\n\n}\n\n\n", "file_path": "execution_engine_testing/cargo_casper/src/common.rs", "rank": 81, "score": 337416.8103948454 }, { "content": "/// Converts an `X509NameRef` to a human readable string.\n\nfn name_to_string(name: &X509NameRef) -> SslResult<String> {\n\n let mut output = String::new();\n\n\n\n for entry in name.entries() {\n\n output.push_str(entry.object().nid().long_name()?);\n\n output.push('=');\n\n output.push_str(entry.data().as_utf8()?.as_ref());\n\n output.push(' ');\n\n }\n\n\n\n Ok(output)\n\n}\n\n\n", "file_path": "node/src/tls.rs", "rank": 82, "score": 337145.4761247286 }, { "content": "fn root_hash_arg() -> Arg<'static, 'static> {\n\n Arg::with_name(ROOT_HASH_ARG_NAME)\n\n .value_name(ROOT_HASH_ARG_VALUE_NAME)\n\n .help(ROOT_HASH_ARG_HELP)\n\n}\n\n\n", "file_path": "execution_engine_testing/tests/src/profiling/simple_transfer.rs", "rank": 83, "score": 337066.8233988717 }, { "content": "#[ignore]\n\n#[test]\n\nfn should_list_named_keys() {\n\n let mut builder = InMemoryWasmTestBuilder::default();\n\n builder.run_genesis(&DEFAULT_RUN_GENESIS_REQUEST);\n\n\n\n let initial_named_keys: NamedKeys = NamedKeys::new();\n\n\n\n let new_named_keys = {\n\n let account_hash = AccountHash::new([1; 32]);\n\n let mut named_keys = NamedKeys::new();\n\n assert!(named_keys\n\n .insert(NEW_NAME_ACCOUNT.to_string(), Key::Account(account_hash))\n\n .is_none());\n\n assert!(named_keys\n\n .insert(NEW_NAME_HASH.to_string(), Key::Hash([2; 32]))\n\n .is_none());\n\n named_keys\n\n };\n\n\n\n let exec_request = ExecuteRequestBuilder::standard(\n\n *DEFAULT_ACCOUNT_ADDR,\n", "file_path": "execution_engine_testing/tests/src/test/contract_api/list_named_keys.rs", "rank": 84, "score": 335847.7012672857 }, { "content": "/// Parse a stream of tokens of arglang.\n\nfn parse_stream<I>(tokens: &mut Peekable<I>) -> Result<Value, Error>\n\nwhere\n\n I: Iterator<Item = Token>,\n\n{\n\n loop {\n\n match tokens.next() {\n\n Some(Token::String(value)) => return Ok(Value::String(value)),\n\n Some(Token::I64(value)) => return Ok(Value::Integer(value)),\n\n Some(Token::Boolean(value)) => return Ok(Value::Boolean(value)),\n\n Some(Token::OpenBracket) => {\n\n // Special case for empty list.\n\n if tokens.peek() == Some(&Token::CloseBracket) {\n\n tokens.next();\n\n return Ok(Value::Array(Vec::new()));\n\n }\n\n\n\n let mut items = Vec::new();\n\n loop {\n\n items.push(parse_stream(tokens)?);\n\n\n", "file_path": "node/src/app/cli/arglang.rs", "rank": 85, "score": 335261.4779310421 }, { "content": "/// serializes value to json;\n\n/// pretty_print: false = inline\n\n/// pretty_print: true = pretty printed / multiline\n\npub fn jsonify<T>(value: T, pretty_print: bool) -> String\n\nwhere\n\n T: Serialize,\n\n{\n\n let fj = if pretty_print {\n\n serde_json::to_string_pretty\n\n } else {\n\n serde_json::to_string\n\n };\n\n\n\n match fj(&value) {\n\n Ok(json) => json,\n\n Err(_) => r#\"{\"error\": \"encountered error serializing value\"}\"#.to_owned(),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use serde::{Deserialize, Serialize};\n", "file_path": "execution_engine/src/shared/utils.rs", "rank": 86, "score": 335000.5972366305 }, { "content": "#[doc(hidden)]\n\npub fn record_era_info(era_id: EraId, era_info: EraInfo) -> Result<(), ApiError> {\n\n let (era_id_ptr, era_id_size, _bytes1) = contract_api::to_ptr(era_id);\n\n let (era_info_ptr, era_info_size, _bytes2) = contract_api::to_ptr(era_info);\n\n let result = unsafe {\n\n ext_ffi::casper_record_era_info(era_id_ptr, era_id_size, era_info_ptr, era_info_size)\n\n };\n\n if result == 0 {\n\n Ok(())\n\n } else {\n\n Err(auction::Error::RecordEraInfo.into())\n\n }\n\n}\n", "file_path": "smart_contracts/contract/src/contract_api/system.rs", "rank": 87, "score": 334786.3958089957 }, { "content": "#[test]\n\nfn is_terminal_block() -> Result<(), AddUnitError<TestContext>> {\n\n let mut state = State::new_test(WEIGHTS, 0);\n\n\n\n let a0 = add_unit!(state, ALICE, 0x00; N, N, N)?;\n\n assert!(!state.is_terminal_block(&a0)); // height 0\n\n let b0 = add_unit!(state, BOB, 0x01; a0, N, N)?;\n\n assert!(!state.is_terminal_block(&b0)); // height 1\n\n let c0 = add_unit!(state, CAROL, 0x02; a0, b0, N)?;\n\n assert!(!state.is_terminal_block(&c0)); // height 2\n\n let a1 = add_unit!(state, ALICE, 0x03; a0, b0, c0)?;\n\n assert!(!state.is_terminal_block(&a1)); // height 3\n\n let a2 = add_unit!(state, ALICE, None; a1, b0, c0)?;\n\n assert!(!state.is_terminal_block(&a2)); // not a block\n\n let a3 = add_unit!(state, ALICE, 0x04; a2, b0, c0)?;\n\n assert!(state.is_terminal_block(&a3)); // height 4, i.e. the fifth block and thus the last one\n\n assert_eq!(TEST_ERA_HEIGHT - 1, state.block(&a3).height);\n\n let a4 = add_unit!(state, ALICE, None; a3, b0, c0)?;\n\n assert!(!state.is_terminal_block(&a4)); // not a block\n\n Ok(())\n\n}\n\n\n", "file_path": "node/src/components/consensus/highway_core/state/tests.rs", "rank": 88, "score": 334691.6795580563 }, { "content": "/// Attempts a wrapping addition of `to_add` to `stored_value`, assuming `stored_value` is\n\n/// compatible with type `Y`.\n\nfn wrapping_addition<Y>(stored_value: StoredValue, to_add: Y) -> Result<StoredValue, Error>\n\nwhere\n\n Y: AsPrimitive<i32>\n\n + AsPrimitive<i64>\n\n + AsPrimitive<u8>\n\n + AsPrimitive<u32>\n\n + AsPrimitive<u64>\n\n + AsPrimitive<U128>\n\n + AsPrimitive<U256>\n\n + AsPrimitive<U512>,\n\n{\n\n let cl_value = CLValue::try_from(stored_value)?;\n\n\n\n match cl_value.cl_type() {\n\n CLType::I32 => do_wrapping_addition::<i32, _>(cl_value, to_add),\n\n CLType::I64 => do_wrapping_addition::<i64, _>(cl_value, to_add),\n\n CLType::U8 => do_wrapping_addition::<u8, _>(cl_value, to_add),\n\n CLType::U32 => do_wrapping_addition::<u32, _>(cl_value, to_add),\n\n CLType::U64 => do_wrapping_addition::<u64, _>(cl_value, to_add),\n\n CLType::U128 => do_wrapping_addition::<U128, _>(cl_value, to_add),\n\n CLType::U256 => do_wrapping_addition::<U256, _>(cl_value, to_add),\n\n CLType::U512 => do_wrapping_addition::<U512, _>(cl_value, to_add),\n\n other => {\n\n let expected = format!(\"integral type compatible with {}\", any::type_name::<Y>());\n\n let found = format!(\"{:?}\", other);\n\n Err(TypeMismatch::new(expected, found).into())\n\n }\n\n }\n\n}\n\n\n", "file_path": "execution_engine/src/shared/transform.rs", "rank": 89, "score": 334485.55675710755 }, { "content": "pub fn blake2b_hash_arb() -> impl Strategy<Value = Blake2bHash> {\n\n vec(any::<u8>(), 0..1000).prop_map(|b| Blake2bHash::new(&b))\n\n}\n\n\n", "file_path": "execution_engine/src/storage/trie/gens.rs", "rank": 90, "score": 332961.4240712637 }, { "content": "fn store() -> (ContractHash, ContractVersion) {\n\n let entry_points = {\n\n let mut entry_points = EntryPoints::new();\n\n\n\n let entry_point = EntryPoint::new(\n\n FUNCTION_NAME,\n\n vec![\n\n Parameter::new(ARG_TARGET, AccountHash::cl_type()),\n\n Parameter::new(ARG_AMOUNT, CLType::U512),\n\n ],\n\n CLType::URef,\n\n EntryPointAccess::Public,\n\n EntryPointType::Session,\n\n );\n\n\n\n entry_points.add_entry_point(entry_point);\n\n\n\n entry_points\n\n };\n\n storage::new_contract(entry_points, None, None, None)\n\n}\n\n\n\n#[no_mangle]\n\npub extern \"C\" fn call() {\n\n let (contract_hash, contract_version) = store();\n\n let version_uref = storage::new_uref(contract_version);\n\n runtime::put_key(CONTRACT_VERSION_KEY, version_uref.into());\n\n runtime::put_key(CONTRACT_NAME, contract_hash.into());\n\n}\n", "file_path": "smart_contracts/contracts/client/transfer-to-account-stored/src/main.rs", "rank": 91, "score": 331518.8788819476 }, { "content": "/// Deserialized vector of bytes into `LinearChainSync::State`.\n\n/// Panics on deserialization errors.\n\nfn deserialize_state(serialized_state: &[u8]) -> Option<State> {\n\n bincode::deserialize(&serialized_state).unwrap_or_else(|error| {\n\n // Panicking here should not corrupt the state of any component as it's done in the\n\n // constructor.\n\n panic!(\n\n \"could not deserialize state from storage, error {:?}\",\n\n error\n\n )\n\n })\n\n}\n\n\n\n/// Reads the `LinearChainSync's` state from storage, if any.\n\n/// Panics on deserialization errors.\n\npub(crate) fn read_init_state(\n\n storage: &Storage,\n\n chainspec: &Chainspec,\n\n) -> Result<Option<State>, storage::Error> {\n\n let key = create_state_key(&chainspec);\n\n if let Some(bytes) = storage.read_state_store(&key)? {\n\n Ok(deserialize_state(&bytes))\n", "file_path": "node/src/components/linear_chain_sync.rs", "rank": 92, "score": 331017.9602116201 }, { "content": "/// Removes the given [`AccountHash`] from the account's associated keys.\n\npub fn remove_associated_key(account_hash: AccountHash) -> Result<(), RemoveKeyFailure> {\n\n let (account_hash_ptr, account_hash_size, _bytes) = to_ptr(account_hash);\n\n let result =\n\n unsafe { ext_ffi::casper_remove_associated_key(account_hash_ptr, account_hash_size) };\n\n if result == 0 {\n\n Ok(())\n\n } else {\n\n Err(RemoveKeyFailure::try_from(result).unwrap_or_revert())\n\n }\n\n}\n\n\n", "file_path": "smart_contracts/contract/src/contract_api/account.rs", "rank": 93, "score": 328450.1062166761 }, { "content": "#[test]\n\nfn validate_lnc_no_equivocation() -> Result<(), AddUnitError<TestContext>> {\n\n let mut state = State::new_test(WEIGHTS, 0);\n\n\n\n // No equivocations – incoming vote doesn't violate LNC.\n\n // Create votes as follows; a0, b0 are blocks:\n\n //\n\n // Alice: a0 — a1\n\n // /\n\n // Bob: b0\n\n let a0 = add_unit!(state, ALICE, 0xA; N, N, N)?;\n\n let b0 = add_unit!(state, BOB, 0xB; N, N, N)?;\n\n\n\n // a1 does not violate LNC\n\n add_unit!(state, ALICE, None; a0, b0, N)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "node/src/components/consensus/highway_core/state/tests.rs", "rank": 94, "score": 328315.8772938926 }, { "content": "pub fn trie_pointer_block_arb() -> impl Strategy<Value = PointerBlock> {\n\n vec(option::of(trie_pointer_arb()), 256).prop_map(|vec| {\n\n let mut ret: [Option<Pointer>; 256] = [Default::default(); 256];\n\n ret.clone_from_slice(vec.as_slice());\n\n ret.into()\n\n })\n\n}\n\n\n", "file_path": "execution_engine/src/storage/trie/gens.rs", "rank": 96, "score": 327118.819156356 }, { "content": "fn timestamp(value: &str) -> Result<Timestamp> {\n\n if value.is_empty() {\n\n return Ok(Timestamp::now());\n\n }\n\n Timestamp::from_str(value).map_err(|error| Error::FailedToParseTimestamp(\"timestamp\", error))\n\n}\n\n\n", "file_path": "client/lib/parsing.rs", "rank": 97, "score": 326760.7781091451 }, { "content": "#[ignore]\n\n#[test]\n\nfn should_fail_payment_stored_at_named_key_with_incompatible_major_version() {\n\n let payment_purse_amount = *DEFAULT_PAYMENT;\n\n\n\n // first, store payment contract\n\n let exec_request = ExecuteRequestBuilder::standard(\n\n *DEFAULT_ACCOUNT_ADDR,\n\n STORED_PAYMENT_CONTRACT_NAME,\n\n RuntimeArgs::default(),\n\n )\n\n .build();\n\n\n\n let mut builder = InMemoryWasmTestBuilder::default();\n\n builder.run_genesis(&DEFAULT_RUN_GENESIS_REQUEST);\n\n\n\n builder.exec_commit_finish(exec_request);\n\n\n\n let query_result = builder\n\n .query(None, Key::Account(*DEFAULT_ACCOUNT_ADDR), &[])\n\n .expect(\"should query default account\");\n\n let default_account = query_result\n", "file_path": "execution_engine_testing/tests/src/test/deploy/stored_contracts.rs", "rank": 98, "score": 326664.42152324156 }, { "content": "#[ignore]\n\n#[test]\n\nfn should_fail_session_stored_at_named_key_with_incompatible_major_version() {\n\n let payment_purse_amount = *DEFAULT_PAYMENT;\n\n\n\n // first, store payment contract for v1.0.0\n\n let exec_request_1 = ExecuteRequestBuilder::standard(\n\n *DEFAULT_ACCOUNT_ADDR,\n\n &format!(\"{}_stored.wasm\", DO_NOTHING_NAME),\n\n RuntimeArgs::default(),\n\n )\n\n .build();\n\n\n\n let mut builder = InMemoryWasmTestBuilder::default();\n\n builder.run_genesis(&DEFAULT_RUN_GENESIS_REQUEST);\n\n\n\n builder.exec_commit_finish(exec_request_1);\n\n\n\n let query_result = builder\n\n .query(None, Key::Account(*DEFAULT_ACCOUNT_ADDR), &[])\n\n .expect(\"should query default account\");\n\n let default_account = query_result\n", "file_path": "execution_engine_testing/tests/src/test/deploy/stored_contracts.rs", "rank": 99, "score": 326664.42152324156 } ]
Rust
src/poller.rs
dwrensha/multipoll
91eddb3bf9281c5d300efbef9a7b605a3e1a0122
use std::pin::Pin; use std::task::{Context, Poll}; use futures::{Future, FutureExt, Stream}; use futures::stream::FuturesUnordered; use futures::channel::mpsc; use std::cell::{RefCell}; use std::rc::Rc; enum EnqueuedTask<E> { Task(Pin<Box<dyn Future<Output=Result<(), E>>>>), Terminate(Result<(), E>), } enum TaskInProgress<E> { Task(Pin<Box<dyn Future<Output=()>>>), Terminate(Option<Result<(), E>>), } impl <E> Unpin for TaskInProgress<E> {} enum TaskDone<E> { Continue, Terminate(Result<(), E>), } impl <E> Future for TaskInProgress<E> { type Output = TaskDone<E>; fn poll(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> { match *self { TaskInProgress::Terminate(ref mut r) => Poll::Ready(TaskDone::Terminate(r.take().unwrap())), TaskInProgress::Task(ref mut f) => { match f.as_mut().poll(cx) { Poll::Pending => Poll::Pending, Poll::Ready(()) => Poll::Ready(TaskDone::Continue), } } } } } #[must_use = "a Poller does nothing unless polled"] pub struct Poller<E> { enqueued: Option<mpsc::UnboundedReceiver<EnqueuedTask<E>>>, in_progress: FuturesUnordered<TaskInProgress<E>>, reaper: Rc<RefCell<Box<dyn Finisher<E>>>>, } impl<E> Poller<E> where E: 'static { pub fn new(reaper: Box<dyn Finisher<E>>) -> (PollerHandle<E>, Poller<E>) where E: 'static, E: ::std::fmt::Debug, { let (sender, receiver) = mpsc::unbounded(); let set = Poller { enqueued: Some(receiver), in_progress: FuturesUnordered::new(), reaper: Rc::new(RefCell::new(reaper)), }; set.in_progress.push(TaskInProgress::Task(Box::pin(::futures::future::pending()))); let handle = PollerHandle { sender: sender, }; (handle, set) } } #[derive(Clone)] pub struct PollerHandle<E> { sender: mpsc::UnboundedSender<EnqueuedTask<E>> } impl <E> PollerHandle<E> where E: 'static { pub fn add<F>(&mut self, f: F) where F: Future<Output = Result<(), E>> + 'static { let _ = self.sender.unbounded_send(EnqueuedTask::Task(Box::pin(f))); } pub fn terminate(&mut self, result: Result<(), E>) { let _ = self.sender.unbounded_send(EnqueuedTask::Terminate(result)); } } pub trait Finisher<E> where E: 'static { fn task_succeeded(&mut self) {} fn task_failed(&mut self, error: E); } impl <E> Future for Poller<E> where E: 'static { type Output = Result<(),E>; fn poll(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> { let mut enqueued_stream_complete = false; if let Poller { enqueued: Some(ref mut enqueued), ref mut in_progress, ref reaper, ..} = self.as_mut().get_mut() { loop { match Pin::new(&mut *enqueued).poll_next(cx) { Poll::Pending => break, Poll::Ready(None) => { enqueued_stream_complete = true; break; } Poll::Ready(Some(EnqueuedTask::Terminate(r))) => { in_progress.push(TaskInProgress::Terminate(Some(r))); } Poll::Ready(Some(EnqueuedTask::Task(f))) => { let reaper = Rc::downgrade(&reaper); in_progress.push( TaskInProgress::Task(Box::pin( f.map(move |r| { match reaper.upgrade() { None => (), Some(rc_reaper) => { match r { Ok(()) => rc_reaper.borrow_mut().task_succeeded(), Err(e) => rc_reaper.borrow_mut().task_failed(e), } } } })))); } } } } if enqueued_stream_complete { drop(self.enqueued.take()); } loop { match Stream::poll_next(Pin::new(&mut self.in_progress), cx) { Poll::Pending => return Poll::Pending, Poll::Ready(v) => { match v { None => return Poll::Ready(Ok(())), Some(TaskDone::Continue) => (), Some(TaskDone::Terminate(Ok(()))) => return Poll::Ready(Ok(())), Some(TaskDone::Terminate(Err(e))) => return Poll::Ready(Err(e)), } } } } } }
use std::pin::Pin; use std::task::{Context, Poll}; use futures::{Future, FutureExt, Stream}; use futures::stream::FuturesUnordered; use futures::channel::mpsc; use std::cell::{RefCell}; use std::rc::Rc; enum EnqueuedTask<E> { Task(Pin<Box<dyn Future<Output=Result<(), E>>>>), Terminate(Result<(), E>), } enum TaskInProgress<E> { Task(Pin<Box<dyn Future<Output=()>>>), Terminate(Option<Result<(), E>>), } impl <E> Unpin for TaskInProgress<E> {} enum TaskDone<E> { Continue, Terminate(Result<(), E>), } impl <E> Future for TaskInProgress<E> { type Output = TaskDone<E>; fn poll(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> { match *self { TaskInProgress::Terminate(ref mut r) => Poll::Ready(TaskDone::Terminate(r.take().unwrap())), TaskInProgress::Task(ref mut f) => { match f.as_mut().poll(cx) { Poll::Pending => Poll::Pending, Poll::Ready(()) => Poll::Ready(TaskDone::Continue), } } } } } #[must_use = "a Poller does nothing unless polled"] pub struct Poller<E> { enqueued: Option<mpsc::UnboundedReceiver<EnqueuedTask<E>>>, in_progress: FuturesUnordered<TaskInProgress<E>>, reaper: Rc<RefCell<Box<dyn Finisher<E>>>>, } impl<E> Poller<E> where E: 'static { pub fn new(reaper: Box<dyn Finisher<E>>) -> (PollerHandle<E>, Poller<E>) where E: 'static, E: ::std::fmt::Debug, { let (sender, receiver) = mpsc::unbounded(); let set = Poller { enqueued: Some(receiver), in_progress: FuturesUnordered::new(), reaper: Rc::new(RefCell::new(reaper)), }; set.in_progress.push(TaskInProgress::Task(Box::pin(::futures::future::pending()))); let handle = PollerHandle { sender: sender, }; (handle, set) } } #[derive(Clone)] pub struct PollerHandle<E> { sender: mpsc::UnboundedSender<EnqueuedTask<E>> } impl <E> PollerHandle<E> where E: 'static { pub fn add<F>(&mut self, f: F) where F: Future<Output = Result<(), E>> + 'static { let _ = self.sender.unbounded_send(EnqueuedTask::Task(Box::pin(f))); } pub fn terminate(&mut self, result: Result<(), E>) { let _ = self.sender.unbounded_send(EnqueuedTask::Terminate(result)); } } pub trait Finisher<E> where E: 'static { fn task_succeeded(&mut self) {} fn task_failed(&mut self, error: E); } impl <E> Future for Pol
ueued_stream_complete = true; break; } Poll::Ready(Some(EnqueuedTask::Terminate(r))) => { in_progress.push(TaskInProgress::Terminate(Some(r))); } Poll::Ready(Some(EnqueuedTask::Task(f))) => { let reaper = Rc::downgrade(&reaper); in_progress.push( TaskInProgress::Task(Box::pin( f.map(move |r| { match reaper.upgrade() { None => (), Some(rc_reaper) => { match r { Ok(()) => rc_reaper.borrow_mut().task_succeeded(), Err(e) => rc_reaper.borrow_mut().task_failed(e), } } } })))); } } } } if enqueued_stream_complete { drop(self.enqueued.take()); } loop { match Stream::poll_next(Pin::new(&mut self.in_progress), cx) { Poll::Pending => return Poll::Pending, Poll::Ready(v) => { match v { None => return Poll::Ready(Ok(())), Some(TaskDone::Continue) => (), Some(TaskDone::Terminate(Ok(()))) => return Poll::Ready(Ok(())), Some(TaskDone::Terminate(Err(e))) => return Poll::Ready(Err(e)), } } } } } }
ler<E> where E: 'static { type Output = Result<(),E>; fn poll(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> { let mut enqueued_stream_complete = false; if let Poller { enqueued: Some(ref mut enqueued), ref mut in_progress, ref reaper, ..} = self.as_mut().get_mut() { loop { match Pin::new(&mut *enqueued).poll_next(cx) { Poll::Pending => break, Poll::Ready(None) => { enq
random
[ { "content": "multipoll\n\n=========\n\n\n", "file_path": "README.md", "rank": 4, "score": 7310.110037664616 }, { "content": "extern crate futures;\n\n\n\npub use poller::{Poller, PollerHandle, Finisher};\n\n\n\nmod poller;\n", "file_path": "src/lib.rs", "rank": 11, "score": 6.242998709330985 } ]
Rust
src/contract.rs
SivaS2003/cw-template
2d75e09e2a9dfd65813bcfd71fa54fb888df33f7
#[cfg(not(feature = "library"))] use cosmwasm_std::entry_point; use cosmwasm_std::{to_binary, Binary, Deps, DepsMut, Env, MessageInfo, Response, StdResult, Coin, BankMsg, has_coins}; use cosmwasm_std::Addr; use cw2::set_contract_version; use crate::error::ContractError; use crate::msg::{ExecuteMsg, InstantiateMsg, QueryMsg, AddressResponse, CostResponse, OwnerResponse}; use crate::state::{State, STATE}; const CONTRACT_NAME: &str = "crates.io:{{project-name}}"; const CONTRACT_VERSION: &str = env!("CARGO_PKG_VERSION"); #[cfg_attr(not(feature = "library"), entry_point)] pub fn instantiate( deps: DepsMut, _env: Env, info: MessageInfo, msg: InstantiateMsg, ) -> Result<Response, ContractError> { let state = State { address : msg.address, rentcost : msg.rentcost, renters : Vec::new(), owner : info.sender.clone(), ownername : msg.ownername, }; set_contract_version(deps.storage, CONTRACT_NAME, CONTRACT_VERSION)?; STATE.save(deps.storage, &state)?; Ok(Response::new() .add_attribute("method", "instantiate") .add_attribute("owner", info.sender) .add_attribute("address", msg.address)) } #[cfg_attr(not(feature = "library"), entry_point)] pub fn execute( deps: DepsMut, env: Env, info: MessageInfo, msg: ExecuteMsg, ) -> Result<Response, ContractError> { match msg { ExecuteMsg::RenterAdd {} => try_add(deps, env, info), ExecuteMsg::RenterPay {} => try_pay(deps, env, info), ExecuteMsg::RenterBoot {} => try_boot(deps, info), ExecuteMsg::ChangeRent {newprice} => try_ChangeRent(deps, info, newprice), } } pub fn try_add(deps: DepsMut, env: Env, info: MessageInfo) -> Result<Response, ContractError> { STATE.update(deps.storage, |mut state| -> Result<_, ContractError> { state.renters.push(env.contract.address); Ok(state) })?; Ok(Response::new().add_attribute("method", "try_add")) } pub fn try_pay( deps: DepsMut, env: Env, info: MessageInfo, ) -> Result<Response, ContractError> { STATE.update(deps.storage, |mut state| -> Result<_, ContractError> { if has_coins(&info.funds, &state.rentcost){ let send = BankMsg::Send { to_address: state.owner.into_string(), amount: info.funds, }; } Ok(state) })?; Ok(Response::new().add_attribute("method", "try_pay")) } pub fn try_boot(deps: DepsMut, info: MessageInfo) -> Result<Response, ContractError> { STATE.update(deps.storage, |mut state| -> Result<_, ContractError> { if info.sender != state.owner { return Err(ContractError::Unauthorized{}); } state.renters.pop(); Ok(state) })?; Ok(Response::new().add_attribute("method", "try_boot")) } pub fn try_ChangeRent(deps: DepsMut, info: MessageInfo, newprice: Vec<Coin>) -> Result<Response, ContractError> { STATE.update(deps.storage, |mut state| -> Result<_, ContractError> { if info.sender != state.owner { return Err(ContractError::Unauthorized{}); } state.rentcost = newprice; Ok(state) })?; Ok(Response::new()) } #[cfg_attr(not(feature = "library"), entry_point)] pub fn query(deps: Deps, _env: Env, msg: QueryMsg) -> StdResult<Binary> { match msg { QueryMsg::GetAddress {} => to_binary(&query_address(deps)?), QueryMsg::GetRentCost {} => to_binary(&query_rent(deps)?), QueryMsg::GetOwner {} => to_binary(&query_owner(deps)?), } } fn query_address(deps: Deps) -> StdResult<AddressResponse> { let state = STATE.load(deps.storage)?; Ok(AddressResponse { address: state.address }) } fn query_rent(deps: Deps) -> StdResult<CostResponse> { let state = STATE.load(deps.storage)?; Ok(CostResponse {rentcost: state.rentcost}) } fn query_owner(deps: Deps) -> StdResult<OwnerResponse> { let state = STATE.load(deps.storage)?; Ok(OwnerResponse {ownername : state.ownername}) } #[cfg(test)] mod tests { use super::*; use cosmwasm_std::testing::{mock_dependencies_with_balance, mock_env, mock_info}; use cosmwasm_std::{coins, from_binary}; #[test] fn proper_initialization() { let mut deps = mock_dependencies_with_balance(&coins(2, "token")); let msg = InstantiateMsg { address: String::from("33 pawn lane"), rentcost: coins(10, "luna"), renters : Vec::new(), ownername: String::from("john doe") }; let info = mock_info("landlord", &coins(0, "luna")); let res = instantiate(deps.as_mut(), mock_env(), info, msg).unwrap(); assert_eq!(0, res.messages.len()); let res = query(deps.as_ref(), mock_env(), QueryMsg::GetAddress {}).unwrap(); let value: AddressResponse = from_binary(&res).unwrap(); assert_eq!(String::from("33 pawn lane"), value.address); let res = query(deps.as_ref(), mock_env(), QueryMsg::GetRentCost {}).unwrap(); let value: CostResponse = from_binary(&res).unwrap(); assert_eq!(coins(10, "luna"), value.rentcost); let res = query(deps.as_ref(), mock_env(), QueryMsg::GetRentCost {}).unwrap(); let value: OwnerResponse = from_binary(&res).unwrap(); assert_eq!(String::from("john doe"), value.ownername); } #[test] fn renters() { let mut deps = mock_dependencies_with_balance(&coins(2, "token")); let msg = InstantiateMsg { address: String::from("33 pawn lane"), rentcost: coins(10, "luna"), renters : Vec::new(), ownername: String::from("john doe") }; let info = mock_info("landlord", &coins(0, "luna")); let res = instantiate(deps.as_mut(), mock_env(), info, msg).unwrap(); let info2 = mock_info("jane doe", &coins(2, "luna")); let msg = ExecuteMsg::RenterAdd {}; let _res = execute(deps.as_mut(), mock_env(), info2, msg).unwrap(); let info3 = mock_info("jaja doe", &coins(3, "luna")); let msg = ExecuteMsg::RenterBoot {}; let res = execute(deps.as_mut(), mock_env(), info3, msg); match res { Err(ContractError::Unauthorized {}) => {} _ => panic!("Must return unauthorized error"), } } }
#[cfg(not(feature = "library"))] use cosmwasm_std::entry_point; use cosmwasm_std::{to_binary, Binary, Deps, DepsMut, Env, MessageInfo, Response, StdResult, Coin, BankMsg, has_coins}; use cosmwasm_std::Addr; use cw2::set_contract_version; use crate::error::ContractError; use crate::msg::{ExecuteMsg, InstantiateMsg, QueryMsg, AddressResponse, CostResponse, OwnerResponse}; use crate::state::{State, STATE}; const CONTRACT_NAME: &str = "crates.io:{{project-name}}"; const CONTRACT_VERSION: &str = env!("CARGO_PKG_VERSION"); #[cfg_attr(not(feature = "library"), entry_point)] pub fn instantiate( deps: DepsMut, _env: Env, info: MessageInfo, msg: InstantiateMsg, ) -> Result<Response, ContractError> { let state = State { address : msg.address, rentcost : msg.rentcost, renters : Vec::new(), owner : info.sender.clone(), ownername : msg.ownername, }; set_contract_version(deps.storage, CONTRACT_NAME, CONTRACT_VERSION)?; STATE.save(deps.storage, &state)?;
} #[cfg_attr(not(feature = "library"), entry_point)] pub fn execute( deps: DepsMut, env: Env, info: MessageInfo, msg: ExecuteMsg, ) -> Result<Response, ContractError> { match msg { ExecuteMsg::RenterAdd {} => try_add(deps, env, info), ExecuteMsg::RenterPay {} => try_pay(deps, env, info), ExecuteMsg::RenterBoot {} => try_boot(deps, info), ExecuteMsg::ChangeRent {newprice} => try_ChangeRent(deps, info, newprice), } } pub fn try_add(deps: DepsMut, env: Env, info: MessageInfo) -> Result<Response, ContractError> { STATE.update(deps.storage, |mut state| -> Result<_, ContractError> { state.renters.push(env.contract.address); Ok(state) })?; Ok(Response::new().add_attribute("method", "try_add")) } pub fn try_pay( deps: DepsMut, env: Env, info: MessageInfo, ) -> Result<Response, ContractError> { STATE.update(deps.storage, |mut state| -> Result<_, ContractError> { if has_coins(&info.funds, &state.rentcost){ let send = BankMsg::Send { to_address: state.owner.into_string(), amount: info.funds, }; } Ok(state) })?; Ok(Response::new().add_attribute("method", "try_pay")) } pub fn try_boot(deps: DepsMut, info: MessageInfo) -> Result<Response, ContractError> { STATE.update(deps.storage, |mut state| -> Result<_, ContractError> { if info.sender != state.owner { return Err(ContractError::Unauthorized{}); } state.renters.pop(); Ok(state) })?; Ok(Response::new().add_attribute("method", "try_boot")) } pub fn try_ChangeRent(deps: DepsMut, info: MessageInfo, newprice: Vec<Coin>) -> Result<Response, ContractError> { STATE.update(deps.storage, |mut state| -> Result<_, ContractError> { if info.sender != state.owner { return Err(ContractError::Unauthorized{}); } state.rentcost = newprice; Ok(state) })?; Ok(Response::new()) } #[cfg_attr(not(feature = "library"), entry_point)] pub fn query(deps: Deps, _env: Env, msg: QueryMsg) -> StdResult<Binary> { match msg { QueryMsg::GetAddress {} => to_binary(&query_address(deps)?), QueryMsg::GetRentCost {} => to_binary(&query_rent(deps)?), QueryMsg::GetOwner {} => to_binary(&query_owner(deps)?), } } fn query_address(deps: Deps) -> StdResult<AddressResponse> { let state = STATE.load(deps.storage)?; Ok(AddressResponse { address: state.address }) } fn query_rent(deps: Deps) -> StdResult<CostResponse> { let state = STATE.load(deps.storage)?; Ok(CostResponse {rentcost: state.rentcost}) } fn query_owner(deps: Deps) -> StdResult<OwnerResponse> { let state = STATE.load(deps.storage)?; Ok(OwnerResponse {ownername : state.ownername}) } #[cfg(test)] mod tests { use super::*; use cosmwasm_std::testing::{mock_dependencies_with_balance, mock_env, mock_info}; use cosmwasm_std::{coins, from_binary}; #[test] fn proper_initialization() { let mut deps = mock_dependencies_with_balance(&coins(2, "token")); let msg = InstantiateMsg { address: String::from("33 pawn lane"), rentcost: coins(10, "luna"), renters : Vec::new(), ownername: String::from("john doe") }; let info = mock_info("landlord", &coins(0, "luna")); let res = instantiate(deps.as_mut(), mock_env(), info, msg).unwrap(); assert_eq!(0, res.messages.len()); let res = query(deps.as_ref(), mock_env(), QueryMsg::GetAddress {}).unwrap(); let value: AddressResponse = from_binary(&res).unwrap(); assert_eq!(String::from("33 pawn lane"), value.address); let res = query(deps.as_ref(), mock_env(), QueryMsg::GetRentCost {}).unwrap(); let value: CostResponse = from_binary(&res).unwrap(); assert_eq!(coins(10, "luna"), value.rentcost); let res = query(deps.as_ref(), mock_env(), QueryMsg::GetRentCost {}).unwrap(); let value: OwnerResponse = from_binary(&res).unwrap(); assert_eq!(String::from("john doe"), value.ownername); } #[test] fn renters() { let mut deps = mock_dependencies_with_balance(&coins(2, "token")); let msg = InstantiateMsg { address: String::from("33 pawn lane"), rentcost: coins(10, "luna"), renters : Vec::new(), ownername: String::from("john doe") }; let info = mock_info("landlord", &coins(0, "luna")); let res = instantiate(deps.as_mut(), mock_env(), info, msg).unwrap(); let info2 = mock_info("jane doe", &coins(2, "luna")); let msg = ExecuteMsg::RenterAdd {}; let _res = execute(deps.as_mut(), mock_env(), info2, msg).unwrap(); let info3 = mock_info("jaja doe", &coins(3, "luna")); let msg = ExecuteMsg::RenterBoot {}; let res = execute(deps.as_mut(), mock_env(), info3, msg); match res { Err(ContractError::Unauthorized {}) => {} _ => panic!("Must return unauthorized error"), } } }
Ok(Response::new() .add_attribute("method", "instantiate") .add_attribute("owner", info.sender) .add_attribute("address", msg.address))
call_expression
[ { "content": "fn main() {\n\n let mut out_dir = current_dir().unwrap();\n\n out_dir.push(\"schema\");\n\n create_dir_all(&out_dir).unwrap();\n\n remove_schemas(&out_dir).unwrap();\n\n\n\n export_schema(&schema_for!(InstantiateMsg), &out_dir);\n\n export_schema(&schema_for!(ExecuteMsg), &out_dir);\n\n export_schema(&schema_for!(QueryMsg), &out_dir);\n\n export_schema(&schema_for!(State), &out_dir);\n\n export_schema(&schema_for!(OwnerResponse), &out_dir);\n\n}\n", "file_path": "examples/schema.rs", "rank": 10, "score": 23505.233990617664 }, { "content": "use schemars::JsonSchema;\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse cosmwasm_std::Addr;\n\nuse cw_storage_plus::Item;\n\nuse cosmwasm_std::Coin;\n\n\n\n#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)]\n\n\n\n// the state of the smart contract, aka where the info for the instance of the smart contract is stored\n\npub struct State {\n\n pub address : String,\n\n pub rentcost : Vec<Coin>,\n\n pub renters : Vec<Addr>,\n\n pub ownername : String,\n\n pub owner: Addr,\n\n\n\n}\n\n\n\npub const STATE: Item<State> = Item::new(\"state\");\n", "file_path": "src/state.rs", "rank": 11, "score": 22315.509979306367 }, { "content": "use schemars::JsonSchema;\n\nuse serde::{Deserialize, Serialize};\n\nuse cosmwasm_std::Coin;\n\nuse cosmwasm_std::Addr;\n\n\n\n#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)]\n\npub struct InstantiateMsg {\n\n pub address : String,\n\n pub rentcost : Vec<Coin>,\n\n pub renters : Vec<Addr>,\n\n pub ownername : String,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub enum ExecuteMsg {\n\n RenterAdd {},\n\n RenterPay {},\n\n RenterBoot {},\n\n ChangeRent {newprice: Vec<Coin>},\n", "file_path": "src/msg.rs", "rank": 12, "score": 21864.37483504397 }, { "content": "}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub enum QueryMsg {\n\n GetAddress {},\n\n GetRentCost {},\n\n GetOwner {},\n\n}\n\n\n\n// We define a custom struct for each query response\n\n#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)]\n\npub struct AddressResponse {\n\n // we should only have to define the variables we need to share here\n\n pub address : String,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)]\n\npub struct CostResponse {\n\n // we should only have to define the variables we need to share here\n\n pub rentcost : Vec<Coin>,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)]\n\npub struct OwnerResponse {\n\n // we should only have to define the variables we need to share here\n\n pub ownername : String,\n\n}\n", "file_path": "src/msg.rs", "rank": 13, "score": 21859.547550119052 }, { "content": "use std::env::current_dir;\n\nuse std::fs::create_dir_all;\n\n\n\nuse cosmwasm_schema::{export_schema, remove_schemas, schema_for};\n\n\n\nuse {{crate_name}}::msg::{CountResponse, ExecuteMsg, InstantiateMsg, QueryMsg};\n\nuse {{crate_name}}::state::State;\n\n\n", "file_path": "examples/schema.rs", "rank": 17, "score": 10.18132975090221 }, { "content": "pub mod contract;\n\nmod error;\n\npub mod msg;\n\npub mod state;\n\n\n\npub use crate::error::ContractError;\n", "file_path": "src/lib.rs", "rank": 18, "score": 8.185906446520297 }, { "content": "use cosmwasm_std::StdError;\n\nuse thiserror::Error;\n\n\n\n#[derive(Error, Debug)]\n\npub enum ContractError {\n\n #[error(\"{0}\")]\n\n Std(#[from] StdError),\n\n\n\n #[error(\"Unauthorized\")]\n\n Unauthorized {},\n\n\n\n #[error(\"this process is currently in progress.\")]\n\n TimeOutError {},\n\n\n\n #[error(\"this wallet address does not exist in this contract.\")]\n\n RenterNotFoundError {},\n\n\n\n // Add any other custom errors you like here.\n\n // Look at https://docs.rs/thiserror/1.0.21/thiserror/ for details.\n\n}\n", "file_path": "src/error.rs", "rank": 19, "score": 6.092504350024579 }, { "content": "### Uploading\n\n\n\nOnce this is set up, make sure you commit the current state you want to publish.\n\nThen try `cargo publish --dry-run`. If that works well, review the files that\n\nwill be published via `cargo package --list`. If you are satisfied, you can now\n\nofficially publish it via `cargo publish`.\n\n\n\nCongratulations, your package is public to the world.\n\n\n\n### Sharing\n\n\n\nOnce you have published your package, people can now find it by\n\n[searching for \"cw-\" on crates.io](https://crates.io/search?q=cw).\n\nBut that isn't exactly the simplest way. To make things easier and help\n\nkeep the ecosystem together, we suggest making a PR to add your package\n\nto the [`cawesome-wasm`](https://github.com/cosmwasm/cawesome-wasm) list.\n\n\n\n### Organizations\n\n\n\nMany times you are writing a contract not as a solo developer, but rather as\n\npart of an organization. You will want to allow colleagues to upload new\n\nversions of the contract to crates.io when you are on holiday.\n\n[These instructions show how]() you can set up your crate to allow multiple maintainers.\n\n\n\nYou can add another owner to the crate by specifying their github user. Note, you will\n\nnow both have complete control of the crate, and they can remove you:\n\n\n\n`cargo owner --add ethanfrey`\n\n\n\nYou can also add an existing github team inside your organization:\n\n\n\n`cargo owner --add github:confio:developers`\n\n\n\nThe team will allow anyone who is currently in the team to publish new versions of the crate.\n\nAnd this is automatically updated when you make changes on github. However, it will not allow\n\nanyone in the team to add or remove other owners.\n", "file_path": "Publishing.md", "rank": 20, "score": 2.5754652585400164 }, { "content": "## Generating JSON Schema\n\n\n\nWhile the Wasm calls (`instantiate`, `execute`, `query`) accept JSON, this is not enough\n\ninformation to use it. We need to expose the schema for the expected messages to the\n\nclients. You can generate this schema by calling `cargo schema`, which will output\n\n4 files in `./schema`, corresponding to the 3 message types the contract accepts,\n\nas well as the internal `State`.\n\n\n\nThese files are in standard json-schema format, which should be usable by various\n\nclient side tools, either to auto-generate codecs, or just to validate incoming\n\njson wrt. the defined schema.\n\n\n\n## Preparing the Wasm bytecode for production\n\n\n\nBefore we upload it to a chain, we need to ensure the smallest output size possible,\n\nas this will be included in the body of a transaction. We also want to have a\n\nreproducible build process, so third parties can verify that the uploaded Wasm\n\ncode did indeed come from the claimed rust code.\n\n\n\nTo solve both these issues, we have produced `rust-optimizer`, a docker image to\n\nproduce an extremely small build output in a consistent manner. The suggest way\n\nto run it is this:\n\n\n\n```sh\n\ndocker run --rm -v \"$(pwd)\":/code \\\n\n --mount type=volume,source=\"$(basename \"$(pwd)\")_cache\",target=/code/target \\\n\n --mount type=volume,source=registry_cache,target=/usr/local/cargo/registry \\\n\n cosmwasm/rust-optimizer:0.12.4\n\n```\n\n\n\nOr, If you're on an arm64 machine, you should use a docker image built with arm64.\n\n```sh\n\ndocker run --rm -v \"$(pwd)\":/code \\\n\n --mount type=volume,source=\"$(basename \"$(pwd)\")_cache\",target=/code/target \\\n\n --mount type=volume,source=registry_cache,target=/usr/local/cargo/registry \\\n\n cosmwasm/rust-optimizer-arm64:0.12.4\n", "file_path": "Developing.md", "rank": 21, "score": 2.438034574363325 }, { "content": "## CI Support\n\n\n\nWe have template configurations for both [GitHub Actions](.github/workflows/Basic.yml)\n\nand [Circle CI](.circleci/config.yml) in the generated project, so you can\n\nget up and running with CI right away.\n\n\n\nOne note is that the CI runs all `cargo` commands\n\nwith `--locked` to ensure it uses the exact same versions as you have locally. This also means\n\nyou must have an up-to-date `Cargo.lock` file, which is not auto-generated.\n\nThe first time you set up the project (or after adding any dep), you should ensure the\n\n`Cargo.lock` file is updated, so the CI will test properly. This can be done simply by\n\nrunning `cargo check` or `cargo unit-test`.\n\n\n\n## Using your project\n\n\n\nOnce you have your custom repo, you should check out [Developing](./Developing.md) to explain\n\nmore on how to run tests and develop code. Or go through the\n\n[online tutorial](https://docs.cosmwasm.com/) to get a better feel\n\nof how to develop.\n\n\n\n[Publishing](./Publishing.md) contains useful information on how to publish your contract\n\nto the world, once you are ready to deploy it on a running blockchain. And\n\n[Importing](./Importing.md) contains information about pulling in other contracts or crates\n\nthat have been published.\n\n\n\nPlease replace this README file with information about your specific project. You can keep\n\nthe `Developing.md` and `Publishing.md` files as useful referenced, but please set some\n\nproper description in the README.\n\n\n\n## Gitpod integration\n\n\n\n[Gitpod](https://www.gitpod.io/) container-based development platform will be enabled on your project by default.\n\n\n\nWorkspace contains:\n\n - **rust**: for builds\n\n - [wasmd](https://github.com/CosmWasm/wasmd): for local node setup and client\n\n - **jq**: shell JSON manipulation tool\n\n\n\nFollow [Gitpod Getting Started](https://www.gitpod.io/docs/getting-started) and launch your workspace.\n\n\n", "file_path": "README.md", "rank": 22, "score": 2.103564401039155 }, { "content": "# Importing\n\n\n\nIn [Publishing](./Publishing.md), we discussed how you can publish your contract to the world.\n\nThis looks at the flip-side, how can you use someone else's contract (which is the same\n\nquestion as how they will use your contract). Let's go through the various stages.\n\n\n\n## Verifying Artifacts\n\n\n\nBefore using remote code, you most certainly want to verify it is honest.\n\n\n\nThe simplest audit of the repo is to simply check that the artifacts in the repo\n\nare correct. This involves recompiling the claimed source with the claimed builder\n\nand validating that the locally compiled code (hash) matches the code hash that was\n\nuploaded. This will verify that the source code is the correct preimage. Which allows\n\none to audit the original (Rust) source code, rather than looking at wasm bytecode.\n\n\n\nWe have a script to do this automatic verification steps that can\n\neasily be run by many individuals. Please check out\n\n[`cosmwasm-verify`](https://github.com/CosmWasm/cosmwasm-verify/blob/master/README.md)\n\nto see a simple shell script that does all these steps and easily allows you to verify\n\nany uploaded contract.\n\n\n\n## Reviewing\n\n\n\nOnce you have done the quick programatic checks, it is good to give at least a quick\n\nlook through the code. A glance at `examples/schema.rs` to make sure it is outputing\n\nall relevant structs from `contract.rs`, and also ensure `src/lib.rs` is just the\n\ndefault wrapper (nothing funny going on there). After this point, we can dive into\n\nthe contract code itself. Check the flows for the execute methods, any invariants and\n\npermission checks that should be there, and a reasonable data storage format.\n\n\n\nYou can dig into the contract as far as you want, but it is important to make sure there\n\nare no obvious backdoors at least.\n\n\n", "file_path": "Importing.md", "rank": 23, "score": 2.0551195164556018 }, { "content": "### Updating schema\n\n\n\nTo allow easy use of the contract, we can publish the schema (`schema/*.json`) together\n\nwith the source code.\n\n\n\n```sh\n\ncargo schema\n\n```\n\n\n\nEnsure you check in all the schema files, and make a git commit with the final state.\n\nThis commit will be published and should be tagged. Generally, you will want to\n\ntag with the version (eg. `v0.1.0`), but in the `cosmwasm-examples` repo, we have\n\nmultiple contracts and label it like `escrow-0.1.0`. Don't forget a\n\n`git push && git push --tags`\n\n\n\n### Note on build results\n\n\n\nBuild results like Wasm bytecode or expected hash don't need to be updated since\n\nthey don't belong to the source publication. However, they are excluded from packaging\n\nin `Cargo.toml` which allows you to commit them to your git repository if you like.\n\n\n\n```toml\n\nexclude = [\"artifacts\"]\n\n```\n\n\n\nA single source code can be built with multiple different optimizers, so\n\nwe should not make any strict assumptions on the tooling that will be used.\n\n\n\n## Publishing\n\n\n\nNow that your package is properly configured and all artifacts are committed, it\n\nis time to share it with the world.\n\nPlease refer to the [complete instructions for any questions](https://rurust.github.io/cargo-docs-ru/crates-io.html),\n\nbut I will try to give a quick overview of the happy path here.\n\n\n\n### Registry\n\n\n\nYou will need an account on [crates.io](https://crates.io) to publish a rust crate.\n\nIf you don't have one already, just click on \"Log in with GitHub\" in the top-right\n\nto quickly set up a free account. Once inside, click on your username (top-right),\n\nthen \"Account Settings\". On the bottom, there is a section called \"API Access\".\n\nIf you don't have this set up already, create a new token and use `cargo login`\n\nto set it up. This will now authenticate you with the `cargo` cli tool and allow\n\nyou to publish.\n\n\n", "file_path": "Publishing.md", "rank": 24, "score": 1.8879463295010503 }, { "content": "```\n\n\n\nWe must mount the contract code to `/code`. You can use a absolute path instead\n\nof `$(pwd)` if you don't want to `cd` to the directory first. The other two\n\nvolumes are nice for speedup. Mounting `/code/target` in particular is useful\n\nto avoid docker overwriting your local dev files with root permissions.\n\nNote the `/code/target` cache is unique for each contract being compiled to limit\n\ninterference, while the registry cache is global.\n\n\n\nThis is rather slow compared to local compilations, especially the first compile\n\nof a given contract. The use of the two volume caches is very useful to speed up\n\nfollowing compiles of the same contract.\n\n\n\nThis produces an `artifacts` directory with a `PROJECT_NAME.wasm`, as well as\n\n`checksums.txt`, containing the Sha256 hash of the wasm file.\n\nThe wasm file is compiled deterministically (anyone else running the same\n\ndocker on the same git commit should get the identical file with the same Sha256 hash).\n\nIt is also stripped and minimized for upload to a blockchain (we will also\n\ngzip it in the uploading process to make it even smaller).\n", "file_path": "Developing.md", "rank": 25, "score": 1.6507021042864012 }, { "content": "# Publishing Contracts\n\n\n\nThis is an overview of how to publish the contract's source code in this repo.\n\nWe use Cargo's default registry [crates.io](https://crates.io/) for publishing contracts written in Rust.\n\n\n\n## Preparation\n\n\n\nEnsure the `Cargo.toml` file in the repo is properly configured. In particular, you want to\n\nchoose a name starting with `cw-`, which will help a lot finding CosmWasm contracts when\n\nsearching on crates.io. For the first publication, you will probably want version `0.1.0`.\n\nIf you have tested this on a public net already and/or had an audit on the code,\n\nyou can start with `1.0.0`, but that should imply some level of stability and confidence.\n\nYou will want entries like the following in `Cargo.toml`:\n\n\n\n```toml\n\nname = \"cw-escrow\"\n\nversion = \"0.1.0\"\n\ndescription = \"Simple CosmWasm contract for an escrow with arbiter and timeout\"\n\nrepository = \"https://github.com/confio/cosmwasm-examples\"\n\n```\n\n\n\nYou will also want to add a valid [SPDX license statement](https://spdx.org/licenses/),\n\nso others know the rules for using this crate. You can use any license you wish,\n\neven a commercial license, but we recommend choosing one of the following, unless you have\n\nspecific requirements.\n\n\n\n* Permissive: [`Apache-2.0`](https://spdx.org/licenses/Apache-2.0.html#licenseText) or [`MIT`](https://spdx.org/licenses/MIT.html#licenseText)\n\n* Copyleft: [`GPL-3.0-or-later`](https://spdx.org/licenses/GPL-3.0-or-later.html#licenseText) or [`AGPL-3.0-or-later`](https://spdx.org/licenses/AGPL-3.0-or-later.html#licenseText)\n\n* Commercial license: `Commercial` (not sure if this works, I cannot find examples)\n\n\n\nIt is also helpful to download the LICENSE text (linked to above) and store this\n\nin a LICENSE file in your repo. Now, you have properly configured your crate for use\n\nin a larger ecosystem.\n\n\n", "file_path": "Publishing.md", "rank": 26, "score": 1.3541290031794653 }, { "content": "# The meta folder\n\n\n\nThis folder is ignored via the `.genignore` file. It contains meta files\n\nthat should not make it into the generated project.\n\n\n\nIn particular, it is used for an AppVeyor CI script that runs on `cw-template`\n\nitself (running the cargo-generate script, then testing the generated project).\n\nThe `.circleci` and `.github` directories contain scripts destined for any projects created from\n\nthis template.\n\n\n\n## Files\n\n\n\n- `appveyor.yml`: The AppVeyor CI configuration\n\n- `test_generate.sh`: A script for generating a project from the template and\n\n runnings builds and tests in it. This works almost like the CI script but\n\n targets local UNIX-like dev environments.\n", "file_path": "meta/README.md", "rank": 27, "score": 1.0470501172632276 }, { "content": "## Decentralized Verification\n\n\n\nIt's not very practical to do a deep code review on every dependency you want to use,\n\nwhich is a big reason for the popularity of code audits in the blockchain world. We trust\n\nsome experts review in lieu of doing the work ourselves. But wouldn't it be nice to do this\n\nin a decentralized manner and peer-review each other's contracts? Bringing in deeper domain\n\nknowledge and saving fees.\n\n\n\nLuckily, there is an amazing project called [crev](https://github.com/crev-dev/cargo-crev/blob/master/cargo-crev/README.md)\n\nthat provides `A cryptographically verifiable code review system for the cargo (Rust) package manager`.\n\n\n\nI highly recommend that CosmWasm contract developers get set up with this. At minimum, we\n\ncan all add a review on a package that programmatically checked out that the json schemas\n\nand wasm bytecode do match the code, and publish our claim, so we don't all rely on some\n\ncentral server to say it validated this. As we go on, we can add deeper reviews on standard\n\npackages.\n\n\n\nIf you want to use `cargo-crev`, please follow their\n\n[getting started guide](https://github.com/crev-dev/cargo-crev/blob/master/cargo-crev/src/doc/getting_started.md)\n\nand once you have made your own *proof repository* with at least one *trust proof*,\n\nplease make a PR to the [`cawesome-wasm`]() repo with a link to your repo and\n\nsome public name or pseudonym that people know you by. This allows people who trust you\n\nto also reuse your proofs.\n\n\n\nThere is a [standard list of proof repos](https://github.com/crev-dev/cargo-crev/wiki/List-of-Proof-Repositories)\n\nwith some strong rust developers in there. This may cover dependencies like `serde` and `snafu`\n\nbut will not hit any CosmWasm-related modules, so we look to bootstrap a very focused\n\nreview community.\n", "file_path": "Importing.md", "rank": 28, "score": 0.9359873360483744 }, { "content": "# Developing\n\n\n\nIf you have recently created a contract with this template, you probably could use some\n\nhelp on how to build and test the contract, as well as prepare it for production. This\n\nfile attempts to provide a brief overview, assuming you have installed a recent\n\nversion of Rust already (eg. 1.58.1+).\n\n\n\n## Prerequisites\n\n\n\nBefore starting, make sure you have [rustup](https://rustup.rs/) along with a\n\nrecent `rustc` and `cargo` version installed. Currently, we are testing on 1.58.1+.\n\n\n\nAnd you need to have the `wasm32-unknown-unknown` target installed as well.\n\n\n\nYou can check that via:\n\n\n\n```sh\n\nrustc --version\n\ncargo --version\n\nrustup target list --installed\n\n# if wasm32 is not listed above, run this\n\nrustup target add wasm32-unknown-unknown\n\n```\n\n\n\n## Compiling and running tests\n\n\n\nNow that you created your custom contract, make sure you can compile and run it before\n\nmaking any changes. Go into the repository and do:\n\n\n\n```sh\n\n# this will produce a wasm build in ./target/wasm32-unknown-unknown/release/YOUR_NAME_HERE.wasm\n\ncargo wasm\n\n\n\n# this runs unit tests with helpful backtraces\n\nRUST_BACKTRACE=1 cargo unit-test\n\n\n\n# auto-generate json schema\n\ncargo schema\n\n```\n\n\n\n### Understanding the tests\n\n\n\nThe main code is in `src/contract.rs` and the unit tests there run in pure rust,\n\nwhich makes them very quick to execute and give nice output on failures, especially\n\nif you do `RUST_BACKTRACE=1 cargo unit-test`.\n\n\n\nWe consider testing critical for anything on a blockchain, and recommend to always keep\n\nthe tests up to date.\n\n\n", "file_path": "Developing.md", "rank": 29, "score": 0.6613803089303942 }, { "content": "# CosmWasm Starter Pack\n\n\n\nThis is a template to build smart contracts in Rust to run inside a\n\n[Cosmos SDK](https://github.com/cosmos/cosmos-sdk) module on all chains that enable it.\n\nTo understand the framework better, please read the overview in the\n\n[cosmwasm repo](https://github.com/CosmWasm/cosmwasm/blob/master/README.md),\n\nand dig into the [cosmwasm docs](https://www.cosmwasm.com).\n\nThis assumes you understand the theory and just want to get coding.\n\n\n\n## Creating a new repo from template\n\n\n\nAssuming you have a recent version of rust and cargo (v1.58.1+) installed\n\n(via [rustup](https://rustup.rs/)),\n\nthen the following should get you a new repo to start a contract:\n\n\n\nInstall [cargo-generate](https://github.com/ashleygwilliams/cargo-generate) and cargo-run-script.\n\nUnless you did that before, run this line now:\n\n\n\n```sh\n\ncargo install cargo-generate --features vendored-openssl\n\ncargo install cargo-run-script\n\n```\n\n\n\nNow, use it to create your new contract.\n\nGo to the folder in which you want to place it and run:\n\n\n\n\n\n**Latest: 1.0.0-beta6**\n\n\n\n```sh\n\ncargo generate --git https://github.com/CosmWasm/cw-template.git --name PROJECT_NAME\n\n````\n\n\n\n**Older Version**\n\n\n\nPass version as branch flag:\n\n\n\n```sh\n\ncargo generate --git https://github.com/CosmWasm/cw-template.git --branch <version> --name PROJECT_NAME\n\n````\n\n\n\nExample:\n\n\n\n```sh\n\ncargo generate --git https://github.com/CosmWasm/cw-template.git --branch 0.16 --name PROJECT_NAME\n\n```\n\n\n\nYou will now have a new folder called `PROJECT_NAME` (I hope you changed that to something else)\n\ncontaining a simple working contract and build system that you can customize.\n\n\n\n## Create a Repo\n\n\n\nAfter generating, you have a initialized local git repo, but no commits, and no remote.\n\nGo to a server (eg. github) and create a new upstream repo (called `YOUR-GIT-URL` below).\n\nThen run the following:\n\n\n\n```sh\n\n# this is needed to create a valid Cargo.lock file (see below)\n\ncargo check\n\ngit branch -M main\n\ngit add .\n\ngit commit -m 'Initial Commit'\n\ngit remote add origin YOUR-GIT-URL\n\ngit push -u origin main\n\n```\n\n\n", "file_path": "README.md", "rank": 30, "score": 0.4728626351945735 } ]
Rust
src/asm/x86/predict.rs
kevleyski/rav1e
d9492a21b007eea38fa5a1409249e75502dfafa6
use crate::context::MAX_TX_SIZE; use crate::cpu_features::CpuFeatureLevel; use crate::predict::{ rust, IntraEdgeFilterParameters, PredictionMode, PredictionVariant, }; use crate::tiling::PlaneRegionMut; use crate::transform::TxSize; use crate::util::Aligned; use crate::Pixel; use v_frame::pixel::PixelType; macro_rules! decl_angular_ipred_fn { ($($f:ident),+) => { extern { $( fn $f( dst: *mut u8, stride: libc::ptrdiff_t, topleft: *const u8, width: libc::c_int, height: libc::c_int, angle: libc::c_int, ); )* } }; } decl_angular_ipred_fn! { rav1e_ipred_dc_avx2, rav1e_ipred_dc_ssse3, rav1e_ipred_dc_128_avx2, rav1e_ipred_dc_128_ssse3, rav1e_ipred_dc_left_avx2, rav1e_ipred_dc_left_ssse3, rav1e_ipred_dc_top_avx2, rav1e_ipred_dc_top_ssse3, rav1e_ipred_v_avx2, rav1e_ipred_v_ssse3, rav1e_ipred_h_avx2, rav1e_ipred_h_ssse3, rav1e_ipred_z1_avx2, rav1e_ipred_z3_avx2, rav1e_ipred_smooth_avx2, rav1e_ipred_smooth_ssse3, rav1e_ipred_smooth_v_avx2, rav1e_ipred_smooth_v_ssse3, rav1e_ipred_smooth_h_avx2, rav1e_ipred_smooth_h_ssse3, rav1e_ipred_paeth_avx2, rav1e_ipred_paeth_ssse3 } macro_rules! decl_angular_ipred_hbd_fn { ($($f:ident),+) => { extern { $( fn $f( dst: *mut u16, stride: libc::ptrdiff_t, topleft: *const u16, width: libc::c_int, height: libc::c_int, angle: libc::c_int, max_width: libc::c_int, max_height: libc::c_int, bit_depth_max: libc::c_int, ); )* } }; } decl_angular_ipred_hbd_fn! { rav1e_ipred_dc_16bpc_avx2, rav1e_ipred_dc_128_16bpc_avx2, rav1e_ipred_dc_left_16bpc_avx2, rav1e_ipred_dc_top_16bpc_avx2, rav1e_ipred_v_16bpc_avx2, rav1e_ipred_h_16bpc_avx2, rav1e_ipred_z1_16bpc_avx2, rav1e_ipred_z3_16bpc_avx2, rav1e_ipred_smooth_16bpc_avx2, rav1e_ipred_smooth_v_16bpc_avx2, rav1e_ipred_smooth_h_16bpc_avx2, rav1e_ipred_paeth_16bpc_avx2 } extern { fn rav1e_ipred_z2_avx2( dst: *mut u8, stride: libc::ptrdiff_t, topleft: *const u8, width: libc::c_int, height: libc::c_int, angle: libc::c_int, dx: libc::c_int, dy: libc::c_int, ); fn rav1e_ipred_z2_16bpc_avx2( dst: *mut u16, stride: libc::ptrdiff_t, topleft: *const u16, width: libc::c_int, height: libc::c_int, angle: libc::c_int, dx: libc::c_int, dy: libc::c_int, bit_depth_max: libc::c_int, ); } macro_rules! decl_cfl_pred_fn { ($($f:ident),+) => { extern { $( fn $f( dst: *mut u8, stride: libc::ptrdiff_t, topleft: *const u8, width: libc::c_int, height: libc::c_int, ac: *const i16, alpha: libc::c_int, ); )* } }; } decl_cfl_pred_fn! { rav1e_ipred_cfl_avx2, rav1e_ipred_cfl_ssse3, rav1e_ipred_cfl_128_avx2, rav1e_ipred_cfl_128_ssse3, rav1e_ipred_cfl_left_avx2, rav1e_ipred_cfl_left_ssse3, rav1e_ipred_cfl_top_avx2, rav1e_ipred_cfl_top_ssse3 } macro_rules! decl_cfl_pred_hbd_fn { ($($f:ident),+) => { extern { $( fn $f( dst: *mut u16, stride: libc::ptrdiff_t, topleft: *const u16, width: libc::c_int, height: libc::c_int, ac: *const i16, alpha: libc::c_int, bit_depth_max: libc::c_int, ); )* } }; } decl_cfl_pred_hbd_fn! { rav1e_ipred_cfl_16bpc_avx2, rav1e_ipred_cfl_128_16bpc_avx2, rav1e_ipred_cfl_left_16bpc_avx2, rav1e_ipred_cfl_top_16bpc_avx2 } #[inline(always)] pub fn dispatch_predict_intra<T: Pixel>( mode: PredictionMode, variant: PredictionVariant, dst: &mut PlaneRegionMut<'_, T>, tx_size: TxSize, bit_depth: usize, ac: &[i16], angle: isize, ief_params: Option<IntraEdgeFilterParameters>, edge_buf: &Aligned<[T; 4 * MAX_TX_SIZE + 1]>, cpu: CpuFeatureLevel, ) { let call_rust = |dst: &mut PlaneRegionMut<'_, T>| { rust::dispatch_predict_intra( mode, variant, dst, tx_size, bit_depth, ac, angle, ief_params, edge_buf, cpu, ); }; unsafe { let stride = T::to_asm_stride(dst.plane_cfg.stride) as libc::ptrdiff_t; let w = tx_size.width() as libc::c_int; let h = tx_size.height() as libc::c_int; let angle = angle as libc::c_int; match T::type_enum() { PixelType::U8 if cpu >= CpuFeatureLevel::SSSE3 => { let dst_ptr = dst.data_ptr_mut() as *mut _; let edge_ptr = edge_buf.data.as_ptr().offset(2 * MAX_TX_SIZE as isize) as *const _; if cpu >= CpuFeatureLevel::AVX2 { match mode { PredictionMode::DC_PRED => { (match variant { PredictionVariant::NONE => rav1e_ipred_dc_128_avx2, PredictionVariant::LEFT => rav1e_ipred_dc_left_avx2, PredictionVariant::TOP => rav1e_ipred_dc_top_avx2, PredictionVariant::BOTH => rav1e_ipred_dc_avx2, })(dst_ptr, stride, edge_ptr, w, h, angle); } PredictionMode::V_PRED if angle == 90 => { rav1e_ipred_v_avx2(dst_ptr, stride, edge_ptr, w, h, angle); } PredictionMode::H_PRED if angle == 180 => { rav1e_ipred_h_avx2(dst_ptr, stride, edge_ptr, w, h, angle); } PredictionMode::V_PRED | PredictionMode::H_PRED | PredictionMode::D45_PRED | PredictionMode::D135_PRED | PredictionMode::D113_PRED | PredictionMode::D157_PRED | PredictionMode::D203_PRED | PredictionMode::D67_PRED => { let (enable_ief, ief_smooth_filter) = if let Some(params) = ief_params { ( true as libc::c_int, params.use_smooth_filter() as libc::c_int, ) } else { (false as libc::c_int, false as libc::c_int) }; let angle_arg = angle | (enable_ief << 10) | (ief_smooth_filter << 9); let (bw, bh) = ( ((dst.plane_cfg.width + 7) >> 3) << 3, ((dst.plane_cfg.height + 7) >> 3) << 3, ); let (dx, dy) = ( (bw as isize - dst.rect().x as isize) as libc::c_int, (bh as isize - dst.rect().y as isize) as libc::c_int, ); if angle <= 90 { rav1e_ipred_z1_avx2( dst_ptr, stride, edge_ptr, w, h, angle_arg, ); } else if angle < 180 { rav1e_ipred_z2_avx2( dst_ptr, stride, edge_ptr, w, h, angle_arg, dx, dy, ); } else { rav1e_ipred_z3_avx2( dst_ptr, stride, edge_ptr, w, h, angle_arg, ); } } PredictionMode::SMOOTH_PRED => { rav1e_ipred_smooth_avx2(dst_ptr, stride, edge_ptr, w, h, angle); } PredictionMode::SMOOTH_V_PRED => { rav1e_ipred_smooth_v_avx2( dst_ptr, stride, edge_ptr, w, h, angle, ); } PredictionMode::SMOOTH_H_PRED => { rav1e_ipred_smooth_h_avx2( dst_ptr, stride, edge_ptr, w, h, angle, ); } PredictionMode::PAETH_PRED => { rav1e_ipred_paeth_avx2(dst_ptr, stride, edge_ptr, w, h, angle); } PredictionMode::UV_CFL_PRED => { let ac_ptr = ac.as_ptr() as *const _; (match variant { PredictionVariant::NONE => rav1e_ipred_cfl_128_avx2, PredictionVariant::LEFT => rav1e_ipred_cfl_left_avx2, PredictionVariant::TOP => rav1e_ipred_cfl_top_avx2, PredictionVariant::BOTH => rav1e_ipred_cfl_avx2, })(dst_ptr, stride, edge_ptr, w, h, ac_ptr, angle); } _ => call_rust(dst), } } else if cpu >= CpuFeatureLevel::SSSE3 { match mode { PredictionMode::DC_PRED => { (match variant { PredictionVariant::NONE => rav1e_ipred_dc_128_ssse3, PredictionVariant::LEFT => rav1e_ipred_dc_left_ssse3, PredictionVariant::TOP => rav1e_ipred_dc_top_ssse3, PredictionVariant::BOTH => rav1e_ipred_dc_ssse3, })(dst_ptr, stride, edge_ptr, w, h, angle); } PredictionMode::V_PRED if angle == 90 => { rav1e_ipred_v_ssse3(dst_ptr, stride, edge_ptr, w, h, angle); } PredictionMode::H_PRED if angle == 180 => { rav1e_ipred_h_ssse3(dst_ptr, stride, edge_ptr, w, h, angle); } PredictionMode::SMOOTH_PRED => { rav1e_ipred_smooth_ssse3(dst_ptr, stride, edge_ptr, w, h, angle); } PredictionMode::SMOOTH_V_PRED => { rav1e_ipred_smooth_v_ssse3( dst_ptr, stride, edge_ptr, w, h, angle, ); } PredictionMode::SMOOTH_H_PRED => { rav1e_ipred_smooth_h_ssse3( dst_ptr, stride, edge_ptr, w, h, angle, ); } PredictionMode::PAETH_PRED => { rav1e_ipred_paeth_ssse3(dst_ptr, stride, edge_ptr, w, h, angle); } PredictionMode::UV_CFL_PRED => { let ac_ptr = ac.as_ptr() as *const _; (match variant { PredictionVariant::NONE => rav1e_ipred_cfl_128_ssse3, PredictionVariant::LEFT => rav1e_ipred_cfl_left_ssse3, PredictionVariant::TOP => rav1e_ipred_cfl_top_ssse3, PredictionVariant::BOTH => rav1e_ipred_cfl_ssse3, })(dst_ptr, stride, edge_ptr, w, h, ac_ptr, angle); } _ => call_rust(dst), } } } PixelType::U16 if cpu >= CpuFeatureLevel::AVX2 => { let dst_ptr = dst.data_ptr_mut() as *mut _; let edge_ptr = edge_buf.data.as_ptr().offset(2 * MAX_TX_SIZE as isize) as *const _; let bd_max = (1 << bit_depth) - 1; match mode { PredictionMode::DC_PRED => { (match variant { PredictionVariant::NONE => rav1e_ipred_dc_128_16bpc_avx2, PredictionVariant::LEFT => rav1e_ipred_dc_left_16bpc_avx2, PredictionVariant::TOP => rav1e_ipred_dc_top_16bpc_avx2, PredictionVariant::BOTH => rav1e_ipred_dc_16bpc_avx2, })( dst_ptr, stride, edge_ptr, w, h, angle, 0, 0, bd_max ); } PredictionMode::V_PRED if angle == 90 => { rav1e_ipred_v_16bpc_avx2( dst_ptr, stride, edge_ptr, w, h, angle, 0, 0, bd_max, ); } PredictionMode::H_PRED if angle == 180 => { rav1e_ipred_h_16bpc_avx2( dst_ptr, stride, edge_ptr, w, h, angle, 0, 0, bd_max, ); } PredictionMode::V_PRED | PredictionMode::H_PRED | PredictionMode::D45_PRED | PredictionMode::D135_PRED | PredictionMode::D113_PRED | PredictionMode::D157_PRED | PredictionMode::D203_PRED | PredictionMode::D67_PRED => { let (enable_ief, ief_smooth_filter) = if let Some(params) = ief_params { (true as libc::c_int, params.use_smooth_filter() as libc::c_int) } else { (false as libc::c_int, false as libc::c_int) }; let angle_arg = angle | (enable_ief << 10) | (ief_smooth_filter << 9); let (bw, bh) = ( ((dst.plane_cfg.width + 7) >> 3) << 3, ((dst.plane_cfg.height + 7) >> 3) << 3, ); let (dx, dy) = ( (bw as isize - dst.rect().x as isize) as libc::c_int, (bh as isize - dst.rect().y as isize) as libc::c_int, ); if angle <= 90 { rav1e_ipred_z1_16bpc_avx2( dst_ptr, stride, edge_ptr, w, h, angle_arg, 0, 0, bd_max, ); } else if angle < 180 { rav1e_ipred_z2_16bpc_avx2( dst_ptr, stride, edge_ptr, w, h, angle_arg, dx, dy, bd_max, ); } else { rav1e_ipred_z3_16bpc_avx2( dst_ptr, stride, edge_ptr, w, h, angle_arg, 0, 0, bd_max, ); } } PredictionMode::SMOOTH_PRED => { rav1e_ipred_smooth_16bpc_avx2( dst_ptr, stride, edge_ptr, w, h, angle, 0, 0, bd_max, ); } PredictionMode::SMOOTH_V_PRED => { rav1e_ipred_smooth_v_16bpc_avx2( dst_ptr, stride, edge_ptr, w, h, angle, 0, 0, bd_max, ); } PredictionMode::SMOOTH_H_PRED => { rav1e_ipred_smooth_h_16bpc_avx2( dst_ptr, stride, edge_ptr, w, h, angle, 0, 0, bd_max, ); } PredictionMode::PAETH_PRED => { rav1e_ipred_paeth_16bpc_avx2( dst_ptr, stride, edge_ptr, w, h, angle, 0, 0, bd_max, ); } PredictionMode::UV_CFL_PRED => { let ac_ptr = ac.as_ptr() as *const _; (match variant { PredictionVariant::NONE => rav1e_ipred_cfl_128_16bpc_avx2, PredictionVariant::LEFT => rav1e_ipred_cfl_left_16bpc_avx2, PredictionVariant::TOP => rav1e_ipred_cfl_top_16bpc_avx2, PredictionVariant::BOTH => rav1e_ipred_cfl_16bpc_avx2, })( dst_ptr, stride, edge_ptr, w, h, ac_ptr, angle, bd_max ); } _ => call_rust(dst), } } _ => call_rust(dst), } } }
use crate::context::MAX_TX_SIZE; use crate::cpu_features::CpuFeatureLevel; use crate::predict::{ rust, IntraEdgeFilterParameters, PredictionMode, PredictionVariant, }; use crate::tiling::PlaneRegionMut; use crate::transform::TxSize; use crate::util::Aligned; use crate::Pixel; use v_frame::pixel::PixelType; macro_rules! decl_angular_ipred_fn { ($($f:ident),+) => { extern { $( fn $f( dst: *mut u8, stride: libc::ptrdiff_t, topleft: *const u8, width: libc::c_int, height: libc::c_int, angle: libc::c_int, ); )* } }; } decl_angular_ipred_fn! { rav1e_ipred_dc_avx2, rav1e_ipred_dc_ssse3, rav1e_ipred_dc_128_avx2, rav1e_ipred_dc_128_ssse3, rav1e_ipred_dc_left_avx2, rav1e_ipred_dc_left_ssse3, rav1e_ipred_dc_top_avx2, rav1e_ipred_dc_top_ssse3, rav1e_ipred_v_avx2, rav1e_ipred_v_ssse3, rav1e_ipred_h_avx2, rav1e_ipred_h_ssse3, rav1e_ipred_z1_avx2, rav1e_ipred_z3_avx2, rav1e_ipred_smooth_avx2, rav1e_ipred_smooth_ssse3, rav1e_ipred_smooth_v_avx2, rav1e_ipred_smooth_v_ssse3, rav1e_ipred_smooth_h_avx2, rav1e_ipred_smooth_h_ssse3, rav1e_ipred_paeth_avx2, rav1e_ipred_paeth_ssse3 } macro_rules! decl_angular_ipred_hbd_fn { ($($f:ident),+) => { extern { $( fn $f( dst: *mut u16, stride: libc::ptrdiff_t, topleft: *const u16, width: libc::c_int, height: libc::c_int, angle: libc::c_int, max_width: libc::c_int, max_height: libc::c_int, bit_depth_max: libc::c_int, ); )* } }; } decl_angular_ipred_hbd_fn! { rav1e_ipred_dc_16bpc_avx2, rav1e_ipred_dc_128_16bpc_avx2, rav1e_ipred_dc_left_16bpc_avx2, rav1e_ipred_dc_top_16bpc_avx2, rav1e_ipred_v_16bpc_avx2, rav1e_ipred_h_16bpc_avx2, rav1e_ipred_z1_16bpc_avx2, rav1e_ipred_z3_16bpc_avx2, rav1e_ipred_smooth_16bpc_avx2, rav1e_ipred_smooth_v_16bpc_avx2, rav1e_ipred_smooth_h_16bpc_avx2, rav1e_ipred_paeth_16bpc_avx2 } extern { fn rav1e_ipred_z2_avx2( dst: *mut u8, stride: libc::ptrdiff_t, topleft: *const u8, width: libc::c_int, height: libc::c_int, angle: libc::c_int, dx: libc::c_int, dy: libc::c_int, ); fn rav1e_ipred_z2_16bpc_avx2( dst: *mut u16, stride: libc::ptrdiff_t, topleft: *const u16, width: libc::c_int, height: libc::c_int, angle: libc::c_int, dx: libc::c_int, dy: libc::c_int, bit_depth_max: libc::c_int, ); } macro_rules! decl_cfl_pred_fn { ($($f:ident),+) => { extern { $( fn $f( dst: *mut u8, stride: libc::ptrdiff_t, topleft: *const u8, width: libc::c_int, height: libc::c_int, ac: *const i16, alpha: libc::c_int, ); )* } }; } decl_cfl_pred_fn! { rav1e_ipred_cfl_avx2, rav1e_ipred_cfl_ssse3, rav1e_ipred_cfl_128_avx2, rav1e_ipred_cfl_128_ssse3, rav1e_ipred_cfl_left_avx2, rav1e_ipred_cfl_left_ssse3, rav1e_ipred_cfl_top_avx2, rav1e_ipred_cfl_top_ssse3 } macro_rules! decl_cfl_pred_hbd_fn { ($($f:ident),+) => { extern { $( fn $f( dst: *mut u16, stride: libc::ptrdiff_t, topleft: *const u16, width: libc::c_int, height: libc::c_int, ac: *const i16, alpha: libc::c_int, bit_depth_max: libc::c_int, ); )* } }; } decl_cfl_pred_hbd_fn! { rav1e_ipred_cfl_16bpc_avx2, rav1e_ipred_cfl_128_16bpc_avx2, rav1e_ipred_cfl_left_16bpc_avx2, rav1e_ipred_cfl_top_16bpc_avx2 } #[inline(always)] pub fn dispatch_predict_intra<T: Pixel>( mode: PredictionMode, variant: PredictionVariant, dst: &mut PlaneRegionMut<'_, T>, tx_size: TxSize, bit_depth: usize, ac: &[i16], angle: isize, ief_params: Option<IntraEdgeFilterParameters>, edge_buf: &Aligned<[T; 4 * MAX_TX_SIZE + 1]>, cpu: CpuFeatureLevel, ) { let call_rust = |dst: &mut PlaneRegionMut<'_, T>| { rust::dispatch_predict_intra( mode, variant, dst, tx_size, bit_depth, ac, angle, ief_params, edge_buf, cpu, ); }; unsafe { let stride = T::to_asm_stride(dst.plane_cfg.stride) as libc::ptrdiff_t; let w = tx_size.width() as libc::c_int; let h = tx_size.height() as libc::c_int; let angle = angle as libc::c_int; match T::type_enum() { PixelType::U8 if cpu >= CpuFeatureLevel::SSSE3 => { let dst_ptr = dst.data_ptr_mut() as *mut _; let edge_ptr = edge_buf.data.as_ptr().offset(2 * MAX_TX_SIZE as isize) as *const _; if cpu >= CpuFeatureLevel::AVX2 { match mode { PredictionMode::DC_PRED => { (match variant { PredictionVariant::NONE => rav1e_ipred_dc_128_avx2, PredictionVariant::LEFT => rav1e_ipred_dc_left_avx2, PredictionVariant::TOP => rav1e_ipred_dc_top_avx2, PredictionVariant::BOTH => rav1e_ipred_dc_avx2, })(dst_ptr, stride, edge_ptr, w, h, angle); } PredictionMode::V_PRED if angle == 90 => { rav1e_ipred_v_avx2(dst_ptr, stride, edge_ptr, w, h, angle); } PredictionMode::H_PRED if angle == 180 => { rav1e_ipred_h_avx2(dst_ptr, stride, edge_ptr, w, h, angle); } PredictionMode::V_PRED | PredictionMode::H_PRED | PredictionMode::D45_PRED | PredictionMode::D135_PRED | PredictionMode::D113_PRED | PredictionMode::D157_PRED | PredictionMode::D203_PRED | PredictionMode::D67_PRED => { let (enable_ief, ief_smooth_filter) = if let Some(params) = ief_params { ( true as libc::c_int, params.use_smooth_filter() as libc::c_int, ) } else { (false as libc::c_int, false as libc::c_int) }; let angle_arg = angle | (enable_ief << 10) | (ief_smooth_filter << 9); let (bw, bh) = ( ((dst.plane_cfg.width + 7) >> 3) << 3, ((dst.plane_cfg.height + 7) >> 3) << 3, ); let (dx, dy) = ( (bw as isize - dst.rect().x as isize) as libc::c_int, (bh as isize - dst.rect().y as isize) as libc::c_int, ); if angle <= 90 { rav1e_ipred_z1_avx2( dst_ptr, stride, edge_ptr, w, h, angle_arg, ); } else if angle < 180 { rav1e_ipred_z2_avx2( dst_ptr, stride, edge_ptr, w, h, angle_arg, dx, dy, ); } else { rav1e_ipred_z3_avx2( dst_ptr, stride, edge_ptr, w, h, angle_arg, ); } } PredictionMode::SMOOTH_PRED => { rav1e_ipred_smooth_avx2(dst_ptr, stride, edge_ptr, w, h, angle); } PredictionMode::SMOOTH_V_PRED => { rav1e_ipred_smooth_v_avx2( dst_ptr, stride, edge_ptr, w, h, angle, ); } PredictionMode::SMOOTH_H_PRED => { rav1e_ipred_smooth_h_avx2( dst_ptr, stride, edge_ptr, w, h, angle, ); } PredictionMode::PAETH_PRED => { rav1e_ipred_paeth_avx2(dst_ptr, stride, edge_ptr, w, h, angle); } PredictionMode::UV_CFL_PRED => { let ac_ptr = ac.as_ptr() as *const _; (match variant { PredictionVariant::NONE => rav1e_ipred_cfl_128_avx2, PredictionVariant::LEFT => rav1e_ipred_cfl_left_avx2, PredictionVariant::TOP => rav1e_ipred_cfl_top_avx2, PredictionVariant::BOTH => rav1e_ipred_cfl_avx2, })(dst_ptr, stride, edge_ptr, w, h, ac_ptr, angle); } _ => call_rust(dst), } } else if cpu >= CpuFeatureLevel::SSSE3 { match mode { PredictionMode::DC_PRED => { (match variant { PredictionVariant::NONE => rav1e_ipred_dc_128_ssse3, PredictionVariant::LEFT => rav1e_ipred_dc_left_ssse3, PredictionVariant::TOP => rav1e_ipred_dc_top_ssse3, PredictionVariant::BOTH => rav1e_ipred_dc_ssse3, })(dst_ptr, stride, edge_ptr, w, h, angle); } PredictionMode::V_PRED if angle == 90 => { rav1e_ipred_v_ssse3(dst_ptr, stride, edge_ptr, w, h, angle); } PredictionMode::H_PRED if angle == 180 => { rav1e_ipred_h_ssse3(dst_ptr, stride, edge_ptr, w, h, angle); } PredictionMode::SMOOTH_PRED => { rav1e_ipred_smooth_ssse3(dst_ptr, stride, edge_ptr, w, h, angle); } PredictionMode::SMOOTH_V_PRED => { rav1e_ipred_smooth_v_ssse3( dst_ptr, stride, edge_ptr, w, h, angle, ); } PredictionMode::SMOOTH_H_PRED => { rav1e_ipred_smooth_h_ssse
3( dst_ptr, stride, edge_ptr, w, h, angle, ); } PredictionMode::PAETH_PRED => { rav1e_ipred_paeth_ssse3(dst_ptr, stride, edge_ptr, w, h, angle); } PredictionMode::UV_CFL_PRED => { let ac_ptr = ac.as_ptr() as *const _; (match variant { PredictionVariant::NONE => rav1e_ipred_cfl_128_ssse3, PredictionVariant::LEFT => rav1e_ipred_cfl_left_ssse3, PredictionVariant::TOP => rav1e_ipred_cfl_top_ssse3, PredictionVariant::BOTH => rav1e_ipred_cfl_ssse3, })(dst_ptr, stride, edge_ptr, w, h, ac_ptr, angle); } _ => call_rust(dst), } } } PixelType::U16 if cpu >= CpuFeatureLevel::AVX2 => { let dst_ptr = dst.data_ptr_mut() as *mut _; let edge_ptr = edge_buf.data.as_ptr().offset(2 * MAX_TX_SIZE as isize) as *const _; let bd_max = (1 << bit_depth) - 1; match mode { PredictionMode::DC_PRED => { (match variant { PredictionVariant::NONE => rav1e_ipred_dc_128_16bpc_avx2, PredictionVariant::LEFT => rav1e_ipred_dc_left_16bpc_avx2, PredictionVariant::TOP => rav1e_ipred_dc_top_16bpc_avx2, PredictionVariant::BOTH => rav1e_ipred_dc_16bpc_avx2, })( dst_ptr, stride, edge_ptr, w, h, angle, 0, 0, bd_max ); } PredictionMode::V_PRED if angle == 90 => { rav1e_ipred_v_16bpc_avx2( dst_ptr, stride, edge_ptr, w, h, angle, 0, 0, bd_max, ); } PredictionMode::H_PRED if angle == 180 => { rav1e_ipred_h_16bpc_avx2( dst_ptr, stride, edge_ptr, w, h, angle, 0, 0, bd_max, ); } PredictionMode::V_PRED | PredictionMode::H_PRED | PredictionMode::D45_PRED | PredictionMode::D135_PRED | PredictionMode::D113_PRED | PredictionMode::D157_PRED | PredictionMode::D203_PRED | PredictionMode::D67_PRED => { let (enable_ief, ief_smooth_filter) = if let Some(params) = ief_params { (true as libc::c_int, params.use_smooth_filter() as libc::c_int) } else { (false as libc::c_int, false as libc::c_int) }; let angle_arg = angle | (enable_ief << 10) | (ief_smooth_filter << 9); let (bw, bh) = ( ((dst.plane_cfg.width + 7) >> 3) << 3, ((dst.plane_cfg.height + 7) >> 3) << 3, ); let (dx, dy) = ( (bw as isize - dst.rect().x as isize) as libc::c_int, (bh as isize - dst.rect().y as isize) as libc::c_int, ); if angle <= 90 { rav1e_ipred_z1_16bpc_avx2( dst_ptr, stride, edge_ptr, w, h, angle_arg, 0, 0, bd_max, ); } else if angle < 180 { rav1e_ipred_z2_16bpc_avx2( dst_ptr, stride, edge_ptr, w, h, angle_arg, dx, dy, bd_max, ); } else { rav1e_ipred_z3_16bpc_avx2( dst_ptr, stride, edge_ptr, w, h, angle_arg, 0, 0, bd_max, ); } } PredictionMode::SMOOTH_PRED => { rav1e_ipred_smooth_16bpc_avx2( dst_ptr, stride, edge_ptr, w, h, angle, 0, 0, bd_max, ); } PredictionMode::SMOOTH_V_PRED => { rav1e_ipred_smooth_v_16bpc_avx2( dst_ptr, stride, edge_ptr, w, h, angle, 0, 0, bd_max, ); } PredictionMode::SMOOTH_H_PRED => { rav1e_ipred_smooth_h_16bpc_avx2( dst_ptr, stride, edge_ptr, w, h, angle, 0, 0, bd_max, ); } PredictionMode::PAETH_PRED => { rav1e_ipred_paeth_16bpc_avx2( dst_ptr, stride, edge_ptr, w, h, angle, 0, 0, bd_max, ); } PredictionMode::UV_CFL_PRED => { let ac_ptr = ac.as_ptr() as *const _; (match variant { PredictionVariant::NONE => rav1e_ipred_cfl_128_16bpc_avx2, PredictionVariant::LEFT => rav1e_ipred_cfl_left_16bpc_avx2, PredictionVariant::TOP => rav1e_ipred_cfl_top_16bpc_avx2, PredictionVariant::BOTH => rav1e_ipred_cfl_16bpc_avx2, })( dst_ptr, stride, edge_ptr, w, h, ac_ptr, angle, bd_max ); } _ => call_rust(dst), } } _ => call_rust(dst), } } }
function_block-function_prefixed
[ { "content": "pub fn ac_q(qindex: u8, delta_q: i8, bit_depth: usize) -> i16 {\n\n static AC_Q: [&[i16; 256]; 3] =\n\n [&ac_qlookup_Q3, &ac_qlookup_10_Q3, &ac_qlookup_12_Q3];\n\n let bd = ((bit_depth ^ 8) >> 1).min(2);\n\n AC_Q[bd][((qindex as isize + delta_q as isize).max(0) as usize).min(255)]\n\n}\n\n\n", "file_path": "src/quantize.rs", "rank": 0, "score": 356816.5105940131 }, { "content": "fn init_plane_u16(width: usize, height: usize) -> Plane<u16> {\n\n let mut ra = ChaChaRng::from_seed([0; 32]);\n\n let data: Vec<u16> = (0..(width * height)).map(|_| ra.gen()).collect();\n\n Plane::from_slice(&data, width)\n\n}\n\n\n", "file_path": "benches/plane.rs", "rank": 1, "score": 336099.69881280366 }, { "content": "fn init_plane_u8(width: usize, height: usize) -> Plane<u8> {\n\n let mut ra = ChaChaRng::from_seed([0; 32]);\n\n let data: Vec<u8> = (0..(width * height)).map(|_| ra.gen()).collect();\n\n let out = Plane::from_slice(&data, width);\n\n if out.cfg.width % 2 == 0 && out.cfg.height % 2 == 0 {\n\n out\n\n } else {\n\n let xpad = out.cfg.width % 2;\n\n let ypad = out.cfg.height % 2;\n\n let mut padded =\n\n Plane::new(out.cfg.width, out.cfg.height, 0, 0, xpad, ypad);\n\n let mut padded_slice = padded.mut_slice(PlaneOffset { x: 0, y: 0 });\n\n for (dst_row, src_row) in padded_slice.rows_iter_mut().zip(out.rows_iter())\n\n {\n\n dst_row[..out.cfg.width].copy_from_slice(&src_row[..out.cfg.width]);\n\n }\n\n padded\n\n }\n\n}\n\n\n", "file_path": "benches/plane.rs", "rank": 2, "score": 336003.9589878691 }, { "content": "fn init_plane_u8(width: usize, height: usize, seed: u8) -> Plane<u8> {\n\n let mut ra = ChaChaRng::from_seed([seed; 32]);\n\n let data: Vec<u8> = (0..(width * height)).map(|_| ra.gen()).collect();\n\n Plane::from_slice(&data, width)\n\n}\n\n\n", "file_path": "benches/rdo.rs", "rank": 3, "score": 333651.53644385515 }, { "content": "pub fn dc_q(qindex: u8, delta_q: i8, bit_depth: usize) -> i16 {\n\n static DC_Q: [&[i16; 256]; 3] =\n\n [&dc_qlookup_Q3, &dc_qlookup_10_Q3, &dc_qlookup_12_Q3];\n\n let bd = ((bit_depth ^ 8) >> 1).min(2);\n\n DC_Q[bd][((qindex as isize + delta_q as isize).max(0) as usize).min(255)]\n\n}\n\n\n", "file_path": "src/quantize.rs", "rank": 4, "score": 318120.397303572 }, { "content": "pub fn encode_block_with_modes<T: Pixel, W: Writer>(\n\n fi: &FrameInvariants<T>, ts: &mut TileStateMut<'_, T>,\n\n cw: &mut ContextWriter, w_pre_cdef: &mut W, w_post_cdef: &mut W,\n\n bsize: BlockSize, tile_bo: TileBlockOffset,\n\n mode_decision: &PartitionParameters, rdo_type: RDOType, record_stats: bool,\n\n) {\n\n let (mode_luma, mode_chroma) =\n\n (mode_decision.pred_mode_luma, mode_decision.pred_mode_chroma);\n\n let cfl = mode_decision.pred_cfl_params;\n\n let ref_frames = mode_decision.ref_frames;\n\n let mvs = mode_decision.mvs;\n\n let mut skip = mode_decision.skip;\n\n let mut cdef_coded = cw.bc.cdef_coded;\n\n\n\n // Set correct segmentation ID before encoding and before\n\n // rdo_tx_size_type().\n\n cw.bc.blocks.set_segmentation_idx(tile_bo, bsize, mode_decision.sidx);\n\n\n\n let mut mv_stack = ArrayVec::<CandidateMV, 9>::new();\n\n let is_compound = ref_frames[1] != NONE_FRAME;\n", "file_path": "src/encoder.rs", "rank": 5, "score": 317710.9799100305 }, { "content": "fn cdef_frame_bench(b: &mut Bencher, width: usize, height: usize) {\n\n let config = Arc::new(EncoderConfig {\n\n width,\n\n height,\n\n quantizer: 100,\n\n speed_settings: SpeedSettings::from_preset(10),\n\n ..Default::default()\n\n });\n\n let sequence = Arc::new(Sequence::new(&Default::default()));\n\n let fi = FrameInvariants::<u16>::new(config, sequence);\n\n let fb = FrameBlocks::new(fi.sb_width * 16, fi.sb_height * 16);\n\n let mut fs = FrameState::new(&fi);\n\n let in_frame = fs.rec.clone();\n\n let mut ts = fs.as_tile_state_mut();\n\n\n\n b.iter(|| {\n\n cdef_filter_tile(&fi, &in_frame, &fb.as_tile_blocks(), &mut ts.rec)\n\n });\n\n}\n\n\n", "file_path": "benches/bench.rs", "rank": 6, "score": 316432.55497430847 }, { "content": "pub fn intra_mode_to_angle(mode: PredictionMode) -> isize {\n\n match mode {\n\n PredictionMode::V_PRED => 90,\n\n PredictionMode::H_PRED => 180,\n\n PredictionMode::D45_PRED => 45,\n\n PredictionMode::D135_PRED => 135,\n\n PredictionMode::D113_PRED => 113,\n\n PredictionMode::D157_PRED => 157,\n\n PredictionMode::D203_PRED => 203,\n\n PredictionMode::D67_PRED => 67,\n\n _ => 0,\n\n }\n\n}\n\n\n\nimpl PredictionMode {\n\n #[inline]\n\n pub fn is_compound(self) -> bool {\n\n self >= PredictionMode::NEAREST_NEARESTMV\n\n }\n\n #[inline]\n", "file_path": "src/predict.rs", "rank": 7, "score": 313714.6833855054 }, { "content": "pub fn select_ac_qi(quantizer: i64, bit_depth: usize) -> u8 {\n\n let qlookup = match bit_depth {\n\n 8 => &ac_qlookup_Q3,\n\n 10 => &ac_qlookup_10_Q3,\n\n 12 => &ac_qlookup_12_Q3,\n\n _ => unimplemented!(),\n\n };\n\n select_qi(quantizer, qlookup)\n\n}\n\n\n\n#[derive(Debug, Default, Clone, Copy)]\n\npub struct QuantizationContext {\n\n log_tx_scale: usize,\n\n dc_quant: u32,\n\n dc_offset: u32,\n\n dc_mul_add: (u32, u32, u32),\n\n\n\n ac_quant: u32,\n\n ac_offset_eob: u32,\n\n ac_offset0: u32,\n\n ac_offset1: u32,\n\n ac_mul_add: (u32, u32, u32),\n\n}\n\n\n", "file_path": "src/quantize.rs", "rank": 8, "score": 302200.35273751925 }, { "content": "pub fn bench_pred_fn<F>(c: &mut Criterion, id: &str, f: F)\n\nwhere\n\n F: FnMut(&mut Bencher) + 'static,\n\n{\n\n let b = Benchmark::new(id, f);\n\n c.bench(\n\n id,\n\n if id.ends_with(\"_4x4_u8\") {\n\n b.throughput(Throughput::Bytes(16))\n\n } else if id.ends_with(\"_4x4\") {\n\n b.throughput(Throughput::Bytes(32))\n\n } else {\n\n b\n\n },\n\n );\n\n}\n\n\n", "file_path": "benches/predict.rs", "rank": 9, "score": 292381.1723932019 }, { "content": "pub fn write_tx_tree<T: Pixel, W: Writer>(\n\n fi: &FrameInvariants<T>, ts: &mut TileStateMut<'_, T>,\n\n cw: &mut ContextWriter, w: &mut W, luma_mode: PredictionMode,\n\n angle_delta_y: i8, tile_bo: TileBlockOffset, bsize: BlockSize,\n\n tx_size: TxSize, tx_type: TxType, skip: bool, luma_only: bool,\n\n rdo_type: RDOType, need_recon_pixel: bool,\n\n) -> (bool, ScaledDistortion) {\n\n if skip {\n\n return (false, ScaledDistortion::zero());\n\n }\n\n let bw = bsize.width_mi() / tx_size.width_mi();\n\n let bh = bsize.height_mi() / tx_size.height_mi();\n\n let qidx = get_qidx(fi, ts, cw, tile_bo);\n\n\n\n let PlaneConfig { xdec, ydec, .. } = ts.input.planes[1].cfg;\n\n let ac = &[0i16; 0];\n\n let mut partition_has_coeff: bool = false;\n\n let mut tx_dist = ScaledDistortion::zero();\n\n\n\n ts.qc.update(\n", "file_path": "src/encoder.rs", "rank": 10, "score": 276827.29786910804 }, { "content": "// Passed in a superblock offset representing the upper left corner of\n\n// the LRU area we're optimizing. This area covers the largest LRU in\n\n// any of the present planes, but may consist of a number of\n\n// superblocks and full, smaller LRUs in the other planes\n\npub fn rdo_loop_decision<T: Pixel, W: Writer>(\n\n base_sbo: TileSuperBlockOffset, fi: &FrameInvariants<T>,\n\n ts: &mut TileStateMut<'_, T>, cw: &mut ContextWriter, w: &mut W,\n\n deblock_p: bool,\n\n) {\n\n let planes = if fi.sequence.chroma_sampling == ChromaSampling::Cs400 {\n\n 1\n\n } else {\n\n MAX_PLANES\n\n };\n\n assert!(fi.sequence.enable_cdef || fi.sequence.enable_restoration);\n\n // Determine area of optimization: Which plane has the largest LRUs?\n\n // How many LRUs for each?\n\n let mut sb_w = 1; // how many superblocks wide the largest LRU\n\n // is/how many SBs we're processing (same thing)\n\n let mut sb_h = 1; // how many superblocks wide the largest LRU\n\n // is/how many SBs we're processing (same thing)\n\n let mut lru_w = [0; MAX_PLANES]; // how many LRUs we're processing\n\n let mut lru_h = [0; MAX_PLANES]; // how many LRUs we're processing\n\n for pli in 0..planes {\n", "file_path": "src/rdo.rs", "rank": 11, "score": 276827.29786910804 }, { "content": "// For a transform block,\n\n// predict, transform, quantize, write coefficients to a bitstream,\n\n// dequantize, inverse-transform.\n\npub fn encode_tx_block<T: Pixel, W: Writer>(\n\n fi: &FrameInvariants<T>,\n\n ts: &mut TileStateMut<'_, T>,\n\n cw: &mut ContextWriter,\n\n w: &mut W,\n\n p: usize,\n\n // Offset in the luma plane of the partition enclosing this block.\n\n tile_partition_bo: TileBlockOffset,\n\n // tx block position within a partition, unit: tx block number\n\n bx: usize,\n\n by: usize,\n\n // Offset in the luma plane where this tx block is colocated. Note that for\n\n // a chroma block, this offset might be outside of the current partition.\n\n // For example in 4:2:0, four 4x4 luma partitions share one 4x4 chroma block,\n\n // this block is part of the last 4x4 partition, but its `tx_bo` offset\n\n // matches the offset of the first 4x4 partition.\n\n tx_bo: TileBlockOffset,\n\n mode: PredictionMode,\n\n tx_size: TxSize,\n\n tx_type: TxType,\n", "file_path": "src/encoder.rs", "rank": 12, "score": 276827.29786910804 }, { "content": "// RDO-based single level partitioning decision\n\npub fn rdo_partition_decision<T: Pixel, W: Writer>(\n\n fi: &FrameInvariants<T>, ts: &mut TileStateMut<'_, T>,\n\n cw: &mut ContextWriter, w_pre_cdef: &mut W, w_post_cdef: &mut W,\n\n bsize: BlockSize, tile_bo: TileBlockOffset,\n\n cached_block: &PartitionGroupParameters, partition_types: &[PartitionType],\n\n rdo_type: RDOType, inter_cfg: &InterConfig,\n\n) -> PartitionGroupParameters {\n\n let mut best_partition = cached_block.part_type;\n\n let mut best_rd = cached_block.rd_cost;\n\n let mut best_pred_modes = cached_block.part_modes.clone();\n\n\n\n let cw_checkpoint = cw.checkpoint(&tile_bo, fi.sequence.chroma_sampling);\n\n let w_pre_checkpoint = w_pre_cdef.checkpoint();\n\n let w_post_checkpoint = w_post_cdef.checkpoint();\n\n\n\n for &partition in partition_types {\n\n // Do not re-encode results we already have\n\n if partition == cached_block.part_type {\n\n continue;\n\n }\n", "file_path": "src/rdo.rs", "rank": 13, "score": 276827.29786910804 }, { "content": "pub fn write_tx_blocks<T: Pixel, W: Writer>(\n\n fi: &FrameInvariants<T>, ts: &mut TileStateMut<'_, T>,\n\n cw: &mut ContextWriter, w: &mut W, luma_mode: PredictionMode,\n\n chroma_mode: PredictionMode, angle_delta: AngleDelta,\n\n tile_bo: TileBlockOffset, bsize: BlockSize, tx_size: TxSize,\n\n tx_type: TxType, skip: bool, cfl: CFLParams, luma_only: bool,\n\n rdo_type: RDOType, need_recon_pixel: bool,\n\n) -> (bool, ScaledDistortion) {\n\n let bw = bsize.width_mi() / tx_size.width_mi();\n\n let bh = bsize.height_mi() / tx_size.height_mi();\n\n let qidx = get_qidx(fi, ts, cw, tile_bo);\n\n assert_ne!(qidx, 0); // lossless is not yet supported\n\n\n\n let PlaneConfig { xdec, ydec, .. } = ts.input.planes[1].cfg;\n\n let mut ac: Aligned<[i16; 32 * 32]> = Aligned::uninitialized();\n\n let mut partition_has_coeff: bool = false;\n\n let mut tx_dist = ScaledDistortion::zero();\n\n let do_chroma =\n\n has_chroma(tile_bo, bsize, xdec, ydec, fi.sequence.chroma_sampling);\n\n\n", "file_path": "src/encoder.rs", "rank": 14, "score": 276827.29786910804 }, { "content": "pub fn encode_block_pre_cdef<T: Pixel, W: Writer>(\n\n seq: &Sequence, ts: &TileStateMut<'_, T>, cw: &mut ContextWriter, w: &mut W,\n\n bsize: BlockSize, tile_bo: TileBlockOffset, skip: bool,\n\n) -> bool {\n\n cw.bc.blocks.set_skip(tile_bo, bsize, skip);\n\n if ts.segmentation.enabled\n\n && ts.segmentation.update_map\n\n && ts.segmentation.preskip\n\n {\n\n cw.write_segmentation(\n\n w,\n\n tile_bo,\n\n bsize,\n\n false,\n\n ts.segmentation.last_active_segid,\n\n );\n\n }\n\n cw.write_skip(w, tile_bo, skip);\n\n if ts.segmentation.enabled\n\n && ts.segmentation.update_map\n", "file_path": "src/encoder.rs", "rank": 15, "score": 272347.8962845948 }, { "content": "pub fn encode_block_post_cdef<T: Pixel, W: Writer>(\n\n fi: &FrameInvariants<T>, ts: &mut TileStateMut<'_, T>,\n\n cw: &mut ContextWriter, w: &mut W, luma_mode: PredictionMode,\n\n chroma_mode: PredictionMode, angle_delta: AngleDelta,\n\n ref_frames: [RefType; 2], mvs: [MotionVector; 2], bsize: BlockSize,\n\n tile_bo: TileBlockOffset, skip: bool, cfl: CFLParams, tx_size: TxSize,\n\n tx_type: TxType, mode_context: usize, mv_stack: &[CandidateMV],\n\n rdo_type: RDOType, need_recon_pixel: bool, record_stats: bool,\n\n) -> (bool, ScaledDistortion) {\n\n let planes =\n\n if fi.sequence.chroma_sampling == ChromaSampling::Cs400 { 1 } else { 3 };\n\n let is_inter = !luma_mode.is_intra();\n\n if is_inter {\n\n assert!(luma_mode == chroma_mode);\n\n };\n\n let sb_size = if fi.sequence.use_128x128_superblock {\n\n BlockSize::BLOCK_128X128\n\n } else {\n\n BlockSize::BLOCK_64X64\n\n };\n", "file_path": "src/encoder.rs", "rank": 16, "score": 272347.8962845948 }, { "content": "pub fn luma_ac<T: Pixel>(\n\n ac: &mut [i16], ts: &mut TileStateMut<'_, T>, tile_bo: TileBlockOffset,\n\n bsize: BlockSize, tx_size: TxSize, fi: &FrameInvariants<T>,\n\n) {\n\n let PlaneConfig { xdec, ydec, .. } = ts.input.planes[1].cfg;\n\n let plane_bsize = bsize.subsampled_size(xdec, ydec);\n\n let bo = if bsize.is_sub8x8(xdec, ydec) {\n\n let offset = bsize.sub8x8_offset(xdec, ydec);\n\n tile_bo.with_offset(offset.0, offset.1)\n\n } else {\n\n tile_bo\n\n };\n\n let rec = &ts.rec.planes[0];\n\n let luma = &rec.subregion(Area::BlockStartingAt { bo: bo.0 });\n\n let frame_bo = ts.to_frame_block_offset(bo);\n\n\n\n let frame_clipped_bw: usize =\n\n ((fi.w_in_b - frame_bo.0.x) << MI_SIZE_LOG2).min(bsize.width());\n\n let frame_clipped_bh: usize =\n\n ((fi.h_in_b - frame_bo.0.y) << MI_SIZE_LOG2).min(bsize.height());\n", "file_path": "src/encoder.rs", "rank": 17, "score": 271816.96823271643 }, { "content": "/// Sum of Squared Error for a wxh block\n\n/// Currently limited to w and h of valid blocks\n\npub fn sse_wxh<T: Pixel, F: Fn(Area, BlockSize) -> DistortionScale>(\n\n src1: &PlaneRegion<'_, T>, src2: &PlaneRegion<'_, T>, w: usize, h: usize,\n\n compute_bias: F, bit_depth: usize, cpu: CpuFeatureLevel,\n\n) -> Distortion {\n\n // See get_weighted_sse in src/dist.rs.\n\n // Provide a scale to get_weighted_sse for each square region of this size.\n\n const CHUNK_SIZE: usize = IMPORTANCE_BLOCK_SIZE >> 1;\n\n\n\n // To bias the distortion correctly, compute it in blocks up to the size\n\n // importance block size in a non-subsampled plane.\n\n let imp_block_w = CHUNK_SIZE << src1.plane_cfg.xdec;\n\n let imp_block_h = CHUNK_SIZE << src1.plane_cfg.ydec;\n\n\n\n let imp_bsize = BlockSize::from_width_and_height(imp_block_w, imp_block_h);\n\n\n\n let n_imp_blocks_w = (w + CHUNK_SIZE - 1) / CHUNK_SIZE;\n\n let n_imp_blocks_h = (h + CHUNK_SIZE - 1) / CHUNK_SIZE;\n\n\n\n // TODO: Copying biases into a buffer is slow. It would be best if biases were\n\n // passed directly. To do this, we would need different versions of the\n", "file_path": "src/rdo.rs", "rank": 18, "score": 266962.25576061645 }, { "content": "pub fn rdo_cfl_alpha<T: Pixel>(\n\n ts: &mut TileStateMut<'_, T>, tile_bo: TileBlockOffset, bsize: BlockSize,\n\n luma_tx_size: TxSize, fi: &FrameInvariants<T>,\n\n) -> Option<CFLParams> {\n\n let PlaneConfig { xdec, ydec, .. } = ts.input.planes[1].cfg;\n\n let uv_tx_size = bsize.largest_chroma_tx_size(xdec, ydec);\n\n debug_assert!(bsize.subsampled_size(xdec, ydec) == uv_tx_size.block_size());\n\n\n\n let frame_bo = ts.to_frame_block_offset(tile_bo);\n\n let (visible_tx_w, visible_tx_h) = clip_visible_bsize(\n\n (fi.width + xdec) >> xdec,\n\n (fi.height + ydec) >> ydec,\n\n uv_tx_size.block_size(),\n\n (frame_bo.0.x << MI_SIZE_LOG2) >> xdec,\n\n (frame_bo.0.y << MI_SIZE_LOG2) >> ydec,\n\n );\n\n\n\n if visible_tx_w == 0 || visible_tx_h == 0 {\n\n return None;\n\n };\n", "file_path": "src/rdo.rs", "rank": 19, "score": 266480.38269996026 }, { "content": "// RDO-based mode decision\n\npub fn rdo_mode_decision<T: Pixel>(\n\n fi: &FrameInvariants<T>, ts: &mut TileStateMut<'_, T>,\n\n cw: &mut ContextWriter, bsize: BlockSize, tile_bo: TileBlockOffset,\n\n inter_cfg: &InterConfig,\n\n) -> PartitionParameters {\n\n let PlaneConfig { xdec, ydec, .. } = ts.input.planes[1].cfg;\n\n let cw_checkpoint = cw.checkpoint(&tile_bo, fi.sequence.chroma_sampling);\n\n\n\n let rdo_type = if fi.use_tx_domain_rate {\n\n RDOType::TxDistEstRate\n\n } else if fi.use_tx_domain_distortion {\n\n RDOType::TxDistRealRate\n\n } else {\n\n RDOType::PixelDistRealRate\n\n };\n\n\n\n let mut best = if fi.frame_type.has_inter() {\n\n assert!(fi.frame_type != FrameType::KEY);\n\n\n\n inter_frame_rdo_mode_decision(\n", "file_path": "src/rdo.rs", "rank": 20, "score": 266475.803196874 }, { "content": "pub fn select_dc_qi(quantizer: i64, bit_depth: usize) -> u8 {\n\n let qlookup = match bit_depth {\n\n 8 => &dc_qlookup_Q3,\n\n 10 => &dc_qlookup_10_Q3,\n\n 12 => &dc_qlookup_12_Q3,\n\n _ => unimplemented!(),\n\n };\n\n select_qi(quantizer, qlookup)\n\n}\n\n\n", "file_path": "src/quantize.rs", "rank": 21, "score": 263504.2394470781 }, { "content": "#[allow(unused)]\n\npub fn cdef_dist_wxh<T: Pixel, F: Fn(Area, BlockSize) -> DistortionScale>(\n\n src1: &PlaneRegion<'_, T>, src2: &PlaneRegion<'_, T>, w: usize, h: usize,\n\n bit_depth: usize, compute_bias: F,\n\n) -> Distortion {\n\n assert!(w & 0x7 == 0);\n\n assert!(h & 0x7 == 0);\n\n debug_assert!(src1.plane_cfg.xdec == 0);\n\n debug_assert!(src1.plane_cfg.ydec == 0);\n\n debug_assert!(src2.plane_cfg.xdec == 0);\n\n debug_assert!(src2.plane_cfg.ydec == 0);\n\n\n\n let mut sum = Distortion::zero();\n\n for j in 0isize..h as isize / 8 {\n\n for i in 0isize..w as isize / 8 {\n\n let area = Area::StartingAt { x: i * 8, y: j * 8 };\n\n let value = cdef_dist_wxh_8x8(\n\n &src1.subregion(area),\n\n &src2.subregion(area),\n\n bit_depth,\n\n );\n\n\n\n // cdef is always called on non-subsampled planes, so BLOCK_8X8 is\n\n // correct here.\n\n sum += value * compute_bias(area, BlockSize::BLOCK_8X8);\n\n }\n\n }\n\n sum\n\n}\n\n\n", "file_path": "src/rdo.rs", "rank": 22, "score": 263382.56661994057 }, { "content": "#[inline(always)]\n\npub fn update_cdf(cdf: &mut [u16], val: u32) {\n\n if cdf.len() == 4 {\n\n return unsafe {\n\n update_cdf_4_sse2(cdf, val);\n\n };\n\n }\n\n\n\n rust::update_cdf(cdf, val);\n\n}\n\n\n\n#[target_feature(enable = \"sse2\")]\n\n#[inline]\n\nunsafe fn update_cdf_4_sse2(cdf: &mut [u16], val: u32) {\n\n let nsymbs = 4;\n\n let rate = 5 + (cdf[nsymbs - 1] >> 4) as usize;\n\n let count = cdf[nsymbs - 1] + (cdf[nsymbs - 1] < 32) as u16;\n\n\n\n // A bit of explanation of what is happening down here. First of all, let's look at the simple\n\n // implementation:\n\n //\n", "file_path": "src/asm/x86/ec.rs", "rank": 23, "score": 262964.93743680726 }, { "content": "pub fn get_log_tx_scale(tx_size: TxSize) -> usize {\n\n let num_pixels = tx_size.area();\n\n\n\n Into::<usize>::into(num_pixels > 256)\n\n + Into::<usize>::into(num_pixels > 1024)\n\n}\n\n\n", "file_path": "src/quantize.rs", "rank": 24, "score": 237567.00641887277 }, { "content": "pub fn motion_estimation<T: Pixel>(\n\n fi: &FrameInvariants<T>, ts: &TileStateMut<'_, T>, bsize: BlockSize,\n\n tile_bo: TileBlockOffset, ref_frame: RefType, pmv: [MotionVector; 2],\n\n) -> (MotionVector, u32) {\n\n match fi.rec_buffer.frames[fi.ref_frames[ref_frame.to_index()] as usize] {\n\n Some(ref rec) => {\n\n let blk_w = bsize.width();\n\n let blk_h = bsize.height();\n\n let frame_bo = ts.to_frame_block_offset(tile_bo);\n\n let (mvx_min, mvx_max, mvy_min, mvy_max) =\n\n get_mv_range(fi.w_in_b, fi.h_in_b, frame_bo, blk_w, blk_h);\n\n\n\n // 0.5 is a fudge factor\n\n let lambda = (fi.me_lambda * 256.0 * 0.5) as u32;\n\n\n\n // Full-pixel motion estimation\n\n\n\n let po = frame_bo.to_luma_plane_offset();\n\n let area = Area::BlockStartingAt { bo: tile_bo.0 };\n\n let org_region: &PlaneRegion<T> =\n", "file_path": "src/me.rs", "rank": 25, "score": 231370.61263928784 }, { "content": "#[inline(always)]\n\npub fn get_mv_class(z: u32, offset: &mut u32) -> usize {\n\n let c = if z >= CLASS0_SIZE as u32 * 4096 {\n\n MV_CLASS_10\n\n } else {\n\n log_in_base_2(z >> 3) as usize\n\n };\n\n\n\n *offset = z - mv_class_base(c);\n\n c\n\n}\n\n\n\nimpl<'a> ContextWriter<'a> {\n\n pub fn encode_mv_component<W: Writer>(\n\n &mut self, w: &mut W, comp: i32, axis: usize, precision: MvSubpelPrecision,\n\n ) {\n\n assert!(comp != 0);\n\n assert!(MV_LOW <= comp && comp <= MV_UPP);\n\n let mvcomp = &mut self.fc.nmv_context.comps[axis];\n\n let mut offset: u32 = 0;\n\n let sign: u32 = if comp < 0 { 1 } else { 0 };\n", "file_path": "src/context/mod.rs", "rank": 26, "score": 229996.46847388704 }, { "content": "#[hawktracer(deblock_filter_optimize)]\n\npub fn deblock_filter_optimize<T: Pixel, U: Pixel>(\n\n fi: &FrameInvariants<T>, rec: &Tile<U>, input: &Tile<U>,\n\n blocks: &TileBlocks, crop_w: usize, crop_h: usize,\n\n) -> [u8; 4] {\n\n if fi.config.speed_settings.fast_deblock {\n\n let q = ac_q(fi.base_q_idx, 0, fi.sequence.bit_depth) as i32;\n\n let level = clamp(\n\n match fi.sequence.bit_depth {\n\n 8 => {\n\n if fi.frame_type == FrameType::KEY {\n\n (q * 17563 - 421_574 + (1 << 18 >> 1)) >> 18\n\n } else {\n\n (q * 6017 + 650_707 + (1 << 18 >> 1)) >> 18\n\n }\n\n }\n\n 10 => {\n\n if fi.frame_type == FrameType::KEY {\n\n ((q * 20723 + 4_060_632 + (1 << 20 >> 1)) >> 20) - 4\n\n } else {\n\n (q * 20723 + 4_060_632 + (1 << 20 >> 1)) >> 20\n", "file_path": "src/deblock.rs", "rank": 27, "score": 229220.26241383044 }, { "content": "pub fn sgrproj_stripe_filter<T: Pixel, U: Pixel>(\n\n set: u8, xqd: [i8; 2], fi: &FrameInvariants<T>,\n\n integral_image_buffer: &IntegralImageBuffer, integral_image_stride: usize,\n\n cdeffed: &PlaneSlice<U>, out: &mut PlaneRegionMut<U>,\n\n) {\n\n let &Rect { width: stripe_w, height: stripe_h, .. } = out.rect();\n\n let bdm8 = fi.sequence.bit_depth - 8;\n\n let mut a_r2: [[u32; IMAGE_WIDTH_MAX + 2]; 2] =\n\n [[0; IMAGE_WIDTH_MAX + 2]; 2];\n\n let mut b_r2: [[u32; IMAGE_WIDTH_MAX + 2]; 2] =\n\n [[0; IMAGE_WIDTH_MAX + 2]; 2];\n\n let mut f_r2_0: [u32; IMAGE_WIDTH_MAX] = [0; IMAGE_WIDTH_MAX];\n\n let mut f_r2_1: [u32; IMAGE_WIDTH_MAX] = [0; IMAGE_WIDTH_MAX];\n\n let mut a_r1: [[u32; IMAGE_WIDTH_MAX + 2]; 3] =\n\n [[0; IMAGE_WIDTH_MAX + 2]; 3];\n\n let mut b_r1: [[u32; IMAGE_WIDTH_MAX + 2]; 3] =\n\n [[0; IMAGE_WIDTH_MAX + 2]; 3];\n\n let mut f_r1: [u32; IMAGE_WIDTH_MAX] = [0; IMAGE_WIDTH_MAX];\n\n\n\n let s_r2: u32 = SGRPROJ_PARAMS_S[set as usize][0];\n", "file_path": "src/lrf.rs", "rank": 28, "score": 229220.26241383044 }, { "content": "/// Compute a scaling factor to multiply the distortion of a block by,\n\n/// this factor is determined using temporal RDO.\n\npub fn distortion_scale<T: Pixel>(\n\n fi: &FrameInvariants<T>, frame_bo: PlaneBlockOffset, bsize: BlockSize,\n\n) -> DistortionScale {\n\n if !fi.config.temporal_rdo() {\n\n return DistortionScale::default();\n\n }\n\n // EncoderConfig::temporal_rdo() should always return false in situations\n\n // where distortion is computed on > 8x8 blocks, so we should never hit this\n\n // assert.\n\n assert!(bsize <= BlockSize::BLOCK_8X8);\n\n\n\n let x = frame_bo.0.x >> IMPORTANCE_BLOCK_TO_BLOCK_SHIFT;\n\n let y = frame_bo.0.y >> IMPORTANCE_BLOCK_TO_BLOCK_SHIFT;\n\n\n\n fi.distortion_scales[y * fi.w_in_imp_b + x]\n\n}\n\n\n", "file_path": "src/rdo.rs", "rank": 29, "score": 227126.36708442675 }, { "content": "#[hawktracer(estimate_tile_motion)]\n\npub fn estimate_tile_motion<T: Pixel>(\n\n fi: &FrameInvariants<T>, ts: &mut TileStateMut<'_, T>,\n\n inter_cfg: &InterConfig,\n\n) {\n\n let init_size = MIB_SIZE_LOG2;\n\n for mv_size_log2 in (2..=init_size).rev() {\n\n let init = mv_size_log2 == init_size;\n\n\n\n // Choose subsampling. Pass one is quarter res and pass two is at half res.\n\n let ssdec = match init_size - mv_size_log2 {\n\n 0 => 2,\n\n 1 => 1,\n\n _ => 0,\n\n };\n\n\n\n for sby in 0..ts.sb_height {\n\n for sbx in 0..ts.sb_width {\n\n let mut tested_frames_flags = 0;\n\n for &ref_frame in inter_cfg.allowed_ref_frames() {\n\n let frame_flag = 1 << fi.ref_frames[ref_frame.to_index()];\n", "file_path": "src/me.rs", "rank": 30, "score": 227120.98120330484 }, { "content": "pub fn motion_compensate<T: Pixel>(\n\n fi: &FrameInvariants<T>, ts: &mut TileStateMut<'_, T>,\n\n cw: &mut ContextWriter, luma_mode: PredictionMode, ref_frames: [RefType; 2],\n\n mvs: [MotionVector; 2], bsize: BlockSize, tile_bo: TileBlockOffset,\n\n luma_only: bool,\n\n) {\n\n debug_assert!(!luma_mode.is_intra());\n\n\n\n let PlaneConfig { xdec: u_xdec, ydec: u_ydec, .. } = ts.input.planes[1].cfg;\n\n\n\n // Inter mode prediction can take place once for a whole partition,\n\n // instead of each tx-block.\n\n let num_planes = 1\n\n + if !luma_only\n\n && has_chroma(\n\n tile_bo,\n\n bsize,\n\n u_xdec,\n\n u_ydec,\n\n fi.sequence.chroma_sampling,\n", "file_path": "src/encoder.rs", "rank": 31, "score": 227120.98120330484 }, { "content": "pub fn intra_bench<T: Pixel>(\n\n b: &mut Bencher, mode: PredictionMode, variant: PredictionVariant,\n\n) {\n\n let mut rng = ChaChaRng::from_seed([0; 32]);\n\n let mut edge_buf = Aligned::uninitialized();\n\n let (mut block, ac) = generate_block::<T>(&mut rng, &mut edge_buf);\n\n let cpu = CpuFeatureLevel::default();\n\n let bitdepth = match T::type_enum() {\n\n PixelType::U8 => 8,\n\n PixelType::U16 => 10,\n\n };\n\n let angle = match mode {\n\n PredictionMode::V_PRED => 90,\n\n PredictionMode::H_PRED => 180,\n\n _ => 0,\n\n };\n\n b.iter(|| {\n\n dispatch_predict_intra::<T>(\n\n mode,\n\n variant,\n", "file_path": "benches/predict.rs", "rank": 32, "score": 227120.98120330484 }, { "content": "pub fn generate_block<T: Pixel>(\n\n rng: &mut ChaChaRng, edge_buf: &mut Aligned<[T; 257]>,\n\n) -> (Plane<T>, Vec<i16>) {\n\n let block = Plane::from_slice(\n\n &vec![T::cast_from(0); BLOCK_SIZE.width() * BLOCK_SIZE.height()],\n\n BLOCK_SIZE.width(),\n\n );\n\n let ac: Vec<i16> = (0..(32 * 32)).map(|_| rng.gen()).collect();\n\n for v in edge_buf.data.iter_mut() {\n\n *v = T::cast_from(rng.gen::<u8>());\n\n }\n\n\n\n (block, ac)\n\n}\n\n\n", "file_path": "benches/predict.rs", "rank": 33, "score": 227120.98120330484 }, { "content": "#[hawktracer(deblock_plane)]\n\npub fn deblock_plane<T: Pixel>(\n\n deblock: &DeblockState, p: &mut PlaneRegionMut<T>, pli: usize,\n\n blocks: &TileBlocks, crop_w: usize, crop_h: usize, bd: usize,\n\n) {\n\n let xdec = p.plane_cfg.xdec;\n\n let ydec = p.plane_cfg.ydec;\n\n assert!(xdec <= 1 && ydec <= 1);\n\n\n\n match pli {\n\n 0 => {\n\n if deblock.levels[0] == 0 && deblock.levels[1] == 0 {\n\n return;\n\n }\n\n }\n\n 1 => {\n\n if deblock.levels[2] == 0 {\n\n return;\n\n }\n\n }\n\n 2 => {\n", "file_path": "src/deblock.rs", "rank": 34, "score": 227120.98120330484 }, { "content": "pub fn select_segment<T: Pixel>(\n\n fi: &FrameInvariants<T>, ts: &TileStateMut<'_, T>, tile_bo: TileBlockOffset,\n\n bsize: BlockSize, skip: bool,\n\n) -> std::ops::RangeInclusive<u8> {\n\n use crate::api::SegmentationLevel;\n\n use crate::rdo::spatiotemporal_scale;\n\n use arrayvec::ArrayVec;\n\n\n\n // If skip is true or segmentation is turned off, sidx is not coded.\n\n if skip || !fi.enable_segmentation {\n\n return 0..=0;\n\n }\n\n\n\n let segment_2_is_lossless = fi.base_q_idx as i16\n\n + ts.segmentation.data[2][SegLvl::SEG_LVL_ALT_Q as usize]\n\n < 1;\n\n\n\n if fi.config.speed_settings.segmentation == SegmentationLevel::Full {\n\n return if segment_2_is_lossless { 0..=1 } else { 0..=2 };\n\n }\n", "file_path": "src/segmentation.rs", "rank": 35, "score": 227120.98120330484 }, { "content": "pub fn segmentation_optimize<T: Pixel>(\n\n fi: &FrameInvariants<T>, fs: &mut FrameState<T>,\n\n) {\n\n assert!(fi.enable_segmentation);\n\n fs.segmentation.enabled = true;\n\n\n\n if fs.segmentation.enabled {\n\n fs.segmentation.update_map = true;\n\n\n\n // We don't change the values between frames.\n\n fs.segmentation.update_data = fi.primary_ref_frame == PRIMARY_REF_NONE;\n\n\n\n if !fs.segmentation.update_data {\n\n return;\n\n }\n\n\n\n // A series of AWCY runs with deltas 13, 15, 17, 18, 19, 20, 21, 22, 23\n\n // showed this to be the optimal one.\n\n const TEMPORAL_RDO_QI_DELTA: i16 = 21;\n\n\n", "file_path": "src/segmentation.rs", "rank": 36, "score": 227120.98120330484 }, { "content": "pub fn spatiotemporal_scale<T: Pixel>(\n\n fi: &FrameInvariants<T>, frame_bo: PlaneBlockOffset, bsize: BlockSize,\n\n) -> DistortionScale {\n\n if !fi.config.temporal_rdo() && fi.config.tune != Tune::Psychovisual {\n\n return DistortionScale::default();\n\n }\n\n\n\n let x0 = frame_bo.0.x >> IMPORTANCE_BLOCK_TO_BLOCK_SHIFT;\n\n let y0 = frame_bo.0.y >> IMPORTANCE_BLOCK_TO_BLOCK_SHIFT;\n\n let x1 = (x0 + bsize.width_imp_b()).min(fi.w_in_imp_b);\n\n let y1 = (y0 + bsize.height_imp_b()).min(fi.h_in_imp_b);\n\n let den = (((x1 - x0) * (y1 - y0)) as u64) << DistortionScale::SHIFT;\n\n\n\n let mut sum = 0;\n\n for y in y0..y1 {\n\n sum += fi.distortion_scales[y * fi.w_in_imp_b..][x0..x1]\n\n .iter()\n\n .zip(fi.activity_scales[y * fi.w_in_imp_b..][x0..x1].iter())\n\n .take(MAX_SB_IN_IMP_B)\n\n .map(|(d, a)| d.0 as u64 * a.0 as u64)\n\n .sum::<u64>();\n\n }\n\n DistortionScale(((sum + (den >> 1)) / den) as u32)\n\n}\n\n\n", "file_path": "src/rdo.rs", "rank": 37, "score": 227120.98120330484 }, { "content": "// Input params follow the same rules as sgrproj_stripe_filter.\n\n// Inputs are relative to the colocated slice views.\n\npub fn sgrproj_solve<T: Pixel>(\n\n set: u8, fi: &FrameInvariants<T>,\n\n integral_image_buffer: &IntegralImageBuffer, input: &PlaneRegion<'_, T>,\n\n cdeffed: &PlaneSlice<T>, cdef_w: usize, cdef_h: usize,\n\n) -> (i8, i8) {\n\n let bdm8 = fi.sequence.bit_depth - 8;\n\n\n\n let mut a_r2: [[u32; IMAGE_WIDTH_MAX + 2]; 2] =\n\n [[0; IMAGE_WIDTH_MAX + 2]; 2];\n\n let mut b_r2: [[u32; IMAGE_WIDTH_MAX + 2]; 2] =\n\n [[0; IMAGE_WIDTH_MAX + 2]; 2];\n\n let mut f_r2_0: [u32; IMAGE_WIDTH_MAX] = [0; IMAGE_WIDTH_MAX];\n\n let mut f_r2_1: [u32; IMAGE_WIDTH_MAX] = [0; IMAGE_WIDTH_MAX];\n\n let mut a_r1: [[u32; IMAGE_WIDTH_MAX + 2]; 3] =\n\n [[0; IMAGE_WIDTH_MAX + 2]; 3];\n\n let mut b_r1: [[u32; IMAGE_WIDTH_MAX + 2]; 3] =\n\n [[0; IMAGE_WIDTH_MAX + 2]; 3];\n\n let mut f_r1: [u32; IMAGE_WIDTH_MAX] = [0; IMAGE_WIDTH_MAX];\n\n\n\n let s_r2: u32 = SGRPROJ_PARAMS_S[set as usize][0];\n", "file_path": "src/lrf.rs", "rank": 38, "score": 227120.98120330484 }, { "content": "pub fn encode_frame<T: Pixel>(\n\n fi: &FrameInvariants<T>, fs: &mut FrameState<T>, inter_cfg: &InterConfig,\n\n) -> Vec<u8> {\n\n debug_assert!(!fi.show_existing_frame);\n\n debug_assert!(!fi.invalid);\n\n let obu_extension = 0;\n\n\n\n let mut packet = Vec::new();\n\n\n\n if fi.enable_segmentation {\n\n fs.segmentation = get_initial_segmentation(fi);\n\n segmentation_optimize(fi, fs);\n\n }\n\n let tile_group = encode_tile_group(fi, fs, inter_cfg);\n\n\n\n if fi.frame_type == FrameType::KEY {\n\n write_key_frame_obus(&mut packet, fi, obu_extension).unwrap();\n\n }\n\n\n\n let mut buf1 = Vec::new();\n", "file_path": "src/encoder.rs", "rank": 39, "score": 227120.98120330484 }, { "content": "#[inline(always)]\n\nfn get_scaled_luma_q0(alpha_q3: i16, ac_pred_q3: i16) -> i32 {\n\n let scaled_luma_q6 = (alpha_q3 as i32) * (ac_pred_q3 as i32);\n\n let abs_scaled_luma_q0 = (scaled_luma_q6.abs() + 32) >> 6;\n\n if scaled_luma_q6 < 0 {\n\n -abs_scaled_luma_q0\n\n } else {\n\n abs_scaled_luma_q0\n\n }\n\n}\n\n\n\npub(crate) mod rust {\n\n use super::*;\n\n use crate::context::MAX_TX_SIZE;\n\n use crate::cpu_features::CpuFeatureLevel;\n\n use crate::tiling::PlaneRegionMut;\n\n use crate::transform::TxSize;\n\n use crate::util::{round_shift, Aligned};\n\n use crate::Pixel;\n\n use std::mem::size_of;\n\n\n", "file_path": "src/predict.rs", "rank": 40, "score": 226946.6226327172 }, { "content": "fn gen_const<W: Write>(f: &mut W, comment: &str, name: &str, value: &str) -> Result<(), Box<dyn Error>> {\n\n writeln!(\n\n f,\n\n \"{}\\n{}{}{}\\\"{}\\\";\",\n\n comment, CONST_PREFIX, name, CONST_TYPE, value\n\n )?;\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::gen_const;\n\n use crate::constants::ConstantsFlags;\n\n use crate::output::generate_build_info;\n\n use regex::Regex;\n\n use std::io::Cursor;\n\n\n\n lazy_static! {\n\n static ref CONST_RE: Regex =\n\n Regex::new(r#\"^/// .*[\\r\\n]+pub const [A-Z_]+: \\&str = \".*\";[\\r\\n]+$\"#)\n", "file_path": "crates/vergen/src/output/codegen.rs", "rank": 41, "score": 223511.63246958988 }, { "content": "pub fn compute_rd_cost<T: Pixel>(\n\n fi: &FrameInvariants<T>, rate: u32, distortion: ScaledDistortion,\n\n) -> f64 {\n\n let rate_in_bits = (rate as f64) / ((1 << OD_BITRES) as f64);\n\n distortion.0 as f64 + fi.lambda * rate_in_bits\n\n}\n\n\n", "file_path": "src/rdo.rs", "rank": 42, "score": 223126.98638040153 }, { "content": "#[hawktracer(cdef_filter_tile)]\n\npub fn cdef_filter_tile<T: Pixel>(\n\n fi: &FrameInvariants<T>, input: &Frame<T>, tb: &TileBlocks,\n\n output: &mut TileMut<'_, T>,\n\n) {\n\n // Each filter block is 64x64, except right and/or bottom for non-multiple-of-64 sizes.\n\n // FIXME: 128x128 SB support will break this, we need FilterBlockOffset etc.\n\n\n\n // No need to guard against having fewer actual coded blocks than\n\n // the output.rect() area. Inner code already guards this case.\n\n let fb_width = (output.planes[0].rect().width + 63) / 64;\n\n let fb_height = (output.planes[0].rect().height + 63) / 64;\n\n\n\n // should parallelize this\n\n for fby in 0..fb_height {\n\n for fbx in 0..fb_width {\n\n // tile_sbo is treated as an offset into the Tiles' plane\n\n // regions, not as an absolute offset in the visible frame. The\n\n // Tile's own offset is added to this in order to address into\n\n // the input Frame.\n\n let tile_sbo = TileSuperBlockOffset(SuperBlockOffset { x: fbx, y: fby });\n\n let cdef_index = tb.get_cdef(tile_sbo);\n\n let cdef_dirs = cdef_analyze_superblock(fi, input, tb, tile_sbo);\n\n\n\n cdef_filter_superblock(\n\n fi, input, output, tb, tile_sbo, cdef_index, &cdef_dirs,\n\n );\n\n }\n\n }\n\n}\n", "file_path": "src/cdef.rs", "rank": 43, "score": 223126.98638040153 }, { "content": "#[hawktracer(deblock_filter_frame)]\n\npub fn deblock_filter_frame<T: Pixel>(\n\n deblock: &DeblockState, tile: &mut TileMut<T>, blocks: &TileBlocks,\n\n crop_w: usize, crop_h: usize, bd: usize, planes: usize,\n\n) {\n\n (&mut tile.planes[..planes]).par_iter_mut().enumerate().for_each(\n\n |(pli, mut plane)| {\n\n deblock_plane(deblock, &mut plane, pli, blocks, crop_w, crop_h, bd);\n\n },\n\n );\n\n}\n\n\n", "file_path": "src/deblock.rs", "rank": 44, "score": 223126.98638040153 }, { "content": "pub fn cdef_filter_superblock<T: Pixel>(\n\n fi: &FrameInvariants<T>, input: &Frame<T>, output: &mut TileMut<'_, T>,\n\n blocks: &TileBlocks<'_>, tile_sbo: TileSuperBlockOffset, cdef_index: u8,\n\n cdef_dirs: &CdefDirections,\n\n) {\n\n let bit_depth = fi.sequence.bit_depth;\n\n let coeff_shift = fi.sequence.bit_depth as i32 - 8;\n\n let cdef_damping = fi.cdef_damping as i32;\n\n let cdef_y_strength = fi.cdef_y_strengths[cdef_index as usize];\n\n let cdef_uv_strength = fi.cdef_uv_strengths[cdef_index as usize];\n\n let cdef_pri_y_strength = (cdef_y_strength / CDEF_SEC_STRENGTHS) as i32;\n\n let mut cdef_sec_y_strength = (cdef_y_strength % CDEF_SEC_STRENGTHS) as i32;\n\n let cdef_pri_uv_strength = (cdef_uv_strength / CDEF_SEC_STRENGTHS) as i32;\n\n let planes = if fi.sequence.chroma_sampling == Cs400 { 1 } else { 3 };\n\n let mut cdef_sec_uv_strength =\n\n (cdef_uv_strength % CDEF_SEC_STRENGTHS) as i32;\n\n if cdef_sec_y_strength == 3 {\n\n cdef_sec_y_strength += 1;\n\n }\n\n if cdef_sec_uv_strength == 3 {\n", "file_path": "src/cdef.rs", "rank": 45, "score": 223126.98638040153 }, { "content": "pub fn update_rec_buffer<T: Pixel>(\n\n output_frameno: u64, fi: &mut FrameInvariants<T>, fs: &FrameState<T>,\n\n) {\n\n let rfs = Arc::new(ReferenceFrame {\n\n order_hint: fi.order_hint,\n\n width: fi.width as u32,\n\n height: fi.height as u32,\n\n render_width: fi.render_width,\n\n render_height: fi.render_height,\n\n frame: fs.rec.clone(),\n\n input_hres: fs.input_hres.clone(),\n\n input_qres: fs.input_qres.clone(),\n\n cdfs: fs.cdfs,\n\n frame_me_stats: fs.frame_me_stats.clone(),\n\n output_frameno,\n\n segmentation: fs.segmentation,\n\n });\n\n for i in 0..(REF_FRAMES as usize) {\n\n if (fi.refresh_frame_flags & (1 << i)) != 0 {\n\n fi.rec_buffer.frames[i] = Some(Arc::clone(&rfs));\n", "file_path": "src/encoder.rs", "rank": 46, "score": 223126.98638040153 }, { "content": "pub fn save_block_motion<T: Pixel>(\n\n ts: &mut TileStateMut<'_, T>, bsize: BlockSize, tile_bo: TileBlockOffset,\n\n ref_frame: usize, mv: MotionVector,\n\n) {\n\n let tile_me_stats = &mut ts.me_stats[ref_frame];\n\n let tile_bo_x_end = (tile_bo.0.x + bsize.width_mi()).min(ts.mi_width);\n\n let tile_bo_y_end = (tile_bo.0.y + bsize.height_mi()).min(ts.mi_height);\n\n for mi_y in tile_bo.0.y..tile_bo_y_end {\n\n for mi_x in tile_bo.0.x..tile_bo_x_end {\n\n tile_me_stats[mi_y][mi_x].mv = mv;\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/encoder.rs", "rank": 47, "score": 223126.98638040153 }, { "content": "pub fn cdef_analyze_superblock<T: Pixel>(\n\n fi: &FrameInvariants<T>, in_frame: &Frame<T>, blocks: &TileBlocks<'_>,\n\n sbo: TileSuperBlockOffset,\n\n) -> CdefDirections {\n\n let coeff_shift = fi.sequence.bit_depth as usize - 8;\n\n let mut dir: CdefDirections =\n\n CdefDirections { dir: [[0; 8]; 8], var: [[0; 8]; 8] };\n\n // Each direction block is 8x8 in y, and direction computation only looks at y\n\n for by in 0..8 {\n\n for bx in 0..8 {\n\n let block_offset = sbo.block_offset(bx << 1, by << 1);\n\n if block_offset.0.x < blocks.cols() && block_offset.0.y < blocks.rows() {\n\n let skip = blocks[block_offset].skip\n\n & blocks[sbo.block_offset(2 * bx + 1, 2 * by)].skip\n\n & blocks[sbo.block_offset(2 * bx, 2 * by + 1)].skip\n\n & blocks[sbo.block_offset(2 * bx + 1, 2 * by + 1)].skip;\n\n\n\n if !skip {\n\n let mut var: u32 = 0;\n\n let in_plane = &in_frame.planes[0];\n", "file_path": "src/cdef.rs", "rank": 48, "score": 223126.98638040153 }, { "content": "pub fn setup_integral_image<T: Pixel>(\n\n integral_image_buffer: &mut IntegralImageBuffer,\n\n integral_image_stride: usize, crop_w: usize, crop_h: usize, stripe_w: usize,\n\n stripe_h: usize, cdeffed: &PlaneSlice<T>, deblocked: &PlaneSlice<T>,\n\n) {\n\n let integral_image = &mut integral_image_buffer.integral_image;\n\n let sq_integral_image = &mut integral_image_buffer.sq_integral_image;\n\n\n\n // Number of elements outside the stripe\n\n let left_w = 4; // max radius of 2 + 2 padding\n\n let right_w = 3; // max radius of 2 + 1 padding\n\n\n\n assert_eq!(cdeffed.x, deblocked.x);\n\n\n\n // Find how many unique elements to use to the left and right\n\n let left_uniques = if cdeffed.x == 0 { 0 } else { left_w };\n\n let right_uniques = right_w.min(crop_w - stripe_w);\n\n\n\n // Find the total number of unique elements used\n\n let row_uniques = left_uniques + stripe_w + right_uniques;\n", "file_path": "src/lrf.rs", "rank": 49, "score": 223126.98638040153 }, { "content": "pub fn get_intra_edges<T: Pixel>(\n\n dst: &PlaneRegion<'_, T>,\n\n partition_bo: TileBlockOffset, // partition bo, BlockOffset\n\n bx: usize,\n\n by: usize,\n\n partition_size: BlockSize, // partition size, BlockSize\n\n po: PlaneOffset,\n\n tx_size: TxSize,\n\n bit_depth: usize,\n\n opt_mode: Option<PredictionMode>,\n\n enable_intra_edge_filter: bool,\n\n intra_param: IntraParam,\n\n) -> Aligned<[T; 4 * MAX_TX_SIZE + 1]> {\n\n let plane_cfg = &dst.plane_cfg;\n\n\n\n let mut edge_buf: Aligned<[T; 4 * MAX_TX_SIZE + 1]> =\n\n Aligned::uninitialized();\n\n //Aligned::new([T::cast_from(0); 4 * MAX_TX_SIZE + 1]);\n\n let base = 128u16 << (bit_depth - 8);\n\n\n", "file_path": "src/partition.rs", "rank": 50, "score": 223126.98638040153 }, { "content": "pub fn downsample_8bit(c: &mut Criterion) {\n\n let input = init_plane_u8(1920, 1080);\n\n c.bench_function(\"downsample_8bit\", move |b| {\n\n b.iter(|| {\n\n let _ = input.downsampled(input.cfg.width, input.cfg.height);\n\n })\n\n });\n\n}\n\n\n", "file_path": "benches/plane.rs", "rank": 51, "score": 222433.42016169924 }, { "content": "pub fn av1_iidentity8(c: &mut Criterion) {\n\n let (input, mut output) = init_buffers(8);\n\n\n\n c.bench_function(\"av1_iidentity8_8\", move |b| {\n\n b.iter(|| {\n\n transform::inverse::av1_iidentity8(&input[..], &mut output[..], 16)\n\n })\n\n });\n\n}\n\n\n", "file_path": "benches/transform.rs", "rank": 52, "score": 222433.42016169924 }, { "content": "pub fn av1_iadst4(c: &mut Criterion) {\n\n let (input, mut output) = init_buffers(4);\n\n\n\n c.bench_function(\"av1_iadst4_8\", move |b| {\n\n b.iter(|| transform::inverse::av1_iadst4(&input[..], &mut output[..], 16))\n\n });\n\n}\n\n\n", "file_path": "benches/transform.rs", "rank": 53, "score": 222433.42016169924 }, { "content": "pub fn av1_iidentity4(c: &mut Criterion) {\n\n let (input, mut output) = init_buffers(4);\n\n\n\n c.bench_function(\"av1_iidentity4_8\", move |b| {\n\n b.iter(|| {\n\n transform::inverse::av1_iidentity4(&input[..], &mut output[..], 16)\n\n })\n\n });\n\n}\n\n\n", "file_path": "benches/transform.rs", "rank": 54, "score": 222433.42016169924 }, { "content": "pub fn pred_bench(c: &mut Criterion) {\n\n bench_pred_fn(c, \"intra_dc_4x4\", |b: &mut Bencher| {\n\n intra_bench::<u16>(b, PredictionMode::DC_PRED, PredictionVariant::BOTH)\n\n });\n\n bench_pred_fn(c, \"intra_dc_128_4x4\", |b: &mut Bencher| {\n\n intra_bench::<u16>(b, PredictionMode::DC_PRED, PredictionVariant::NONE)\n\n });\n\n bench_pred_fn(c, \"intra_dc_left_4x4\", |b: &mut Bencher| {\n\n intra_bench::<u16>(b, PredictionMode::DC_PRED, PredictionVariant::LEFT)\n\n });\n\n bench_pred_fn(c, \"intra_dc_top_4x4\", |b: &mut Bencher| {\n\n intra_bench::<u16>(b, PredictionMode::DC_PRED, PredictionVariant::TOP)\n\n });\n\n bench_pred_fn(c, \"intra_v_4x4\", |b: &mut Bencher| {\n\n intra_bench::<u16>(b, PredictionMode::V_PRED, PredictionVariant::BOTH)\n\n });\n\n bench_pred_fn(c, \"intra_h_4x4\", |b: &mut Bencher| {\n\n intra_bench::<u16>(b, PredictionMode::H_PRED, PredictionVariant::BOTH)\n\n });\n\n bench_pred_fn(c, \"intra_smooth_4x4\", |b: &mut Bencher| {\n", "file_path": "benches/predict.rs", "rank": 55, "score": 222433.42016169924 }, { "content": "pub fn downsample_odd(c: &mut Criterion) {\n\n let input = init_plane_u8(1919, 1079);\n\n c.bench_function(\"downsample_odd\", move |b| {\n\n b.iter(|| {\n\n let _ = input.downsampled(input.cfg.width, input.cfg.height);\n\n })\n\n });\n\n}\n\n\n", "file_path": "benches/plane.rs", "rank": 56, "score": 222433.42016169924 }, { "content": "pub fn av1_idct8(c: &mut Criterion) {\n\n let (input, mut output) = init_buffers(8);\n\n\n\n c.bench_function(\"av1_idct8_8\", move |b| {\n\n b.iter(|| transform::inverse::av1_idct8(&input[..], &mut output[..], 16))\n\n });\n\n}\n\n\n", "file_path": "benches/transform.rs", "rank": 57, "score": 222433.42016169924 }, { "content": "pub fn get_satd(c: &mut Criterion) {\n\n c.bench_function_over_inputs(\"get_satd\", bench_get_satd, DIST_BENCH_SET);\n\n}\n\n\n", "file_path": "benches/dist.rs", "rank": 58, "score": 222433.42016169927 }, { "content": "pub fn get_sad(c: &mut Criterion) {\n\n c.bench_function_over_inputs(\"get_sad\", bench_get_sad, DIST_BENCH_SET);\n\n}\n\n\n", "file_path": "benches/dist.rs", "rank": 59, "score": 222433.42016169927 }, { "content": "pub fn av1_iadst8(c: &mut Criterion) {\n\n let (input, mut output) = init_buffers(8);\n\n\n\n c.bench_function(\"av1_iadst8_8\", move |b| {\n\n b.iter(|| transform::inverse::av1_iadst8(&input[..], &mut output[..], 16))\n\n });\n\n}\n\n\n", "file_path": "benches/transform.rs", "rank": 60, "score": 222433.42016169924 }, { "content": "pub fn av1_idct4(c: &mut Criterion) {\n\n let (input, mut output) = init_buffers(4);\n\n\n\n c.bench_function(\"av1_idct4_8\", move |b| {\n\n b.iter(|| transform::inverse::av1_idct4(&input[..], &mut output[..], 16))\n\n });\n\n}\n\n\n", "file_path": "benches/transform.rs", "rank": 61, "score": 222433.42016169924 }, { "content": "pub fn downsample_10bit(c: &mut Criterion) {\n\n let input = init_plane_u16(1920, 1080);\n\n c.bench_function(\"downsample_10bit\", move |b| {\n\n b.iter(|| {\n\n let _ = input.downsampled(input.cfg.width, input.cfg.height);\n\n })\n\n });\n\n}\n\n\n\ncriterion_group!(plane, downsample_8bit, downsample_odd, downsample_10bit);\n", "file_path": "benches/plane.rs", "rank": 62, "score": 222433.42016169924 }, { "content": "pub fn av1_iflipadst16(input: &[i32], output: &mut [i32], range: usize) {\n\n av1_iadst16(input, output, range);\n\n output[..16].reverse();\n\n}\n\n\n", "file_path": "src/transform/inverse.rs", "rank": 63, "score": 222396.08615067083 }, { "content": "pub fn av1_iflipadst8(input: &[i32], output: &mut [i32], range: usize) {\n\n av1_iadst8(input, output, range);\n\n output[..8].reverse();\n\n}\n\n\n", "file_path": "src/transform/inverse.rs", "rank": 64, "score": 222396.08615067083 }, { "content": "pub fn av1_iidentity8(input: &[i32], output: &mut [i32], _range: usize) {\n\n output[..8]\n\n .iter_mut()\n\n .zip(input[..8].iter())\n\n .for_each(|(outp, inp)| *outp = 2 * *inp);\n\n}\n\n\n", "file_path": "src/transform/inverse.rs", "rank": 65, "score": 222396.08615067083 }, { "content": "#[inline(always)]\n\npub fn av1_iadst8(input: &[i32], output: &mut [i32], range: usize) {\n\n assert!(input.len() >= 8);\n\n assert!(output.len() >= 8);\n\n\n\n // stage 1\n\n let stg1 = [\n\n input[7], input[0], input[5], input[2], input[3], input[4], input[1],\n\n input[6],\n\n ];\n\n\n\n // stage 2\n\n let stg2 = [\n\n half_btf(COSPI_INV[4], stg1[0], COSPI_INV[60], stg1[1], INV_COS_BIT),\n\n half_btf(COSPI_INV[60], stg1[0], -COSPI_INV[4], stg1[1], INV_COS_BIT),\n\n half_btf(COSPI_INV[20], stg1[2], COSPI_INV[44], stg1[3], INV_COS_BIT),\n\n half_btf(COSPI_INV[44], stg1[2], -COSPI_INV[20], stg1[3], INV_COS_BIT),\n\n half_btf(COSPI_INV[36], stg1[4], COSPI_INV[28], stg1[5], INV_COS_BIT),\n\n half_btf(COSPI_INV[28], stg1[4], -COSPI_INV[36], stg1[5], INV_COS_BIT),\n\n half_btf(COSPI_INV[52], stg1[6], COSPI_INV[12], stg1[7], INV_COS_BIT),\n\n half_btf(COSPI_INV[12], stg1[6], -COSPI_INV[52], stg1[7], INV_COS_BIT),\n", "file_path": "src/transform/inverse.rs", "rank": 66, "score": 222396.08615067083 }, { "content": "pub fn av1_iflipadst4(input: &[i32], output: &mut [i32], range: usize) {\n\n av1_iadst4(input, output, range);\n\n output[..4].reverse();\n\n}\n\n\n", "file_path": "src/transform/inverse.rs", "rank": 67, "score": 222396.08615067083 }, { "content": "pub fn av1_iidentity4(input: &[i32], output: &mut [i32], _range: usize) {\n\n output[..4]\n\n .iter_mut()\n\n .zip(input[..4].iter())\n\n .for_each(|(outp, inp)| *outp = round_shift(SQRT2 * *inp, 12));\n\n}\n\n\n", "file_path": "src/transform/inverse.rs", "rank": 68, "score": 222396.08615067083 }, { "content": "#[inline(always)]\n\npub fn av1_iadst4(input: &[i32], output: &mut [i32], _range: usize) {\n\n assert!(input.len() >= 4);\n\n assert!(output.len() >= 4);\n\n\n\n let bit = 12;\n\n\n\n let x0 = input[0];\n\n let x1 = input[1];\n\n let x2 = input[2];\n\n let x3 = input[3];\n\n\n\n // stage 1\n\n let s0 = SINPI_INV[1] * x0;\n\n let s1 = SINPI_INV[2] * x0;\n\n let s2 = SINPI_INV[3] * x1;\n\n let s3 = SINPI_INV[4] * x2;\n\n let s4 = SINPI_INV[1] * x2;\n\n let s5 = SINPI_INV[2] * x3;\n\n let s6 = SINPI_INV[4] * x3;\n\n\n", "file_path": "src/transform/inverse.rs", "rank": 69, "score": 222396.08615067083 }, { "content": "pub fn av1_idct8(input: &[i32], output: &mut [i32], range: usize) {\n\n assert!(input.len() >= 8);\n\n assert!(output.len() >= 8);\n\n\n\n // call idct4\n\n let temp_in = [input[0], input[2], input[4], input[6]];\n\n let mut temp_out: [i32; 4] = [0; 4];\n\n av1_idct4(&temp_in, &mut temp_out, range);\n\n\n\n // stage 0\n\n\n\n // stage 1\n\n let stg1 = [input[1], input[5], input[3], input[7]];\n\n\n\n // stage 2\n\n let stg2 = [\n\n half_btf(COSPI_INV[56], stg1[0], -COSPI_INV[8], stg1[3], INV_COS_BIT),\n\n half_btf(COSPI_INV[24], stg1[1], -COSPI_INV[40], stg1[2], INV_COS_BIT),\n\n half_btf(COSPI_INV[40], stg1[1], COSPI_INV[24], stg1[2], INV_COS_BIT),\n\n half_btf(COSPI_INV[8], stg1[0], COSPI_INV[56], stg1[3], INV_COS_BIT),\n", "file_path": "src/transform/inverse.rs", "rank": 70, "score": 222396.08615067083 }, { "content": "pub fn av1_idct4(input: &[i32], output: &mut [i32], range: usize) {\n\n assert!(input.len() >= 4);\n\n assert!(output.len() >= 4);\n\n\n\n // stage 1\n\n let stg1 = [input[0], input[2], input[1], input[3]];\n\n\n\n // stage 2\n\n let stg2 = [\n\n half_btf(COSPI_INV[32], stg1[0], COSPI_INV[32], stg1[1], INV_COS_BIT),\n\n half_btf(COSPI_INV[32], stg1[0], -COSPI_INV[32], stg1[1], INV_COS_BIT),\n\n half_btf(COSPI_INV[48], stg1[2], -COSPI_INV[16], stg1[3], INV_COS_BIT),\n\n half_btf(COSPI_INV[16], stg1[2], COSPI_INV[48], stg1[3], INV_COS_BIT),\n\n ];\n\n\n\n // stage 3\n\n output[0] = clamp_value(stg2[0] + stg2[3], range);\n\n output[1] = clamp_value(stg2[1] + stg2[2], range);\n\n output[2] = clamp_value(stg2[1] - stg2[2], range);\n\n output[3] = clamp_value(stg2[0] - stg2[3], range);\n\n}\n\n\n", "file_path": "src/transform/inverse.rs", "rank": 71, "score": 222396.08615067083 }, { "content": "fn write_b_bench(b: &mut Bencher, tx_size: TxSize, qindex: usize) {\n\n let config = Arc::new(EncoderConfig {\n\n width: 1024,\n\n height: 1024,\n\n quantizer: qindex,\n\n speed_settings: SpeedSettings::from_preset(10),\n\n ..Default::default()\n\n });\n\n let sequence = Arc::new(Sequence::new(&Default::default()));\n\n let fi = FrameInvariants::<u16>::new(config, sequence);\n\n let mut w = WriterEncoder::new();\n\n let mut fc = CDFContext::new(fi.base_q_idx);\n\n let mut fb = FrameBlocks::new(fi.sb_width * 16, fi.sb_height * 16);\n\n let mut tb = fb.as_tile_blocks_mut();\n\n let bc = BlockContext::new(&mut tb);\n\n let mut fs = FrameState::new(&fi);\n\n let mut ts = fs.as_tile_state_mut();\n\n // For now, restoration unit size is locked to superblock size.\n\n let mut cw = ContextWriter::new(&mut fc, bc);\n\n\n", "file_path": "benches/bench.rs", "rank": 72, "score": 220238.16415076648 }, { "content": "#[inline(always)]\n\n#[allow(clippy::let_and_return)]\n\npub fn get_sad<T: Pixel>(\n\n src: &PlaneRegion<'_, T>, dst: &PlaneRegion<'_, T>, bsize: BlockSize,\n\n bit_depth: usize, cpu: CpuFeatureLevel,\n\n) -> u32 {\n\n let call_rust = || -> u32 { rust::get_sad(dst, src, bsize, bit_depth, cpu) };\n\n\n\n #[cfg(feature = \"check_asm\")]\n\n let ref_dist = call_rust();\n\n\n\n let dist = match T::type_enum() {\n\n PixelType::U8 => match SAD_FNS[cpu.as_index()][to_index(bsize)] {\n\n Some(func) => unsafe {\n\n (func)(\n\n src.data_ptr() as *const _,\n\n T::to_asm_stride(src.plane_cfg.stride),\n\n dst.data_ptr() as *const _,\n\n T::to_asm_stride(dst.plane_cfg.stride),\n\n )\n\n },\n\n None => call_rust(),\n", "file_path": "src/asm/aarch64/dist.rs", "rank": 73, "score": 219372.00907057052 }, { "content": "/// RDO-based transform type decision\n\n/// If cw_checkpoint is None, a checkpoint for cw's (ContextWriter) current\n\n/// state is created and stored for later use.\n\npub fn rdo_tx_type_decision<T: Pixel>(\n\n fi: &FrameInvariants<T>, ts: &mut TileStateMut<'_, T>,\n\n cw: &mut ContextWriter, cw_checkpoint: &mut Option<ContextWriterCheckpoint>,\n\n mode: PredictionMode, ref_frames: [RefType; 2], mvs: [MotionVector; 2],\n\n bsize: BlockSize, tile_bo: TileBlockOffset, tx_size: TxSize, tx_set: TxSet,\n\n tx_types: &[TxType],\n\n) -> (TxType, f64) {\n\n let mut best_type = TxType::DCT_DCT;\n\n let mut best_rd = std::f64::MAX;\n\n\n\n let PlaneConfig { xdec, ydec, .. } = ts.input.planes[1].cfg;\n\n let is_chroma_block =\n\n has_chroma(tile_bo, bsize, xdec, ydec, fi.sequence.chroma_sampling);\n\n\n\n let is_inter = !mode.is_intra();\n\n\n\n if cw_checkpoint.is_none() {\n\n // Only run the first call\n\n // Prevents creating multiple checkpoints for own version of cw\n\n *cw_checkpoint =\n", "file_path": "src/rdo.rs", "rank": 74, "score": 219371.31129032868 }, { "content": "// Write a packet containing only the placeholder that tells the decoder\n\n// to present the already decoded frame present at `frame_to_show_map_idx`\n\n//\n\n// See `av1-spec` Section 6.8.2 and 7.18.\n\npub fn encode_show_existing_frame<T: Pixel>(\n\n fi: &FrameInvariants<T>, fs: &mut FrameState<T>, inter_cfg: &InterConfig,\n\n) -> Vec<u8> {\n\n debug_assert!(fi.show_existing_frame);\n\n let obu_extension = 0;\n\n\n\n let mut packet = Vec::new();\n\n\n\n if fi.frame_type == FrameType::KEY {\n\n write_key_frame_obus(&mut packet, fi, obu_extension).unwrap();\n\n }\n\n\n\n let mut buf1 = Vec::new();\n\n let mut buf2 = Vec::new();\n\n {\n\n let mut bw2 = BitWriter::endian(&mut buf2, BigEndian);\n\n bw2.write_frame_header_obu(fi, fs, inter_cfg).unwrap();\n\n }\n\n\n\n {\n", "file_path": "src/encoder.rs", "rank": 75, "score": 219366.2349684429 }, { "content": "#[inline(always)]\n\npub fn prep_8tap<T: Pixel>(\n\n tmp: &mut [i16], src: PlaneSlice<'_, T>, width: usize, height: usize,\n\n col_frac: i32, row_frac: i32, mode_x: FilterMode, mode_y: FilterMode,\n\n bit_depth: usize, cpu: CpuFeatureLevel,\n\n) {\n\n let call_rust = |tmp: &mut [i16]| {\n\n rust::prep_8tap(\n\n tmp, src, width, height, col_frac, row_frac, mode_x, mode_y, bit_depth,\n\n cpu,\n\n );\n\n };\n\n #[cfg(feature = \"check_asm\")]\n\n let ref_tmp = {\n\n let mut copy = vec![0; width * height];\n\n copy[..].copy_from_slice(&tmp[..width * height]);\n\n call_rust(&mut copy);\n\n copy\n\n };\n\n match T::type_enum() {\n\n PixelType::U8 => {\n", "file_path": "src/asm/x86/mc.rs", "rank": 76, "score": 219366.2349684429 }, { "content": "pub fn mc_avg<T: Pixel>(\n\n dst: &mut PlaneRegionMut<'_, T>, tmp1: &[i16], tmp2: &[i16], width: usize,\n\n height: usize, bit_depth: usize, cpu: CpuFeatureLevel,\n\n) {\n\n let call_rust = |dst: &mut PlaneRegionMut<'_, T>| {\n\n rust::mc_avg(dst, tmp1, tmp2, width, height, bit_depth, cpu);\n\n };\n\n #[cfg(feature = \"check_asm\")]\n\n let ref_dst = {\n\n let mut copy = dst.scratch_copy();\n\n call_rust(&mut copy.as_region_mut());\n\n copy\n\n };\n\n match T::type_enum() {\n\n PixelType::U8 => match AVG_FNS[cpu.as_index()] {\n\n Some(func) => unsafe {\n\n (func)(\n\n dst.data_ptr_mut() as *mut _,\n\n T::to_asm_stride(dst.plane_cfg.stride),\n\n tmp1.as_ptr(),\n", "file_path": "src/asm/aarch64/mc.rs", "rank": 77, "score": 219366.2349684429 }, { "content": "#[inline(always)]\n\npub fn put_8tap<T: Pixel>(\n\n dst: &mut PlaneRegionMut<'_, T>, src: PlaneSlice<'_, T>, width: usize,\n\n height: usize, col_frac: i32, row_frac: i32, mode_x: FilterMode,\n\n mode_y: FilterMode, bit_depth: usize, cpu: CpuFeatureLevel,\n\n) {\n\n let call_rust = |dst: &mut PlaneRegionMut<'_, T>| {\n\n rust::put_8tap(\n\n dst, src, width, height, col_frac, row_frac, mode_x, mode_y, bit_depth,\n\n cpu,\n\n );\n\n };\n\n #[cfg(feature = \"check_asm\")]\n\n let ref_dst = {\n\n let mut copy = dst.scratch_copy();\n\n call_rust(&mut copy.as_region_mut());\n\n copy\n\n };\n\n match T::type_enum() {\n\n PixelType::U8 => {\n\n match PUT_FNS[cpu.as_index()][get_2d_mode_idx(mode_x, mode_y)] {\n", "file_path": "src/asm/x86/mc.rs", "rank": 78, "score": 219366.2349684429 }, { "content": "#[inline(always)]\n\npub fn prep_8tap<T: Pixel>(\n\n tmp: &mut [i16], src: PlaneSlice<'_, T>, width: usize, height: usize,\n\n col_frac: i32, row_frac: i32, mode_x: FilterMode, mode_y: FilterMode,\n\n bit_depth: usize, cpu: CpuFeatureLevel,\n\n) {\n\n let call_rust = |tmp: &mut [i16]| {\n\n rust::prep_8tap(\n\n tmp, src, width, height, col_frac, row_frac, mode_x, mode_y, bit_depth,\n\n cpu,\n\n );\n\n };\n\n #[cfg(feature = \"check_asm\")]\n\n let ref_tmp = {\n\n let mut copy = vec![0; width * height];\n\n copy[..].copy_from_slice(&tmp[..width * height]);\n\n call_rust(&mut copy);\n\n copy\n\n };\n\n match T::type_enum() {\n\n PixelType::U8 => {\n", "file_path": "src/asm/aarch64/mc.rs", "rank": 79, "score": 219366.2349684429 }, { "content": "#[inline(always)]\n\npub fn put_8tap<T: Pixel>(\n\n dst: &mut PlaneRegionMut<'_, T>, src: PlaneSlice<'_, T>, width: usize,\n\n height: usize, col_frac: i32, row_frac: i32, mode_x: FilterMode,\n\n mode_y: FilterMode, bit_depth: usize, cpu: CpuFeatureLevel,\n\n) {\n\n let call_rust = |dst: &mut PlaneRegionMut<'_, T>| {\n\n rust::put_8tap(\n\n dst, src, width, height, col_frac, row_frac, mode_x, mode_y, bit_depth,\n\n cpu,\n\n );\n\n };\n\n #[cfg(feature = \"check_asm\")]\n\n let ref_dst = {\n\n let mut copy = dst.scratch_copy();\n\n call_rust(&mut copy.as_region_mut());\n\n copy\n\n };\n\n match T::type_enum() {\n\n PixelType::U8 => {\n\n match PUT_FNS[cpu.as_index()][get_2d_mode_idx(mode_x, mode_y)] {\n", "file_path": "src/asm/aarch64/mc.rs", "rank": 80, "score": 219366.2349684429 }, { "content": "pub fn cdef_analyze_superblock_range<T: Pixel>(\n\n fi: &FrameInvariants<T>, in_frame: &Frame<T>, blocks: &TileBlocks<'_>,\n\n sb_w: usize, sb_h: usize,\n\n) -> Vec<CdefDirections> {\n\n let mut ret = Vec::<CdefDirections>::with_capacity(sb_h * sb_w);\n\n for sby in 0..sb_h {\n\n for sbx in 0..sb_w {\n\n let sbo = TileSuperBlockOffset(SuperBlockOffset { x: sbx, y: sby });\n\n ret.push(cdef_analyze_superblock(fi, in_frame, blocks, sbo));\n\n }\n\n }\n\n ret\n\n}\n\n\n", "file_path": "src/cdef.rs", "rank": 81, "score": 219366.2349684429 }, { "content": "pub fn rdo_tx_size_type<T: Pixel>(\n\n fi: &FrameInvariants<T>, ts: &mut TileStateMut<'_, T>,\n\n cw: &mut ContextWriter, bsize: BlockSize, tile_bo: TileBlockOffset,\n\n luma_mode: PredictionMode, ref_frames: [RefType; 2], mvs: [MotionVector; 2],\n\n skip: bool,\n\n) -> (TxSize, TxType) {\n\n let is_inter = !luma_mode.is_intra();\n\n let mut tx_size = max_txsize_rect_lookup[bsize as usize];\n\n\n\n if fi.enable_inter_txfm_split && is_inter && !skip {\n\n tx_size = sub_tx_size_map[tx_size as usize]; // Always choose one level split size\n\n }\n\n\n\n let mut best_tx_type = TxType::DCT_DCT;\n\n let mut best_tx_size = tx_size;\n\n let mut best_rd = std::f64::MAX;\n\n\n\n let do_rdo_tx_size =\n\n fi.tx_mode_select && fi.config.speed_settings.rdo_tx_decision && !is_inter;\n\n let rdo_tx_depth = if do_rdo_tx_size { 2 } else { 0 };\n", "file_path": "src/rdo.rs", "rank": 82, "score": 219366.2349684429 }, { "content": "pub fn mc_avg<T: Pixel>(\n\n dst: &mut PlaneRegionMut<'_, T>, tmp1: &[i16], tmp2: &[i16], width: usize,\n\n height: usize, bit_depth: usize, cpu: CpuFeatureLevel,\n\n) {\n\n let call_rust = |dst: &mut PlaneRegionMut<'_, T>| {\n\n rust::mc_avg(dst, tmp1, tmp2, width, height, bit_depth, cpu);\n\n };\n\n #[cfg(feature = \"check_asm\")]\n\n let ref_dst = {\n\n let mut copy = dst.scratch_copy();\n\n call_rust(&mut copy.as_region_mut());\n\n copy\n\n };\n\n match T::type_enum() {\n\n PixelType::U8 => match AVG_FNS[cpu.as_index()] {\n\n Some(func) => unsafe {\n\n (func)(\n\n dst.data_ptr_mut() as *mut _,\n\n T::to_asm_stride(dst.plane_cfg.stride),\n\n tmp1.as_ptr(),\n", "file_path": "src/asm/x86/mc.rs", "rank": 83, "score": 219366.2349684429 }, { "content": "// If n != 0, returns the floor of log base 2 of n. If n == 0, returns 0.\n\npub fn log_in_base_2(n: u32) -> u8 {\n\n 31 - cmp::min(31, n.leading_zeros() as u8)\n\n}\n", "file_path": "src/context/mod.rs", "rank": 84, "score": 219151.24345931935 }, { "content": "fn fill_frame_const<T: Pixel>(frame: &mut Frame<T>, value: T) {\n\n for plane in frame.planes.iter_mut() {\n\n let stride = plane.cfg.stride;\n\n for row in plane.data.chunks_mut(stride) {\n\n for pixel in row {\n\n *pixel = value;\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[cfg(feature = \"channel-api\")]\n\nmod channel {\n\n use super::*;\n\n\n\n #[interpolate_test(low_latency_no_scene_change, true, true)]\n\n #[interpolate_test(reorder_no_scene_change, false, true)]\n\n #[interpolate_test(low_latency_scene_change_detection, true, false)]\n\n #[interpolate_test(reorder_scene_change_detection, false, false)]\n\n fn flush(low_lantency: bool, no_scene_detection: bool) {\n", "file_path": "src/api/test.rs", "rank": 85, "score": 218748.52718835144 }, { "content": "pub fn sse_wxh_4x4(c: &mut Criterion) {\n\n let cpu = CpuFeatureLevel::default();\n\n let src1 = init_plane_u8(8, 8, 1);\n\n let src2 = init_plane_u8(8, 8, 2);\n\n\n\n c.bench_function(\"sse_wxh_4x4\", move |b| {\n\n b.iter(|| {\n\n rdo::sse_wxh(\n\n &src1.region(Area::Rect { x: 0, y: 0, width: 4, height: 4 }),\n\n &src2.region(Area::Rect { x: 0, y: 0, width: 4, height: 4 }),\n\n 4,\n\n 4,\n\n |_, _| DistortionScale::default(),\n\n 8,\n\n cpu,\n\n )\n\n })\n\n });\n\n}\n\n\n\ncriterion_group!(rdo, cdef_dist_wxh_8x8, sse_wxh_8x8, sse_wxh_4x4,);\n", "file_path": "benches/rdo.rs", "rank": 86, "score": 218435.43355366046 }, { "content": "pub fn sse_wxh_8x8(c: &mut Criterion) {\n\n let cpu = CpuFeatureLevel::default();\n\n let src1 = init_plane_u8(8, 8, 1);\n\n let src2 = init_plane_u8(8, 8, 2);\n\n\n\n c.bench_function(\"sse_wxh_8x8\", move |b| {\n\n b.iter(|| {\n\n rdo::sse_wxh(\n\n &src1.region(Area::Rect { x: 0, y: 0, width: 8, height: 8 }),\n\n &src2.region(Area::Rect { x: 0, y: 0, width: 8, height: 8 }),\n\n 8,\n\n 8,\n\n |_, _| DistortionScale::default(),\n\n 8,\n\n cpu,\n\n )\n\n })\n\n });\n\n}\n\n\n", "file_path": "benches/rdo.rs", "rank": 87, "score": 218435.43355366046 }, { "content": "pub fn bench_forward_transforms(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"forward_transform\");\n\n\n\n let mut rng = rand::thread_rng();\n\n let cpu = CpuFeatureLevel::default();\n\n\n\n let tx_sizes = {\n\n use TxSize::*;\n\n [\n\n TX_4X4, TX_8X8, TX_16X16, TX_32X32, TX_64X64, TX_4X8, TX_8X4, TX_8X16,\n\n TX_16X8, TX_16X32, TX_32X16, TX_32X64, TX_64X32, TX_4X16, TX_16X4,\n\n TX_8X32, TX_32X8, TX_16X64, TX_64X16,\n\n ]\n\n };\n\n\n\n for &tx_size in &tx_sizes {\n\n let area = tx_size.area();\n\n\n\n let input: Vec<i16> =\n\n (0..area).map(|_| rng.gen_range(-255..256)).collect();\n", "file_path": "benches/transform.rs", "rank": 88, "score": 218435.43355366046 }, { "content": "pub fn get_weighted_sse(c: &mut Criterion) {\n\n c.bench_function_over_inputs(\n\n \"get_weighted_sse\",\n\n bench_get_weighted_sse,\n\n DIST_BENCH_SET,\n\n );\n\n}\n", "file_path": "benches/dist.rs", "rank": 89, "score": 218435.43355366046 }, { "content": "#[inline]\n\npub fn sgrproj_box_f_r0<T: Pixel>(\n\n f: &mut [u32], y: usize, w: usize, cdeffed: &PlaneSlice<T>,\n\n cpu: CpuFeatureLevel,\n\n) {\n\n if cpu >= CpuFeatureLevel::AVX2 {\n\n return unsafe {\n\n sgrproj_box_f_r0_avx2(f, y, w, cdeffed);\n\n };\n\n }\n\n\n\n rust::sgrproj_box_f_r0(f, y, w, cdeffed, cpu);\n\n}\n\n\n", "file_path": "src/asm/x86/lrf.rs", "rank": 91, "score": 215818.87502245654 }, { "content": "#[inline(always)]\n\npub fn dispatch_predict_intra<T: Pixel>(\n\n mode: PredictionMode, variant: PredictionVariant,\n\n dst: &mut PlaneRegionMut<'_, T>, tx_size: TxSize, bit_depth: usize,\n\n ac: &[i16], angle: isize, ief_params: Option<IntraEdgeFilterParameters>,\n\n edge_buf: &Aligned<[T; 4 * MAX_TX_SIZE + 1]>, cpu: CpuFeatureLevel,\n\n) {\n\n let call_rust = |dst: &mut PlaneRegionMut<'_, T>| {\n\n rust::dispatch_predict_intra(\n\n mode, variant, dst, tx_size, bit_depth, ac, angle, ief_params, edge_buf,\n\n cpu,\n\n );\n\n };\n\n\n\n if cpu < CpuFeatureLevel::NEON {\n\n return call_rust(dst);\n\n }\n\n\n\n unsafe {\n\n let dst_ptr = dst.data_ptr_mut() as *mut _;\n\n let dst_u16 = dst.data_ptr_mut() as *mut u16;\n", "file_path": "src/asm/aarch64/predict.rs", "rank": 92, "score": 215818.87502245654 }, { "content": "#[inline]\n\npub fn sgrproj_box_f_r1<T: Pixel>(\n\n af: &[&[u32]; 3], bf: &[&[u32]; 3], f: &mut [u32], y: usize, w: usize,\n\n cdeffed: &PlaneSlice<T>, cpu: CpuFeatureLevel,\n\n) {\n\n if cpu >= CpuFeatureLevel::AVX2 {\n\n return unsafe {\n\n sgrproj_box_f_r1_avx2(af, bf, f, y, w, cdeffed);\n\n };\n\n }\n\n\n\n rust::sgrproj_box_f_r1(af, bf, f, y, w, cdeffed, cpu);\n\n}\n\n\n", "file_path": "src/asm/x86/lrf.rs", "rank": 93, "score": 215818.87502245654 }, { "content": "#[inline]\n\npub fn sgrproj_box_f_r2<T: Pixel>(\n\n af: &[&[u32]; 2], bf: &[&[u32]; 2], f0: &mut [u32], f1: &mut [u32],\n\n y: usize, w: usize, cdeffed: &PlaneSlice<T>, cpu: CpuFeatureLevel,\n\n) {\n\n if cpu >= CpuFeatureLevel::AVX2 {\n\n return unsafe {\n\n sgrproj_box_f_r2_avx2(af, bf, f0, f1, y, w, cdeffed);\n\n };\n\n }\n\n\n\n rust::sgrproj_box_f_r2(af, bf, f0, f1, y, w, cdeffed, cpu);\n\n}\n\n\n\nstatic X_BY_XPLUS1: [u32; 256] = [\n\n // Special case: Map 0 -> 1 (corresponding to a value of 1/256)\n\n // instead of 0. See comments in selfguided_restoration_internal() for why\n\n 1, 128, 171, 192, 205, 213, 219, 224, 228, 230, 233, 235, 236, 238, 239, 240,\n\n 241, 242, 243, 243, 244, 244, 245, 245, 246, 246, 247, 247, 247, 247, 248,\n\n 248, 248, 248, 249, 249, 249, 249, 249, 250, 250, 250, 250, 250, 250, 250,\n\n 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 252, 252, 252, 252, 252,\n", "file_path": "src/asm/x86/lrf.rs", "rank": 94, "score": 215818.87502245654 }, { "content": "fn encode_partition_bottomup<T: Pixel, W: Writer>(\n\n fi: &FrameInvariants<T>, ts: &mut TileStateMut<'_, T>,\n\n cw: &mut ContextWriter, w_pre_cdef: &mut W, w_post_cdef: &mut W,\n\n bsize: BlockSize, tile_bo: TileBlockOffset, ref_rd_cost: f64,\n\n inter_cfg: &InterConfig,\n\n) -> PartitionGroupParameters {\n\n let rdo_type = RDOType::PixelDistRealRate;\n\n let mut rd_cost = std::f64::MAX;\n\n let mut best_rd = std::f64::MAX;\n\n let mut rdo_output = PartitionGroupParameters {\n\n rd_cost,\n\n part_type: PartitionType::PARTITION_INVALID,\n\n part_modes: ArrayVec::new(),\n\n };\n\n\n\n if tile_bo.0.x >= ts.mi_width || tile_bo.0.y >= ts.mi_height {\n\n return rdo_output;\n\n }\n\n\n\n let is_square = bsize.is_sqr();\n", "file_path": "src/encoder.rs", "rank": 95, "score": 215496.38854836643 }, { "content": "#[inline(always)]\n\nfn rdo_partition_simple<T: Pixel, W: Writer>(\n\n fi: &FrameInvariants<T>, ts: &mut TileStateMut<'_, T>,\n\n cw: &mut ContextWriter, w_pre_cdef: &mut W, w_post_cdef: &mut W,\n\n bsize: BlockSize, tile_bo: TileBlockOffset, inter_cfg: &InterConfig,\n\n partition: PartitionType, rdo_type: RDOType, best_rd: f64,\n\n child_modes: &mut ArrayVec<PartitionParameters, 4>,\n\n) -> Option<f64> {\n\n debug_assert!(tile_bo.0.x < ts.mi_width && tile_bo.0.y < ts.mi_height);\n\n let subsize = bsize.subsize(partition);\n\n\n\n debug_assert!(subsize != BlockSize::BLOCK_INVALID);\n\n\n\n let cost = if bsize >= BlockSize::BLOCK_8X8 {\n\n let w: &mut W = if cw.bc.cdef_coded { w_post_cdef } else { w_pre_cdef };\n\n let tell = w.tell_frac();\n\n cw.write_partition(w, tile_bo, partition, bsize);\n\n compute_rd_cost(fi, w.tell_frac() - tell, ScaledDistortion::zero())\n\n } else {\n\n 0.0\n\n };\n", "file_path": "src/rdo.rs", "rank": 96, "score": 215496.38854836643 }, { "content": "fn encode_partition_topdown<T: Pixel, W: Writer>(\n\n fi: &FrameInvariants<T>, ts: &mut TileStateMut<'_, T>,\n\n cw: &mut ContextWriter, w_pre_cdef: &mut W, w_post_cdef: &mut W,\n\n bsize: BlockSize, tile_bo: TileBlockOffset,\n\n block_output: &Option<PartitionGroupParameters>, inter_cfg: &InterConfig,\n\n) {\n\n if tile_bo.0.x >= ts.mi_width || tile_bo.0.y >= ts.mi_height {\n\n return;\n\n }\n\n let is_square = bsize.is_sqr();\n\n let rdo_type = RDOType::PixelDistRealRate;\n\n let hbs = bsize.width_mi() >> 1;\n\n let has_cols = tile_bo.0.x + hbs < ts.mi_width;\n\n let has_rows = tile_bo.0.y + hbs < ts.mi_height;\n\n\n\n // TODO: Update for 128x128 superblocks\n\n assert!(fi.partition_range.max <= BlockSize::BLOCK_64X64);\n\n\n\n let must_split =\n\n is_square && (bsize > fi.partition_range.max || !has_cols || !has_rows);\n", "file_path": "src/encoder.rs", "rank": 97, "score": 215496.38854836643 }, { "content": "pub fn av1_round_shift_array(arr: &mut [i32], size: usize, bit: i8) {\n\n if bit == 0 {\n\n return;\n\n }\n\n if bit > 0 {\n\n let bit = bit as usize;\n\n for i in arr.iter_mut().take(size) {\n\n *i = round_shift(*i, bit);\n\n }\n\n } else {\n\n for i in arr.iter_mut().take(size) {\n\n *i <<= -bit;\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/transform/mod.rs", "rank": 98, "score": 214957.32462176128 }, { "content": "pub fn cdef_dist_wxh_8x8(c: &mut Criterion) {\n\n let src1 = init_plane_u8(8, 8, 1);\n\n let src2 = init_plane_u8(8, 8, 2);\n\n\n\n c.bench_function(\"cdef_dist_wxh_8x8\", move |b| {\n\n b.iter(|| {\n\n rdo::cdef_dist_wxh(\n\n &src1.region(Area::Rect { x: 0, y: 0, width: 8, height: 8 }),\n\n &src2.region(Area::Rect { x: 0, y: 0, width: 8, height: 8 }),\n\n 8,\n\n 8,\n\n 8,\n\n |_, _| DistortionScale::default(),\n\n )\n\n })\n\n });\n\n}\n\n\n", "file_path": "benches/rdo.rs", "rank": 99, "score": 214670.9234709348 } ]
Rust
src/embeds/osu/pinned.rs
JoshiDhima/Bathbot
7a8dd8a768caede285df4f8ea5aead27bdbbf660
use std::fmt::Write; use eyre::Report; use hashbrown::HashMap; use rosu_v2::prelude::{Beatmapset, GameMode, Score, User}; use crate::{ commands::osu::TopOrder, core::Context, embeds::{osu, Author, Footer}, pp::PpCalculator, util::{ constants::OSU_BASE, datetime::how_long_ago_dynamic, numbers::with_comma_int, osu::ScoreOrder, ScoreExt, }, }; use super::OrderAppendix; pub struct PinnedEmbed { author: Author, description: String, footer: Footer, thumbnail: String, } impl PinnedEmbed { pub async fn new<'i, S>( user: &User, scores: S, ctx: &Context, sort_by: ScoreOrder, pages: (usize, usize), ) -> Self where S: Iterator<Item = &'i Score>, { let mut description = String::with_capacity(512); let farm = HashMap::new(); for score in scores { let map = score.map.as_ref().unwrap(); let mapset = score.mapset.as_ref().unwrap(); let (pp, max_pp, stars) = match PpCalculator::new(ctx, map.map_id).await { Ok(mut calc) => { calc.score(score); let stars = calc.stars(); let max_pp = calc.max_pp(); let pp = match score.pp { Some(pp) => pp, None => calc.pp() as f32, }; (Some(pp), Some(max_pp as f32), stars as f32) } Err(err) => { warn!("{:?}", Report::new(err)); (None, None, 0.0) } }; let stars = osu::get_stars(stars); let pp = osu::get_pp(pp, max_pp); let mapset_opt = if let ScoreOrder::RankedDate = sort_by { let mapset_fut = ctx.psql().get_beatmapset::<Beatmapset>(mapset.mapset_id); match mapset_fut.await { Ok(mapset) => Some(mapset), Err(err) => { let report = Report::new(err).wrap_err("failed to get mapset"); warn!("{report:?}"); None } } } else { None }; let _ = writeln!( description, "**- [{title} [{version}]]({OSU_BASE}b/{id}) {mods}** [{stars}]\n\ {grade} {pp} ~ {acc}% ~ {score}{appendix}\n[ {combo} ] ~ {hits} ~ {ago}", title = mapset.title, version = map.version, id = map.map_id, mods = osu::get_mods(score.mods), grade = score.grade_emote(score.mode), acc = score.acc(score.mode), score = with_comma_int(score.score), appendix = OrderAppendix::new(TopOrder::Other(sort_by), map, mapset_opt, score, &farm), combo = osu::get_combo(score, map), hits = score.hits_string(score.mode), ago = how_long_ago_dynamic(&score.created_at) ); } description.pop(); let footer_text = format!( "Page {}/{} | Mode: {}", pages.0, pages.1, mode_str(user.mode) ); Self { author: author!(user), description, footer: Footer::new(footer_text), thumbnail: user.avatar_url.to_owned(), } } } fn mode_str(mode: GameMode) -> &'static str { match mode { GameMode::STD => "osu!", GameMode::TKO => "Taiko", GameMode::CTB => "Catch", GameMode::MNA => "Mania", } } impl_builder!(PinnedEmbed { author, description, footer, thumbnail, });
use std::fmt::Write; use eyre::Report; use hashbrown::HashMap; use rosu_v2::prelude::{Beatmapset, GameMode, Score, User}; use crate::{ commands::osu::TopOrder, core::Context, embeds::{osu, Author, Footer}, pp::PpCalculator, util::{ constants::OSU_BASE, datetime::how_long_ago_dynamic, numbers::with_comma_int, osu::ScoreOrder, ScoreExt, }, }; use super::OrderAppendix; pub struct PinnedEmbed { author: Author, description: String, footer: Footer, thumbnail: String, } impl PinnedEmbed { pub async fn new<'i, S>( user: &User, scores: S, ctx: &Context, sort_by: ScoreOrder, pages: (usize, usize), ) -> Self where S: Iterator<Item = &'i Score>, { let mut description = String::with_capacity(512); let farm = HashMap::new(); for score in scores { let map = score.map.as_ref().unwrap(); let mapset = score.mapset.as_ref().unwrap(); let (pp, max_pp, stars) = match PpCalculator::new(ctx, map.map_id).await { Ok(mut calc) => { calc.score(score); let stars = calc.stars(); let max_pp = calc.max_pp(); let pp = match score.pp { Some(pp) => pp, None => calc.pp() as f32, }; (Some(pp), Some(max_pp as f32), stars as f32) } Err(err) => { warn!("{:?}", Report::new(err)); (None, None, 0.0) } }; let
} fn mode_str(mode: GameMode) -> &'static str { match mode { GameMode::STD => "osu!", GameMode::TKO => "Taiko", GameMode::CTB => "Catch", GameMode::MNA => "Mania", } } impl_builder!(PinnedEmbed { author, description, footer, thumbnail, });
stars = osu::get_stars(stars); let pp = osu::get_pp(pp, max_pp); let mapset_opt = if let ScoreOrder::RankedDate = sort_by { let mapset_fut = ctx.psql().get_beatmapset::<Beatmapset>(mapset.mapset_id); match mapset_fut.await { Ok(mapset) => Some(mapset), Err(err) => { let report = Report::new(err).wrap_err("failed to get mapset"); warn!("{report:?}"); None } } } else { None }; let _ = writeln!( description, "**- [{title} [{version}]]({OSU_BASE}b/{id}) {mods}** [{stars}]\n\ {grade} {pp} ~ {acc}% ~ {score}{appendix}\n[ {combo} ] ~ {hits} ~ {ago}", title = mapset.title, version = map.version, id = map.map_id, mods = osu::get_mods(score.mods), grade = score.grade_emote(score.mode), acc = score.acc(score.mode), score = with_comma_int(score.score), appendix = OrderAppendix::new(TopOrder::Other(sort_by), map, mapset_opt, score, &farm), combo = osu::get_combo(score, map), hits = score.hits_string(score.mode), ago = how_long_ago_dynamic(&score.created_at) ); } description.pop(); let footer_text = format!( "Page {}/{} | Mode: {}", pages.0, pages.1, mode_str(user.mode) ); Self { author: author!(user), description, footer: Footer::new(footer_text), thumbnail: user.avatar_url.to_owned(), } }
function_block-function_prefixed
[ { "content": "fn new_pp(pp: f32, user: &User, scores: &[Score], actual_offset: f32) -> (usize, f32) {\n\n let actual: f32 = scores\n\n .iter()\n\n .filter_map(|s| s.weight)\n\n .fold(0.0, |sum, weight| sum + weight.pp);\n\n\n\n let total = user.statistics.as_ref().map_or(0.0, |stats| stats.pp);\n\n let bonus_pp = total - (actual + actual_offset);\n\n let mut new_pp = 0.0;\n\n let mut used = false;\n\n let mut new_pos = scores.len();\n\n let mut factor = 1.0;\n\n\n\n let pp_iter = scores.iter().take(99).filter_map(|s| s.pp).enumerate();\n\n\n\n for (i, pp_value) in pp_iter {\n\n if !used && pp_value < pp {\n\n used = true;\n\n new_pp += pp * factor;\n\n factor *= 0.95;\n", "file_path": "src/embeds/osu/fix_score.rs", "rank": 0, "score": 467558.4575909388 }, { "content": "fn write_compact_score(args: &mut WriteArgs<'_>, i: usize, score: &Score, stars: f32, pp: f32) {\n\n let config = CONFIG.get().unwrap();\n\n\n\n let _ = write!(\n\n args.description,\n\n \"{grade} **+{mods}** [{stars:.2}★] {pp_format}{pp:.2}pp{pp_format} \\\n\n ({acc}%) {combo}x • {miss}{miss_emote} {timestamp}\",\n\n grade = config.grade(score.grade),\n\n mods = score.mods,\n\n pp_format = if args.pp_idx == Some(i) { \"**\" } else { \"~~\" },\n\n acc = round(score.accuracy),\n\n combo = score.max_combo,\n\n miss = score.statistics.count_miss,\n\n miss_emote = Emote::Miss.text(),\n\n timestamp = how_long_ago_dynamic(&score.created_at),\n\n );\n\n\n\n let mut pinned = args.pinned.iter();\n\n\n\n if pinned.any(|s| s.score_id == score.score_id && s.mods == score.mods) {\n", "file_path": "src/embeds/osu/scores.rs", "rank": 1, "score": 462648.4507540302 }, { "content": "/// First element: Weighted missing pp to reach goal from start\n\n///\n\n/// Second element: Index of hypothetical pp in pps\n\npub fn pp_missing(start: f32, goal: f32, pps: impl IntoPpIter) -> (f32, usize) {\n\n let mut top = start;\n\n let mut bot = 0.0;\n\n\n\n // top + x * 0.95^i + bot = goal\n\n // <=> x = (goal - top - bot) / 0.95^i\n\n fn calculate_remaining(idx: usize, goal: f32, top: f32, bot: f32) -> (f32, usize) {\n\n let factor = 0.95_f32.powi(idx as i32);\n\n let required = (goal - top - bot) / factor;\n\n\n\n (required, idx)\n\n }\n\n\n\n for (i, last_pp) in pps.into_pps().enumerate().rev() {\n\n let factor = 0.95_f32.powi(i as i32);\n\n let term = factor * last_pp;\n\n let bot_term = term * 0.95;\n\n\n\n if top + bot + bot_term >= goal {\n\n return calculate_remaining(i + 1, goal, top, bot);\n\n }\n\n\n\n bot += bot_term;\n\n top -= term;\n\n }\n\n\n\n calculate_remaining(0, goal, top, bot)\n\n}\n\n\n", "file_path": "src/util/osu.rs", "rank": 2, "score": 444758.3533309861 }, { "content": "// Credits to flowabot\n\n/// Extend the list of pps by taking the average difference\n\n/// between 2 values towards the end and create more values\n\n/// based on that difference\n\npub fn approx_more_pp(pps: &mut Vec<f32>, more: usize) {\n\n if pps.len() != 100 {\n\n return;\n\n }\n\n\n\n let diff = (pps[89] - pps[99]) / 10.0;\n\n\n\n let extension = iter::successors(pps.last().copied(), |pp| {\n\n let pp = pp - diff;\n\n\n\n (pp > 0.0).then(|| pp)\n\n });\n\n\n\n pps.extend(extension.take(more));\n\n}\n\n\n", "file_path": "src/util/osu.rs", "rank": 3, "score": 437425.88373856875 }, { "content": "/// The stars argument must already be adjusted for mods\n\npub fn get_map_info(map: &Beatmap, mods: GameMods, stars: f32) -> String {\n\n let clock_rate = mods.bits().clock_rate();\n\n\n\n let mut sec_total = map.seconds_total;\n\n let mut sec_drain = map.seconds_drain;\n\n let mut bpm = map.bpm;\n\n let mut cs = map.cs;\n\n let mut ar = map.ar;\n\n let mut od = map.od;\n\n let mut hp = map.hp;\n\n\n\n if mods.contains(GameMods::HardRock) {\n\n hp = (hp * 1.4).min(10.0);\n\n od = (od * 1.4).min(10.0);\n\n ar = (ar * 1.4).min(10.0);\n\n cs = (cs * 1.3).min(10.0);\n\n } else if mods.contains(GameMods::Easy) {\n\n hp *= 0.5;\n\n od *= 0.5;\n\n ar *= 0.5;\n", "file_path": "src/embeds/osu/mod.rs", "rank": 4, "score": 402528.1676288205 }, { "content": "pub fn get_combo(score: &dyn ScoreExt, map: &dyn BeatmapExt) -> String {\n\n let mut combo = String::from(\"**\");\n\n let _ = write!(combo, \"{}x**/\", score.max_combo());\n\n\n\n match map.max_combo() {\n\n Some(amount) => write!(combo, \"{amount}x\").unwrap(),\n\n None => combo.push('-'),\n\n }\n\n\n\n combo\n\n}\n\n\n", "file_path": "src/embeds/osu/mod.rs", "rank": 5, "score": 388709.17207935895 }, { "content": "pub fn get_stars(stars: f32) -> String {\n\n format!(\"{:.2}★\", stars)\n\n}\n\n\n", "file_path": "src/embeds/osu/mod.rs", "rank": 6, "score": 379769.1832829786 }, { "content": "fn parse_country_code(ctx: &Context, mut country: String) -> Result<CountryCode, String> {\n\n match country.as_str() {\n\n \"global\" | \"world\" => Ok(\"global\".into()),\n\n _ => {\n\n let country = if country.len() == 2 && country.is_ascii() {\n\n country.make_ascii_uppercase();\n\n\n\n country.into()\n\n } else if let Some(code) = CountryCode::from_name(&country) {\n\n code\n\n } else {\n\n let content = format!(\n\n \"Failed to parse `{country}` as country or country code.\\n\\\n\n Be sure to specify a valid country or two ASCII letter country code.\"\n\n );\n\n\n\n return Err(content);\n\n };\n\n\n\n if !country.snipe_supported(ctx) {\n", "file_path": "src/commands/osu/snipe/mod.rs", "rank": 7, "score": 369477.4544796485 }, { "content": "fn get_attrs(map: &Option<Map>, score: &Score) -> (Option<f32>, Option<f32>, f32) {\n\n match map {\n\n Some(ref map) => {\n\n let mods = score.mods.bits();\n\n let performance = map.pp().mods(mods).calculate();\n\n\n\n let max_pp = performance.pp() as f32;\n\n let stars = performance.stars() as f32;\n\n\n\n let pp = match score.pp {\n\n Some(pp) => pp,\n\n None => {\n\n let performance = map\n\n .pp()\n\n .attributes(performance)\n\n .mods(mods)\n\n .n300(score.statistics.count_300 as usize)\n\n .n100(score.statistics.count_100 as usize)\n\n .n50(score.statistics.count_50 as usize)\n\n .n_katu(score.statistics.count_katu as usize)\n", "file_path": "src/embeds/osu/scores.rs", "rank": 8, "score": 362104.27777369664 }, { "content": "pub fn grade_completion_mods(score: &dyn ScoreExt, map: &Beatmap) -> Cow<'static, str> {\n\n let mode = map.mode();\n\n let grade = CONFIG.get().unwrap().grade(score.grade(mode));\n\n let mods = score.mods();\n\n\n\n match (\n\n mods.is_empty(),\n\n score.grade(mode) == Grade::F && mode != GameMode::CTB,\n\n ) {\n\n (true, true) => format!(\"{grade} ({}%)\", completion(score, map)).into(),\n\n (false, true) => format!(\"{grade} ({}%) +{mods}\", completion(score, map)).into(),\n\n (true, false) => grade.into(),\n\n (false, false) => format!(\"{grade} +{mods}\").into(),\n\n }\n\n}\n\n\n", "file_path": "src/util/osu.rs", "rank": 9, "score": 354455.02880480746 }, { "content": "pub fn get_pp(actual: Option<f32>, max: Option<f32>) -> String {\n\n let mut result = String::with_capacity(17);\n\n result.push_str(\"**\");\n\n\n\n if let Some(pp) = actual {\n\n let _ = write!(result, \"{:.2}\", pp);\n\n } else {\n\n result.push('-');\n\n }\n\n\n\n result.push_str(\"**/\");\n\n\n\n if let Some(max) = max {\n\n let pp = actual.map(|pp| pp.max(max)).unwrap_or(max);\n\n let _ = write!(result, \"{:.2}\", pp);\n\n } else {\n\n result.push('-');\n\n }\n\n\n\n result.push_str(\"PP\");\n\n\n\n result\n\n}\n\n\n", "file_path": "src/embeds/osu/mod.rs", "rank": 10, "score": 350926.3006556016 }, { "content": "fn role_string(roles: &[u64], content: &mut String) {\n\n let mut iter = roles.iter();\n\n\n\n if let Some(first) = iter.next() {\n\n content.reserve(roles.len() * 20);\n\n let _ = write!(content, \"<@&{first}>\");\n\n\n\n for role in iter {\n\n let _ = write!(content, \", <@&{role}>\");\n\n }\n\n } else {\n\n content.push_str(\"None\");\n\n }\n\n}\n\n\n\npub(super) enum AuthorityCommandKind {\n\n Add(u64),\n\n List,\n\n Remove(u64),\n\n Replace(Vec<Id<RoleMarker>>),\n\n}\n\n\n", "file_path": "src/commands/utility/authorities.rs", "rank": 11, "score": 337657.37873634807 }, { "content": "fn team_scores(map: &CommonMap, match_1: &str, match_2: &str) -> String {\n\n let mut scores = Vec::new();\n\n\n\n for team in [Team::Blue, Team::Red] {\n\n if map.match_1_scores[team as usize] > 0 {\n\n scores.push(TeamScore::new(\n\n team,\n\n match_1,\n\n map.match_1_scores[team as usize],\n\n ));\n\n }\n\n\n\n if map.match_2_scores[team as usize] > 0 {\n\n scores.push(TeamScore::new(\n\n team,\n\n match_2,\n\n map.match_2_scores[team as usize],\n\n ));\n\n }\n\n }\n", "file_path": "src/embeds/osu/match_compare.rs", "rank": 12, "score": 332307.03289008065 }, { "content": "/// \"How many replace/delete/insert operations are necessary to morph one word into the other?\"\n\n///\n\n/// Returns (distance, max word length) tuple\n\npub fn levenshtein_distance<'w>(mut word_a: &'w str, mut word_b: &'w str) -> (usize, usize) {\n\n let m = word_a.chars().count();\n\n let mut n = word_b.chars().count();\n\n\n\n if m > n {\n\n std::mem::swap(&mut word_a, &mut word_b);\n\n n = m;\n\n }\n\n\n\n // u16 is sufficient considering the max length\n\n // of discord messages is smaller than u16::MAX\n\n let mut costs: Vec<_> = (0..=n as u16).collect();\n\n\n\n // SAFETY for get! and set!:\n\n // chars(word_a) <= chars(word_b) = n < n + 1 = costs.len()\n\n\n\n for (a, i) in word_a.chars().zip(1..) {\n\n let mut last_val = i;\n\n\n\n for (b, j) in word_b.chars().zip(1..) {\n", "file_path": "src/util/mod.rs", "rank": 13, "score": 324238.07648554194 }, { "content": "/// Round with two decimal positions\n\npub fn round(n: f32) -> f32 {\n\n (100.0 * n).round() / 100.0\n\n}\n\n\n", "file_path": "src/util/numbers.rs", "rank": 14, "score": 320062.818472159 }, { "content": "fn footer_text(user: &User) -> String {\n\n format!(\n\n \"Joined osu! {} ({})\",\n\n date_to_string(&user.join_date),\n\n how_long_ago_text(&user.join_date),\n\n )\n\n}\n\n\n", "file_path": "src/embeds/osu/profile.rs", "rank": 15, "score": 319222.14294230024 }, { "content": "fn completion(score: &dyn ScoreExt, map: &Beatmap) -> u32 {\n\n let passed = score.hits(map.mode() as u8);\n\n let total = map.count_objects();\n\n\n\n 100 * passed / total\n\n}\n\n\n\npub async fn prepare_beatmap_file(ctx: &Context, map_id: u32) -> Result<PathBuf, MapFileError> {\n\n let mut map_path = CONFIG.get().unwrap().paths.maps.clone();\n\n map_path.push(format!(\"{map_id}.osu\"));\n\n\n\n if !map_path.exists() {\n\n let bytes = ctx.clients.custom.get_map_file(map_id).await?;\n\n let mut file = File::create(&map_path).await?;\n\n file.write_all(&bytes).await?;\n\n info!(\"Downloaded {map_id}.osu successfully\");\n\n }\n\n\n\n Ok(map_path)\n\n}\n\n\n", "file_path": "src/util/osu.rs", "rank": 16, "score": 317639.9420534646 }, { "content": "/// Consider the length of the longest common substring, then repeat recursively\n\n/// for the remaining left and right parts of the words\n\npub fn gestalt_pattern_matching(word_a: &str, word_b: &str) -> f32 {\n\n let chars_a = word_a.chars().count();\n\n let chars_b = word_b.chars().count();\n\n\n\n // u16 is sufficient considering the max length\n\n // of discord messages is smaller than u16::MAX\n\n let mut buf = vec![0; chars_a.max(chars_b) + 1];\n\n\n\n // SAFETY: buf.len is set to be 1 + max(chars(word_a), chars(word_b))\n\n let matching_chars = unsafe { _gestalt_pattern_matching(word_a, word_b, &mut buf) };\n\n\n\n (2 * matching_chars) as f32 / (chars_a + chars_b) as f32\n\n}\n\n\n\n/// Caller must guarantee that buf.len is 1 + max(chars(word_a), chars(word_b))\n\nunsafe fn _gestalt_pattern_matching(word_a: &str, word_b: &str, buf: &mut [u16]) -> usize {\n\n let SubstringResult {\n\n start_a,\n\n start_b,\n\n len,\n", "file_path": "src/util/mod.rs", "rank": 17, "score": 313944.679695762 }, { "content": "pub fn attachment(filename: impl AsRef<str>) -> String {\n\n #[cfg(debug_assert)]\n\n match filename.rfind('.') {\n\n Some(idx) => {\n\n if filename.get(idx + 1..).map(str::is_empty).is_none() {\n\n panic!(\"expected non-empty extension for attachment\");\n\n }\n\n }\n\n None => panic!(\"expected extension for attachment\"),\n\n }\n\n\n\n format!(\"attachment://{}\", filename.as_ref())\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct EmbedBuilder(Embed);\n\n\n\nimpl Default for EmbedBuilder {\n\n fn default() -> Self {\n\n Self(Embed {\n", "file_path": "src/embeds/mod.rs", "rank": 18, "score": 307604.4411643895 }, { "content": "pub fn last_multiple(per_page: usize, total: usize) -> usize {\n\n if per_page <= total && total % per_page == 0 {\n\n total - per_page\n\n } else {\n\n total - total % per_page\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_round() {\n\n let v1 = 3.1615;\n\n let v2 = 3.16;\n\n\n\n if round(v1) - v2 > std::f32::EPSILON {\n\n panic!(\"[test_round] round({})={} != {}\", v1, round(v1), v2);\n\n }\n", "file_path": "src/util/numbers.rs", "rank": 19, "score": 302645.095274115 }, { "content": "pub fn with_comma_float(n: f32) -> FormatF32 {\n\n FormatF32(n)\n\n}\n\n\n\npub struct FormatF32(f32);\n\n\n\nimpl fmt::Display for FormatF32 {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n let n = if self.0 < 0.0 {\n\n f.write_str(\"-\")?;\n\n\n\n -self.0\n\n } else {\n\n self.0\n\n };\n\n\n\n let mut int = n.trunc() as i64;\n\n let mut rev = 0;\n\n let mut triples = 0;\n\n\n", "file_path": "src/util/numbers.rs", "rank": 20, "score": 301677.32629026606 }, { "content": "fn map_name(map: &BeatmapCompact) -> String {\n\n let mut name = String::new();\n\n\n\n if let Some(ref mapset) = map.mapset {\n\n let _ = write!(name, \"{}\", mapset.title);\n\n } else {\n\n name.push_str(\"<unknown title>\")\n\n }\n\n\n\n let _ = write!(name, \" [{}]\", map.version);\n\n\n\n name\n\n}\n\n\n", "file_path": "src/commands/osu/match_compare.rs", "rank": 21, "score": 301163.54079030594 }, { "content": "pub fn get_keys(mods: GameMods, map: &Beatmap) -> String {\n\n if let Some(key_mod) = mods.has_key_mod() {\n\n format!(\"[{key_mod}]\")\n\n } else {\n\n format!(\"[{}K]\", map.cs as u32)\n\n }\n\n}\n\n\n", "file_path": "src/embeds/osu/mod.rs", "rank": 22, "score": 296283.26116893836 }, { "content": "pub fn date_to_string(date: &DateTime<Utc>) -> String {\n\n date.format(DATE_FORMAT).to_string()\n\n}\n\n\n", "file_path": "src/util/datetime.rs", "rank": 23, "score": 289402.3508515429 }, { "content": "/// Return the description and image for a either in-progress or finished games\n\nfn game_content(lobby: &OsuMatch, game: &MatchGame) -> (String, Option<String>, Option<Footer>) {\n\n let mut description = String::with_capacity(128);\n\n\n\n match game.end_time {\n\n Some(_) => {\n\n let image = match game.map {\n\n Some(ref map) => {\n\n let mapset = map.mapset.as_ref().unwrap();\n\n\n\n let _ = write!(\n\n description,\n\n \"**[{artist} - {title} [{version}]]({OSU_BASE}b/{map_id})\",\n\n artist = mapset.artist,\n\n title = mapset.title,\n\n version = map.version,\n\n map_id = map.map_id,\n\n );\n\n\n\n if !game.mods.is_empty() {\n\n let _ = write!(description, \" +{}\", game.mods);\n", "file_path": "src/embeds/osu/match_live.rs", "rank": 24, "score": 285669.6642593021 }, { "content": "pub fn flag_url(country_code: &str) -> String {\n\n // format!(\"{}/images/flags/{}.png\", OSU_BASE, country_code) // from osu itself but outdated\n\n format!(\"https://osuflags.omkserver.nl/{country_code}-256.png\") // kelderman\n\n}\n\n\n", "file_path": "src/util/osu.rs", "rank": 25, "score": 284950.9075064121 }, { "content": "#[allow(dead_code)]\n\npub fn flag_url_svg(country_code: &str) -> String {\n\n assert_eq!(\n\n country_code.len(),\n\n 2,\n\n \"country code `{country_code}` is invalid\",\n\n );\n\n\n\n const OFFSET: u32 = 0x1F1A5;\n\n let bytes = country_code.as_bytes();\n\n\n\n let url = format!(\n\n \"{OSU_BASE}assets/images/flags/{:x}-{:x}.svg\",\n\n bytes[0].to_ascii_uppercase() as u32 + OFFSET,\n\n bytes[1].to_ascii_uppercase() as u32 + OFFSET\n\n );\n\n\n\n url\n\n}\n\n\n", "file_path": "src/util/osu.rs", "rank": 26, "score": 281079.9788283142 }, { "content": "fn current_prefixes(content: &mut String, prefixes: &[Prefix]) {\n\n content.push_str(\"Prefixes for this server: \");\n\n let len = prefixes.iter().map(|p| p.len() + 4).sum();\n\n content.reserve_exact(len);\n\n let mut prefixes = prefixes.iter();\n\n\n\n if let Some(first) = prefixes.next() {\n\n let _ = write!(content, \"`{first}`\");\n\n\n\n for prefix in prefixes {\n\n let _ = write!(content, \", `{prefix}`\");\n\n }\n\n }\n\n}\n", "file_path": "src/commands/utility/prefix.rs", "rank": 27, "score": 278257.2919106764 }, { "content": "#[allow(dead_code)]\n\npub fn string_to_date(date: String) -> BotResult<DateTime<Utc>> {\n\n Ok(Utc.datetime_from_str(&date, DATE_FORMAT)?)\n\n}\n\n\n", "file_path": "src/util/datetime.rs", "rank": 28, "score": 278227.43149851623 }, { "content": "fn main_fields(user: &User, stats: &UserStatistics, bonus_pp: f32) -> Vec<EmbedField> {\n\n let level = stats.level.float();\n\n\n\n vec![\n\n field!(\n\n \"Ranked score\",\n\n with_comma_int(stats.ranked_score).to_string(),\n\n true\n\n ),\n\n field!(\"Accuracy\", format!(\"{:.2}%\", stats.accuracy), true),\n\n field!(\n\n \"Max combo\",\n\n with_comma_int(stats.max_combo).to_string(),\n\n true\n\n ),\n\n field!(\n\n \"Total score\",\n\n with_comma_int(stats.total_score).to_string(),\n\n true\n\n ),\n", "file_path": "src/embeds/osu/profile.rs", "rank": 29, "score": 278132.94107909 }, { "content": "pub fn get_osu_mapset_id(msg: &str) -> Option<MapIdType> {\n\n if let Ok(id) = msg.parse::<u32>() {\n\n return Some(MapIdType::Set(id));\n\n }\n\n\n\n if !msg.contains(OSU_BASE) {\n\n return None;\n\n }\n\n\n\n OSU_URL_MAPSET_OLD_MATCHER\n\n .captures(msg)\n\n .or_else(|| OSU_URL_MAP_NEW_MATCHER.captures(msg))\n\n .and_then(|c| c.get(1))\n\n .and_then(|c| c.as_str().parse::<u32>().ok())\n\n .map(MapIdType::Set)\n\n}\n\n\n", "file_path": "src/util/matcher.rs", "rank": 30, "score": 276971.5244619357 }, { "content": "pub fn levenshtein_similarity(word_a: &str, word_b: &str) -> f32 {\n\n let (dist, len) = levenshtein_distance(word_a, word_b);\n\n\n\n (len - dist) as f32 / len as f32\n\n}\n\n\n", "file_path": "src/util/mod.rs", "rank": 31, "score": 272361.40005761024 }, { "content": "fn acc_to_score(mod_mult: f32, acc: f32) -> u64 {\n\n (mod_mult * (acc * 10_000.0 - (100.0 - acc) * 50_000.0)).round() as u64\n\n}\n\n\n\nimpl_builder!(MapEmbed {\n\n author,\n\n description,\n\n fields,\n\n footer,\n\n image,\n\n thumbnail,\n\n timestamp,\n\n title,\n\n url,\n\n});\n", "file_path": "src/embeds/osu/map.rs", "rank": 32, "score": 265806.4362164992 }, { "content": "pub fn div_euclid(group: usize, total: usize) -> usize {\n\n if total % group == 0 && total > 0 {\n\n total / group\n\n } else {\n\n total.div_euclid(group) + 1\n\n }\n\n}\n\n\n", "file_path": "src/util/numbers.rs", "rank": 33, "score": 260513.93357282816 }, { "content": "pub fn calculate_od(od: f32, clock_rate: f32) -> f32 {\n\n let ms = difficulty_range(od, OD_MIN, OD_MID, OD_MAX) / clock_rate;\n\n\n\n (OD_MIN - ms) / (OD_MIN - OD_MID) * 5.0\n\n}\n\n\n\nconst OD_MIN: f32 = 80.0;\n\nconst OD_MID: f32 = 50.0;\n\nconst OD_MAX: f32 = 20.0;\n\n\n", "file_path": "src/embeds/osu/mod.rs", "rank": 34, "score": 256675.69204599035 }, { "content": "pub fn calculate_ar(ar: f32, clock_rate: f32) -> f32 {\n\n let ms = difficulty_range(ar, AR_MIN, AR_MID, AR_MAX) / clock_rate;\n\n\n\n if ms > AR_MID {\n\n (AR_MIN - ms) / (AR_MIN - AR_MID) * 5.0\n\n } else {\n\n (AR_MID - ms) / (AR_MID - AR_MAX) * 5.0 + 5.0\n\n }\n\n}\n\n\n\nconst AR_MIN: f32 = 1800.0;\n\nconst AR_MID: f32 = 1200.0;\n\nconst AR_MAX: f32 = 450.0;\n\n\n", "file_path": "src/embeds/osu/mod.rs", "rank": 35, "score": 256675.69204599035 }, { "content": "fn needs_unchoking(score: &Score, map: &Beatmap) -> bool {\n\n match map.mode {\n\n GameMode::STD => {\n\n score.statistics.count_miss > 0\n\n || score.max_combo < map.max_combo.map_or(0, |c| c.saturating_sub(5))\n\n }\n\n GameMode::TKO => score.statistics.count_miss > 0,\n\n GameMode::CTB => score.max_combo != map.max_combo.unwrap_or(0),\n\n GameMode::MNA => panic!(\"can not unchoke mania scores\"),\n\n }\n\n}\n\n\n", "file_path": "src/commands/osu/fix.rs", "rank": 36, "score": 255495.38255424754 }, { "content": "pub fn tourney_badge(description: &str) -> bool {\n\n !IGNORE_BADGE_MATCHER.is_match_at(description, 0)\n\n}\n\n\n", "file_path": "src/util/matcher.rs", "rank": 37, "score": 255136.09757554735 }, { "content": "fn needs_unchoking(score: &Score, map: &Beatmap) -> bool {\n\n match map.mode {\n\n GameMode::STD => {\n\n score.statistics.count_miss > 0\n\n || score.max_combo < map.max_combo.map_or(0, |c| c.saturating_sub(5))\n\n }\n\n GameMode::TKO => score.statistics.count_miss > 0,\n\n GameMode::CTB => score.max_combo != map.max_combo.unwrap_or(0),\n\n GameMode::MNA => panic!(\"can not unchoke mania scores\"),\n\n }\n\n}\n\n\n\npub(super) struct FixArgs {\n\n mode: GameMode,\n\n name: Option<Username>,\n\n index: Option<usize>,\n\n}\n\n\n\nimpl FixArgs {\n\n pub(super) async fn slash(\n", "file_path": "src/commands/osu/recent/fix.rs", "rank": 38, "score": 252873.13031136148 }, { "content": "struct MatchEntry {\n\n tracked: TrackedMatch,\n\n // Not a set since the list is expected to be very short and thus cheap to iterate over.\n\n /// Channels that are tracking the match\n\n channels: SmallVec<[Channel; 2]>,\n\n}\n\n\n", "file_path": "src/core/context/impls/match_live.rs", "rank": 39, "score": 251378.37862149894 }, { "content": "struct TrackedMatch {\n\n /// Most recent update of the match\n\n osu_match: OsuMatch,\n\n /// All embeds of the match\n\n embeds: Vec<MatchLiveEmbed>,\n\n}\n\n\n\nimpl TrackedMatch {\n\n fn new(osu_match: OsuMatch, embeds: MatchLiveEmbeds) -> Self {\n\n Self {\n\n osu_match,\n\n embeds: embeds.into_vec(),\n\n }\n\n }\n\n}\n\n\n\n/// Sends a message to the channel for each embed\n\n/// and returns the last of these messages\n\nasync fn send_match_messages(\n\n ctx: &Context,\n", "file_path": "src/core/context/impls/match_live.rs", "rank": 40, "score": 251378.37862149894 }, { "content": "pub fn map_id_from_history(msgs: &[Message]) -> Option<MapIdType> {\n\n msgs.iter().find_map(map_id_from_msg)\n\n}\n\n\n", "file_path": "src/util/osu.rs", "rank": 41, "score": 249865.26520654 }, { "content": "pub fn map_id_from_msg(msg: &Message) -> Option<MapIdType> {\n\n if msg.content.chars().all(|c| c.is_numeric()) {\n\n return check_embeds_for_map_id(&msg.embeds);\n\n }\n\n\n\n matcher::get_osu_map_id(&msg.content)\n\n .or_else(|| matcher::get_osu_mapset_id(&msg.content))\n\n .or_else(|| check_embeds_for_map_id(&msg.embeds))\n\n}\n\n\n", "file_path": "src/util/osu.rs", "rank": 42, "score": 249865.26520654 }, { "content": "pub fn get_mention_user(msg: &str) -> Option<Id<UserMarker>> {\n\n msg.parse::<u64>()\n\n .is_err()\n\n .then(|| get_mention(MentionType::User, msg))\n\n .flatten()\n\n .and_then(Id::new_checked)\n\n}\n\n\n", "file_path": "src/util/matcher.rs", "rank": 43, "score": 247323.69023530718 }, { "content": "#[allow(dead_code)]\n\npub fn get_osu_user_id(msg: &str) -> Option<OsuUserId> {\n\n OSU_URL_USER_MATCHER.captures(msg).and_then(|c| {\n\n c.get(1)\n\n .and_then(|m| m.as_str().parse().ok())\n\n .map(OsuUserId::Id)\n\n .or_else(|| c.get(2).map(|m| OsuUserId::Name(m.as_str().into())))\n\n })\n\n}\n\n\n", "file_path": "src/util/matcher.rs", "rank": 44, "score": 246753.62578606937 }, { "content": "pub fn get_osu_map_id(msg: &str) -> Option<MapIdType> {\n\n if let Ok(id) = msg.parse::<u32>() {\n\n return Some(MapIdType::Map(id));\n\n }\n\n\n\n if !msg.contains(OSU_BASE) {\n\n return None;\n\n }\n\n\n\n let matcher = if let Some(c) = OSU_URL_MAP_OLD_MATCHER.captures(msg) {\n\n c.get(1)\n\n } else {\n\n OSU_URL_MAP_NEW_MATCHER.captures(msg).and_then(|c| c.get(2))\n\n };\n\n\n\n matcher.and_then(|c| c.as_str().parse::<u32>().ok().map(MapIdType::Map))\n\n}\n\n\n", "file_path": "src/util/matcher.rs", "rank": 45, "score": 246570.1452204113 }, { "content": "struct PPFormatter(f32, f32);\n\n\n\nimpl fmt::Display for PPFormatter {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"**{:.2}**/{:.2}PP\", self.0, self.1)\n\n }\n\n}\n\n\n", "file_path": "src/embeds/osu/leaderboard.rs", "rank": 46, "score": 245033.39522400277 }, { "content": "#[derive(Default)]\n\nstruct MatchLiveChannelsInner {\n\n /// Mapping match ids to channels that track them\n\n match_channels: HashMap<u32, MatchEntry>,\n\n\n\n /// Mapping channels to the amount of tracked matches in that channel\n\n channel_count: HashMap<Id<ChannelMarker>, u8>,\n\n}\n\n\n", "file_path": "src/core/context/impls/match_live.rs", "rank": 47, "score": 243525.36894918428 }, { "content": "struct Channel {\n\n id: Id<ChannelMarker>,\n\n /// Last msg in the channel\n\n msg_id: Id<MessageMarker>,\n\n}\n\n\n\nimpl MatchLiveChannels {\n\n pub fn new() -> Self {\n\n Self {\n\n inner: Mutex::new(MatchLiveChannelsInner::default()),\n\n }\n\n }\n\n}\n\n\n\nimpl MatchEntry {\n\n fn new(tracked: TrackedMatch, channel: Channel) -> Self {\n\n Self {\n\n tracked,\n\n channels: smallvec![channel],\n\n }\n", "file_path": "src/core/context/impls/match_live.rs", "rank": 48, "score": 243510.04296902945 }, { "content": "fn insert_mapset_<'a>(\n\n conn: &'a mut PgConnection,\n\n mapset: &'a Beatmapset,\n\n) -> BoxFuture<'a, InsertMapResult<()>> {\n\n let fut = async move {\n\n let query = sqlx::query!(\n\n \"INSERT INTO mapsets (\\\n\n mapset_id,\\\n\n user_id,\\\n\n artist,\\\n\n title,\\\n\n creator,\\\n\n status,\\\n\n ranked_date,\\\n\n bpm\\\n\n )\\\n\n VALUES\\\n\n ($1,$2,$3,$4,$5,$6,$7,$8)\\\n\n ON CONFLICT (mapset_id) DO NOTHING\",\n\n mapset.mapset_id as i32,\n", "file_path": "src/database/impls/maps.rs", "rank": 49, "score": 241607.24093424794 }, { "content": "pub fn get_osu_match_id(msg: &str) -> Option<u32> {\n\n if let Ok(id) = msg.parse::<u32>() {\n\n return Some(id);\n\n }\n\n\n\n OSU_URL_MATCH_MATCHER\n\n .captures(msg)\n\n .and_then(|c| c.get(1))\n\n .and_then(|c| c.as_str().parse::<u32>().ok())\n\n}\n\n\n", "file_path": "src/util/matcher.rs", "rank": 50, "score": 240268.283069996 }, { "content": "fn log_invoke(ctx: &Context, msg: &Message) {\n\n let location = MessageLocationLog { ctx, msg };\n\n info!(\"[{location}] {}: {}\", msg.author.name, msg.content);\n\n}\n\n\n", "file_path": "src/core/commands/handle_message.rs", "rank": 51, "score": 237434.2846350642 }, { "content": "fn backtrack_subcommand(title: &mut String) -> PartResult {\n\n let index = title.chars().filter(char::is_ascii_whitespace).count();\n\n let mut names = title.split(' ').take(index);\n\n let base = names.next().ok_or(InvalidHelpState::MissingTitle)?;\n\n\n\n let command = SLASH_COMMANDS\n\n .command(base)\n\n .ok_or(InvalidHelpState::UnknownCommand)?;\n\n\n\n let authority = command.authority;\n\n let mut iter = CommandIter::from(command);\n\n\n\n for name in names {\n\n if iter.next(name) {\n\n return Err(InvalidHelpState::UnknownCommand);\n\n }\n\n }\n\n\n\n if let Some(pos) = title.rfind(' ') {\n\n title.truncate(pos);\n", "file_path": "src/commands/help/interaction.rs", "rank": 52, "score": 237095.96905369198 }, { "content": "fn personal_idx(score: &Score, scores: &[Score]) -> Option<usize> {\n\n scores\n\n .iter()\n\n .position(|s| s.created_at == score.created_at)\n\n .map(|i| i + 1)\n\n}\n\n\n", "file_path": "src/embeds/osu/scores.rs", "rank": 53, "score": 235590.135416541 }, { "content": "pub fn get_mods(mods: GameMods) -> String {\n\n if mods.is_empty() {\n\n String::new()\n\n } else {\n\n format!(\"+{mods}\")\n\n }\n\n}\n\n\n", "file_path": "src/embeds/osu/mod.rs", "rank": 54, "score": 235213.01183067856 }, { "content": "pub fn process_match(\n\n games: &[MatchGame],\n\n finished: bool,\n\n users: &StdHashMap<u32, UserCompact>,\n\n) -> MatchResult {\n\n let mut teams = HashMap::new();\n\n let mut point_costs = HashMap::new();\n\n let mut mods = HashMap::new();\n\n let team_vs = games[0].team_type == TeamType::TeamVS;\n\n let mut match_scores = MatchScores(0, 0);\n\n\n\n // Calculate point scores for each score in each game\n\n for game in games.iter() {\n\n let score_sum: f32 = game.scores.iter().map(|s| s.score as f32).sum();\n\n\n\n let avg = score_sum / game.scores.iter().filter(|s| s.score > 0).count() as f32;\n\n let mut team_scores = HashMap::with_capacity(team_vs as usize + 1);\n\n\n\n for score in game.scores.iter().filter(|s| s.score > 0) {\n\n mods.entry(score.user_id)\n", "file_path": "src/commands/osu/match_costs.rs", "rank": 55, "score": 233637.61096352933 }, { "content": "pub fn get_osu_score_id(msg: &str) -> Option<(GameMode, u64)> {\n\n OSU_SCORE_URL_MATCHER\n\n .captures(msg)\n\n .and_then(|c| c.get(1).zip(c.get(2)))\n\n .and_then(|(mode, id)| {\n\n let mode = match mode.as_str() {\n\n OSU => GameMode::STD,\n\n TAIKO => GameMode::TKO,\n\n FRUITS => GameMode::CTB,\n\n MANIA => GameMode::MNA,\n\n _ => return None,\n\n };\n\n\n\n let id = id.as_str().parse().ok()?;\n\n\n\n Some((mode, id))\n\n })\n\n}\n\n\n", "file_path": "src/util/matcher.rs", "rank": 56, "score": 230496.49753679358 }, { "content": "pub fn define_pp() -> MyCommand {\n\n let pp = MyCommandOption::builder(\"pp\", \"Specify a target pp amount\")\n\n .min_num(0.0)\n\n .number(Vec::new(), true);\n\n\n\n let mode = option_mode();\n\n let name = option_name();\n\n let discord = option_discord();\n\n\n\n let each_description =\n\n \"Fill a top100 with scores of this many pp until the target total pp are reached\";\n\n\n\n let each = MyCommandOption::builder(\"each\", each_description)\n\n .min_num(0.0)\n\n .number(Vec::new(), false);\n\n\n\n let description = \"How many pp is a user missing to reach the given amount?\";\n\n\n\n MyCommand::new(\"pp\", description).options(vec![pp, mode, name, each, discord])\n\n}\n", "file_path": "src/commands/osu/pp.rs", "rank": 57, "score": 229687.88272795 }, { "content": "pub fn define_map() -> MyCommand {\n\n let map = option_map();\n\n let mods = option_mods(false);\n\n\n\n let ar = MyCommandOption::builder(\"ar\", \"Specify an AR value to override the actual one\")\n\n .min_num(0.0)\n\n .max_num(10.0)\n\n .number(Vec::new(), false);\n\n\n\n let od = MyCommandOption::builder(\"od\", \"Specify an OD value to override the actual one\")\n\n .min_num(0.0)\n\n .max_num(10.0)\n\n .number(Vec::new(), false);\n\n\n\n let cs = MyCommandOption::builder(\"cs\", \"Specify a CS value to override the actual one\")\n\n .min_num(0.0)\n\n .max_num(10.0)\n\n .number(Vec::new(), false);\n\n\n\n let hp = MyCommandOption::builder(\"hp\", \"Specify an HP value to override the actual one\")\n", "file_path": "src/commands/osu/map.rs", "rank": 58, "score": 229459.10142568365 }, { "content": "fn router(ctx: Arc<Context>) -> Router<Body, ServerError> {\n\n let connector = HttpsConnectorBuilder::new()\n\n .with_webpki_roots()\n\n .https_or_http()\n\n .enable_http1()\n\n .build();\n\n\n\n let client = HyperClient::builder().build(connector);\n\n let config = CONFIG.get().unwrap();\n\n\n\n let osu_client_id = config.tokens.osu_client_id;\n\n let osu_client_secret = config.tokens.osu_client_secret.to_owned();\n\n\n\n let twitch_client_id = config.tokens.twitch_client_id.to_owned();\n\n let twitch_client_secret = config.tokens.twitch_token.to_owned();\n\n\n\n let url = &config.server.external_url;\n\n let osu_redirect = format!(\"{url}/auth/osu\");\n\n let twitch_redirect = format!(\"{url}/auth/twitch\");\n\n\n", "file_path": "src/server/mod.rs", "rank": 59, "score": 228420.3662757796 }, { "content": "fn should_not_be_stored(map: &Beatmap) -> bool {\n\n invalid_status!(map) || map.convert || (map.mode != GameMode::MNA && map.max_combo.is_none())\n\n}\n\n\n\nimpl Database {\n\n pub async fn get_beatmap(&self, map_id: u32, with_mapset: bool) -> BotResult<Beatmap> {\n\n let mut conn = self.pool.acquire().await?;\n\n\n\n let query = sqlx::query_as!(\n\n DBBeatmap,\n\n \"SELECT * FROM maps WHERE map_id=$1\",\n\n map_id as i32\n\n );\n\n\n\n let row = query.fetch_one(&mut conn).await?;\n\n let mut map = Beatmap::from(row);\n\n\n\n if with_mapset {\n\n let query = sqlx::query_as!(\n\n DBBeatmapset,\n", "file_path": "src/database/impls/maps.rs", "rank": 60, "score": 227547.03108196557 }, { "content": "pub trait ScoreExt: Send + Sync {\n\n // Required to implement\n\n fn count_miss(&self) -> u32;\n\n fn count_50(&self) -> u32;\n\n fn count_100(&self) -> u32;\n\n fn count_300(&self) -> u32;\n\n fn count_geki(&self) -> u32;\n\n fn count_katu(&self) -> u32;\n\n fn max_combo(&self) -> u32;\n\n fn mods(&self) -> GameMods;\n\n fn score(&self) -> u32;\n\n fn pp(&self) -> Option<f32>;\n\n fn acc(&self, mode: GameMode) -> f32;\n\n\n\n // Optional to implement\n\n fn grade(&self, mode: GameMode) -> Grade {\n\n match mode {\n\n GameMode::STD => self.osu_grade(),\n\n GameMode::MNA => self.mania_grade(Some(self.acc(GameMode::MNA))),\n\n GameMode::CTB => self.ctb_grade(Some(self.acc(GameMode::CTB))),\n", "file_path": "src/util/exts/score.rs", "rank": 61, "score": 226881.545702916 }, { "content": "pub fn parse_invoke(stream: &mut Stream<'_>) -> Invoke {\n\n let mut name = stream\n\n .take_until_char(|c| c.is_whitespace() || c.is_numeric())\n\n .cow_to_ascii_lowercase();\n\n\n\n let num_str = stream.take_while_char(char::is_numeric);\n\n\n\n let num = if num_str.is_empty() {\n\n None\n\n } else if name.is_empty() {\n\n name = Cow::Borrowed(num_str);\n\n\n\n None\n\n } else {\n\n let n = num_str.chars().fold(0_usize, |n, c| {\n\n n.wrapping_mul(10).wrapping_add((c as u8 & 0xF) as usize)\n\n });\n\n\n\n Some(n)\n\n };\n", "file_path": "src/core/commands/parse.rs", "rank": 62, "score": 226606.87489257785 }, { "content": "fn get_combo<'a>(score: &'a ScraperScore, map: &'a Beatmap) -> ComboFormatter<'a> {\n\n ComboFormatter(score, map)\n\n}\n\n\n", "file_path": "src/embeds/osu/leaderboard.rs", "rank": 63, "score": 224760.47013568965 }, { "content": "pub fn if_fc_struct(\n\n score: &Score,\n\n map: &Map,\n\n attributes: DifficultyAttributes,\n\n mods: u32,\n\n) -> (Option<IfFC>, DifficultyAttributes) {\n\n match attributes {\n\n DifficultyAttributes::Osu(attributes)\n\n if score.statistics.count_miss > 0\n\n || score.max_combo\n\n // Allowing one missed sliderend per 500 combo\n\n < (attributes.max_combo - (attributes.max_combo / 500).max(5)) as u32 =>\n\n {\n\n let total_objects = (map.n_circles + map.n_sliders + map.n_spinners) as usize;\n\n let passed_objects = (score.statistics.count_300\n\n + score.statistics.count_100\n\n + score.statistics.count_50\n\n + score.statistics.count_miss) as usize;\n\n\n\n let mut count300 =\n", "file_path": "src/embeds/osu/recent.rs", "rank": 64, "score": 224754.53659109242 }, { "content": "fn continue_subcommand(title: &mut String, name: &str) -> PartResult {\n\n let mut names = title.split(' ');\n\n let base = names.next().ok_or(InvalidHelpState::MissingTitle)?;\n\n\n\n let command = SLASH_COMMANDS\n\n .command(base)\n\n .ok_or(InvalidHelpState::UnknownCommand)?;\n\n\n\n let authority = command.authority;\n\n let mut iter = CommandIter::from(command);\n\n\n\n for name in names {\n\n if iter.next(name) {\n\n return Err(InvalidHelpState::UnknownCommand);\n\n }\n\n }\n\n\n\n if iter.next(name) {\n\n return Err(InvalidHelpState::UnknownCommand);\n\n }\n\n\n\n let command = Parts::from(iter);\n\n let _ = write!(title, \" {}\", command.name);\n\n\n\n Ok((command, authority))\n\n}\n\n\n", "file_path": "src/commands/help/interaction.rs", "rank": 65, "score": 222633.0294547227 }, { "content": "pub fn define_prune() -> MyCommand {\n\n let amount_help = \"Choose the amount of messages to delete. Should be between 1 and 99.\";\n\n\n\n let amount = MyCommandOption::builder(\"amount\", \"Choose the amount of messages to delete\")\n\n .help(amount_help)\n\n .min_int(1)\n\n .integer(Vec::new(), true);\n\n\n\n let help = \"Delete the last few messages in a channel.\\n\\\n\n Messages older than two weeks __cannot__ be deleted with this command.\";\n\n\n\n MyCommand::new(\"prune\", \"Delete the last few messages in a channel\")\n\n .help(help)\n\n .options(vec![amount])\n\n}\n", "file_path": "src/commands/utility/prune.rs", "rank": 66, "score": 216835.25631613366 }, { "content": "pub fn define_config() -> MyCommand {\n\n let osu_description = \"Specify whether you want to link to an osu! profile\";\n\n\n\n let osu_help = \"Most osu! commands require a specified username to work.\\n\\\n\n Since using a command is most commonly intended for your own profile, you can link \\\n\n your discord with an osu! profile so that when no username is specified in commands, \\\n\n it will choose the linked username.\\n\\\n\n If the value is set to `Link`, it will prompt you to authorize your account.\\n\\\n\n If `Unlink` is selected, you will be unlinked from the osu! profile.\";\n\n\n\n let osu = MyCommandOption::builder(OSU, osu_description)\n\n .help(osu_help)\n\n .string(link_options(), false);\n\n\n\n let twitch_description = \"Specify whether you want to link to a twitch profile\";\n\n\n\n let twitch_help = \"With this option you can link to a twitch channel.\\n\\\n\n When you have both your osu! and twitch linked, are currently streaming, and anyone uses \\\n\n the `recent score` command on your osu! username, it will try to retrieve the last VOD from your \\\n\n twitch channel and link to a timestamp for the score.\\n\\\n", "file_path": "src/commands/utility/config.rs", "rank": 67, "score": 216835.25631613366 }, { "content": "pub fn define_roll() -> MyCommand {\n\n let limit = MyCommandOption::builder(\"limit\", \"Specify an upper limit, defaults to 100\")\n\n .min_int(0)\n\n .integer(Vec::new(), false);\n\n\n\n MyCommand::new(\"roll\", \"Roll a random number\").options(vec![limit])\n\n}\n", "file_path": "src/commands/utility/roll.rs", "rank": 68, "score": 216835.25631613366 }, { "content": "pub fn define_invite() -> MyCommand {\n\n MyCommand::new(\"invite\", \"Invite me to your server\")\n\n}\n", "file_path": "src/commands/utility/invite.rs", "rank": 69, "score": 216835.25631613366 }, { "content": "pub fn define_ping() -> MyCommand {\n\n let help = \"Most basic command, generally used to check if the bot is online.\\n\\\n\n The displayed latency is the time it takes for the bot \\\n\n to receive a response from discord after sending a message.\";\n\n\n\n MyCommand::new(\"ping\", \"Check if I'm online\").help(help)\n\n}\n", "file_path": "src/commands/utility/ping.rs", "rank": 70, "score": 216835.25631613366 }, { "content": "pub fn define_matchlive() -> MyCommand {\n\n let track = MyCommandOption::builder(\"track\", \"Start tracking a match\")\n\n .subcommand(vec![option_match_url()]);\n\n\n\n let untrack =\n\n MyCommandOption::builder(\"untrack\", \"Untrack a match\").subcommand(vec![option_match_url()]);\n\n\n\n let help = \"Similar to what an mp link does, this command will \\\n\n keep a channel up to date about events in a multiplayer match.\";\n\n\n\n MyCommand::new(\"matchlive\", \"Live track a multiplayer match\")\n\n .help(help)\n\n .options(vec![track, untrack])\n\n .authority()\n\n}\n", "file_path": "src/commands/osu/match_live.rs", "rank": 71, "score": 214059.85081013496 }, { "content": "pub fn define_matchcost() -> MyCommand {\n\n let match_url = MyCommandOption::builder(\"match_url\", \"Specify a match url or match id\")\n\n .string(Vec::new(), true);\n\n\n\n let warmup_description = \"Specify the amount of warmups to ignore (defaults to 2)\";\n\n\n\n let warmup_help =\n\n \"Since warmup maps commonly want to be skipped for performance calculations, \\\n\n this option allows you to specify how many maps should be ignored in the beginning.\\n\\\n\n If no value is specified, it defaults to 2.\";\n\n\n\n let warmups = MyCommandOption::builder(\"warmups\", warmup_description)\n\n .help(warmup_help)\n\n .min_int(0)\n\n .integer(Vec::new(), false);\n\n\n\n let ez_mult_help = \"Specify a multiplier for EZ scores.\\n\\\n\n The suggested multiplier range is 1.0-2.0\";\n\n\n\n let ez_mult = MyCommandOption::builder(\"ez_multiplier\", \"Specify a multiplier for EZ scores\")\n", "file_path": "src/commands/osu/match_costs.rs", "rank": 72, "score": 214059.85081013496 }, { "content": "pub fn define_matchcompare() -> MyCommand {\n\n let match_url_1 =\n\n MyCommandOption::builder(\"match_url_1\", \"Specify the first match url or match id\")\n\n .string(Vec::new(), true);\n\n\n\n let match_url_2 =\n\n MyCommandOption::builder(\"match_url_2\", \"Specify the second match url or match id\")\n\n .string(Vec::new(), true);\n\n\n\n let comparison_choices = vec![\n\n CommandOptionChoice::String {\n\n name: \"Compare players\".to_owned(),\n\n value: \"players\".to_owned(),\n\n },\n\n CommandOptionChoice::String {\n\n name: \"Compare teams\".to_owned(),\n\n value: \"teams\".to_owned(),\n\n },\n\n CommandOptionChoice::String {\n\n name: \"Compare both\".to_owned(),\n", "file_path": "src/commands/osu/match_compare.rs", "rank": 73, "score": 214059.85081013496 }, { "content": "pub fn define_mapsearch() -> MyCommand {\n\n let query =\n\n MyCommandOption::builder(\"query\", \"Specify a search query\").string(Vec::new(), false);\n\n let mode = option_mode();\n\n\n\n let status_choices = vec![\n\n CommandOptionChoice::String {\n\n name: \"any\".to_owned(),\n\n value: \"any\".to_owned(),\n\n },\n\n CommandOptionChoice::String {\n\n name: \"leaderboard\".to_owned(),\n\n value: \"leaderboard\".to_owned(),\n\n },\n\n CommandOptionChoice::String {\n\n name: \"ranked\".to_owned(),\n\n value: \"ranked\".to_owned(),\n\n },\n\n CommandOptionChoice::String {\n\n name: \"loved\".to_owned(),\n", "file_path": "src/commands/osu/map_search.rs", "rank": 74, "score": 214020.99137548066 }, { "content": "pub fn define_cs() -> MyCommand {\n\n let score_help = \"Given a user and a map, display the user's scores on the map\";\n\n\n\n MyCommand::new(\"cs\", \"Compare a score\")\n\n .help(score_help)\n\n .options(score_options())\n\n}\n", "file_path": "src/commands/osu/compare/score.rs", "rank": 75, "score": 214001.46496925538 }, { "content": "pub fn define_rs() -> MyCommand {\n\n let help = \"Show a user's recent score.\\n\\\n\n To add a timestamp to a twitch VOD, be sure you linked yourself to a twitch account via `/config`.\";\n\n\n\n MyCommand::new(\"rs\", \"Show a user's recent score\")\n\n .help(help)\n\n .options(super::score_options())\n\n}\n", "file_path": "src/commands/osu/recent/score.rs", "rank": 76, "score": 214001.46496925538 }, { "content": "fn log_interaction(ctx: &Context, interaction: &dyn InteractionExt, name: &str) {\n\n let username = interaction.username().unwrap_or(\"<unknown user>\");\n\n let location = InteractionLocationLog { ctx, interaction };\n\n info!(\"[{location}] {username} used `{name}` interaction\");\n\n}\n\n\n", "file_path": "src/core/commands/handle_interaction.rs", "rank": 77, "score": 213958.6936116797 }, { "content": "pub fn define_serverconfig() -> MyCommand {\n\n let role =\n\n MyCommandOption::builder(\"role\", \"Specify the role that should gain authority status\")\n\n .role(true);\n\n\n\n let add = MyCommandOption::builder(\"add\", \"Add authority status to a role\")\n\n .help(\"Add authority status to a role.\\nServers can have at most 10 authority roles.\")\n\n .subcommand(vec![role]);\n\n\n\n let list = MyCommandOption::builder(\"list\", \"Display all current authority roles\")\n\n .subcommand(Vec::new());\n\n\n\n let role =\n\n MyCommandOption::builder(\"role\", \"Specify the role that should lose authority status\")\n\n .role(true);\n\n\n\n let remove_help = \"Remove authority status from a role.\\n\\\n\n You can only use this if the removed role would __not__ make you lose authority status yourself.\";\n\n\n\n let remove = MyCommandOption::builder(\"remove\", \"Remove authority status from a role\")\n", "file_path": "src/commands/utility/server_config.rs", "rank": 78, "score": 213681.60973587947 }, { "content": "pub fn define_commands() -> MyCommand {\n\n MyCommand::new(\"commands\", \"Display a list of popular commands\")\n\n}\n", "file_path": "src/commands/utility/command_count.rs", "rank": 79, "score": 213681.60973587947 }, { "content": "pub fn define_roleassign() -> MyCommand {\n\n let channel =\n\n MyCommandOption::builder(\"channel\", \"Specify the channel that contains the message\")\n\n .channel(true);\n\n\n\n let message_help = \"Specify the message by providing its ID.\\n\\\n\n You can find the ID by rightclicking the message and clicking on `Copy ID`.\\n\\\n\n To see the `Copy ID` option, you must have `Settings > Advanced > Developer Mode` enabled.\";\n\n\n\n let message = MyCommandOption::builder(\"message\", \"Specify a message id\")\n\n .help(message_help)\n\n .string(Vec::new(), true);\n\n\n\n let role =\n\n MyCommandOption::builder(\"role\", \"Specify a role that should be assigned\").role(true);\n\n\n\n let add_help = \"Add role-assigning upon reaction on a message \\\n\n i.e. make me add or remove a member's role when they (un)react to a message.\";\n\n\n\n let add = MyCommandOption::builder(\"add\", \"Add role-assigning upon reaction on a message\")\n", "file_path": "src/commands/utility/role_assign.rs", "rank": 80, "score": 213681.60973587947 }, { "content": "fn twitch_content(state: u8) -> String {\n\n let config = CONFIG.get().unwrap();\n\n\n\n format!(\n\n \"{emote} [Click here](https://id.twitch.tv/oauth2/authorize?client_id={client_id}\\\n\n &response_type=code&scope=user:read:email&redirect_uri={url}/auth/twitch\\\n\n &state={state}) to authenticate your twitch channel\",\n\n emote = Emote::Twitch.text(),\n\n client_id = config.tokens.twitch_client_id,\n\n url = config.server.external_url,\n\n )\n\n}\n\n\n\nasync fn handle_both_links(\n\n ctx: &Context,\n\n command: ApplicationCommand,\n\n mut config: UserConfig,\n\n) -> BotResult<()> {\n\n let osu_fut = ctx.auth_standby.wait_for_osu();\n\n let twitch_fut = ctx.auth_standby.wait_for_twitch();\n", "file_path": "src/commands/utility/config.rs", "rank": 81, "score": 211716.0118621499 }, { "content": "fn osu_content(state: u8) -> String {\n\n let config = CONFIG.get().unwrap();\n\n\n\n format!(\n\n \"{emote} [Click here](https://osu.ppy.sh/oauth/authorize?client_id={client_id}&\\\n\n response_type=code&scope=identify&redirect_uri={url}/auth/osu&state={state}) \\\n\n to authenticate your osu! profile\",\n\n emote = Emote::Osu.text(),\n\n client_id = config.tokens.osu_client_id,\n\n url = config.server.external_url,\n\n )\n\n}\n\n\n", "file_path": "src/commands/utility/config.rs", "rank": 82, "score": 211716.0118621499 }, { "content": "pub fn str_to_f32<'de, D: Deserializer<'de>>(d: D) -> Result<f32, D::Error> {\n\n Ok(str_to_maybe_f32(d)?.unwrap_or(0.0))\n\n}\n\n\n", "file_path": "src/custom_client/deserialize.rs", "rank": 83, "score": 210854.06065911127 }, { "content": "fn extract_medals(user: &User) -> HashMap<u32, DateTime<Utc>> {\n\n match user.medals.as_ref() {\n\n Some(medals) => medals\n\n .iter()\n\n .map(|medal| (medal.medal_id, medal.achieved_at))\n\n .collect(),\n\n None => HashMap::new(),\n\n }\n\n}\n\n\n\n#[command]\n\n#[short_desc(\"Compare which of the given users achieved medals first\")]\n\n#[usage(\"[username1] [username2]\")]\n\n#[example(\"badewanne3 5joshi\")]\n\n#[aliases(\"medalcommon\")]\n\npub async fn medalscommon(ctx: Arc<Context>, data: CommandData) -> BotResult<()> {\n\n match data {\n\n CommandData::Message { msg, mut args, num } => {\n\n match CommonArgs::args(&ctx, &mut args, msg.author.id).await {\n\n Ok(Ok(common_args)) => {\n", "file_path": "src/commands/osu/medals/common.rs", "rank": 84, "score": 210702.8305089724 }, { "content": "pub fn find_prefix<'a>(prefixes: &[Prefix], stream: &mut Stream<'a>) -> bool {\n\n prefixes.iter().any(|p| {\n\n if stream.starts_with(p) {\n\n stream.increment(p.len());\n\n\n\n true\n\n } else {\n\n false\n\n }\n\n })\n\n}\n\n\n", "file_path": "src/core/commands/parse.rs", "rank": 85, "score": 210527.8095524055 }, { "content": "pub trait Authored {\n\n fn author(&self) -> Option<&User>;\n\n fn guild_id(&self) -> Option<Id<GuildMarker>>;\n\n fn channel_id(&self) -> Id<ChannelMarker>;\n\n}\n\n\n\nimpl Authored for Message {\n\n fn author(&self) -> Option<&User> {\n\n Some(&self.author)\n\n }\n\n\n\n fn guild_id(&self) -> Option<Id<GuildMarker>> {\n\n self.guild_id\n\n }\n\n\n\n fn channel_id(&self) -> Id<ChannelMarker> {\n\n self.channel_id\n\n }\n\n}\n\n\n", "file_path": "src/util/authored.rs", "rank": 86, "score": 207772.64127039321 }, { "content": "pub fn is_guest_diff(msg: &str) -> bool {\n\n OSU_DIFF_MATCHER.is_match(msg)\n\n}\n\n\n", "file_path": "src/util/matcher.rs", "rank": 87, "score": 206670.2747773125 }, { "content": "pub fn is_custom_emote(msg: &str) -> bool {\n\n EMOJI_MATCHER.is_match(msg)\n\n}\n\n\n", "file_path": "src/util/matcher.rs", "rank": 88, "score": 206670.2747773125 }, { "content": "#[allow(dead_code)]\n\npub fn is_hit_results(msg: &str) -> bool {\n\n HIT_RESULTS_MATCHER.is_match(msg)\n\n}\n\n\n", "file_path": "src/util/matcher.rs", "rank": 89, "score": 206670.2747773125 }, { "content": "pub fn str_to_maybe_f32<'de, D: Deserializer<'de>>(d: D) -> Result<Option<f32>, D::Error> {\n\n d.deserialize_option(MaybeF32String)\n\n}\n\n\n", "file_path": "src/custom_client/deserialize.rs", "rank": 90, "score": 203943.10056701215 }, { "content": "pub trait PpListUtil {\n\n fn accum_weighted(&self) -> f32;\n\n}\n\n\n\nimpl PpListUtil for [f32] {\n\n fn accum_weighted(&self) -> f32 {\n\n self.iter()\n\n .copied()\n\n .zip(0..)\n\n .fold(0.0, |sum, (pp, i)| sum + pp * 0.95_f32.powi(i))\n\n }\n\n}\n\n\n", "file_path": "src/util/osu.rs", "rank": 91, "score": 201032.79971229535 }, { "content": "pub fn grade_emote(grade: Grade) -> &'static str {\n\n CONFIG.get().unwrap().grade(grade)\n\n}\n\n\n", "file_path": "src/util/osu.rs", "rank": 92, "score": 200647.0979101337 }, { "content": "pub fn sec_to_minsec(secs: u32) -> SecToMinSecFormatter {\n\n SecToMinSecFormatter { secs }\n\n}\n\n\n\npub struct SecToMinSecFormatter {\n\n secs: u32,\n\n}\n\n\n\nimpl fmt::Display for SecToMinSecFormatter {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{}:{:02}\", self.secs / 60, self.secs % 60)\n\n }\n\n}\n\n\n", "file_path": "src/util/datetime.rs", "rank": 93, "score": 197971.33965806698 }, { "content": "pub fn get_mods(msg: &str) -> Option<ModSelection> {\n\n let selection = if let Some(captures) = MOD_PLUS_MATCHER.captures(msg) {\n\n let mods = GameMods::from_str(captures.get(1)?.as_str()).ok()?;\n\n\n\n if msg.ends_with('!') {\n\n ModSelection::Exact(mods)\n\n } else {\n\n ModSelection::Include(mods)\n\n }\n\n } else if let Some(captures) = MOD_MINUS_MATCHER.captures(msg) {\n\n let mods = GameMods::from_str(captures.get(1)?.as_str()).ok()?;\n\n\n\n ModSelection::Exclude(mods)\n\n } else {\n\n return None;\n\n };\n\n\n\n Some(selection)\n\n}\n\n\n", "file_path": "src/util/matcher.rs", "rank": 94, "score": 197749.9531404642 }, { "content": "pub fn inflate_acc<'de, D: Deserializer<'de>>(d: D) -> Result<f32, D::Error> {\n\n let acc: f32 = Deserialize::deserialize(d)?;\n\n\n\n Ok(100.0 * acc)\n\n}\n\n\n", "file_path": "src/custom_client/deserialize.rs", "rank": 95, "score": 197233.63475761202 }, { "content": "pub fn deserialize_acc<'de, D: Deserializer<'de>>(d: D) -> Result<f32, D::Error> {\n\n Deserialize::deserialize(d).map(|n: f32| 100.0 * n)\n\n}\n\n\n", "file_path": "src/custom_client/snipe.rs", "rank": 96, "score": 197233.63475761202 }, { "content": "fn content_with_condition(args: &PinnedArgs, amount: usize) -> String {\n\n let mut content = String::with_capacity(64);\n\n\n\n match args.sort_by {\n\n Some(ScoreOrder::Acc) => content.push_str(\"`Order: Accuracy`\"),\n\n Some(ScoreOrder::Bpm) => content.push_str(\"`Order: BPM`\"),\n\n Some(ScoreOrder::Combo) => content.push_str(\"`Order: Combo`\"),\n\n Some(ScoreOrder::Date) => content.push_str(\"`Order: Date`\"),\n\n Some(ScoreOrder::Length) => content.push_str(\"`Order: Length`\"),\n\n Some(ScoreOrder::Misses) => content.push_str(\"`Order: Miss count`\"),\n\n Some(ScoreOrder::Pp) => content.push_str(\"`Order: Pp`\"),\n\n Some(ScoreOrder::RankedDate) => content.push_str(\"`Order: Ranked date`\"),\n\n Some(ScoreOrder::Score) => content.push_str(\"`Order: Score`\"),\n\n Some(ScoreOrder::Stars) => content.push_str(\"`Order: Stars`\"),\n\n None => {}\n\n }\n\n\n\n if let Some(selection) = args.mods {\n\n if !content.is_empty() {\n\n content.push_str(\" ~ \");\n", "file_path": "src/commands/osu/pinned.rs", "rank": 97, "score": 197102.09213523215 }, { "content": "fn content_with_condition(args: &TopArgs, amount: usize) -> String {\n\n let mut content = String::with_capacity(64);\n\n\n\n match args.sort_by {\n\n TopOrder::Farm => content.push_str(\"`Order: Farm`\"),\n\n TopOrder::Other(ScoreOrder::Acc) => content.push_str(\"`Order: Accuracy\"),\n\n TopOrder::Other(ScoreOrder::Bpm) => content.push_str(\"`Order: BPM\"),\n\n TopOrder::Other(ScoreOrder::Combo) => content.push_str(\"`Order: Combo\"),\n\n TopOrder::Other(ScoreOrder::Date) => content.push_str(\"`Order: Date\"),\n\n TopOrder::Other(ScoreOrder::Length) => content.push_str(\"`Order: Length\"),\n\n TopOrder::Other(ScoreOrder::Misses) => content.push_str(\"`Order: Miss count\"),\n\n TopOrder::Other(ScoreOrder::Pp) => content.push_str(\"`Order: Pp\"),\n\n TopOrder::Other(ScoreOrder::RankedDate) => content.push_str(\"`Order: Ranked date\"),\n\n TopOrder::Other(ScoreOrder::Score) => content.push_str(\"`Order: Score\"),\n\n TopOrder::Other(ScoreOrder::Stars) => content.push_str(\"`Order: Stars\"),\n\n }\n\n\n\n if args.reverse {\n\n content.push_str(\" (reverse)`\");\n\n } else {\n", "file_path": "src/commands/osu/top.rs", "rank": 98, "score": 197102.09213523215 }, { "content": "pub fn highlight_funny_numeral(content: &str) -> Cow<'_, str> {\n\n SEVEN_TWO_SEVEN.replace_all(content, \"__${num}__\")\n\n}\n\n\n\nlazy_static! {\n\n static ref ROLE_ID_MATCHER: Regex = Regex::new(r\"<@&(\\d+)>\").unwrap();\n\n\n\n static ref CHANNEL_ID_MATCHER: Regex = Regex::new(r\"<#(\\d+)>\").unwrap();\n\n\n\n static ref MENTION_MATCHER: Regex = Regex::new(r\"<@!?(\\d+)>\").unwrap();\n\n\n\n static ref OSU_URL_USER_MATCHER: Regex = Regex::new(r\"^https://osu.ppy.sh/u(?:sers)?/(?:(\\d+)|(\\w+))$\").unwrap();\n\n\n\n static ref OSU_URL_MAP_NEW_MATCHER: Regex = Regex::new(\n\n r\"https://osu.ppy.sh/beatmapsets/(\\d+)(?:(?:#(?:osu|mania|taiko|fruits)|<#\\d+>)/(\\d+))?\"\n\n )\n\n .unwrap();\n\n\n\n static ref OSU_URL_MAP_OLD_MATCHER: Regex =\n\n Regex::new(r\"https://osu.ppy.sh/b(?:eatmaps)?/(\\d+)\").unwrap();\n", "file_path": "src/util/matcher.rs", "rank": 99, "score": 195022.54820513303 } ]
Rust
src/platform_impl/web/event_loop/window_target.rs
michaelkirk/winit
412bd94ea473c7b175f94a190d66f4be3e92aaab
use super::{super::monitor, backend, device, proxy::Proxy, runner, window}; use crate::dpi::{PhysicalSize, Size}; use crate::event::{DeviceId, ElementState, Event, KeyboardInput, TouchPhase, WindowEvent}; use crate::event_loop::ControlFlow; use crate::window::{Theme, WindowId}; use std::clone::Clone; use std::collections::{vec_deque::IntoIter as VecDequeIter, VecDeque}; pub struct WindowTarget<T: 'static> { pub(crate) runner: runner::Shared<T>, } impl<T> Clone for WindowTarget<T> { fn clone(&self) -> Self { WindowTarget { runner: self.runner.clone(), } } } impl<T> WindowTarget<T> { pub fn new() -> Self { WindowTarget { runner: runner::Shared::new(), } } pub fn proxy(&self) -> Proxy<T> { Proxy::new(self.runner.clone()) } pub fn run(&self, event_handler: Box<dyn FnMut(Event<'static, T>, &mut ControlFlow)>) { self.runner.set_listener(event_handler); } pub fn generate_id(&self) -> window::Id { window::Id(self.runner.generate_id()) } pub fn register(&self, canvas: &mut backend::Canvas, id: window::Id) { let runner = self.runner.clone(); canvas.set_attribute("data-raw-handle", &id.0.to_string()); canvas.on_blur(move || { runner.send_event(Event::WindowEvent { window_id: WindowId(id), event: WindowEvent::Focused(false), }); }); let runner = self.runner.clone(); canvas.on_focus(move || { runner.send_event(Event::WindowEvent { window_id: WindowId(id), event: WindowEvent::Focused(true), }); }); let runner = self.runner.clone(); canvas.on_keyboard_press(move |scancode, virtual_keycode, modifiers| { #[allow(deprecated)] runner.send_event(Event::WindowEvent { window_id: WindowId(id), event: WindowEvent::KeyboardInput { device_id: DeviceId(unsafe { device::Id::dummy() }), input: KeyboardInput { scancode, state: ElementState::Pressed, virtual_keycode, modifiers, }, is_synthetic: false, }, }); }); let runner = self.runner.clone(); canvas.on_keyboard_release(move |scancode, virtual_keycode, modifiers| { #[allow(deprecated)] runner.send_event(Event::WindowEvent { window_id: WindowId(id), event: WindowEvent::KeyboardInput { device_id: DeviceId(unsafe { device::Id::dummy() }), input: KeyboardInput { scancode, state: ElementState::Released, virtual_keycode, modifiers, }, is_synthetic: false, }, }); }); let runner = self.runner.clone(); canvas.on_received_character(move |char_code| { runner.send_event(Event::WindowEvent { window_id: WindowId(id), event: WindowEvent::ReceivedCharacter(char_code), }); }); let runner = self.runner.clone(); canvas.on_cursor_leave(move |pointer_id| { runner.send_event(Event::WindowEvent { window_id: WindowId(id), event: WindowEvent::CursorLeft { device_id: DeviceId(device::Id(pointer_id)), }, }); }); let runner = self.runner.clone(); canvas.on_cursor_enter(move |pointer_id| { runner.send_event(Event::WindowEvent { window_id: WindowId(id), event: WindowEvent::CursorEntered { device_id: DeviceId(device::Id(pointer_id)), }, }); }); let runner = self.runner.clone(); canvas.on_cursor_move(move |pointer_id, position, modifiers| { runner.send_event(Event::WindowEvent { window_id: WindowId(id), event: WindowEvent::CursorMoved { device_id: DeviceId(device::Id(pointer_id)), position, modifiers, }, }); }); let runner = self.runner.clone(); canvas.on_mouse_press(move |pointer_id, button, modifiers| { runner.send_event(Event::WindowEvent { window_id: WindowId(id), event: WindowEvent::MouseInput { device_id: DeviceId(device::Id(pointer_id)), state: ElementState::Pressed, button, modifiers, }, }); }); let runner = self.runner.clone(); canvas.on_mouse_release(move |pointer_id, button, modifiers| { runner.send_event(Event::WindowEvent { window_id: WindowId(id), event: WindowEvent::MouseInput { device_id: DeviceId(device::Id(pointer_id)), state: ElementState::Released, button, modifiers, }, }); }); let runner = self.runner.clone(); canvas.on_mouse_wheel(move |pointer_id, delta, modifiers| { runner.send_event(Event::WindowEvent { window_id: WindowId(id), event: WindowEvent::MouseWheel { device_id: DeviceId(device::Id(pointer_id)), delta, phase: TouchPhase::Moved, modifiers, }, }); }); let runner = self.runner.clone(); let raw = canvas.raw().clone(); let mut intended_size = PhysicalSize { width: raw.width() as u32, height: raw.height() as u32, }; canvas.on_fullscreen_change(move || { let new_size = if backend::is_fullscreen(&raw) { intended_size = PhysicalSize { width: raw.width() as u32, height: raw.height() as u32, }; backend::window_size().to_physical(backend::scale_factor()) } else { intended_size }; backend::set_canvas_size(&raw, Size::Physical(new_size)); runner.send_event(Event::WindowEvent { window_id: WindowId(id), event: WindowEvent::Resized(new_size), }); runner.request_redraw(WindowId(id)); }); let runner = self.runner.clone(); canvas.on_dark_mode(move |is_dark_mode| { let theme = if is_dark_mode { Theme::Dark } else { Theme::Light }; runner.send_event(Event::WindowEvent { window_id: WindowId(id), event: WindowEvent::ThemeChanged(theme), }); }); } pub fn available_monitors(&self) -> VecDequeIter<monitor::Handle> { VecDeque::new().into_iter() } pub fn primary_monitor(&self) -> monitor::Handle { monitor::Handle } }
use super::{super::monitor, backend, device, proxy::Proxy, runner, window}; use crate::dpi::{PhysicalSize, Size}; use crate::event::{DeviceId, ElementState, Event, KeyboardInput, TouchPhase, WindowEvent}; use crate::event_loop::ControlFlow; use crate::window::{Theme, WindowId}; use std::clone::Clone; use std::collections::{vec_deque::IntoIter as VecDequeIter, VecDeque}; pub struct WindowTarget<T: 'static> { pub(crate) runner: runner::Shared<T>, } impl<T> Clone for WindowTarget<T> { fn clone(&self) -> Self { WindowTarget { runner: self.runner.clone(), } } } impl<T> WindowTarget<T> { pub fn new() -> Self { WindowTarget { runner: runner::Shared::new(), } } pub fn proxy(&self) -> Proxy<T> { Proxy::new(self.runner.clone()) } pub fn run(&self, event_handler: Box<dyn FnMut(Event<'static, T>, &mut ControlFlow)>) { self.runner.set_listener(event_handler); } pub fn generate_id(&self) -> window::Id { window::Id(self.runner.generate_id()) } pub fn register(&self, canvas: &mut backend::Canvas, id: window::Id) { let runner = self.runner.clone(); canvas.set_attribute("data-raw-handle", &id.0.to_string()); canvas.on_blur(move || { runner.send_event(Event::WindowEvent { window_id: WindowId(id), event: WindowEvent::Focused(false), }); }); let runner = self.runner.clone(); canvas.on_focus(move || { runner.send_event(Event::WindowEvent { window_id: WindowId(id), event: WindowEvent::Focused(true), }); }); let runner = self.runner.clone(); canvas.on_keyboard_press(move |scancode, virtual_keycode, modifiers| { #[allow(deprecated)] runner.send_event(Event::WindowEvent { window_id: WindowId(id), event: WindowEvent::KeyboardInput { device_id: DeviceId(unsafe { device::Id::dummy() }), input: KeyboardInput { scancode, state: ElementState::Pressed, virtual_keycode, modifiers, }, is_synthetic: false, }, }); }); let runner = self.runner.clone(); canvas.on_keyboard_release(move |scancode, virtual_keycode, modifiers| { #[allow(deprecated)] runner.send_event(Event::WindowEvent { window_id: WindowId(id), event: WindowEvent::KeyboardInput { device_id: DeviceId(unsafe { device::Id::dummy() }), input: KeyboardInput { scancode, state: ElementState::Released, virtual_keycode, modifiers, }, is_synthetic: false, }, }); }); let runner = self.runner.clone(); canvas.on_received_character(move |char_code| { runner.send_event(Event::WindowEvent { window_id: WindowId(id), event: WindowEvent::ReceivedCharacter(char_code), }); }); let runner = self.runner.clone(); canvas.on_cursor_leave(move |pointer_id| { runner.send_event(Event::WindowEvent { window_id: WindowId(id), event: WindowEvent::CursorLeft { device_id: DeviceId(device::Id(pointer_id)), }, }); }); let runner = self.runner.clone(); canvas.on_cursor_enter(move |pointer_id| { runner.send_event(Event::WindowEvent { window_id: WindowId(id), event: WindowEvent::CursorEntered { device_id: DeviceId(device::Id(pointer_id)), }, }); }); let runner = self.runner.clone(); canvas.on_cursor_move(move |pointer_id, position, modifiers| { runner.send_event(Event::WindowEvent { window_id: WindowId(id), event: WindowEvent::CursorMoved {
}
device_id: DeviceId(device::Id(pointer_id)), position, modifiers, }, }); }); let runner = self.runner.clone(); canvas.on_mouse_press(move |pointer_id, button, modifiers| { runner.send_event(Event::WindowEvent { window_id: WindowId(id), event: WindowEvent::MouseInput { device_id: DeviceId(device::Id(pointer_id)), state: ElementState::Pressed, button, modifiers, }, }); }); let runner = self.runner.clone(); canvas.on_mouse_release(move |pointer_id, button, modifiers| { runner.send_event(Event::WindowEvent { window_id: WindowId(id), event: WindowEvent::MouseInput { device_id: DeviceId(device::Id(pointer_id)), state: ElementState::Released, button, modifiers, }, }); }); let runner = self.runner.clone(); canvas.on_mouse_wheel(move |pointer_id, delta, modifiers| { runner.send_event(Event::WindowEvent { window_id: WindowId(id), event: WindowEvent::MouseWheel { device_id: DeviceId(device::Id(pointer_id)), delta, phase: TouchPhase::Moved, modifiers, }, }); }); let runner = self.runner.clone(); let raw = canvas.raw().clone(); let mut intended_size = PhysicalSize { width: raw.width() as u32, height: raw.height() as u32, }; canvas.on_fullscreen_change(move || { let new_size = if backend::is_fullscreen(&raw) { intended_size = PhysicalSize { width: raw.width() as u32, height: raw.height() as u32, }; backend::window_size().to_physical(backend::scale_factor()) } else { intended_size }; backend::set_canvas_size(&raw, Size::Physical(new_size)); runner.send_event(Event::WindowEvent { window_id: WindowId(id), event: WindowEvent::Resized(new_size), }); runner.request_redraw(WindowId(id)); }); let runner = self.runner.clone(); canvas.on_dark_mode(move |is_dark_mode| { let theme = if is_dark_mode { Theme::Dark } else { Theme::Light }; runner.send_event(Event::WindowEvent { window_id: WindowId(id), event: WindowEvent::ThemeChanged(theme), }); }); } pub fn available_monitors(&self) -> VecDequeIter<monitor::Handle> { VecDeque::new().into_iter() } pub fn primary_monitor(&self) -> monitor::Handle { monitor::Handle }
random
[ { "content": "pub fn event_mods(event: id) -> ModifiersState {\n\n let flags = unsafe { NSEvent::modifierFlags(event) };\n\n let mut m = ModifiersState::empty();\n\n m.set(\n\n ModifiersState::SHIFT,\n\n flags.contains(NSEventModifierFlags::NSShiftKeyMask),\n\n );\n\n m.set(\n\n ModifiersState::CTRL,\n\n flags.contains(NSEventModifierFlags::NSControlKeyMask),\n\n );\n\n m.set(\n\n ModifiersState::ALT,\n\n flags.contains(NSEventModifierFlags::NSAlternateKeyMask),\n\n );\n\n m.set(\n\n ModifiersState::LOGO,\n\n flags.contains(NSEventModifierFlags::NSCommandKeyMask),\n\n );\n\n m\n\n}\n\n\n", "file_path": "src/platform_impl/macos/event.rs", "rank": 0, "score": 323426.0234127553 }, { "content": "// Update `state.modifiers` if `event` has something different\n\nfn update_potentially_stale_modifiers(state: &mut ViewState, event: id) {\n\n let event_modifiers = event_mods(event);\n\n if state.modifiers != event_modifiers {\n\n state.modifiers = event_modifiers;\n\n\n\n AppState::queue_event(EventWrapper::StaticEvent(Event::WindowEvent {\n\n window_id: WindowId(get_window_id(state.ns_window)),\n\n event: WindowEvent::ModifiersChanged(state.modifiers),\n\n }));\n\n }\n\n}\n\n\n\nextern \"C\" fn key_down(this: &Object, _sel: Sel, event: id) {\n\n trace!(\"Triggered `keyDown`\");\n\n unsafe {\n\n let state_ptr: *mut c_void = *this.get_ivar(\"winitState\");\n\n let state = &mut *(state_ptr as *mut ViewState);\n\n let window_id = WindowId(get_window_id(state.ns_window));\n\n let characters = get_characters(event, false);\n\n\n", "file_path": "src/platform_impl/macos/view.rs", "rank": 1, "score": 319036.9271220651 }, { "content": "pub fn get_key_mods() -> ModifiersState {\n\n let filter_out_altgr = layout_uses_altgr() && key_pressed(winuser::VK_RMENU);\n\n\n\n let mut mods = ModifiersState::empty();\n\n mods.set(ModifiersState::SHIFT, key_pressed(winuser::VK_SHIFT));\n\n mods.set(\n\n ModifiersState::CTRL,\n\n key_pressed(winuser::VK_CONTROL) && !filter_out_altgr,\n\n );\n\n mods.set(\n\n ModifiersState::ALT,\n\n key_pressed(winuser::VK_MENU) && !filter_out_altgr,\n\n );\n\n mods.set(\n\n ModifiersState::LOGO,\n\n key_pressed(winuser::VK_LWIN) || key_pressed(winuser::VK_RWIN),\n\n );\n\n mods\n\n}\n\n\n", "file_path": "src/platform_impl/windows/event.rs", "rank": 2, "score": 271524.2047950869 }, { "content": "pub fn get_scancode(event: cocoa::base::id) -> c_ushort {\n\n // In AppKit, `keyCode` refers to the position (scancode) of a key rather than its character,\n\n // and there is no easy way to navtively retrieve the layout-dependent character.\n\n // In winit, we use keycode to refer to the key's character, and so this function aligns\n\n // AppKit's terminology with ours.\n\n unsafe { msg_send![event, keyCode] }\n\n}\n\n\n\npub unsafe fn modifier_event(\n\n ns_event: id,\n\n keymask: NSEventModifierFlags,\n\n was_key_pressed: bool,\n\n) -> Option<WindowEvent<'static>> {\n\n if !was_key_pressed && NSEvent::modifierFlags(ns_event).contains(keymask)\n\n || was_key_pressed && !NSEvent::modifierFlags(ns_event).contains(keymask)\n\n {\n\n let state = if was_key_pressed {\n\n ElementState::Released\n\n } else {\n\n ElementState::Pressed\n", "file_path": "src/platform_impl/macos/event.rs", "rank": 3, "score": 264149.08125502116 }, { "content": "pub fn mouse_modifiers(event: &MouseEvent) -> ModifiersState {\n\n let mut m = ModifiersState::empty();\n\n m.set(ModifiersState::SHIFT, event.shift_key());\n\n m.set(ModifiersState::CTRL, event.ctrl_key());\n\n m.set(ModifiersState::ALT, event.alt_key());\n\n m.set(ModifiersState::LOGO, event.meta_key());\n\n m\n\n}\n\n\n", "file_path": "src/platform_impl/web/web_sys/event.rs", "rank": 4, "score": 263957.6799280063 }, { "content": "pub fn keyboard_modifiers(event: &KeyboardEvent) -> ModifiersState {\n\n let mut m = ModifiersState::empty();\n\n m.set(ModifiersState::SHIFT, event.shift_key());\n\n m.set(ModifiersState::CTRL, event.ctrl_key());\n\n m.set(ModifiersState::ALT, event.alt_key());\n\n m.set(ModifiersState::LOGO, event.meta_key());\n\n m\n\n}\n\n\n", "file_path": "src/platform_impl/web/web_sys/event.rs", "rank": 5, "score": 263957.6799280063 }, { "content": "pub fn mouse_modifiers(event: &impl IMouseEvent) -> ModifiersState {\n\n let mut m = ModifiersState::empty();\n\n m.set(ModifiersState::SHIFT, event.shift_key());\n\n m.set(ModifiersState::CTRL, event.ctrl_key());\n\n m.set(ModifiersState::ALT, event.alt_key());\n\n m.set(ModifiersState::LOGO, event.meta_key());\n\n m\n\n}\n\n\n", "file_path": "src/platform_impl/web/stdweb/event.rs", "rank": 6, "score": 259262.33218326754 }, { "content": "pub fn keyboard_modifiers(event: &impl IKeyboardEvent) -> ModifiersState {\n\n let mut m = ModifiersState::empty();\n\n m.set(ModifiersState::SHIFT, event.shift_key());\n\n m.set(ModifiersState::CTRL, event.ctrl_key());\n\n m.set(ModifiersState::ALT, event.alt_key());\n\n m.set(ModifiersState::LOGO, event.meta_key());\n\n m\n\n}\n\n\n", "file_path": "src/platform_impl/web/stdweb/event.rs", "rank": 7, "score": 259262.33218326757 }, { "content": "pub fn new_view(ns_window: id) -> (IdRef, Weak<Mutex<CursorState>>) {\n\n let cursor_state = Default::default();\n\n let cursor_access = Arc::downgrade(&cursor_state);\n\n let state = ViewState {\n\n ns_window,\n\n cursor_state,\n\n ime_spot: None,\n\n raw_characters: None,\n\n is_key_down: false,\n\n modifiers: Default::default(),\n\n tracking_rect: None,\n\n };\n\n unsafe {\n\n // This is free'd in `dealloc`\n\n let state_ptr = Box::into_raw(Box::new(state)) as *mut c_void;\n\n let ns_view: id = msg_send![VIEW_CLASS.0, alloc];\n\n (\n\n IdRef::new(msg_send![ns_view, initWithWinit: state_ptr]),\n\n cursor_access,\n\n )\n", "file_path": "src/platform_impl/macos/view.rs", "rank": 8, "score": 257853.41615210258 }, { "content": "// Convert the `cocoa::base::id` associated with a window to a usize to use as a unique identifier\n\n// for the window.\n\npub fn get_window_id(window_cocoa_id: id) -> Id {\n\n Id(window_cocoa_id as *const Object as _)\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct PlatformSpecificWindowBuilderAttributes {\n\n pub activation_policy: ActivationPolicy,\n\n pub movable_by_window_background: bool,\n\n pub titlebar_transparent: bool,\n\n pub title_hidden: bool,\n\n pub titlebar_hidden: bool,\n\n pub titlebar_buttons_hidden: bool,\n\n pub fullsize_content_view: bool,\n\n pub resize_increments: Option<LogicalSize<f64>>,\n\n pub disallow_hidpi: bool,\n\n pub has_shadow: bool,\n\n}\n\n\n\nimpl Default for PlatformSpecificWindowBuilderAttributes {\n\n #[inline]\n", "file_path": "src/platform_impl/macos/window.rs", "rank": 9, "score": 252788.68833651723 }, { "content": "struct Runner<T: 'static> {\n\n state: State,\n\n is_busy: bool,\n\n event_handler: Box<dyn FnMut(Event<'static, T>, &mut root::ControlFlow)>,\n\n}\n\n\n\nimpl<T: 'static> Runner<T> {\n\n pub fn new(event_handler: Box<dyn FnMut(Event<'static, T>, &mut root::ControlFlow)>) -> Self {\n\n Runner {\n\n state: State::Init,\n\n is_busy: false,\n\n event_handler,\n\n }\n\n }\n\n}\n\n\n\nimpl<T: 'static> Shared<T> {\n\n pub fn new() -> Self {\n\n Shared(Rc::new(Execution {\n\n runner: RefCell::new(None),\n", "file_path": "src/platform_impl/web/event_loop/runner.rs", "rank": 10, "score": 248139.0175381355 }, { "content": "pub fn on_unload(mut handler: impl FnMut() + 'static) {\n\n window().add_event_listener(move |_: BeforeUnloadEvent| handler());\n\n}\n\n\n\nimpl WindowExtStdweb for Window {\n\n fn canvas(&self) -> CanvasElement {\n\n self.window.canvas().raw().clone()\n\n }\n\n\n\n fn is_dark_mode(&self) -> bool {\n\n // TODO: upstream to stdweb\n\n let is_dark_mode = js! {\n\n return (window.matchMedia && window.matchMedia(\"(prefers-color-scheme: dark)\").matches)\n\n };\n\n\n\n is_dark_mode.try_into().expect(\"should return a bool\")\n\n }\n\n}\n\n\n", "file_path": "src/platform_impl/web/stdweb/mod.rs", "rank": 11, "score": 239465.80341100815 }, { "content": "pub fn register_raw_input_devices(devices: &[RAWINPUTDEVICE]) -> bool {\n\n let device_size = size_of::<RAWINPUTDEVICE>() as UINT;\n\n\n\n let success = unsafe {\n\n winuser::RegisterRawInputDevices(devices.as_ptr() as _, devices.len() as _, device_size)\n\n };\n\n\n\n success == TRUE\n\n}\n\n\n", "file_path": "src/platform_impl/windows/raw_input.rs", "rank": 12, "score": 239323.8686403767 }, { "content": "pub fn set_canvas_size(raw: &CanvasElement, size: Size) {\n\n use stdweb::*;\n\n\n\n let scale_factor = scale_factor();\n\n\n\n let physical_size = size.to_physical::<u32>(scale_factor);\n\n let logical_size = size.to_logical::<f64>(scale_factor);\n\n\n\n raw.set_width(physical_size.width);\n\n raw.set_height(physical_size.height);\n\n\n\n js! {\n\n @{raw.as_ref()}.style.width = @{logical_size.width} + \"px\";\n\n @{raw.as_ref()}.style.height = @{logical_size.height} + \"px\";\n\n }\n\n}\n\n\n", "file_path": "src/platform_impl/web/stdweb/mod.rs", "rank": 13, "score": 237339.9169378965 }, { "content": "pub fn on_unload(mut handler: impl FnMut() + 'static) {\n\n let window = web_sys::window().expect(\"Failed to obtain window\");\n\n\n\n let closure = Closure::wrap(\n\n Box::new(move |_: BeforeUnloadEvent| handler()) as Box<dyn FnMut(BeforeUnloadEvent)>\n\n );\n\n\n\n window\n\n .add_event_listener_with_callback(\"beforeunload\", &closure.as_ref().unchecked_ref())\n\n .expect(\"Failed to add close listener\");\n\n}\n\n\n\nimpl WindowExtWebSys for Window {\n\n fn canvas(&self) -> HtmlCanvasElement {\n\n self.window.canvas().raw().clone()\n\n }\n\n\n\n fn is_dark_mode(&self) -> bool {\n\n let window = web_sys::window().expect(\"Failed to obtain window\");\n\n\n\n window\n\n .match_media(\"(prefers-color-scheme: dark)\")\n\n .ok()\n\n .flatten()\n\n .map(|media| media.matches())\n\n .unwrap_or(false)\n\n }\n\n}\n\n\n", "file_path": "src/platform_impl/web/web_sys/mod.rs", "rank": 14, "score": 236218.65637875203 }, { "content": "pub fn set_canvas_size(raw: &HtmlCanvasElement, size: Size) {\n\n let scale_factor = scale_factor();\n\n\n\n let physical_size = size.to_physical::<u32>(scale_factor);\n\n let logical_size = size.to_logical::<f64>(scale_factor);\n\n\n\n raw.set_width(physical_size.width);\n\n raw.set_height(physical_size.height);\n\n\n\n let style = raw.style();\n\n style\n\n .set_property(\"width\", &format!(\"{}px\", logical_size.width))\n\n .expect(\"Failed to set canvas width\");\n\n style\n\n .set_property(\"height\", &format!(\"{}px\", logical_size.height))\n\n .expect(\"Failed to set canvas height\");\n\n}\n\n\n", "file_path": "src/platform_impl/web/web_sys/mod.rs", "rank": 15, "score": 231415.41862926586 }, { "content": "struct ThreadMsgTargetSubclassInput<T: 'static> {\n\n event_loop_runner: EventLoopRunnerShared<T>,\n\n user_event_receiver: Receiver<T>,\n\n}\n\n\n\nimpl<T> ThreadMsgTargetSubclassInput<T> {\n\n unsafe fn send_event(&self, event: Event<'_, T>) {\n\n self.event_loop_runner.send_event(event);\n\n }\n\n}\n\n\n\npub struct EventLoop<T: 'static> {\n\n thread_msg_sender: Sender<T>,\n\n window_target: RootELW<T>,\n\n}\n\n\n\npub struct EventLoopWindowTarget<T: 'static> {\n\n thread_id: DWORD,\n\n thread_msg_target: HWND,\n\n pub(crate) runner_shared: EventLoopRunnerShared<T>,\n", "file_path": "src/platform_impl/windows/event_loop.rs", "rank": 16, "score": 231032.35470267813 }, { "content": "pub fn new_delegate(window: &Arc<UnownedWindow>, initial_fullscreen: bool) -> IdRef {\n\n let state = WindowDelegateState::new(window, initial_fullscreen);\n\n unsafe {\n\n // This is free'd in `dealloc`\n\n let state_ptr = Box::into_raw(Box::new(state)) as *mut c_void;\n\n let delegate: id = msg_send![WINDOW_DELEGATE_CLASS.0, alloc];\n\n IdRef::new(msg_send![delegate, initWithWinit: state_ptr])\n\n }\n\n}\n\n\n", "file_path": "src/platform_impl/macos/window_delegate.rs", "rank": 17, "score": 227634.46795804508 }, { "content": "fn get_view_and_screen_frame(window_id: id) -> (id, CGRect) {\n\n unsafe {\n\n let view_controller: id = msg_send![window_id, rootViewController];\n\n let view: id = msg_send![view_controller, view];\n\n let bounds: CGRect = msg_send![window_id, bounds];\n\n let screen: id = msg_send![window_id, screen];\n\n let screen_space: id = msg_send![screen, coordinateSpace];\n\n let screen_frame: CGRect =\n\n msg_send![window_id, convertRect:bounds toCoordinateSpace:screen_space];\n\n (view, screen_frame)\n\n }\n\n}\n\n\n", "file_path": "src/platform_impl/ios/app_state.rs", "rank": 18, "score": 226336.39106001513 }, { "content": "/// Emit a `ModifiersChanged` event whenever modifiers have changed.\n\nfn update_modifiers<T>(window: HWND, subclass_input: &SubclassInput<T>) {\n\n use crate::event::WindowEvent::ModifiersChanged;\n\n\n\n let modifiers = event::get_key_mods();\n\n let mut window_state = subclass_input.window_state.lock();\n\n if window_state.modifiers_state != modifiers {\n\n window_state.modifiers_state = modifiers;\n\n\n\n // Drop lock\n\n drop(window_state);\n\n\n\n unsafe {\n\n subclass_input.send_event(Event::WindowEvent {\n\n window_id: RootWindowId(WindowId(window)),\n\n event: ModifiersChanged(modifiers),\n\n });\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/platform_impl/windows/event_loop.rs", "rank": 19, "score": 225673.23703568906 }, { "content": "fn set_modifier(state: &mut ModifiersState, modifier: Modifier, value: bool) {\n\n match modifier {\n\n Modifier::Alt => state.set(ModifiersState::ALT, value),\n\n Modifier::Ctrl => state.set(ModifiersState::CTRL, value),\n\n Modifier::Shift => state.set(ModifiersState::SHIFT, value),\n\n Modifier::Logo => state.set(ModifiersState::LOGO, value),\n\n }\n\n}\n", "file_path": "src/platform_impl/linux/x11/util/modifiers.rs", "rank": 20, "score": 222846.14759999243 }, { "content": "#[allow(dead_code)]\n\npub fn get_raw_input_device_list() -> Option<Vec<RAWINPUTDEVICELIST>> {\n\n let list_size = size_of::<RAWINPUTDEVICELIST>() as UINT;\n\n\n\n let mut num_devices = 0;\n\n let status =\n\n unsafe { winuser::GetRawInputDeviceList(ptr::null_mut(), &mut num_devices, list_size) };\n\n\n\n if status == UINT::max_value() {\n\n return None;\n\n }\n\n\n\n let mut buffer = Vec::with_capacity(num_devices as _);\n\n\n\n let num_stored = unsafe {\n\n winuser::GetRawInputDeviceList(buffer.as_ptr() as _, &mut num_devices, list_size)\n\n };\n\n\n\n if num_stored == UINT::max_value() {\n\n return None;\n\n }\n", "file_path": "src/platform_impl/windows/raw_input.rs", "rank": 21, "score": 222438.84373530353 }, { "content": "pub fn mouse_position(event: &MouseEvent) -> LogicalPosition<f64> {\n\n LogicalPosition {\n\n x: event.offset_x() as f64,\n\n y: event.offset_y() as f64,\n\n }\n\n}\n\n\n", "file_path": "src/platform_impl/web/web_sys/event.rs", "rank": 22, "score": 222153.8456688463 }, { "content": "#[allow(dead_code)]\n\npub fn get_raw_input_device_info(handle: HANDLE) -> Option<RawDeviceInfo> {\n\n let mut info: RID_DEVICE_INFO = unsafe { mem::zeroed() };\n\n let info_size = size_of::<RID_DEVICE_INFO>() as UINT;\n\n\n\n info.cbSize = info_size;\n\n\n\n let mut minimum_size = 0;\n\n let status = unsafe {\n\n winuser::GetRawInputDeviceInfoW(\n\n handle,\n\n RIDI_DEVICEINFO,\n\n &mut info as *mut _ as _,\n\n &mut minimum_size,\n\n )\n\n };\n\n\n\n if status == UINT::max_value() || status == 0 {\n\n return None;\n\n }\n\n\n\n debug_assert_eq!(info_size, status);\n\n\n\n Some(info.into())\n\n}\n\n\n", "file_path": "src/platform_impl/windows/raw_input.rs", "rank": 23, "score": 221103.7393095115 }, { "content": "fn wrap_device_id(id: u32) -> RootDeviceId {\n\n RootDeviceId(DeviceId(id))\n\n}\n\n\n\npub type OsError = std::io::Error;\n\n\n\n#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]\n\npub struct WindowId(HWND);\n\nunsafe impl Send for WindowId {}\n\nunsafe impl Sync for WindowId {}\n\n\n\nimpl WindowId {\n\n pub unsafe fn dummy() -> Self {\n\n use std::ptr::null_mut;\n\n\n\n WindowId(null_mut())\n\n }\n\n}\n\n\n\n#[macro_use]\n", "file_path": "src/platform_impl/windows/mod.rs", "rank": 24, "score": 219823.52639692713 }, { "content": "pub fn mouse_position(event: &impl IMouseEvent) -> LogicalPosition<f64> {\n\n LogicalPosition {\n\n x: event.offset_x() as f64,\n\n y: event.offset_y() as f64,\n\n }\n\n}\n\n\n", "file_path": "src/platform_impl/web/stdweb/event.rs", "rank": 25, "score": 218112.15371497913 }, { "content": "pub fn get_raw_input_device_name(handle: HANDLE) -> Option<String> {\n\n let mut minimum_size = 0;\n\n let status = unsafe {\n\n winuser::GetRawInputDeviceInfoW(handle, RIDI_DEVICENAME, ptr::null_mut(), &mut minimum_size)\n\n };\n\n\n\n if status != 0 {\n\n return None;\n\n }\n\n\n\n let mut name: Vec<wchar_t> = Vec::with_capacity(minimum_size as _);\n\n\n\n let status = unsafe {\n\n winuser::GetRawInputDeviceInfoW(\n\n handle,\n\n RIDI_DEVICENAME,\n\n name.as_ptr() as _,\n\n &mut minimum_size,\n\n )\n\n };\n", "file_path": "src/platform_impl/windows/raw_input.rs", "rank": 26, "score": 215513.38691374974 }, { "content": "pub fn process_key_params(\n\n wparam: WPARAM,\n\n lparam: LPARAM,\n\n) -> Option<(ScanCode, Option<VirtualKeyCode>)> {\n\n let scancode = ((lparam >> 16) & 0xff) as UINT;\n\n let extended = (lparam & 0x01000000) != 0;\n\n handle_extended_keys(wparam as _, scancode, extended)\n\n .map(|(vkey, scancode)| (scancode, vkey_to_winit_vkey(vkey)))\n\n}\n\n\n", "file_path": "src/platform_impl/windows/event.rs", "rank": 27, "score": 209069.87473823322 }, { "content": "pub fn handle_extended_keys(\n\n vkey: c_int,\n\n mut scancode: UINT,\n\n extended: bool,\n\n) -> Option<(c_int, UINT)> {\n\n // Welcome to hell https://blog.molecular-matters.com/2011/09/05/properly-handling-keyboard-input/\n\n let vkey = match vkey {\n\n winuser::VK_SHIFT => unsafe {\n\n winuser::MapVirtualKeyA(scancode, winuser::MAPVK_VSC_TO_VK_EX) as _\n\n },\n\n winuser::VK_CONTROL => {\n\n if extended {\n\n winuser::VK_RCONTROL\n\n } else {\n\n winuser::VK_LCONTROL\n\n }\n\n }\n\n winuser::VK_MENU => {\n\n if extended {\n\n winuser::VK_RMENU\n", "file_path": "src/platform_impl/windows/event.rs", "rank": 28, "score": 209069.87473823322 }, { "content": "pub fn get_raw_mouse_button_state(button_flags: USHORT) -> [Option<ElementState>; 3] {\n\n [\n\n button_flags_to_element_state(\n\n button_flags,\n\n winuser::RI_MOUSE_LEFT_BUTTON_DOWN,\n\n winuser::RI_MOUSE_LEFT_BUTTON_UP,\n\n ),\n\n button_flags_to_element_state(\n\n button_flags,\n\n winuser::RI_MOUSE_MIDDLE_BUTTON_DOWN,\n\n winuser::RI_MOUSE_MIDDLE_BUTTON_UP,\n\n ),\n\n button_flags_to_element_state(\n\n button_flags,\n\n winuser::RI_MOUSE_RIGHT_BUTTON_DOWN,\n\n winuser::RI_MOUSE_RIGHT_BUTTON_UP,\n\n ),\n\n ]\n\n}\n", "file_path": "src/platform_impl/windows/raw_input.rs", "rank": 29, "score": 205291.20387758964 }, { "content": "struct EventLoopHandler<T: 'static> {\n\n callback: Box<dyn FnMut(Event<'_, T>, &RootWindowTarget<T>, &mut ControlFlow)>,\n\n will_exit: bool,\n\n window_target: Rc<RootWindowTarget<T>>,\n\n}\n\n\n\nimpl<T> Debug for EventLoopHandler<T> {\n\n fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n formatter\n\n .debug_struct(\"EventLoopHandler\")\n\n .field(\"window_target\", &self.window_target)\n\n .finish()\n\n }\n\n}\n\n\n\nimpl<T> EventHandler for EventLoopHandler<T> {\n\n fn handle_nonuser_event(&mut self, event: Event<'_, Never>, control_flow: &mut ControlFlow) {\n\n (self.callback)(event.userify(), &self.window_target, control_flow);\n\n self.will_exit |= *control_flow == ControlFlow::Exit;\n\n if self.will_exit {\n", "file_path": "src/platform_impl/macos/app_state.rs", "rank": 30, "score": 204236.4265289753 }, { "content": "fn get_characters(event: id, ignore_modifiers: bool) -> String {\n\n unsafe {\n\n let characters: id = if ignore_modifiers {\n\n msg_send![event, charactersIgnoringModifiers]\n\n } else {\n\n msg_send![event, characters]\n\n };\n\n\n\n assert_ne!(characters, nil);\n\n let slice =\n\n slice::from_raw_parts(characters.UTF8String() as *const c_uchar, characters.len());\n\n\n\n let string = str::from_utf8_unchecked(slice);\n\n string.to_owned()\n\n }\n\n}\n\n\n", "file_path": "src/platform_impl/macos/view.rs", "rank": 31, "score": 202343.17725405877 }, { "content": "pub fn window_size() -> LogicalSize<f64> {\n\n let window = window();\n\n let width = window.inner_width() as f64;\n\n let height = window.inner_height() as f64;\n\n\n\n LogicalSize { width, height }\n\n}\n\n\n", "file_path": "src/platform_impl/web/stdweb/mod.rs", "rank": 32, "score": 201872.38961419874 }, { "content": "/// Additional methods on `DeviceId` that are specific to Windows.\n\npub trait DeviceIdExtWindows {\n\n /// Returns an identifier that persistently refers to this specific device.\n\n ///\n\n /// Will return `None` if the device is no longer available.\n\n fn persistent_identifier(&self) -> Option<String>;\n\n}\n\n\n\nimpl DeviceIdExtWindows for DeviceId {\n\n #[inline]\n\n fn persistent_identifier(&self) -> Option<String> {\n\n self.0.persistent_identifier()\n\n }\n\n}\n\n\n", "file_path": "src/platform/windows.rs", "rank": 33, "score": 201560.21648478205 }, { "content": "pub fn adjust_size(hwnd: HWND, size: PhysicalSize<u32>) -> PhysicalSize<u32> {\n\n let (width, height): (u32, u32) = size.into();\n\n let rect = RECT {\n\n left: 0,\n\n right: width as LONG,\n\n top: 0,\n\n bottom: height as LONG,\n\n };\n\n let rect = adjust_window_rect(hwnd, rect).unwrap_or(rect);\n\n PhysicalSize::new((rect.right - rect.left) as _, (rect.bottom - rect.top) as _)\n\n}\n\n\n\npub(crate) fn set_inner_size_physical(window: HWND, x: u32, y: u32) {\n\n unsafe {\n\n let rect = adjust_window_rect(\n\n window,\n\n RECT {\n\n top: 0,\n\n left: 0,\n\n bottom: y as LONG,\n", "file_path": "src/platform_impl/windows/util.rs", "rank": 34, "score": 200609.0321688814 }, { "content": "pub fn window_size() -> LogicalSize<f64> {\n\n let window = web_sys::window().expect(\"Failed to obtain window\");\n\n let width = window\n\n .inner_width()\n\n .expect(\"Failed to get width\")\n\n .as_f64()\n\n .expect(\"Failed to get width as f64\");\n\n let height = window\n\n .inner_height()\n\n .expect(\"Failed to get height\")\n\n .as_f64()\n\n .expect(\"Failed to get height as f64\");\n\n\n\n LogicalSize { width, height }\n\n}\n\n\n", "file_path": "src/platform_impl/web/web_sys/mod.rs", "rank": 35, "score": 198305.9741505706 }, { "content": "pub fn register_all_mice_and_keyboards_for_raw_input(window_handle: HWND) -> bool {\n\n // RIDEV_DEVNOTIFY: receive hotplug events\n\n // RIDEV_INPUTSINK: receive events even if we're not in the foreground\n\n let flags = RIDEV_DEVNOTIFY | RIDEV_INPUTSINK;\n\n\n\n let devices: [RAWINPUTDEVICE; 2] = [\n\n RAWINPUTDEVICE {\n\n usUsagePage: HID_USAGE_PAGE_GENERIC,\n\n usUsage: HID_USAGE_GENERIC_MOUSE,\n\n dwFlags: flags,\n\n hwndTarget: window_handle,\n\n },\n\n RAWINPUTDEVICE {\n\n usUsagePage: HID_USAGE_PAGE_GENERIC,\n\n usUsage: HID_USAGE_GENERIC_KEYBOARD,\n\n dwFlags: flags,\n\n hwndTarget: window_handle,\n\n },\n\n ];\n\n\n\n register_raw_input_devices(&devices)\n\n}\n\n\n", "file_path": "src/platform_impl/windows/raw_input.rs", "rank": 36, "score": 197082.8945305205 }, { "content": "struct ComInitialized(*mut ());\n\nimpl Drop for ComInitialized {\n\n fn drop(&mut self) {\n\n unsafe { combaseapi::CoUninitialize() };\n\n }\n\n}\n\n\n\nthread_local! {\n\n static COM_INITIALIZED: ComInitialized = {\n\n unsafe {\n\n combaseapi::CoInitializeEx(ptr::null_mut(), COINIT_APARTMENTTHREADED);\n\n ComInitialized(ptr::null_mut())\n\n }\n\n };\n\n\n\n static TASKBAR_LIST: Cell<*mut ITaskbarList2> = Cell::new(ptr::null_mut());\n\n}\n\n\n", "file_path": "src/platform_impl/windows/window.rs", "rank": 37, "score": 192801.32264405803 }, { "content": "// This function is definitely unsafe, but labeling that would increase\n\n// boilerplate and wouldn't really clarify anything...\n\nfn with_state<F: FnOnce(&mut WindowDelegateState) -> T, T>(this: &Object, callback: F) {\n\n let state_ptr = unsafe {\n\n let state_ptr: *mut c_void = *this.get_ivar(\"winitState\");\n\n &mut *(state_ptr as *mut WindowDelegateState)\n\n };\n\n callback(state_ptr);\n\n}\n\n\n\nextern \"C\" fn dealloc(this: &Object, _sel: Sel) {\n\n with_state(this, |state| unsafe {\n\n Box::from_raw(state as *mut WindowDelegateState);\n\n });\n\n}\n\n\n\nextern \"C\" fn init_with_winit(this: &Object, _sel: Sel, state: *mut c_void) -> id {\n\n unsafe {\n\n let this: id = msg_send![this, init];\n\n if this != nil {\n\n (*this).set_ivar(\"winitState\", state);\n\n with_state(&*this, |state| {\n", "file_path": "src/platform_impl/macos/window_delegate.rs", "rank": 38, "score": 192260.51581353514 }, { "content": "fn mouse_click(this: &Object, event: id, button: MouseButton, button_state: ElementState) {\n\n unsafe {\n\n let state_ptr: *mut c_void = *this.get_ivar(\"winitState\");\n\n let state = &mut *(state_ptr as *mut ViewState);\n\n\n\n update_potentially_stale_modifiers(state, event);\n\n\n\n let window_event = Event::WindowEvent {\n\n window_id: WindowId(get_window_id(state.ns_window)),\n\n event: WindowEvent::MouseInput {\n\n device_id: DEVICE_ID,\n\n state: button_state,\n\n button,\n\n modifiers: event_mods(event),\n\n },\n\n };\n\n\n\n AppState::queue_event(EventWrapper::StaticEvent(window_event));\n\n }\n\n}\n", "file_path": "src/platform_impl/macos/view.rs", "rank": 39, "score": 190979.45276942357 }, { "content": "fn mkwid(w: ffi::Window) -> crate::window::WindowId {\n\n crate::window::WindowId(crate::platform_impl::WindowId::X(WindowId(w)))\n\n}\n", "file_path": "src/platform_impl/linux/x11/mod.rs", "rank": 40, "score": 189738.23402691347 }, { "content": "pub fn is_focused(window: HWND) -> bool {\n\n window == unsafe { winuser::GetActiveWindow() }\n\n}\n\n\n\nimpl CursorIcon {\n\n pub(crate) fn to_windows_cursor(self) -> *const wchar_t {\n\n match self {\n\n CursorIcon::Arrow | CursorIcon::Default => winuser::IDC_ARROW,\n\n CursorIcon::Hand => winuser::IDC_HAND,\n\n CursorIcon::Crosshair => winuser::IDC_CROSS,\n\n CursorIcon::Text | CursorIcon::VerticalText => winuser::IDC_IBEAM,\n\n CursorIcon::NotAllowed | CursorIcon::NoDrop => winuser::IDC_NO,\n\n CursorIcon::Grab | CursorIcon::Grabbing | CursorIcon::Move | CursorIcon::AllScroll => {\n\n winuser::IDC_SIZEALL\n\n }\n\n CursorIcon::EResize\n\n | CursorIcon::WResize\n\n | CursorIcon::EwResize\n\n | CursorIcon::ColResize => winuser::IDC_SIZEWE,\n\n CursorIcon::NResize\n", "file_path": "src/platform_impl/windows/util.rs", "rank": 41, "score": 189016.5956522064 }, { "content": "pub fn scancode_to_keycode(scancode: c_ushort) -> Option<VirtualKeyCode> {\n\n Some(match scancode {\n\n 0x00 => VirtualKeyCode::A,\n\n 0x01 => VirtualKeyCode::S,\n\n 0x02 => VirtualKeyCode::D,\n\n 0x03 => VirtualKeyCode::F,\n\n 0x04 => VirtualKeyCode::H,\n\n 0x05 => VirtualKeyCode::G,\n\n 0x06 => VirtualKeyCode::Z,\n\n 0x07 => VirtualKeyCode::X,\n\n 0x08 => VirtualKeyCode::C,\n\n 0x09 => VirtualKeyCode::V,\n\n //0x0a => World 1,\n\n 0x0b => VirtualKeyCode::B,\n\n 0x0c => VirtualKeyCode::Q,\n\n 0x0d => VirtualKeyCode::W,\n\n 0x0e => VirtualKeyCode::E,\n\n 0x0f => VirtualKeyCode::R,\n\n 0x10 => VirtualKeyCode::Y,\n\n 0x11 => VirtualKeyCode::T,\n", "file_path": "src/platform_impl/macos/event.rs", "rank": 42, "score": 188832.21696303092 }, { "content": "enum BufferedEvent<T: 'static> {\n\n Event(Event<'static, T>),\n\n ScaleFactorChanged(WindowId, f64, PhysicalSize<u32>),\n\n}\n\n\n\nimpl<T> EventLoopRunner<T> {\n\n pub(crate) fn new(thread_msg_target: HWND, wait_thread_id: DWORD) -> EventLoopRunner<T> {\n\n EventLoopRunner {\n\n thread_msg_target,\n\n wait_thread_id,\n\n runner_state: Cell::new(RunnerState::Uninitialized),\n\n control_flow: Cell::new(ControlFlow::Poll),\n\n panic_error: Cell::new(None),\n\n last_events_cleared: Cell::new(Instant::now()),\n\n event_handler: Cell::new(None),\n\n event_buffer: RefCell::new(VecDeque::new()),\n\n owned_windows: Cell::new(HashSet::new()),\n\n }\n\n }\n\n\n", "file_path": "src/platform_impl/windows/event_loop/runner.rs", "rank": 43, "score": 187564.38013434887 }, { "content": "pub fn codepoint(event: &KeyboardEvent) -> char {\n\n // `event.key()` always returns a non-empty `String`. Therefore, this should\n\n // never panic.\n\n // https://developer.mozilla.org/en-US/docs/Web/API/KeyboardEvent/key\n\n event.key().chars().next().unwrap()\n\n}\n", "file_path": "src/platform_impl/web/web_sys/event.rs", "rank": 44, "score": 187296.08575462547 }, { "content": "pub fn com_initialized() {\n\n COM_INITIALIZED.with(|_| {});\n\n}\n\n\n\n// Reference Implementation:\n\n// https://github.com/chromium/chromium/blob/f18e79d901f56154f80eea1e2218544285e62623/ui/views/win/fullscreen_handler.cc\n\n//\n\n// As per MSDN marking the window as fullscreen should ensure that the\n\n// taskbar is moved to the bottom of the Z-order when the fullscreen window\n\n// is activated. If the window is not fullscreen, the Shell falls back to\n\n// heuristics to determine how the window should be treated, which means\n\n// that it could still consider the window as fullscreen. :(\n\nunsafe fn taskbar_mark_fullscreen(handle: HWND, fullscreen: bool) {\n\n com_initialized();\n\n\n\n TASKBAR_LIST.with(|task_bar_list_ptr| {\n\n let mut task_bar_list = task_bar_list_ptr.get();\n\n\n\n if task_bar_list == ptr::null_mut() {\n\n use winapi::{shared::winerror::S_OK, Interface};\n", "file_path": "src/platform_impl/windows/window.rs", "rank": 45, "score": 185353.8959445957 }, { "content": "fn mkdid(w: c_int) -> crate::event::DeviceId {\n\n crate::event::DeviceId(crate::platform_impl::DeviceId::X(DeviceId(w)))\n\n}\n\n\n", "file_path": "src/platform_impl/linux/x11/mod.rs", "rank": 46, "score": 185056.34761715416 }, { "content": "pub fn get_raw_input_data(handle: HRAWINPUT) -> Option<RAWINPUT> {\n\n let mut data: RAWINPUT = unsafe { mem::zeroed() };\n\n let mut data_size = size_of::<RAWINPUT>() as UINT;\n\n let header_size = size_of::<RAWINPUTHEADER>() as UINT;\n\n\n\n let status = unsafe {\n\n winuser::GetRawInputData(\n\n handle,\n\n RID_INPUT,\n\n &mut data as *mut _ as _,\n\n &mut data_size,\n\n header_size,\n\n )\n\n };\n\n\n\n if status == UINT::max_value() || status == 0 {\n\n return None;\n\n }\n\n\n\n Some(data)\n\n}\n\n\n", "file_path": "src/platform_impl/windows/raw_input.rs", "rank": 47, "score": 185011.2330832972 }, { "content": "pub fn codepoint(event: &impl IKeyboardEvent) -> char {\n\n // `event.key()` always returns a non-empty `String`. Therefore, this should\n\n // never panic.\n\n // https://developer.mozilla.org/en-US/docs/Web/API/KeyboardEvent/key\n\n event.key().chars().next().unwrap()\n\n}\n", "file_path": "src/platform_impl/web/stdweb/event.rs", "rank": 48, "score": 182600.7380098867 }, { "content": "pub fn mouse_button(event: &MouseEvent) -> MouseButton {\n\n match event.button() {\n\n 0 => MouseButton::Left,\n\n 1 => MouseButton::Middle,\n\n 2 => MouseButton::Right,\n\n i => MouseButton::Other((i - 3).try_into().expect(\"very large mouse button value\")),\n\n }\n\n}\n\n\n", "file_path": "src/platform_impl/web/web_sys/event.rs", "rank": 49, "score": 182549.9108480506 }, { "content": "pub fn scan_code(event: &KeyboardEvent) -> ScanCode {\n\n match event.key_code() {\n\n 0 => event.char_code(),\n\n i => i,\n\n }\n\n}\n\n\n", "file_path": "src/platform_impl/web/web_sys/event.rs", "rank": 50, "score": 182549.9108480506 }, { "content": "fn button_flags_to_element_state(\n\n button_flags: USHORT,\n\n down_flag: USHORT,\n\n up_flag: USHORT,\n\n) -> Option<ElementState> {\n\n // We assume the same button won't be simultaneously pressed and released.\n\n if util::has_flag(button_flags, down_flag) {\n\n Some(ElementState::Pressed)\n\n } else if util::has_flag(button_flags, up_flag) {\n\n Some(ElementState::Released)\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/platform_impl/windows/raw_input.rs", "rank": 51, "score": 182080.62389992617 }, { "content": "/// Figures out if the keyboard layout has an AltGr key instead of an Alt key.\n\n///\n\n/// Unfortunately, the Windows API doesn't give a way for us to conveniently figure that out. So,\n\n/// we use a technique blatantly stolen from [the Firefox source code][source]: iterate over every\n\n/// possible virtual key and compare the `char` output when AltGr is pressed vs when it isn't. If\n\n/// pressing AltGr outputs characters that are different from the standard characters, the layout\n\n/// uses AltGr. Otherwise, it doesn't.\n\n///\n\n/// [source]: https://github.com/mozilla/gecko-dev/blob/265e6721798a455604328ed5262f430cfcc37c2f/widget/windows/KeyboardLayout.cpp#L4356-L4416\n\nfn layout_uses_altgr() -> bool {\n\n unsafe {\n\n static ACTIVE_LAYOUT: AtomicPtr<HKL__> = AtomicPtr::new(ptr::null_mut());\n\n static USES_ALTGR: AtomicBool = AtomicBool::new(false);\n\n\n\n let hkl = winuser::GetKeyboardLayout(0);\n\n let old_hkl = ACTIVE_LAYOUT.swap(hkl, Ordering::SeqCst);\n\n\n\n if hkl == old_hkl {\n\n return USES_ALTGR.load(Ordering::SeqCst);\n\n }\n\n\n\n let mut keyboard_state_altgr = [0u8; 256];\n\n // AltGr is an alias for Ctrl+Alt for... some reason. Whatever it is, those are the keypresses\n\n // we have to emulate to do an AltGr test.\n\n keyboard_state_altgr[winuser::VK_MENU as usize] = 0x80;\n\n keyboard_state_altgr[winuser::VK_CONTROL as usize] = 0x80;\n\n\n\n let keyboard_state_empty = [0u8; 256];\n\n\n", "file_path": "src/platform_impl/windows/event.rs", "rank": 52, "score": 181984.42818382912 }, { "content": "fn get_modifier(state: &ModifiersState, modifier: Modifier) -> bool {\n\n match modifier {\n\n Modifier::Alt => state.alt(),\n\n Modifier::Ctrl => state.ctrl(),\n\n Modifier::Shift => state.shift(),\n\n Modifier::Logo => state.logo(),\n\n }\n\n}\n\n\n", "file_path": "src/platform_impl/linux/x11/util/modifiers.rs", "rank": 53, "score": 180832.1658371185 }, { "content": "fn mouse_motion(this: &Object, event: id) {\n\n unsafe {\n\n let state_ptr: *mut c_void = *this.get_ivar(\"winitState\");\n\n let state = &mut *(state_ptr as *mut ViewState);\n\n\n\n // We have to do this to have access to the `NSView` trait...\n\n let view: id = this as *const _ as *mut _;\n\n\n\n let window_point = event.locationInWindow();\n\n let view_point = view.convertPoint_fromView_(window_point, nil);\n\n let view_rect = NSView::frame(view);\n\n\n\n if view_point.x.is_sign_negative()\n\n || view_point.y.is_sign_negative()\n\n || view_point.x > view_rect.size.width\n\n || view_point.y > view_rect.size.height\n\n {\n\n let mouse_buttons_down: NSInteger = msg_send![class!(NSEvent), pressedMouseButtons];\n\n if mouse_buttons_down == 0 {\n\n // Point is outside of the client area (view) and no buttons are pressed\n", "file_path": "src/platform_impl/macos/view.rs", "rank": 54, "score": 179726.68838316074 }, { "content": "pub fn adjust_window_rect_with_styles(\n\n hwnd: HWND,\n\n style: DWORD,\n\n style_ex: DWORD,\n\n rect: RECT,\n\n) -> Option<RECT> {\n\n unsafe {\n\n status_map(|r| {\n\n *r = rect;\n\n\n\n let b_menu = !winuser::GetMenu(hwnd).is_null() as BOOL;\n\n if let (Some(get_dpi_for_window), Some(adjust_window_rect_ex_for_dpi)) =\n\n (*GET_DPI_FOR_WINDOW, *ADJUST_WINDOW_RECT_EX_FOR_DPI)\n\n {\n\n let dpi = get_dpi_for_window(hwnd);\n\n adjust_window_rect_ex_for_dpi(r, style as _, b_menu, style_ex as _, dpi)\n\n } else {\n\n winuser::AdjustWindowRectEx(r, style as _, b_menu, style_ex as _)\n\n }\n\n })\n\n }\n\n}\n\n\n", "file_path": "src/platform_impl/windows/util.rs", "rank": 55, "score": 179105.65413828497 }, { "content": "pub fn mouse_button(event: &impl IMouseEvent) -> MouseButton {\n\n match event.button() {\n\n stdweb::web::event::MouseButton::Left => MouseButton::Left,\n\n stdweb::web::event::MouseButton::Right => MouseButton::Right,\n\n stdweb::web::event::MouseButton::Wheel => MouseButton::Middle,\n\n stdweb::web::event::MouseButton::Button4 => MouseButton::Other(0),\n\n stdweb::web::event::MouseButton::Button5 => MouseButton::Other(1),\n\n }\n\n}\n\n\n", "file_path": "src/platform_impl/web/stdweb/event.rs", "rank": 56, "score": 177854.56310331184 }, { "content": "fn main_thread_id() -> DWORD {\n\n static mut MAIN_THREAD_ID: DWORD = 0;\n\n #[used]\n\n #[allow(non_upper_case_globals)]\n\n #[link_section = \".CRT$XCU\"]\n\n static INIT_MAIN_THREAD_ID: unsafe fn() = {\n\n unsafe fn initer() {\n\n MAIN_THREAD_ID = processthreadsapi::GetCurrentThreadId();\n\n }\n\n initer\n\n };\n\n\n\n unsafe { MAIN_THREAD_ID }\n\n}\n\n\n", "file_path": "src/platform_impl/windows/event_loop.rs", "rank": 57, "score": 177674.6524530339 }, { "content": "pub fn get_pressed_keys() -> impl Iterator<Item = c_int> {\n\n let mut keyboard_state = vec![0u8; 256];\n\n unsafe { winuser::GetKeyboardState(keyboard_state.as_mut_ptr()) };\n\n keyboard_state\n\n .into_iter()\n\n .enumerate()\n\n .filter(|(_, p)| (*p & (1 << 7)) != 0) // whether or not a key is pressed is communicated via the high-order bit\n\n .map(|(i, _)| i as c_int)\n\n}\n\n\n\nunsafe fn get_char(keyboard_state: &[u8; 256], v_key: u32, hkl: HKL) -> Option<char> {\n\n let mut unicode_bytes = [0u16; 5];\n\n let len = winuser::ToUnicodeEx(\n\n v_key,\n\n 0,\n\n keyboard_state.as_ptr(),\n\n unicode_bytes.as_mut_ptr(),\n\n unicode_bytes.len() as _,\n\n 0,\n\n hkl,\n\n );\n\n if len >= 1 {\n\n char::decode_utf16(unicode_bytes.iter().cloned())\n\n .next()\n\n .and_then(|c| c.ok())\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/platform_impl/windows/event.rs", "rank": 58, "score": 177508.14635524608 }, { "content": "fn is_first_touch(first: &mut Option<u64>, num: &mut u32, id: u64, phase: TouchPhase) -> bool {\n\n match phase {\n\n TouchPhase::Started => {\n\n if *num == 0 {\n\n *first = Some(id);\n\n }\n\n *num += 1;\n\n }\n\n TouchPhase::Cancelled | TouchPhase::Ended => {\n\n if *first == Some(id) {\n\n *first = None;\n\n }\n\n *num = num.saturating_sub(1);\n\n }\n\n _ => (),\n\n }\n\n\n\n *first == Some(id)\n\n}\n", "file_path": "src/platform_impl/linux/x11/event_processor.rs", "rank": 59, "score": 177023.0230002568 }, { "content": "fn wait_thread(parent_thread_id: DWORD, msg_window_id: HWND) {\n\n unsafe {\n\n let mut msg: winuser::MSG;\n\n\n\n let cur_thread_id = processthreadsapi::GetCurrentThreadId();\n\n winuser::PostThreadMessageW(\n\n parent_thread_id,\n\n *SEND_WAIT_THREAD_ID_MSG_ID,\n\n 0,\n\n cur_thread_id as LPARAM,\n\n );\n\n\n\n let mut wait_until_opt = None;\n\n 'main: loop {\n\n // Zeroing out the message ensures that the `WaitUntilInstantBox` doesn't get\n\n // double-freed if `MsgWaitForMultipleObjectsEx` returns early and there aren't\n\n // additional messages to process.\n\n msg = mem::zeroed();\n\n\n\n if wait_until_opt.is_some() {\n", "file_path": "src/platform_impl/windows/event_loop.rs", "rank": 60, "score": 175195.31679918323 }, { "content": "fn get_wait_thread_id() -> DWORD {\n\n unsafe {\n\n let mut msg = mem::zeroed();\n\n let result = winuser::GetMessageW(\n\n &mut msg,\n\n -1 as _,\n\n *SEND_WAIT_THREAD_ID_MSG_ID,\n\n *SEND_WAIT_THREAD_ID_MSG_ID,\n\n );\n\n assert_eq!(\n\n msg.message, *SEND_WAIT_THREAD_ID_MSG_ID,\n\n \"this shouldn't be possible. please open an issue with Winit. error code: {}\",\n\n result\n\n );\n\n msg.lParam as DWORD\n\n }\n\n}\n\n\n", "file_path": "src/platform_impl/windows/event_loop.rs", "rank": 61, "score": 173654.3912289397 }, { "content": "pub fn mouse_scroll_delta(event: &WheelEvent) -> Option<MouseScrollDelta> {\n\n let x = event.delta_x();\n\n let y = event.delta_y();\n\n\n\n match event.delta_mode() {\n\n WheelEvent::DOM_DELTA_LINE => Some(MouseScrollDelta::LineDelta(x as f32, y as f32)),\n\n WheelEvent::DOM_DELTA_PIXEL => {\n\n let delta = LogicalPosition::new(x, y).to_physical(super::scale_factor());\n\n Some(MouseScrollDelta::PixelDelta(delta))\n\n }\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "src/platform_impl/web/web_sys/event.rs", "rank": 62, "score": 171427.53690098156 }, { "content": "pub fn mouse_scroll_delta(event: &MouseWheelEvent) -> Option<MouseScrollDelta> {\n\n let x = event.delta_x();\n\n let y = event.delta_y();\n\n\n\n match event.delta_mode() {\n\n MouseWheelDeltaMode::Line => Some(MouseScrollDelta::LineDelta(x as f32, y as f32)),\n\n MouseWheelDeltaMode::Pixel => {\n\n let delta = LogicalPosition::new(x, y).to_physical(super::scale_factor());\n\n Some(MouseScrollDelta::PixelDelta(delta))\n\n }\n\n MouseWheelDeltaMode::Page => None,\n\n }\n\n}\n\n\n", "file_path": "src/platform_impl/web/stdweb/event.rs", "rank": 63, "score": 171427.53690098156 }, { "content": "pub fn virtual_key_code(event: &KeyboardEvent) -> Option<VirtualKeyCode> {\n\n Some(match &event.code()[..] {\n\n \"Digit1\" => VirtualKeyCode::Key1,\n\n \"Digit2\" => VirtualKeyCode::Key2,\n\n \"Digit3\" => VirtualKeyCode::Key3,\n\n \"Digit4\" => VirtualKeyCode::Key4,\n\n \"Digit5\" => VirtualKeyCode::Key5,\n\n \"Digit6\" => VirtualKeyCode::Key6,\n\n \"Digit7\" => VirtualKeyCode::Key7,\n\n \"Digit8\" => VirtualKeyCode::Key8,\n\n \"Digit9\" => VirtualKeyCode::Key9,\n\n \"Digit0\" => VirtualKeyCode::Key0,\n\n \"KeyA\" => VirtualKeyCode::A,\n\n \"KeyB\" => VirtualKeyCode::B,\n\n \"KeyC\" => VirtualKeyCode::C,\n\n \"KeyD\" => VirtualKeyCode::D,\n\n \"KeyE\" => VirtualKeyCode::E,\n\n \"KeyF\" => VirtualKeyCode::F,\n\n \"KeyG\" => VirtualKeyCode::G,\n\n \"KeyH\" => VirtualKeyCode::H,\n", "file_path": "src/platform_impl/web/web_sys/event.rs", "rank": 64, "score": 171427.53690098156 }, { "content": "// Retrieves a layout-independent keycode given an event.\n\nfn retrieve_keycode(event: id) -> Option<VirtualKeyCode> {\n\n #[inline]\n\n fn get_code(ev: id, raw: bool) -> Option<VirtualKeyCode> {\n\n let characters = get_characters(ev, raw);\n\n characters.chars().next().and_then(|c| char_to_keycode(c))\n\n }\n\n\n\n // Cmd switches Roman letters for Dvorak-QWERTY layout, so we try modified characters first.\n\n // If we don't get a match, then we fall back to unmodified characters.\n\n let code = get_code(event, false).or_else(|| get_code(event, true));\n\n\n\n // We've checked all layout related keys, so fall through to scancode.\n\n // Reaching this code means that the key is layout-independent (e.g. Backspace, Return).\n\n //\n\n // We're additionally checking here for F21-F24 keys, since their keycode\n\n // can vary, but we know that they are encoded\n\n // in characters property.\n\n code.or_else(|| {\n\n let scancode = get_scancode(event);\n\n scancode_to_keycode(scancode).or_else(|| check_function_keys(&get_characters(event, true)))\n\n })\n\n}\n\n\n", "file_path": "src/platform_impl/macos/view.rs", "rank": 65, "score": 171060.062591238 }, { "content": "struct EventLoopHandler<F, T: 'static> {\n\n f: F,\n\n event_loop: RootEventLoopWindowTarget<T>,\n\n}\n\n\n\nimpl<F, T: 'static> Debug for EventLoopHandler<F, T> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.debug_struct(\"EventLoopHandler\")\n\n .field(\"event_loop\", &self.event_loop)\n\n .finish()\n\n }\n\n}\n\n\n\nimpl<F, T> EventHandler for EventLoopHandler<F, T>\n\nwhere\n\n F: 'static + FnMut(Event<'_, T>, &RootEventLoopWindowTarget<T>, &mut ControlFlow),\n\n T: 'static,\n\n{\n\n fn handle_nonuser_event(&mut self, event: Event<'_, Never>, control_flow: &mut ControlFlow) {\n\n (self.f)(\n", "file_path": "src/platform_impl/ios/event_loop.rs", "rank": 66, "score": 170900.4096595107 }, { "content": "pub fn vkey_to_winit_vkey(vkey: c_int) -> Option<VirtualKeyCode> {\n\n // VK_* codes are documented here https://msdn.microsoft.com/en-us/library/windows/desktop/dd375731(v=vs.85).aspx\n\n match vkey {\n\n //winuser::VK_LBUTTON => Some(VirtualKeyCode::Lbutton),\n\n //winuser::VK_RBUTTON => Some(VirtualKeyCode::Rbutton),\n\n //winuser::VK_CANCEL => Some(VirtualKeyCode::Cancel),\n\n //winuser::VK_MBUTTON => Some(VirtualKeyCode::Mbutton),\n\n //winuser::VK_XBUTTON1 => Some(VirtualKeyCode::Xbutton1),\n\n //winuser::VK_XBUTTON2 => Some(VirtualKeyCode::Xbutton2),\n\n winuser::VK_BACK => Some(VirtualKeyCode::Back),\n\n winuser::VK_TAB => Some(VirtualKeyCode::Tab),\n\n //winuser::VK_CLEAR => Some(VirtualKeyCode::Clear),\n\n winuser::VK_RETURN => Some(VirtualKeyCode::Return),\n\n winuser::VK_LSHIFT => Some(VirtualKeyCode::LShift),\n\n winuser::VK_RSHIFT => Some(VirtualKeyCode::RShift),\n\n winuser::VK_LCONTROL => Some(VirtualKeyCode::LControl),\n\n winuser::VK_RCONTROL => Some(VirtualKeyCode::RControl),\n\n winuser::VK_LMENU => Some(VirtualKeyCode::LAlt),\n\n winuser::VK_RMENU => Some(VirtualKeyCode::RAlt),\n\n winuser::VK_PAUSE => Some(VirtualKeyCode::Pause),\n", "file_path": "src/platform_impl/windows/event.rs", "rank": 67, "score": 170798.0451668736 }, { "content": "pub fn become_dpi_aware() {\n\n static ENABLE_DPI_AWARENESS: Once = Once::new();\n\n ENABLE_DPI_AWARENESS.call_once(|| {\n\n unsafe {\n\n if let Some(SetProcessDpiAwarenessContext) = *SET_PROCESS_DPI_AWARENESS_CONTEXT {\n\n // We are on Windows 10 Anniversary Update (1607) or later.\n\n if SetProcessDpiAwarenessContext(DPI_AWARENESS_CONTEXT_PER_MONITOR_AWARE_V2)\n\n == FALSE\n\n {\n\n // V2 only works with Windows 10 Creators Update (1703). Try using the older\n\n // V1 if we can't set V2.\n\n SetProcessDpiAwarenessContext(DPI_AWARENESS_CONTEXT_PER_MONITOR_AWARE);\n\n }\n\n } else if let Some(SetProcessDpiAwareness) = *SET_PROCESS_DPI_AWARENESS {\n\n // We are on Windows 8.1 or later.\n\n SetProcessDpiAwareness(PROCESS_PER_MONITOR_DPI_AWARE);\n\n } else if let Some(SetProcessDPIAware) = *SET_PROCESS_DPI_AWARE {\n\n // We are on Vista or later.\n\n SetProcessDPIAware();\n\n }\n\n }\n\n });\n\n}\n\n\n", "file_path": "src/platform_impl/windows/dpi.rs", "rank": 68, "score": 170583.1365482594 }, { "content": "#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\nenum RunnerState {\n\n /// The event loop has just been created, and an `Init` event must be sent.\n\n Uninitialized,\n\n /// The event loop is idling.\n\n Idle,\n\n /// The event loop is handling the OS's events and sending them to the user's callback.\n\n /// `NewEvents` has been sent, and `MainEventsCleared` hasn't.\n\n HandlingMainEvents,\n\n /// The event loop is handling the redraw events and sending them to the user's callback.\n\n /// `MainEventsCleared` has been sent, and `RedrawEventsCleared` hasn't.\n\n HandlingRedrawEvents,\n\n /// The event loop has been destroyed. No other events will be emitted.\n\n Destroyed,\n\n}\n\n\n", "file_path": "src/platform_impl/windows/event_loop/runner.rs", "rank": 69, "score": 168698.56841254226 }, { "content": "pub fn virtual_key_code(event: &impl IKeyboardEvent) -> Option<VirtualKeyCode> {\n\n Some(match &event.code()[..] {\n\n \"Digit1\" => VirtualKeyCode::Key1,\n\n \"Digit2\" => VirtualKeyCode::Key2,\n\n \"Digit3\" => VirtualKeyCode::Key3,\n\n \"Digit4\" => VirtualKeyCode::Key4,\n\n \"Digit5\" => VirtualKeyCode::Key5,\n\n \"Digit6\" => VirtualKeyCode::Key6,\n\n \"Digit7\" => VirtualKeyCode::Key7,\n\n \"Digit8\" => VirtualKeyCode::Key8,\n\n \"Digit9\" => VirtualKeyCode::Key9,\n\n \"Digit0\" => VirtualKeyCode::Key0,\n\n \"KeyA\" => VirtualKeyCode::A,\n\n \"KeyB\" => VirtualKeyCode::B,\n\n \"KeyC\" => VirtualKeyCode::C,\n\n \"KeyD\" => VirtualKeyCode::D,\n\n \"KeyE\" => VirtualKeyCode::E,\n\n \"KeyF\" => VirtualKeyCode::F,\n\n \"KeyG\" => VirtualKeyCode::G,\n\n \"KeyH\" => VirtualKeyCode::H,\n", "file_path": "src/platform_impl/web/stdweb/event.rs", "rank": 70, "score": 167385.84494711438 }, { "content": "pub fn scan_code<T: JsSerialize>(event: &T) -> ScanCode {\n\n let key_code = js! ( return @{event}.keyCode; );\n\n\n\n key_code\n\n .try_into()\n\n .expect(\"The which value should be a number\")\n\n}\n\n\n", "file_path": "src/platform_impl/web/stdweb/event.rs", "rank": 71, "score": 165061.1126931795 }, { "content": "pub fn is_fullscreen(canvas: &CanvasElement) -> bool {\n\n match document().fullscreen_element() {\n\n Some(elem) => {\n\n let raw: Element = canvas.clone().into();\n\n raw == elem\n\n }\n\n None => false,\n\n }\n\n}\n\n\n\npub type RawCanvasType = CanvasElement;\n", "file_path": "src/platform_impl/web/stdweb/mod.rs", "rank": 72, "score": 163190.4319909021 }, { "content": "pub fn get_desktop_rect() -> RECT {\n\n unsafe {\n\n let left = winuser::GetSystemMetrics(winuser::SM_XVIRTUALSCREEN);\n\n let top = winuser::GetSystemMetrics(winuser::SM_YVIRTUALSCREEN);\n\n RECT {\n\n left,\n\n top,\n\n right: left + winuser::GetSystemMetrics(winuser::SM_CXVIRTUALSCREEN),\n\n bottom: top + winuser::GetSystemMetrics(winuser::SM_CYVIRTUALSCREEN),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/platform_impl/windows/util.rs", "rank": 73, "score": 162269.04582871372 }, { "content": "pub fn primary_monitor() -> MonitorHandle {\n\n const ORIGIN: POINT = POINT { x: 0, y: 0 };\n\n let hmonitor = unsafe { winuser::MonitorFromPoint(ORIGIN, winuser::MONITOR_DEFAULTTOPRIMARY) };\n\n MonitorHandle::new(hmonitor)\n\n}\n\n\n", "file_path": "src/platform_impl/windows/monitor.rs", "rank": 74, "score": 162269.04582871372 }, { "content": "pub fn os_capabilities() -> OSCapabilities {\n\n lazy_static! {\n\n static ref OS_CAPABILITIES: OSCapabilities = {\n\n let version: NSOperatingSystemVersion = unsafe {\n\n let process_info: id = msg_send![class!(NSProcessInfo), processInfo];\n\n let atleast_ios_8: BOOL = msg_send![\n\n process_info,\n\n respondsToSelector: sel!(operatingSystemVersion)\n\n ];\n\n // winit requires atleast iOS 8 because no one has put the time into supporting earlier os versions.\n\n // Older iOS versions are increasingly difficult to test. For example, Xcode 11 does not support\n\n // debugging on devices with an iOS version of less than 8. Another example, in order to use an iOS\n\n // simulator older than iOS 8, you must download an older version of Xcode (<9), and at least Xcode 7\n\n // has been tested to not even run on macOS 10.15 - Xcode 8 might?\n\n //\n\n // The minimum required iOS version is likely to grow in the future.\n\n assert!(\n\n atleast_ios_8 == YES,\n\n \"`winit` requires iOS version 8 or greater\"\n\n );\n\n msg_send![process_info, operatingSystemVersion]\n\n };\n\n version.into()\n\n };\n\n }\n\n OS_CAPABILITIES.clone()\n\n}\n", "file_path": "src/platform_impl/ios/app_state.rs", "rank": 75, "score": 161074.9248180102 }, { "content": "fn create_event_target_window() -> HWND {\n\n unsafe {\n\n let window = winuser::CreateWindowExW(\n\n winuser::WS_EX_NOACTIVATE | winuser::WS_EX_TRANSPARENT | winuser::WS_EX_LAYERED,\n\n THREAD_EVENT_TARGET_WINDOW_CLASS.as_ptr(),\n\n ptr::null_mut(),\n\n 0,\n\n 0,\n\n 0,\n\n 0,\n\n 0,\n\n ptr::null_mut(),\n\n ptr::null_mut(),\n\n libloaderapi::GetModuleHandleW(ptr::null()),\n\n ptr::null_mut(),\n\n );\n\n winuser::SetWindowLongPtrW(\n\n window,\n\n winuser::GWL_STYLE,\n\n // The window technically has to be visible to receive WM_PAINT messages (which are used\n\n // for delivering events during resizes), but it isn't displayed to the user because of\n\n // the LAYERED style.\n\n (winuser::WS_VISIBLE | winuser::WS_POPUP) as _,\n\n );\n\n window\n\n }\n\n}\n\n\n", "file_path": "src/platform_impl/windows/event_loop.rs", "rank": 76, "score": 159458.96329274605 }, { "content": "fn subclass_event_target_window<T>(\n\n window: HWND,\n\n event_loop_runner: EventLoopRunnerShared<T>,\n\n) -> Sender<T> {\n\n unsafe {\n\n let (tx, rx) = mpsc::channel();\n\n\n\n let subclass_input = ThreadMsgTargetSubclassInput {\n\n event_loop_runner,\n\n user_event_receiver: rx,\n\n };\n\n let input_ptr = Box::into_raw(Box::new(subclass_input));\n\n let subclass_result = commctrl::SetWindowSubclass(\n\n window,\n\n Some(thread_event_target_callback::<T>),\n\n THREAD_EVENT_TARGET_SUBCLASS_ID,\n\n input_ptr as DWORD_PTR,\n\n );\n\n assert_eq!(subclass_result, 1);\n\n\n", "file_path": "src/platform_impl/windows/event_loop.rs", "rank": 77, "score": 159458.96329274605 }, { "content": "/// Additional methods on `EventLoop` that are specific to Windows.\n\npub trait EventLoopExtWindows {\n\n /// Creates an event loop off of the main thread.\n\n ///\n\n /// # `Window` caveats\n\n ///\n\n /// Note that any `Window` created on the new thread will be destroyed when the thread\n\n /// terminates. Attempting to use a `Window` after its parent thread terminates has\n\n /// unspecified, although explicitly not undefined, behavior.\n\n fn new_any_thread() -> Self\n\n where\n\n Self: Sized;\n\n\n\n /// By default, winit on Windows will attempt to enable process-wide DPI awareness. If that's\n\n /// undesirable, you can create an `EventLoop` using this function instead.\n\n fn new_dpi_unaware() -> Self\n\n where\n\n Self: Sized;\n\n\n\n /// Creates a DPI-unaware event loop off of the main thread.\n\n ///\n", "file_path": "src/platform/windows.rs", "rank": 78, "score": 158505.53719834087 }, { "content": "pub fn is_fullscreen(canvas: &HtmlCanvasElement) -> bool {\n\n let window = window().expect(\"Failed to obtain window\");\n\n let document = window.document().expect(\"Failed to obtain document\");\n\n\n\n match document.fullscreen_element() {\n\n Some(elem) => {\n\n let raw: Element = canvas.clone().into();\n\n raw == elem\n\n }\n\n None => false,\n\n }\n\n}\n\n\n\npub type RawCanvasType = HtmlCanvasElement;\n", "file_path": "src/platform_impl/web/web_sys/mod.rs", "rank": 79, "score": 157939.57745687984 }, { "content": "/// Additional methods on `Icon` that are specific to Windows.\n\npub trait IconExtWindows: Sized {\n\n /// Create an icon from a file path.\n\n ///\n\n /// Specify `size` to load a specific icon size from the file, or `None` to load the default\n\n /// icon size from the file.\n\n ///\n\n /// In cases where the specified size does not exist in the file, Windows may perform scaling\n\n /// to get an icon of the desired size.\n\n fn from_path<P: AsRef<Path>>(path: P, size: Option<PhysicalSize<u32>>)\n\n -> Result<Self, BadIcon>;\n\n\n\n /// Create an icon from a resource embedded in this executable or library.\n\n ///\n\n /// Specify `size` to load a specific icon size from the file, or `None` to load the default\n\n /// icon size from the file.\n\n ///\n\n /// In cases where the specified size does not exist in the file, Windows may perform scaling\n\n /// to get an icon of the desired size.\n\n fn from_resource(ordinal: WORD, size: Option<PhysicalSize<u32>>) -> Result<Self, BadIcon>;\n\n}\n", "file_path": "src/platform/windows.rs", "rank": 80, "score": 157600.63512791018 }, { "content": "type GetPointerDeviceRects = unsafe extern \"system\" fn(\n\n device: HANDLE,\n\n pointerDeviceRect: *mut RECT,\n\n displayRect: *mut RECT,\n\n) -> BOOL;\n\n\n", "file_path": "src/platform_impl/windows/event_loop.rs", "rank": 81, "score": 156583.7160759093 }, { "content": "pub fn set_cursor_hidden(hidden: bool) {\n\n static HIDDEN: AtomicBool = AtomicBool::new(false);\n\n let changed = HIDDEN.swap(hidden, Ordering::SeqCst) ^ hidden;\n\n if changed {\n\n unsafe { winuser::ShowCursor(!hidden as BOOL) };\n\n }\n\n}\n\n\n", "file_path": "src/platform_impl/windows/util.rs", "rank": 82, "score": 155025.17748657038 }, { "content": "pub fn get_window_rect(hwnd: HWND) -> Option<RECT> {\n\n unsafe { status_map(|rect| winuser::GetWindowRect(hwnd, rect)) }\n\n}\n\n\n", "file_path": "src/platform_impl/windows/util.rs", "rank": 83, "score": 154906.78700312827 }, { "content": "pub fn unset_for_window(hwnd: HWND, icon_type: IconType) {\n\n unsafe {\n\n winuser::SendMessageW(hwnd, winuser::WM_SETICON, icon_type as WPARAM, 0 as LPARAM);\n\n }\n\n}\n", "file_path": "src/platform_impl/windows/icon.rs", "rank": 84, "score": 152383.7888344308 }, { "content": "pub fn available_monitors() -> VecDeque<MonitorHandle> {\n\n let mut monitors: VecDeque<MonitorHandle> = VecDeque::new();\n\n unsafe {\n\n winuser::EnumDisplayMonitors(\n\n ptr::null_mut(),\n\n ptr::null_mut(),\n\n Some(monitor_enum_proc),\n\n &mut monitors as *mut _ as LPARAM,\n\n );\n\n }\n\n monitors\n\n}\n\n\n", "file_path": "src/platform_impl/windows/monitor.rs", "rank": 85, "score": 152158.7541057471 }, { "content": "struct EventLoopWaker {\n\n timer: CFRunLoopTimerRef,\n\n}\n\n\n\nimpl Drop for EventLoopWaker {\n\n fn drop(&mut self) {\n\n unsafe {\n\n CFRunLoopTimerInvalidate(self.timer);\n\n CFRelease(self.timer as _);\n\n }\n\n }\n\n}\n\n\n\nimpl EventLoopWaker {\n\n fn new(rl: CFRunLoopRef) -> EventLoopWaker {\n\n extern \"C\" fn wakeup_main_loop(_timer: CFRunLoopTimerRef, _info: *mut c_void) {}\n\n unsafe {\n\n // Create a timer with a 0.1µs interval (1ns does not work) to mimic polling.\n\n // It is initially setup with a first fire time really far into the\n\n // future, but that gets changed to fire immediately in did_finish_launching\n", "file_path": "src/platform_impl/ios/app_state.rs", "rank": 86, "score": 151753.235464493 }, { "content": "pub fn wchar_to_string(wchar: &[wchar_t]) -> String {\n\n String::from_utf16_lossy(wchar).to_string()\n\n}\n\n\n", "file_path": "src/platform_impl/windows/util.rs", "rank": 87, "score": 151486.22414046305 }, { "content": "type ThreadExecFn = Box<Box<dyn FnMut()>>;\n\n\n\npub struct EventLoopProxy<T: 'static> {\n\n target_window: HWND,\n\n event_send: Sender<T>,\n\n}\n\nunsafe impl<T: Send + 'static> Send for EventLoopProxy<T> {}\n\n\n\nimpl<T: 'static> Clone for EventLoopProxy<T> {\n\n fn clone(&self) -> Self {\n\n Self {\n\n target_window: self.target_window,\n\n event_send: self.event_send.clone(),\n\n }\n\n }\n\n}\n\n\n\nimpl<T: 'static> EventLoopProxy<T> {\n\n pub fn send_event(&self, event: T) -> Result<(), EventLoopClosed<T>> {\n\n unsafe {\n\n if winuser::PostMessageW(self.target_window, *USER_EVENT_MSG_ID, 0, 0) != 0 {\n\n self.event_send.send(event).ok();\n\n Ok(())\n\n } else {\n\n Err(EventLoopClosed(event))\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/platform_impl/windows/event_loop.rs", "rank": 88, "score": 149786.49002463423 }, { "content": "pub fn enable_non_client_dpi_scaling(hwnd: HWND) {\n\n unsafe {\n\n if let Some(EnableNonClientDpiScaling) = *ENABLE_NON_CLIENT_DPI_SCALING {\n\n EnableNonClientDpiScaling(hwnd);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/platform_impl/windows/dpi.rs", "rank": 89, "score": 149436.31687495045 }, { "content": "pub fn current_monitor(hwnd: HWND) -> MonitorHandle {\n\n let hmonitor = unsafe { winuser::MonitorFromWindow(hwnd, winuser::MONITOR_DEFAULTTONEAREST) };\n\n MonitorHandle::new(hmonitor)\n\n}\n\n\n\nimpl Window {\n\n pub fn available_monitors(&self) -> VecDeque<MonitorHandle> {\n\n available_monitors()\n\n }\n\n\n\n pub fn primary_monitor(&self) -> MonitorHandle {\n\n primary_monitor()\n\n }\n\n}\n\n\n\npub(crate) fn get_monitor_info(hmonitor: HMONITOR) -> Result<winuser::MONITORINFOEXW, io::Error> {\n\n let mut monitor_info: winuser::MONITORINFOEXW = unsafe { mem::zeroed() };\n\n monitor_info.cbSize = mem::size_of::<winuser::MONITORINFOEXW>() as DWORD;\n\n let status = unsafe {\n\n winuser::GetMonitorInfoW(\n", "file_path": "src/platform_impl/windows/monitor.rs", "rank": 90, "score": 148619.80075963974 }, { "content": "pub fn dpi_to_scale_factor(dpi: u32) -> f64 {\n\n dpi as f64 / BASE_DPI as f64\n\n}\n\n\n\npub unsafe fn hwnd_dpi(hwnd: HWND) -> u32 {\n\n let hdc = winuser::GetDC(hwnd);\n\n if hdc.is_null() {\n\n panic!(\"[winit] `GetDC` returned null!\");\n\n }\n\n if let Some(GetDpiForWindow) = *GET_DPI_FOR_WINDOW {\n\n // We are on Windows 10 Anniversary Update (1607) or later.\n\n match GetDpiForWindow(hwnd) {\n\n 0 => BASE_DPI, // 0 is returned if hwnd is invalid\n\n dpi => dpi as u32,\n\n }\n\n } else if let Some(GetDpiForMonitor) = *GET_DPI_FOR_MONITOR {\n\n // We are on Windows 8.1 or later.\n\n let monitor = winuser::MonitorFromWindow(hwnd, MONITOR_DEFAULTTONEAREST);\n\n if monitor.is_null() {\n\n return BASE_DPI;\n", "file_path": "src/platform_impl/windows/dpi.rs", "rank": 91, "score": 148619.80075963974 }, { "content": "fn create_app(activation_policy: ActivationPolicy) -> Option<id> {\n\n unsafe {\n\n let ns_app = NSApp();\n\n if ns_app == nil {\n\n None\n\n } else {\n\n use self::NSApplicationActivationPolicy::*;\n\n ns_app.setActivationPolicy_(match activation_policy {\n\n ActivationPolicy::Regular => NSApplicationActivationPolicyRegular,\n\n ActivationPolicy::Accessory => NSApplicationActivationPolicyAccessory,\n\n ActivationPolicy::Prohibited => NSApplicationActivationPolicyProhibited,\n\n });\n\n ns_app.finishLaunching();\n\n Some(ns_app)\n\n }\n\n }\n\n}\n\n\n\nunsafe fn create_view(\n\n ns_window: id,\n", "file_path": "src/platform_impl/macos/window.rs", "rank": 92, "score": 148057.098673484 }, { "content": "fn widestring(src: &'static str) -> Vec<u16> {\n\n OsStr::new(src)\n\n .encode_wide()\n\n .chain(Some(0).into_iter())\n\n .collect()\n\n}\n", "file_path": "src/platform_impl/windows/dark_mode.rs", "rank": 93, "score": 147706.59709752662 }, { "content": "fn handle_event_proxy(\n\n event_handler: &mut Box<dyn EventHandler>,\n\n control_flow: ControlFlow,\n\n proxy: EventProxy,\n\n) {\n\n match proxy {\n\n EventProxy::DpiChangedProxy {\n\n suggested_size,\n\n scale_factor,\n\n window_id,\n\n } => handle_hidpi_proxy(\n\n event_handler,\n\n control_flow,\n\n suggested_size,\n\n scale_factor,\n\n window_id,\n\n ),\n\n }\n\n}\n\n\n", "file_path": "src/platform_impl/ios/app_state.rs", "rank": 94, "score": 146765.0667927615 }, { "content": "/// Attempt to set dark mode on a window, if necessary.\n\n/// Returns true if dark mode was set, false if not.\n\npub fn try_dark_mode(hwnd: HWND) -> bool {\n\n if *DARK_MODE_SUPPORTED {\n\n let is_dark_mode = should_use_dark_mode();\n\n\n\n let theme_name = if is_dark_mode {\n\n DARK_THEME_NAME.as_ptr()\n\n } else {\n\n LIGHT_THEME_NAME.as_ptr()\n\n };\n\n\n\n let status = unsafe { uxtheme::SetWindowTheme(hwnd, theme_name as _, std::ptr::null()) };\n\n\n\n status == S_OK && set_dark_mode_for_window(hwnd, is_dark_mode)\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "src/platform_impl/windows/dark_mode.rs", "rank": 95, "score": 145901.77102761943 }, { "content": "#[inline]\n\nfn make_wid(s: &wl_surface::WlSurface) -> WindowId {\n\n WindowId(s.as_ref().c_ptr() as usize)\n\n}\n", "file_path": "src/platform_impl/linux/wayland/mod.rs", "rank": 96, "score": 145602.62760209944 }, { "content": "pub fn main() {\n\n let event_loop = EventLoop::new();\n\n\n\n let window = WindowBuilder::new()\n\n .with_title(\"A fantastic window!\")\n\n .build(&event_loop)\n\n .unwrap();\n\n\n\n #[cfg(feature = \"web-sys\")]\n\n {\n\n use winit::platform::web::WindowExtWebSys;\n\n\n\n let canvas = window.canvas();\n\n\n\n let window = web_sys::window().unwrap();\n\n let document = window.document().unwrap();\n\n let body = document.body().unwrap();\n\n\n\n body.append_child(&canvas)\n\n .expect(\"Append canvas to HTML body\");\n", "file_path": "examples/web.rs", "rank": 97, "score": 145511.21903801252 }, { "content": "pub fn adjust_window_rect(hwnd: HWND, rect: RECT) -> Option<RECT> {\n\n unsafe {\n\n let style = winuser::GetWindowLongW(hwnd, winuser::GWL_STYLE);\n\n let style_ex = winuser::GetWindowLongW(hwnd, winuser::GWL_EXSTYLE);\n\n adjust_window_rect_with_styles(hwnd, style as _, style_ex as _, rect)\n\n }\n\n}\n\n\n", "file_path": "src/platform_impl/windows/util.rs", "rank": 98, "score": 145126.12493961697 }, { "content": "pub fn char_to_keycode(c: char) -> Option<VirtualKeyCode> {\n\n // We only translate keys that are affected by keyboard layout.\n\n //\n\n // Note that since keys are translated in a somewhat \"dumb\" way (reading character)\n\n // there is a concern that some combination, i.e. Cmd+char, causes the wrong\n\n // letter to be received, and so we receive the wrong key.\n\n //\n\n // Implementation reference: https://github.com/WebKit/webkit/blob/82bae82cf0f329dbe21059ef0986c4e92fea4ba6/Source/WebCore/platform/cocoa/KeyEventCocoa.mm#L626\n\n Some(match c {\n\n 'a' | 'A' => VirtualKeyCode::A,\n\n 'b' | 'B' => VirtualKeyCode::B,\n\n 'c' | 'C' => VirtualKeyCode::C,\n\n 'd' | 'D' => VirtualKeyCode::D,\n\n 'e' | 'E' => VirtualKeyCode::E,\n\n 'f' | 'F' => VirtualKeyCode::F,\n\n 'g' | 'G' => VirtualKeyCode::G,\n\n 'h' | 'H' => VirtualKeyCode::H,\n\n 'i' | 'I' => VirtualKeyCode::I,\n\n 'j' | 'J' => VirtualKeyCode::J,\n\n 'k' | 'K' => VirtualKeyCode::K,\n", "file_path": "src/platform_impl/macos/event.rs", "rank": 99, "score": 143351.19825795718 } ]
Rust
crates/oci-distribution/src/reference.rs
thomastaylor312/krustlet
a99a507bda67595a07713860e1c13ab40f977bad
use std::convert::{Into, TryFrom}; use std::error::Error; use std::fmt; use std::path::PathBuf; use std::str::FromStr; const NAME_TOTAL_LENGTH_MAX: usize = 255; #[derive(Debug, PartialEq, Eq)] pub enum ParseError { DigestInvalidFormat, NameContainsUppercase, NameEmpty, NameNotCanonical, NameTooLong, ReferenceInvalidFormat, TagInvalidFormat, } impl fmt::Display for ParseError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { ParseError::DigestInvalidFormat => write!(f, "invalid digest format"), ParseError::NameContainsUppercase => write!(f, "repository name must be lowercase"), ParseError::NameEmpty => write!(f, "repository name must have at least one component"), ParseError::NameNotCanonical => write!(f, "repository name must be canonical"), ParseError::NameTooLong => write!( f, "repository name must not be more than {} characters", NAME_TOTAL_LENGTH_MAX ), ParseError::ReferenceInvalidFormat => write!(f, "invalid reference format"), ParseError::TagInvalidFormat => write!(f, "invalid tag format"), } } } impl Error for ParseError {} #[derive(Clone, Hash, PartialEq, Eq)] pub struct Reference { registry: String, repository: String, tag: Option<String>, digest: Option<String>, } impl Reference { pub fn registry(&self) -> &str { &self.registry } pub fn repository(&self) -> &str { &self.repository } pub fn tag(&self) -> Option<&str> { self.tag.as_deref() } pub fn digest(&self) -> Option<&str> { self.digest.as_deref() } fn full_name(&self) -> String { let mut path = PathBuf::new(); path.push(self.registry()); path.push(self.repository()); path.to_str().unwrap_or("").to_owned() } pub fn whole(&self) -> String { let mut s = self.full_name(); if let Some(t) = self.tag() { if s != "" { s.push_str(":"); } s.push_str(t); } if let Some(d) = self.digest() { if s != "" { s.push_str("@"); } s.push_str(d); } s } } impl std::fmt::Debug for Reference { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}", self.whole()) } } impl fmt::Display for Reference { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.whole()) } } impl FromStr for Reference { type Err = ParseError; fn from_str(s: &str) -> Result<Self, Self::Err> { Reference::try_from(s) } } impl TryFrom<String> for Reference { type Error = ParseError; fn try_from(s: String) -> Result<Self, Self::Error> { let repo_start = s .find('/') .ok_or_else(|| ParseError::ReferenceInvalidFormat)?; let first_colon = s[repo_start + 1..].find(':').map(|i| repo_start + i); let digest_start = s[repo_start + 1..].find('@').map(|i| repo_start + i + 1); let tag_start = match (digest_start, first_colon) { (Some(ds), Some(fc)) => { if fc < ds { Some(fc) } else { None } } (None, Some(fc)) => Some(fc), _ => None, } .map(|i| i + 1); let repo_end = match (digest_start, tag_start) { (Some(_), Some(ts)) => ts, (None, Some(ts)) => ts, (Some(ds), None) => ds, (None, None) => s.len(), }; let tag: Option<String> = match (digest_start, tag_start) { (Some(d), Some(t)) => Some(s[t + 1..d].to_string()), (None, Some(t)) => Some(s[t + 1..].to_string()), _ => None, }; let digest: Option<String> = match digest_start { Some(c) => Some(s[c + 1..].to_string()), None => None, }; let reference = Reference { registry: s[..repo_start].to_string(), repository: s[repo_start + 1..repo_end].to_string(), tag, digest, }; if reference.repository().len() > NAME_TOTAL_LENGTH_MAX { return Err(ParseError::NameTooLong); } Ok(reference) } } impl TryFrom<&str> for Reference { type Error = ParseError; fn try_from(string: &str) -> Result<Self, Self::Error> { TryFrom::try_from(string.to_owned()) } } impl Into<String> for Reference { fn into(self) -> String { self.whole() } } #[cfg(test)] mod test { use super::*; mod parse { use super::*; fn must_parse(image: &str) -> Reference { Reference::try_from(image).expect("could not parse reference") } fn validate_registry_and_repository(reference: &Reference) { assert_eq!(reference.registry(), "webassembly.azurecr.io"); assert_eq!(reference.repository(), "hello"); } fn validate_tag(reference: &Reference) { assert_eq!(reference.tag(), Some("v1")); } fn validate_digest(reference: &Reference) { assert_eq!( reference.digest(), Some("sha256:f29dba55022eec8c0ce1cbfaaed45f2352ab3fbbb1cdcd5ea30ca3513deb70c9") ); } #[test] fn name_too_long() { assert_eq!( Reference::try_from(format!( "webassembly.azurecr.io/{}", (0..256).map(|_| "a").collect::<String>() )) .err(), Some(ParseError::NameTooLong) ); } #[test] fn owned_string() { let reference = Reference::from_str("webassembly.azurecr.io/hello:v1") .expect("could not parse reference"); validate_registry_and_repository(&reference); validate_tag(&reference); assert_eq!(reference.digest(), None); } #[test] fn tag_only() { let reference = must_parse("webassembly.azurecr.io/hello:v1"); validate_registry_and_repository(&reference); validate_tag(&reference); assert_eq!(reference.digest(), None); } #[test] fn digest_only() { let reference = must_parse("webassembly.azurecr.io/hello@sha256:f29dba55022eec8c0ce1cbfaaed45f2352ab3fbbb1cdcd5ea30ca3513deb70c9"); validate_registry_and_repository(&reference); validate_digest(&reference); assert_eq!(reference.tag(), None); } #[test] fn tag_and_digest() { let reference = must_parse("webassembly.azurecr.io/hello:v1@sha256:f29dba55022eec8c0ce1cbfaaed45f2352ab3fbbb1cdcd5ea30ca3513deb70c9"); validate_registry_and_repository(&reference); validate_tag(&reference); validate_digest(&reference); } #[test] fn no_tag_or_digest() { let reference = must_parse("webassembly.azurecr.io/hello"); validate_registry_and_repository(&reference); assert_eq!(reference.tag(), None); assert_eq!(reference.digest(), None); } #[test] fn missing_slash_char() { Reference::try_from("webassembly.azurecr.io:hello") .expect_err("no slash should produce an error"); } } }
use std::convert::{Into, TryFrom}; use std::error::Error; use std::fmt; use std::path::PathBuf; use std::str::FromStr; const NAME_TOTAL_LENGTH_MAX: usize = 255; #[derive(Debug, PartialEq, Eq)] pub enum ParseError { DigestInvalidFormat, NameContainsUppercase, NameEmpty, NameNotCanonical, NameTooLong, ReferenceInvalidFormat, TagInvalidFormat, } impl fmt::Display for ParseError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { ParseError::DigestInvalidFormat => write!(f, "invalid digest format"), ParseError::NameContainsUppercase => write!(f, "repository name must be lowercase"), ParseError::NameEmpty => write!(f, "repository name must have at least one component"), ParseError::NameNotCanonical => write!(f, "repository name must be canonical"), ParseError::NameTooLong => write!( f, "repository name must not be more than {} characters", NAME_TOTAL_LENGTH_MAX ), ParseError::ReferenceInvalidFormat => write!(f, "invalid reference format"), ParseError::TagInvalidFormat => write!(f, "invalid tag format"), } } } impl Error for ParseError {} #[derive(Clone, Hash, PartialEq, Eq)] pub struct Reference { registry: String, repository: String, tag: Option<String>, digest: Option<String>, } impl Reference { pub fn registry(&self) -> &str { &self.registry } pub fn repository(&self) -> &str { &self.repository } pub fn tag(&self) -> Option<&str> { self.tag.as_deref() } pub fn digest(&self) -> Option<&str> { self.digest.as_deref() } fn full_name(&self) -> String { let mut path = PathBuf::new(); path.push(self.registry()); path.push(self.repository()); path.to_str().unwrap_or("").to_owned() } pub fn whole(&self) -> String { let mut s = self.full_name(); if let Some(t) = self.tag() { if s != "" { s.push_str(":"); } s.push_str(t); } if let Some(d) = self.digest() { if s != "" { s.push_str("@"); } s.push_str(d); } s } } impl std::fmt::Debug for Reference { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}", self.whole()) } } impl fmt::Display for Reference { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.whole()) } } impl FromStr for Reference { type Err = ParseError; fn from_str(s: &str) -> Result<Self, Self::Err> { Reference::try_from(s) } } impl TryFrom<String> for Reference { type Error = ParseError; fn try_from(s: String) -> Result<Self, Self::Error> { let repo_start = s .find('/') .ok_or_else(|| ParseError::ReferenceInvalidFormat)?; let first_colon = s[repo_start + 1..].find(':').map(|i| repo_start + i); let digest_start = s[repo_start + 1..].find('@').map(|i| repo_start + i + 1); let tag_start = match (digest_start, first_colon) { (Some(ds), Some(fc)) => { if fc < ds { Some(fc) } else { None } } (None, Some(fc)) => Some(fc), _ => None, } .map(|i| i + 1); let repo_end = match (digest_start, tag_start) { (Some(_), Some(ts)) => ts, (None, Some(ts)) => ts, (Some(ds), None) => ds, (None, None) => s.len(), }; let tag: Option<String> = match (digest_start, tag_start) { (Some(d), Some(t)) => Some(s[t + 1..d].to_string()), (None, Some(t)) => Some(s[t + 1..].to_string()), _ => None, }; let digest: Option<String> = match digest_start { Some(c) => Some(s[c + 1..].to_string()), None => None, }; let reference = Reference { registry: s[..repo_start].to_string(), repository: s[repo_start + 1..repo_end].to_string(), tag, digest, }; if reference.repository().len() > NAME_TOTAL_LENGTH_MAX { return Err(ParseError::NameTooLong); } Ok(reference) } } impl TryFrom<&str> for Reference { type Error = ParseError; fn try_from(string: &str) -> Result<Self, Self::Error> { TryFrom::try_from(string.to_owned()) } } impl Into<String> for Reference { fn into(self) -> String { self.whole() } } #[cfg(test)] mod test { use super::*; mod parse { use super::*; fn must_parse(image: &str) -> Reference { Reference::try_from(image).expect("could not parse reference") } fn validate_registry_and_repository(reference: &Reference) { assert_eq!(reference.registry(), "webassembly.azurecr.io"); assert_eq!(reference.repository(), "hello"); } fn validate_tag(reference: &Reference) { assert_eq!(reference.tag(), Some("v1")); }
#[test] fn name_too_long() { assert_eq!( Reference::try_from(format!( "webassembly.azurecr.io/{}", (0..256).map(|_| "a").collect::<String>() )) .err(), Some(ParseError::NameTooLong) ); } #[test] fn owned_string() { let reference = Reference::from_str("webassembly.azurecr.io/hello:v1") .expect("could not parse reference"); validate_registry_and_repository(&reference); validate_tag(&reference); assert_eq!(reference.digest(), None); } #[test] fn tag_only() { let reference = must_parse("webassembly.azurecr.io/hello:v1"); validate_registry_and_repository(&reference); validate_tag(&reference); assert_eq!(reference.digest(), None); } #[test] fn digest_only() { let reference = must_parse("webassembly.azurecr.io/hello@sha256:f29dba55022eec8c0ce1cbfaaed45f2352ab3fbbb1cdcd5ea30ca3513deb70c9"); validate_registry_and_repository(&reference); validate_digest(&reference); assert_eq!(reference.tag(), None); } #[test] fn tag_and_digest() { let reference = must_parse("webassembly.azurecr.io/hello:v1@sha256:f29dba55022eec8c0ce1cbfaaed45f2352ab3fbbb1cdcd5ea30ca3513deb70c9"); validate_registry_and_repository(&reference); validate_tag(&reference); validate_digest(&reference); } #[test] fn no_tag_or_digest() { let reference = must_parse("webassembly.azurecr.io/hello"); validate_registry_and_repository(&reference); assert_eq!(reference.tag(), None); assert_eq!(reference.digest(), None); } #[test] fn missing_slash_char() { Reference::try_from("webassembly.azurecr.io:hello") .expect_err("no slash should produce an error"); } } }
fn validate_digest(reference: &Reference) { assert_eq!( reference.digest(), Some("sha256:f29dba55022eec8c0ce1cbfaaed45f2352ab3fbbb1cdcd5ea30ca3513deb70c9") ); }
function_block-full_function
[ { "content": "fn config_file_path_str(file_name: impl AsRef<std::path::Path>) -> String {\n\n config_dir().join(file_name).to_str().unwrap().to_owned()\n\n}\n\n\n", "file_path": "tests/oneclick/src/main.rs", "rank": 0, "score": 299495.3732690705 }, { "content": "fn warn_if_premature_exit(process: &mut OwnedChildProcess, name: &str) {\n\n match process.exited() {\n\n Err(e) => eprintln!(\n\n \"FAILED checking kubelet process {} exit state ({})\",\n\n name, e\n\n ),\n\n Ok(false) => eprintln!(\"WARNING: Kubelet process {} exited prematurely\", name),\n\n _ => (),\n\n };\n\n}\n\n\n", "file_path": "tests/oneclick/src/main.rs", "rank": 1, "score": 229535.3901439607 }, { "content": "fn invalid_config_value_error(e: anyhow::Error, value_name: &str) -> anyhow::Error {\n\n let context = format!(\"invalid {} in configuration file: {}\", value_name, e);\n\n e.context(context)\n\n}\n\n\n", "file_path": "crates/kubelet/src/config.rs", "rank": 2, "score": 226866.53833590978 }, { "content": "fn awaiting_user_csr_approval(cert_description: &str, csr_name: &str) -> String {\n\n format!(\n\n \"{} certificate requires manual approval. Run kubectl certificate approve {}\",\n\n cert_description, csr_name\n\n )\n\n}\n\n\n", "file_path": "crates/kubelet/src/bootstrapping/mod.rs", "rank": 3, "score": 223274.6897388757 }, { "content": "fn name_of(ns: &impl Metadata<Ty = ObjectMeta>) -> String {\n\n ns.metadata().name.as_ref().unwrap().to_owned()\n\n}\n\n\n", "file_path": "tests/podsmiter/src/main.rs", "rank": 4, "score": 215020.87942344171 }, { "content": "fn local_object_references(names: &[&str]) -> Vec<LocalObjectReference> {\n\n names\n\n .iter()\n\n .map(|n| LocalObjectReference {\n\n name: Some(n.to_string()),\n\n })\n\n .collect()\n\n}\n", "file_path": "tests/pod_builder.rs", "rank": 5, "score": 214620.4835927432 }, { "content": "#[cfg(any(feature = \"cli\", feature = \"docs\"))]\n\nfn split_one_label(in_string: &str) -> Option<(String, String)> {\n\n let mut splitter = in_string.splitn(2, '=');\n\n\n\n match splitter.next() {\n\n Some(\"\") | None => None,\n\n Some(key) => match splitter.next() {\n\n Some(val) => Some((key.to_string(), val.to_string())),\n\n None => Some((key.to_string(), String::new())),\n\n },\n\n }\n\n}\n\n\n", "file_path": "crates/kubelet/src/config.rs", "rank": 6, "score": 211132.0359886689 }, { "content": "fn send(mut sender: Sender<(String, Status)>, name: String, status: Status, cx: &mut Context<'_>) {\n\n loop {\n\n if let Poll::Ready(r) = sender.poll_ready(cx) {\n\n if r.is_ok() {\n\n sender\n\n .try_send((name, status))\n\n .expect(\"Possible deadlock, exiting\");\n\n return;\n\n }\n\n trace!(\"Receiver for status showing as closed: {:?}\", r);\n\n }\n\n trace!(\n\n \"Channel for container {} not ready for send. Attempting again\",\n\n name\n\n );\n\n }\n\n}\n", "file_path": "crates/wasi-provider/src/wasi_runtime.rs", "rank": 7, "score": 206191.4171215713 }, { "content": "pub fn pod_key<N: AsRef<str>, T: AsRef<str>>(namespace: N, pod_name: T) -> String {\n\n format!(\"{}:{}\", namespace.as_ref(), pod_name.as_ref())\n\n}\n", "file_path": "crates/kubelet/src/pod/handle.rs", "rank": 8, "score": 203580.53727737657 }, { "content": "fn smite_pods_failure_error(namespace: &str, errors: &[anyhow::Error]) -> anyhow::Error {\n\n let message_list = errors\n\n .iter()\n\n .map(|e| format!(\" - {}\", e))\n\n .collect::<Vec<_>>()\n\n .join(\"\\n\");\n\n anyhow::anyhow!(\n\n \"- Namespace {}: pod delete(s) failed:\\n{}\",\n\n namespace,\n\n message_list\n\n )\n\n}\n\n\n", "file_path": "tests/podsmiter/src/main.rs", "rank": 9, "score": 198425.46497783516 }, { "content": "fn parse_auth(secret: &Secret, registry_name: &str) -> Option<RegistryAuth> {\n\n if let Some(data) = secret.data.as_ref() {\n\n parse_auth_from_secret_data(data, registry_name)\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "crates/kubelet/src/secret/mod.rs", "rank": 10, "score": 197773.5049034433 }, { "content": "fn completed_csr_approval(cert_description: &str) -> String {\n\n format!(\n\n \"received {} certificate approval: continuing\",\n\n cert_description\n\n )\n\n}\n\n\n", "file_path": "crates/kubelet/src/bootstrapping/mod.rs", "rank": 11, "score": 194287.26710348274 }, { "content": "fn all_or_none(files: Vec<std::path::PathBuf>) -> AllOrNone {\n\n let (exist, missing): (Vec<_>, Vec<_>) = files.iter().partition(|f| f.exists());\n\n\n\n if missing.is_empty() {\n\n return AllOrNone::AllExist;\n\n }\n\n\n\n for f in exist {\n\n if matches!(std::fs::remove_file(f), Err(_)) {\n\n return AllOrNone::Error;\n\n }\n\n }\n\n\n\n AllOrNone::NoneExist\n\n}\n\n\n", "file_path": "tests/oneclick/src/main.rs", "rank": 12, "score": 187317.25195736182 }, { "content": "fn delete_csr(csr_name: impl AsRef<str>) -> std::io::Result<std::process::Child> {\n\n std::process::Command::new(\"kubectl\")\n\n .args(&[\"delete\", \"csr\", csr_name.as_ref()])\n\n .stderr(std::process::Stdio::piped())\n\n .stdout(std::process::Stdio::piped())\n\n .spawn()\n\n}\n\n\n", "file_path": "tests/oneclick/src/main.rs", "rank": 13, "score": 185818.57266514248 }, { "content": "/// Build the map of allowable field_ref values.\n\n///\n\n/// The Downward API only supports a small selection of fields. This\n\n/// provides those fields.\n\nfn field_map(pod: &Pod) -> HashMap<String, String> {\n\n let mut map: HashMap<String, String> = HashMap::new();\n\n map.insert(\"metadata.name\".into(), pod.name().to_owned());\n\n map.insert(\"metadata.namespace\".into(), pod.namespace().to_owned());\n\n map.insert(\n\n \"spec.serviceAccountName\".into(),\n\n pod.service_account_name().unwrap_or_default().to_owned(),\n\n );\n\n map.insert(\n\n \"status.hostIP\".into(),\n\n pod.host_ip().unwrap_or_default().to_owned(),\n\n );\n\n map.insert(\n\n \"status.podIP\".into(),\n\n pod.pod_ip().unwrap_or_default().to_owned(),\n\n );\n\n pod.labels().iter().for_each(|(k, v)| {\n\n info!(\"adding {} to labels\", k);\n\n map.insert(format!(\"metadata.labels.{}\", k), v.clone());\n\n });\n", "file_path": "crates/kubelet/src/provider/mod.rs", "rank": 14, "score": 183313.4480516587 }, { "content": "#[derive(Debug)]\n\nenum Type {\n\n ConfigMap,\n\n Secret,\n\n HostPath,\n\n}\n\n\n\n/// A smart wrapper around the location of a volume on the host system. If this is a ConfigMap or\n\n/// Secret volume, dropping this reference will clean up the temporary volume. [AsRef] and\n\n/// [std::ops::Deref] are implemented for this type so you can still use it like a normal PathBuf\n\n#[derive(Debug)]\n\npub struct Ref {\n\n host_path: PathBuf,\n\n volume_type: Type,\n\n}\n\n\n\nimpl Ref {\n\n /// Resolves the volumes for a pod, including preparing temporary directories containing the\n\n /// contents of secrets and configmaps. Returns a HashMap of volume names to a PathBuf for the\n\n /// directory where the volume is mounted\n\n pub async fn volumes_from_pod(\n", "file_path": "crates/kubelet/src/volume/mod.rs", "rank": 15, "score": 177545.23183058845 }, { "content": "fn pod_dir_name(pod: &Pod) -> String {\n\n format!(\"{}-{}\", pod.name(), pod.namespace())\n\n}\n\n\n", "file_path": "crates/kubelet/src/volume/mod.rs", "rank": 16, "score": 176258.3464892569 }, { "content": "fn clean_up_csr(csr_name: &str) -> anyhow::Result<()> {\n\n println!(\"Cleaning up approved CSR {}\", csr_name);\n\n let clean_up_process = std::process::Command::new(\"kubectl\")\n\n .args(&[\"delete\", \"csr\", csr_name])\n\n .stderr(std::process::Stdio::piped())\n\n .stdout(std::process::Stdio::piped())\n\n .output()?;\n\n if !clean_up_process.status.success() {\n\n Err(anyhow::anyhow!(\n\n \"Error cleaning up CSR {}: {}\",\n\n csr_name,\n\n String::from_utf8(clean_up_process.stderr).unwrap()\n\n ))\n\n } else {\n\n println!(\"Cleaned up approved CSR {}\", csr_name);\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "tests/oneclick/src/main.rs", "rank": 17, "score": 173308.84988789356 }, { "content": "fn approve_csr(csr_name: &str) -> anyhow::Result<()> {\n\n println!(\"Approving CSR {}\", csr_name);\n\n let approve_process = std::process::Command::new(\"kubectl\")\n\n .args(&[\"certificate\", \"approve\", csr_name])\n\n .stderr(std::process::Stdio::piped())\n\n .stdout(std::process::Stdio::piped())\n\n .output()?;\n\n if !approve_process.status.success() {\n\n Err(anyhow::anyhow!(\n\n \"Error approving CSR {}: {}\",\n\n csr_name,\n\n String::from_utf8(approve_process.stderr).unwrap()\n\n ))\n\n } else {\n\n println!(\"Approved CSR {}\", csr_name);\n\n clean_up_csr(csr_name)\n\n }\n\n}\n\n\n", "file_path": "tests/oneclick/src/main.rs", "rank": 18, "score": 173308.84988789356 }, { "content": "/// Define a new coordination.Lease object for Kubernetes\n\n///\n\n/// The lease tells Kubernetes that we want to claim the node for a while\n\n/// longer. And then tells Kubernetes how long it should wait before\n\n/// expecting a new lease.\n\nfn lease_definition(node_uid: &str, node_name: &str) -> serde_json::Value {\n\n serde_json::json!(\n\n {\n\n \"apiVersion\": \"coordination.k8s.io/v1\",\n\n \"kind\": \"Lease\",\n\n \"metadata\": {\n\n \"name\": node_name,\n\n \"ownerReferences\": [\n\n {\n\n \"apiVersion\": \"v1\",\n\n \"kind\": \"Node\",\n\n \"name\": node_name,\n\n \"uid\": node_uid\n\n }\n\n ]\n\n },\n\n \"spec\": lease_spec_definition(node_name)\n\n }\n\n )\n\n}\n\n\n", "file_path": "crates/kubelet/src/node/mod.rs", "rank": 19, "score": 172270.98871682346 }, { "content": "// Some hostnames (particularly local ones) can have uppercase letters, which is\n\n// disallowed by the DNS spec used in kubernetes naming. This sanitizes those\n\n// names\n\nfn sanitize_hostname(hostname: &str) -> String {\n\n // TODO: Are there other sanitation steps we should do here?\n\n hostname.to_lowercase()\n\n}\n\n\n", "file_path": "crates/kubelet/src/config.rs", "rank": 20, "score": 169871.61012156156 }, { "content": "/// Defines the labels that will be applied to this node\n\n///\n\n/// Default values and passed node-labels arguments are injected by config.\n\nfn node_labels_definition(arch: &str, config: &Config, builder: &mut Builder) {\n\n // Add mandatory static labels\n\n builder.add_label(\"beta.kubernetes.io/os\", \"linux\");\n\n builder.add_label(\"kubernetes.io/os\", \"linux\");\n\n builder.add_label(\"type\", \"krustlet\");\n\n // add the mandatory labels that are dependent on injected values\n\n builder.add_label(\"beta.kubernetes.io/arch\", arch);\n\n builder.add_label(\"kubernetes.io/arch\", arch);\n\n builder.add_label(\"kubernetes.io/hostname\", &config.hostname);\n\n\n\n let k8s_namespace = \"kubernetes.io\";\n\n // namespaces managed by this method - do not allow user injection\n\n let managed_namespace_labels = [\n\n \"beta.kubernetes.io/arch\",\n\n \"beta.kubernetes.io/os\",\n\n \"kubernetes.io/arch\",\n\n \"kubernetes.io/hostname\",\n\n \"kubernetes.io/os\",\n\n \"type\",\n\n ];\n", "file_path": "crates/kubelet/src/node/mod.rs", "rank": 21, "score": 168917.93144693327 }, { "content": "pub fn wasmerciser_pod(\n\n pod_name: &str,\n\n inits: Vec<WasmerciserContainerSpec>,\n\n containers: Vec<WasmerciserContainerSpec>,\n\n test_volumes: Vec<WasmerciserVolumeSpec>,\n\n architecture: &str,\n\n) -> anyhow::Result<PodLifetimeOwner> {\n\n let init_container_specs: Vec<_> = inits\n\n .iter()\n\n .map(|spec| wasmerciser_container(spec, &test_volumes).unwrap())\n\n .collect();\n\n let app_container_specs: Vec<_> = containers\n\n .iter()\n\n .map(|spec| wasmerciser_container(spec, &test_volumes).unwrap())\n\n .collect();\n\n\n\n let volume_maps: Vec<_> = test_volumes\n\n .iter()\n\n .map(|spec| wasmerciser_volume(spec).unwrap())\n\n .collect();\n", "file_path": "tests/pod_builder.rs", "rank": 22, "score": 166417.36041461737 }, { "content": "fn object_of_tuples(source: &[(String, String)]) -> serde_json::Value {\n\n let mut map = serde_json::Map::new();\n\n\n\n for (key, value) in source {\n\n map.insert(\n\n key.to_string(),\n\n serde_json::Value::String(value.to_string()),\n\n );\n\n }\n\n\n\n serde_json::Value::Object(map)\n\n}\n", "file_path": "tests/test_resource_manager.rs", "rank": 23, "score": 163949.5242830437 }, { "content": "/// Defines a new coordiation lease for Kubernetes\n\n///\n\n/// We set the lease times, the lease duration, and the node name.\n\nfn lease_spec_definition(node_name: &str) -> serde_json::Value {\n\n // Workaround for https://github.com/deislabs/krustlet/issues/5\n\n // In the future, use LeaseSpec rather than a JSON value\n\n let now = Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Micros, true);\n\n\n\n serde_json::json!(\n\n {\n\n \"holderIdentity\": node_name,\n\n \"acquireTime\": now,\n\n \"renewTime\": now,\n\n \"leaseDurationSeconds\": 300\n\n }\n\n )\n\n}\n\n\n", "file_path": "crates/kubelet/src/node/mod.rs", "rank": 24, "score": 163385.57594141416 }, { "content": "fn smite_failure_error(errors: &[anyhow::Error]) -> anyhow::Error {\n\n let message_list = errors\n\n .iter()\n\n .map(|e| format!(\"{}\", e))\n\n .collect::<Vec<_>>()\n\n .join(\"\\n\");\n\n anyhow::anyhow!(\n\n \"Some integration test resources were not cleaned up:\\n{}\",\n\n message_list\n\n )\n\n}\n\n\n", "file_path": "tests/podsmiter/src/main.rs", "rank": 25, "score": 162707.4910387966 }, { "content": "pub fn key_from_pod(pod: &Pod) -> String {\n\n #[allow(deprecated)]\n\n pod_key(pod.namespace(), pod.name())\n\n}\n\n\n\n/// Generates a unique human readable key for storing a handle to a pod if you\n\n/// already have the namespace and pod name.\n\n#[deprecated(\n\n since = \"0.6.0\",\n\n note = \"Please use the new kubelet::pod::PodKey type. This function will be removed in 0.7\"\n\n)]\n", "file_path": "crates/kubelet/src/pod/handle.rs", "rank": 26, "score": 159082.92383837473 }, { "content": "fn config_dir() -> std::path::PathBuf {\n\n let home_dir = dirs::home_dir().expect(\"Can't get home dir\"); // TODO: allow override of config dir\n\n home_dir.join(\".krustlet/config\")\n\n}\n\n\n", "file_path": "tests/oneclick/src/main.rs", "rank": 27, "score": 156347.9343102617 }, { "content": "/// Kubernetes' view of environment variables is an unordered map of string to string.\n\ntype EnvVars = std::collections::HashMap<String, String>;\n\n\n\n/// A [kubelet::handle::Handle] implementation for a wascc actor\n\npub struct ActorHandle {\n\n /// The public key of the wascc Actor that will be stopped\n\n pub key: String,\n\n host: Arc<Mutex<WasccHost>>,\n\n volumes: Vec<VolumeBinding>,\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl StopHandler for ActorHandle {\n\n async fn stop(&mut self) -> anyhow::Result<()> {\n\n debug!(\"stopping wascc instance {}\", self.key);\n\n let host = self.host.clone();\n\n let key = self.key.clone();\n\n let volumes: Vec<VolumeBinding> = self.volumes.drain(0..).collect();\n\n tokio::task::spawn_blocking(move || {\n\n let lock = host.lock().unwrap();\n\n lock.remove_actor(&key)\n", "file_path": "crates/wascc-provider/src/lib.rs", "rank": 28, "score": 154500.47869981223 }, { "content": "enum AllOrNone {\n\n AllExist,\n\n NoneExist,\n\n Error,\n\n}\n\n\n", "file_path": "tests/oneclick/src/main.rs", "rank": 29, "score": 151210.0105007508 }, { "content": "fn return_with_code(code: StatusCode, body: String) -> Result<Response<Body>, Infallible> {\n\n let mut response = Response::new(body.into());\n\n *response.status_mut() = code;\n\n Ok(response)\n\n}\n", "file_path": "crates/kubelet/src/webserver/mod.rs", "rank": 30, "score": 148188.63518426067 }, { "content": "fn is_e2e_namespace(namespace: &str) -> bool {\n\n E2E_NS_PREFIXES\n\n .iter()\n\n .any(|prefix| namespace.starts_with(prefix))\n\n}\n\n\n\nasync fn smite_namespace_pods(client: kube::Client, namespace: &str) -> anyhow::Result<()> {\n\n println!(\"Finding pods in namespace {}...\", namespace);\n\n\n\n let podapi: Api<Pod> = Api::namespaced(client.clone(), namespace);\n\n let pods = podapi.list(&ListParams::default()).await?;\n\n\n\n println!(\"Deleting pods in namespace {}...\", namespace);\n\n\n\n let delete_operations = pods.iter().map(|p| smite_pod(&podapi, p));\n\n let delete_results = futures::future::join_all(delete_operations).await;\n\n let (_, errors) = delete_results.partition_success();\n\n\n\n if !errors.is_empty() {\n\n return Err(smite_pods_failure_error(namespace, &errors));\n", "file_path": "tests/podsmiter/src/main.rs", "rank": 31, "score": 147820.78973172978 }, { "content": "fn confirm_smite(namespaces: &[String]) -> bool {\n\n println!(\n\n \"Smite these namespaces and all resources within them: {}? (y/n) \",\n\n namespaces.join(\", \")\n\n );\n\n let mut response = String::new();\n\n match std::io::stdin().read_line(&mut response) {\n\n Err(e) => {\n\n eprintln!(\"Error reading response: {}\", e);\n\n confirm_smite(namespaces)\n\n }\n\n Ok(_) => response.starts_with('y') || response.starts_with('Y'),\n\n }\n\n}\n\n\n\n// TODO: deduplicate with oneclick\n\n\n", "file_path": "tests/podsmiter/src/main.rs", "rank": 32, "score": 147576.51529076678 }, { "content": "fn mount_setting_for(key: &str, items_to_mount: &Option<Vec<KeyToPath>>) -> ItemMount {\n\n match items_to_mount {\n\n None => ItemMount::MountAt(key.to_string()),\n\n Some(items) => ItemMount::from(\n\n items\n\n .iter()\n\n .find(|kp| kp.key == key)\n\n .map(|kp| kp.path.to_string()),\n\n ),\n\n }\n\n}\n\n\n", "file_path": "crates/kubelet/src/volume/mod.rs", "rank": 33, "score": 147501.8074679131 }, { "content": "fn write_kubelet_log_to_file(\n\n kubelet_name: &str,\n\n log: &mut impl std::io::Read,\n\n file_path: std::path::PathBuf,\n\n) {\n\n let mut file_result = std::fs::File::create(file_path);\n\n match file_result {\n\n Ok(ref mut file) => {\n\n let write_result = std::io::copy(log, file);\n\n match write_result {\n\n Ok(_) => (),\n\n Err(e) => eprintln!(\"Can't capture {} output: {}\", kubelet_name, e),\n\n }\n\n }\n\n Err(e) => {\n\n eprintln!(\"Can't capture {} output: {}\", kubelet_name, e);\n\n }\n\n }\n\n}\n", "file_path": "tests/oneclick/src/main.rs", "rank": 34, "score": 136790.2147295588 }, { "content": "fn parse_script_from_env_var_value(var_value: Result<String, std::env::VarError>) -> Vec<String> {\n\n match var_value {\n\n Ok(script_text) => words(script_text),\n\n Err(_) => vec![],\n\n }\n\n}\n\n\n", "file_path": "demos/wasi/wasmerciser/src/main.rs", "rank": 35, "score": 130564.56758056427 }, { "content": "/// Returns kubeconfig path from specified environment variable.\n\nfn path() -> Option<PathBuf> {\n\n env::var_os(KUBECONFIG)\n\n .map(PathBuf::from)\n\n .or_else(default_path)\n\n}\n\n\n", "file_path": "crates/kubelet/src/kubeconfig.rs", "rank": 36, "score": 129623.36125038011 }, { "content": "/// Create basic Pod status patch.\n\npub fn make_status(phase: Phase, reason: &str) -> anyhow::Result<serde_json::Value> {\n\n Ok(serde_json::json!(\n\n {\n\n \"metadata\": {\n\n \"resourceVersion\": \"\",\n\n },\n\n \"status\": {\n\n \"phase\": phase,\n\n \"reason\": reason,\n\n }\n\n }\n\n ))\n\n}\n\n\n\n/// Describe the status of a workload.\n\n#[derive(Clone, Debug, Default)]\n\npub struct Status {\n\n /// Allows a provider to set a custom message, otherwise, kubelet will infer\n\n /// a message from the container statuses\n\n pub message: StatusMessage,\n", "file_path": "crates/kubelet/src/pod/status.rs", "rank": 37, "score": 128805.31137996833 }, { "content": "/// Provides methods for accessing `ContainerMap` elements by name.\n\npub trait ContainerMapByName<V> {\n\n /// Gets a mutable reference to the value associated with the container\n\n /// with the given name.\n\n fn get_mut_by_name(&mut self, name: String) -> Option<&mut V>;\n\n /// Whether the map contains a `ContainerKey` with the given name.\n\n fn contains_key_name(&self, name: &str) -> bool;\n\n}\n\n\n\nimpl<V> ContainerMapByName<V> for ContainerMap<V> {\n\n fn get_mut_by_name(&mut self, name: String) -> Option<&mut V> {\n\n // TODO: borrow checker objected to any of the more natural forms\n\n let app_key = ContainerKey::App(name.clone());\n\n if self.contains_key(&app_key) {\n\n self.get_mut(&app_key)\n\n } else {\n\n self.get_mut(&ContainerKey::Init(name))\n\n }\n\n }\n\n\n\n fn contains_key_name(&self, name: &str) -> bool {\n", "file_path": "crates/kubelet/src/container/mod.rs", "rank": 38, "score": 128419.75452596613 }, { "content": "// Attempt to get the node IP address in the following order (this follows the\n\n// same pattern as the Kubernetes kubelet):\n\n// 1. Lookup the IP from node name by DNS\n\n// 2. Try to get the IP from the network interface used as default gateway\n\n// (unimplemented for now because it doesn't work across platforms)\n\nfn default_node_ip(hostname: &mut String, preferred_ip_family: &IpAddr) -> anyhow::Result<IpAddr> {\n\n // NOTE: As of right now, we don't have cloud providers. In the future if\n\n // that is the case, we will need to add logic for looking up the IP and\n\n // hostname using the cloud provider as they do in the kubelet\n\n // To use the local resolver, we need to add a port to the hostname. Doesn't\n\n // matter which one, it just needs to be a valid socket address\n\n hostname.push_str(\":80\");\n\n Ok(hostname\n\n .to_socket_addrs()?\n\n .find(|i| {\n\n !i.ip().is_loopback()\n\n && !i.ip().is_multicast()\n\n && !i.ip().is_unspecified()\n\n && is_same_ip_family(&i.ip(), preferred_ip_family)\n\n })\n\n .ok_or_else(|| {\n\n anyhow::anyhow!(\n\n \"unable to find default IP address for node. Please specify a node IP manually\"\n\n )\n\n })?\n\n .ip())\n\n}\n\n\n", "file_path": "crates/kubelet/src/config.rs", "rank": 39, "score": 127709.23597178186 }, { "content": "fn digest_header_value(response: &reqwest::Response) -> anyhow::Result<String> {\n\n let headers = response.headers();\n\n let digest_header = headers.get(\"Docker-Content-Digest\");\n\n match digest_header {\n\n None => Err(anyhow::anyhow!(\"resgistry did not return a digest header\")),\n\n Some(hv) => hv\n\n .to_str()\n\n .map(|s| s.to_string())\n\n .map_err(anyhow::Error::new),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use std::convert::TryFrom;\n\n\n\n const HELLO_IMAGE_NO_TAG: &str = \"webassembly.azurecr.io/hello-wasm\";\n\n const HELLO_IMAGE_TAG: &str = \"webassembly.azurecr.io/hello-wasm:v1\";\n\n const HELLO_IMAGE_DIGEST: &str = \"webassembly.azurecr.io/hello-wasm@sha256:51d9b231d5129e3ffc267c9d455c49d789bf3167b611a07ab6e4b3304c96b0e7\";\n", "file_path": "crates/oci-distribution/src/client.rs", "rank": 40, "score": 126936.6111282262 }, { "content": "fn words(text: String) -> Vec<String> {\n\n text.split(' ')\n\n .filter(|s| !s.is_empty())\n\n .map(|s| s.to_owned())\n\n .collect()\n\n}\n\n\n", "file_path": "demos/wasi/wasmerciser/src/main.rs", "rank": 41, "score": 126565.21178690335 }, { "content": "fn parse_comma_separated(source: String) -> Vec<String> {\n\n source.split(',').map(|s| s.trim().to_owned()).collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n fn builder_from_json_string(json: &str) -> anyhow::Result<ConfigBuilder> {\n\n ConfigBuilder::from_reader(json.as_bytes())\n\n }\n\n\n\n fn fallbacks() -> ConfigBuilderFallbacks {\n\n ConfigBuilderFallbacks {\n\n node_ip: |_, _| IpAddr::V4(std::net::Ipv4Addr::new(4, 4, 4, 4)),\n\n hostname: || \"fallback-hostname\".to_owned(),\n\n data_dir: || PathBuf::from(\"/fallback/data/dir\"),\n\n cert_path: |_| PathBuf::from(\"/fallback/cert/path\"),\n\n key_path: |_| PathBuf::from(\"/fallback/key/path\"),\n\n bootstrap_file: || PathBuf::from(\"/fallback/bootstrap_file.txt\"),\n", "file_path": "crates/kubelet/src/config.rs", "rank": 42, "score": 124896.32476587333 }, { "content": "fn wait_for_tls_certificate_approval(stdout: impl std::io::Read) -> anyhow::Result<()> {\n\n let reader = std::io::BufReader::new(stdout);\n\n for (_, line) in reader.lines().enumerate() {\n\n match line {\n\n Ok(line_text) => {\n\n println!(\"Kubelet printed: {}\", line_text);\n\n if line_text == \"BOOTSTRAP: received TLS certificate approval: continuing\" {\n\n return Ok(());\n\n }\n\n let re = regex::Regex::new(r\"^BOOTSTRAP: TLS certificate requires manual approval. Run kubectl certificate approve (\\S+)$\").unwrap();\n\n match re.captures(&line_text) {\n\n None => (),\n\n Some(captures) => {\n\n let csr_name = &captures[1];\n\n approve_csr(csr_name)?\n\n }\n\n }\n\n }\n\n Err(e) => eprintln!(\"Error reading kubelet stdout: {}\", e),\n\n }\n\n }\n\n println!(\"End of kubelet output with no approval\");\n\n Err(anyhow::anyhow!(\"End of kubelet output with no approval\"))\n\n}\n\n\n", "file_path": "tests/oneclick/src/main.rs", "rank": 43, "score": 122286.02650598987 }, { "content": "fn notify_bootstrap(message: String) {\n\n println!(\"BOOTSTRAP: {}\", message);\n\n}\n", "file_path": "src/krustlet-wascc.rs", "rank": 44, "score": 120381.62954791609 }, { "content": "fn notify_bootstrap(message: String) {\n\n println!(\"BOOTSTRAP: {}\", message);\n\n}\n", "file_path": "src/krustlet-wasi.rs", "rank": 45, "score": 120381.62954791609 }, { "content": "struct WasccTestResourceCleaner {}\n\n\n\nimpl Drop for WasccTestResourceCleaner {\n\n fn drop(&mut self) {\n\n let t = std::thread::spawn(move || {\n\n let mut rt =\n\n tokio::runtime::Runtime::new().expect(\"Failed to create Tokio runtime for cleanup\");\n\n rt.block_on(clean_up_wascc_test_resources());\n\n });\n\n\n\n t.join().expect(\"Failed to clean up wasCC test resources\");\n\n }\n\n}\n\n\n\nasync fn clean_up_wascc_test_resources() -> () {\n\n let client = kube::Client::try_default()\n\n .await\n\n .expect(\"Failed to create client\");\n\n\n\n let pods: Api<Pod> = Api::namespaced(client.clone(), \"default\");\n", "file_path": "tests/integration_tests.rs", "rank": 46, "score": 120324.42580005879 }, { "content": "fn parse_byte_string_json(byte_string: &k8s_openapi::ByteString) -> Option<serde_json::Value> {\n\n serde_json::from_slice(&byte_string.0).ok()\n\n}\n\n\n", "file_path": "crates/kubelet/src/secret/mod.rs", "rank": 47, "score": 116867.73728269218 }, { "content": "fn get_script() -> Vec<String> {\n\n let command_line_script = get_script_from_command_line();\n\n if command_line_script.is_empty() {\n\n get_script_from_environment_variable()\n\n } else {\n\n command_line_script\n\n }\n\n}\n\n\n", "file_path": "demos/wasi/wasmerciser/src/main.rs", "rank": 48, "score": 116285.79928989688 }, { "content": "fn get_script_from_environment_variable() -> Vec<String> {\n\n parse_script_from_env_var_value(std::env::var(\"WASMERCISER_RUN_SCRIPT\"))\n\n}\n\n\n", "file_path": "demos/wasi/wasmerciser/src/main.rs", "rank": 49, "score": 112609.04193316156 }, { "content": "fn get_script_from_command_line() -> Vec<String> {\n\n // TODO: un-hardwire the module file name\n\n let original_args: Vec<String> = env::args().collect();\n\n if !original_args.is_empty() && original_args[0] == \"wasmerciser.wasm\" {\n\n original_args[1..].to_vec()\n\n } else {\n\n original_args\n\n }\n\n}\n\n\n", "file_path": "demos/wasi/wasmerciser/src/main.rs", "rank": 50, "score": 112609.04193316156 }, { "content": "fn default_hostname() -> anyhow::Result<String> {\n\n Ok(hostname::get()?\n\n .into_string()\n\n .map_err(|_| anyhow::anyhow!(\"invalid utf-8 hostname string\"))?)\n\n}\n\n\n", "file_path": "crates/kubelet/src/config.rs", "rank": 51, "score": 112181.57928928103 }, { "content": "fn in_ci_environment() -> bool {\n\n std::env::var(\"KRUSTLET_TEST_ENV\") == Ok(\"ci\".to_owned())\n\n}\n\n\n\n#[tokio::test]\n\nasync fn test_wascc_provider() -> Result<(), Box<dyn std::error::Error>> {\n\n let client = kube::Client::try_default().await?;\n\n\n\n let nodes: Api<Node> = Api::all(client);\n\n\n\n let node = nodes.get(\"krustlet-wascc\").await?;\n\n\n\n verify_wascc_node(node).await;\n\n\n\n let client: kube::Client = nodes.into();\n\n\n\n let _cleaner = WasccTestResourceCleaner {};\n\n\n\n let pods: Api<Pod> = Api::namespaced(client.clone(), \"default\");\n\n\n", "file_path": "tests/integration_tests.rs", "rank": 52, "score": 110688.03689539785 }, { "content": "enum BootstrapReadiness {\n\n AlreadyBootstrapped,\n\n NeedBootstrapAndApprove,\n\n NeedManualCleanup,\n\n}\n\n\n\nconst EXIT_CODE_TESTS_PASSED: i32 = 0;\n\nconst EXIT_CODE_TESTS_FAILED: i32 = 1;\n\nconst EXIT_CODE_NEED_MANUAL_CLEANUP: i32 = 2;\n\nconst EXIT_CODE_BUILD_FAILED: i32 = 3;\n\n\n", "file_path": "tests/oneclick/src/main.rs", "rank": 53, "score": 108105.51442955919 }, { "content": "fn fail_with(message: String) -> anyhow::Result<()> {\n\n eprintln!(\"ERR: {}\", message);\n\n Err(anyhow::Error::msg(message))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::cell::RefCell;\n\n use std::rc::Rc;\n\n\n\n struct FakeOutput {\n\n pub name: String,\n\n pub content: String,\n\n }\n\n\n\n struct FakeEnvironment {\n\n pub outputs: Rc<RefCell<Vec<FakeOutput>>>,\n\n }\n\n\n", "file_path": "demos/wasi/wasmerciser/src/main.rs", "rank": 54, "score": 106885.05810883534 }, { "content": "#[derive(serde::Deserialize, Default)]\n\nstruct RegistryToken {\n\n #[serde(alias = \"access_token\")]\n\n token: String,\n\n}\n\n\n\nimpl RegistryToken {\n\n fn bearer_token(&self) -> String {\n\n format!(\"Bearer {}\", self.token)\n\n }\n\n}\n\n\n", "file_path": "crates/oci-distribution/src/client.rs", "rank": 55, "score": 106390.65737165764 }, { "content": "fn wasmerciser_volume(\n\n spec: &WasmerciserVolumeSpec,\n\n) -> anyhow::Result<(Volume, Option<Arc<tempfile::TempDir>>)> {\n\n match spec.source {\n\n WasmerciserVolumeSource::HostPath => {\n\n let tempdir = Arc::new(tempfile::tempdir()?);\n\n\n\n let volume: Volume = serde_json::from_value(json!({\n\n \"name\": spec.volume_name,\n\n \"hostPath\": {\n\n \"path\": tempdir.path()\n\n }\n\n }))?;\n\n\n\n Ok((volume, Some(tempdir)))\n\n }\n\n WasmerciserVolumeSource::ConfigMap(name) => {\n\n let volume: Volume = serde_json::from_value(json!({\n\n \"name\": spec.volume_name,\n\n \"configMap\": {\n", "file_path": "tests/pod_builder.rs", "rank": 56, "score": 106045.16951343589 }, { "content": "fn main() {\n\n println!(\"Ensuring all binaries are built...\");\n\n\n\n let build_result = build_workspace();\n\n\n\n match build_result {\n\n Ok(()) => {\n\n println!(\"Build succeeded\");\n\n }\n\n Err(e) => {\n\n eprintln!(\"{}\", e);\n\n eprintln!(\"Build FAILED\");\n\n std::process::exit(EXIT_CODE_BUILD_FAILED);\n\n }\n\n }\n\n\n\n println!(\"Preparing for bootstrap...\");\n\n\n\n let readiness = prepare_for_bootstrap();\n\n\n", "file_path": "tests/oneclick/src/main.rs", "rank": 57, "score": 106045.16951343589 }, { "content": "fn wasmerciser_container(\n\n spec: &WasmerciserContainerSpec,\n\n volumes: &Vec<WasmerciserVolumeSpec>,\n\n) -> anyhow::Result<Container> {\n\n let volume_mounts: Vec<_> = volumes\n\n .iter()\n\n .map(|v| wasmerciser_volume_mount(v).unwrap())\n\n .collect();\n\n let registry = if spec.use_private_registry {\n\n PRIVATE_TEST_REGISTRY\n\n } else {\n\n DEFAULT_TEST_REGISTRY\n\n };\n\n let container: Container = serde_json::from_value(json!({\n\n \"name\": spec.name,\n\n \"image\": format!(\"{}.azurecr.io/wasmerciser:v0.2.0\", registry),\n\n \"args\": spec.args,\n\n \"volumeMounts\": volume_mounts,\n\n }))?;\n\n Ok(container)\n\n}\n\n\n", "file_path": "tests/pod_builder.rs", "rank": 58, "score": 106045.16951343589 }, { "content": "enum ItemMount {\n\n MountAt(String),\n\n DoNotMount,\n\n}\n\n\n\nimpl From<Option<String>> for ItemMount {\n\n fn from(option: Option<String>) -> Self {\n\n match option {\n\n None => ItemMount::DoNotMount,\n\n Some(path) => ItemMount::MountAt(path),\n\n }\n\n }\n\n}\n", "file_path": "crates/kubelet/src/volume/mod.rs", "rank": 59, "score": 105620.26933532141 }, { "content": "struct OwnedChildProcess {\n\n terminated: bool,\n\n child: std::process::Child,\n\n}\n\n\n\nimpl OwnedChildProcess {\n\n fn terminate(&mut self) -> anyhow::Result<()> {\n\n match self.child.kill().and_then(|_| self.child.wait()) {\n\n Ok(_) => {\n\n self.terminated = true;\n\n Ok(())\n\n }\n\n Err(e) => Err(anyhow::anyhow!(\n\n \"Failed to terminate spawned kubelet process: {}\",\n\n e\n\n )),\n\n }\n\n }\n\n\n\n fn exited(&mut self) -> anyhow::Result<bool> {\n", "file_path": "tests/oneclick/src/main.rs", "rank": 60, "score": 105385.53902547862 }, { "content": "#[cfg(any(feature = \"cli\", feature = \"docs\"))]\n\nfn default_config_file_path() -> PathBuf {\n\n dirs::home_dir()\n\n .unwrap()\n\n .join(\".krustlet/config/config.json\")\n\n}\n\n\n", "file_path": "crates/kubelet/src/config.rs", "rank": 61, "score": 104421.71694190695 }, { "content": "fn parse_auth_from_json_creds(json_creds: &serde_json::Value) -> Option<RegistryAuth> {\n\n let username = json_creds.get(\"username\");\n\n let password = json_creds.get(\"password\");\n\n // TODO: my test creds also included an entry \"auth\" - should we return this? (e.g. bearer auth?)\n\n match (username, password) {\n\n (Some(serde_json::Value::String(u)), Some(serde_json::Value::String(p))) => {\n\n Some(RegistryAuth::Basic(u.to_owned(), p.to_owned()))\n\n }\n\n _ => None,\n\n }\n\n}\n", "file_path": "crates/kubelet/src/secret/mod.rs", "rank": 62, "score": 104254.25944405675 }, { "content": "/// Returns kubeconfig path from `$HOME/.kube/config`.\n\nfn default_path() -> Option<PathBuf> {\n\n home_dir().map(|h| h.join(\".kube\").join(\"config\"))\n\n}\n", "file_path": "crates/kubelet/src/kubeconfig.rs", "rank": 63, "score": 103797.4714424444 }, { "content": "fn launch_kubelet(\n\n name: &str,\n\n kubeconfig_suffix: &str,\n\n kubelet_port: i32,\n\n need_csr: bool,\n\n) -> anyhow::Result<OwnedChildProcess> {\n\n // run the kubelet as a background process using the\n\n // same cmd line as in the justfile:\n\n // KUBECONFIG=$(eval echo $CONFIG_DIR)/kubeconfig-wasi cargo run --bin krustlet-wasi {{FLAGS}} -- --node-name krustlet-wasi --port 3001 --bootstrap-file $(eval echo $CONFIG_DIR)/bootstrap.conf --cert-file $(eval echo $CONFIG_DIR)/krustlet-wasi.crt --private-key-file $(eval echo $CONFIG_DIR)/krustlet-wasi.key\n\n let bootstrap_conf = config_file_path_str(\"bootstrap.conf\");\n\n let cert = config_file_path_str(format!(\"{}.crt\", name));\n\n let private_key = config_file_path_str(format!(\"{}.key\", name));\n\n let kubeconfig = config_file_path_str(format!(\"kubeconfig-{}\", kubeconfig_suffix));\n\n let port_arg = format!(\"{}\", kubelet_port);\n\n\n\n let repo_root = std::path::PathBuf::from(env!(\"CARGO_MANIFEST_DIR\"));\n\n let bin_path = repo_root.join(\"target/debug\").join(name);\n\n\n\n let mut launch_kubelet_process = std::process::Command::new(bin_path)\n\n .args(&[\n", "file_path": "tests/oneclick/src/main.rs", "rank": 64, "score": 103708.17034378044 }, { "content": "struct CompositeStore {\n\n base: Arc<dyn Store + Send + Sync>,\n\n interceptor: Arc<dyn InterceptingStore + Send + Sync>,\n\n}\n\n\n\n#[async_trait]\n\nimpl Store for CompositeStore {\n\n async fn get(\n\n &self,\n\n image_ref: &Reference,\n\n pull_policy: PullPolicy,\n\n auth: &RegistryAuth,\n\n ) -> anyhow::Result<Vec<u8>> {\n\n if self.interceptor.intercepts(image_ref) {\n\n self.interceptor.get(image_ref, pull_policy, auth).await\n\n } else {\n\n self.base.get(image_ref, pull_policy, auth).await\n\n }\n\n }\n\n}\n", "file_path": "crates/kubelet/src/store/composite/mod.rs", "rank": 65, "score": 103033.28349485839 }, { "content": "fn run_test_suite() -> anyhow::Result<()> {\n\n println!(\"Launching integration tests\");\n\n let test_process = std::process::Command::new(\"cargo\")\n\n .args(&[\"test\", \"--test\", \"integration_tests\"])\n\n .stderr(std::process::Stdio::piped())\n\n .stdout(std::process::Stdio::piped())\n\n .spawn()?;\n\n println!(\"Integration tests running\");\n\n // TODO: consider streaming progress\n\n // TODO: capture pod logs: probably requires cooperation from the test\n\n // process\n\n let test_process_result = test_process.wait_with_output()?;\n\n if test_process_result.status.success() {\n\n println!(\"Integration tests PASSED\");\n\n Ok(())\n\n } else {\n\n let stdout = String::from_utf8(test_process_result.stdout)?;\n\n eprintln!(\"{}\", stdout);\n\n let stderr = String::from_utf8(test_process_result.stderr)?;\n\n eprintln!(\"{}\", stderr);\n\n eprintln!(\"Integration tests FAILED\");\n\n Err(anyhow::anyhow!(stderr))\n\n }\n\n}\n\n\n", "file_path": "tests/oneclick/src/main.rs", "rank": 66, "score": 102623.28191301764 }, { "content": "#[derive(Debug)]\n\nstruct PortAllocationError;\n\n\n\nimpl std::fmt::Display for PortAllocationError {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n write!(f, \"all ports are currently in use\")\n\n }\n\n}\n\n\n\nimpl std::error::Error for PortAllocationError {\n\n fn description(&self) -> &str {\n\n \"all ports are currently in use\"\n\n }\n\n}\n\n\n\nasync fn find_available_port(\n\n port_map: &Arc<Mutex<BTreeMap<u16, PodKey>>>,\n\n pod: &Pod,\n\n) -> Result<u16, PortAllocationError> {\n\n let pod_key = PodKey::from(pod);\n\n let mut empty_port: BTreeSet<u16> = BTreeSet::new();\n", "file_path": "crates/wascc-provider/src/states/starting.rs", "rank": 67, "score": 102167.49455760655 }, { "content": "#[async_trait]\n\npub trait Storer {\n\n /// Saves a module's data into the backing store indexed by its image `Reference`.\n\n async fn store(&mut self, image_ref: &Reference, image_data: ImageData) -> anyhow::Result<()>;\n\n\n\n /// Get a module's data from the backing store given its image `Reference`.\n\n ///\n\n /// The implementation must fail if the image is not present\n\n /// locally. `Storer` handles only reading and writing its own backing store;\n\n /// remote fetch is handled at the `Store` level.\n\n async fn get_local(&self, image_ref: &Reference) -> anyhow::Result<Vec<u8>>;\n\n\n\n /// Whether the specified module is already present in the backing store.\n\n async fn is_present(&self, image_ref: &Reference) -> bool;\n\n\n\n /// Whether the specified module is already present in the backing store with the specified digest.\n\n async fn is_present_with_digest(&self, image_ref: &Reference, digest: String) -> bool;\n\n}\n", "file_path": "crates/kubelet/src/store/mod.rs", "rank": 68, "score": 101725.00810179602 }, { "content": "fn capture_kubelet_logs(\n\n kubelet_name: &str,\n\n kubelet_process: &mut std::process::Child,\n\n destination: std::path::PathBuf,\n\n) {\n\n let stdout = kubelet_process.stdout.as_mut().unwrap();\n\n let stdout_path = destination.with_extension(\"stdout.txt\");\n\n write_kubelet_log_to_file(kubelet_name, stdout, stdout_path);\n\n\n\n let stderr = kubelet_process.stderr.as_mut().unwrap();\n\n let stderr_path = destination.with_extension(\"stderr.txt\");\n\n write_kubelet_log_to_file(kubelet_name, stderr, stderr_path);\n\n}\n\n\n", "file_path": "tests/oneclick/src/main.rs", "rank": 69, "score": 101504.35551664513 }, { "content": "fn gen_kubeconfig(\n\n ca_data: String,\n\n server: String,\n\n client_cert_data: k8s_openapi::ByteString,\n\n client_key: String,\n\n) -> anyhow::Result<Vec<u8>> {\n\n let json = serde_json::json!({\n\n \"kind\": \"Config\",\n\n \"apiVersion\": \"v1\",\n\n \"preferences\": {},\n\n \"clusters\": [{\n\n \"name\": \"krustlet\",\n\n \"cluster\": {\n\n \"certificate-authority-data\": ca_data,\n\n \"server\": server,\n\n }\n\n }],\n\n \"users\":[{\n\n \"name\": \"krustlet\",\n\n \"user\": {\n", "file_path": "crates/kubelet/src/bootstrapping/mod.rs", "rank": 70, "score": 101242.59698097467 }, { "content": "fn default_key_path(data_dir: &PathBuf) -> PathBuf {\n\n data_dir.join(\"config/krustlet.key\")\n\n}\n\n\n", "file_path": "crates/kubelet/src/config.rs", "rank": 71, "score": 100988.96850296485 }, { "content": "fn default_cert_path(data_dir: &PathBuf) -> PathBuf {\n\n data_dir.join(\"config/krustlet.crt\")\n\n}\n\n\n", "file_path": "crates/kubelet/src/config.rs", "rank": 72, "score": 100988.96850296485 }, { "content": "fn main() {\n\n println!(\"hello from stdout!\");\n\n eprintln!(\"hello from stderr!\");\n\n for (key, value) in env::vars() {\n\n println!(\"{}={}\", key, value);\n\n }\n\n let args: Vec<String> = env::args().collect();\n\n println!(\"Args are: {:?}\", args);\n\n println!(\"\");\n\n\n\n // open a path using the hostpath volume\n\n let path = Path::new(\"/mnt/storage/bacon_ipsum.txt\");\n\n let display = path.display();\n\n\n\n let mut file = match File::open(&path) {\n\n Err(why) => panic!(\"couldn't open {}: {}\", display,\n\n why),\n\n Ok(file) => file,\n\n };\n\n\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents).expect(format!(\"could not read {}\", display).as_str());\n\n println!(\"{}\", contents);\n\n}\n", "file_path": "demos/wasi/hello-world-rust/src/main.rs", "rank": 73, "score": 100317.80134851307 }, { "content": "fn volume_path_map(\n\n container: &Container,\n\n volumes: &HashMap<String, Ref>,\n\n) -> anyhow::Result<HashMap<PathBuf, Option<PathBuf>>> {\n\n if let Some(volume_mounts) = container.volume_mounts().as_ref() {\n\n volume_mounts\n\n .iter()\n\n .map(|vm| -> anyhow::Result<(PathBuf, Option<PathBuf>)> {\n\n // Check the volume exists first\n\n let vol = volumes.get(&vm.name).ok_or_else(|| {\n\n anyhow::anyhow!(\n\n \"no volume with the name of {} found for container {}\",\n\n vm.name,\n\n container.name()\n\n )\n\n })?;\n\n let mut guest_path = PathBuf::from(&vm.mount_path);\n\n if let Some(sub_path) = &vm.sub_path {\n\n guest_path.push(sub_path);\n\n }\n", "file_path": "crates/wasi-provider/src/states/starting.rs", "rank": 74, "score": 98350.80679183101 }, { "content": "/// Provides a way to overlay an `InterceptingStore` so that the\n\n/// interceptor handles the references it can, and the base store\n\n/// handles all other references.\n\npub trait ComposableStore {\n\n /// Creates a `Store` identical to the implementer except that\n\n /// 'get' requests are offered to the interceptor first.\n\n fn with_override(\n\n self,\n\n interceptor: Arc<dyn InterceptingStore + Send + Sync>,\n\n ) -> Arc<dyn Store + Send + Sync>;\n\n}\n\n\n\nimpl ComposableStore for Arc<dyn Store + Send + Sync> {\n\n fn with_override(\n\n self,\n\n interceptor: Arc<dyn InterceptingStore + Send + Sync>,\n\n ) -> Arc<dyn Store + Send + Sync> {\n\n Arc::new(CompositeStore {\n\n base: self,\n\n interceptor,\n\n })\n\n }\n\n}\n", "file_path": "crates/kubelet/src/store/composite/mod.rs", "rank": 75, "score": 97662.26472606351 }, { "content": "fn parse_auth_from_secret_value(\n\n secret_value: &k8s_openapi::ByteString,\n\n registry_name: &str,\n\n) -> Option<RegistryAuth> {\n\n // We are intereted in secret_value if it is of the form\n\n // {\n\n // \"auths\": {\n\n // \"reg1\": { ... },\n\n // \"reg2\": { ... }\n\n // }\n\n // }\n\n parse_byte_string_json(secret_value)\n\n .and_then(|value| parse_auth_from_json_value(&value, registry_name))\n\n}\n\n\n", "file_path": "crates/kubelet/src/secret/mod.rs", "rank": 76, "score": 97205.54830594081 }, { "content": "fn parse_auth_from_json_value(\n\n json_value: &serde_json::Value,\n\n registry_name: &str,\n\n) -> Option<RegistryAuth> {\n\n json_value\n\n .get(\"auths\")\n\n .and_then(|auths| auths.get(registry_name))\n\n .and_then(|creds| parse_auth_from_json_creds(creds))\n\n}\n\n\n", "file_path": "crates/kubelet/src/secret/mod.rs", "rank": 77, "score": 97205.54830594081 }, { "content": "fn parse_auth_from_secret_data(\n\n secret_data: &std::collections::BTreeMap<String, k8s_openapi::ByteString>,\n\n registry_name: &str,\n\n) -> Option<RegistryAuth> {\n\n secret_data\n\n .values()\n\n .find_map(|v| parse_auth_from_secret_value(v, registry_name))\n\n}\n\n\n", "file_path": "crates/kubelet/src/secret/mod.rs", "rank": 78, "score": 97205.54830594081 }, { "content": "#[async_trait]\n\npub trait Store: Sync {\n\n /// Get a module's data given its image `Reference`.\n\n async fn get(\n\n &self,\n\n image_ref: &Reference,\n\n pull_policy: PullPolicy,\n\n auth: &RegistryAuth,\n\n ) -> anyhow::Result<Vec<u8>>;\n\n\n\n /// Fetch all container modules for a given `Pod` storing the name of the\n\n /// container and the module's data as key/value pairs in a hashmap.\n\n ///\n\n /// This will fetch all of the container modules in parallel.\n\n ///\n\n /// # Panics\n\n ///\n\n /// This panics if any of the pod's containers do not have an image associated with them\n\n async fn fetch_pod_modules(\n\n &self,\n\n pod: &Pod,\n", "file_path": "crates/kubelet/src/store/mod.rs", "rank": 79, "score": 96959.35872368126 }, { "content": "#[async_trait]\n\npub trait Provider: Sized {\n\n /// The state that is passed between Pod state handlers.\n\n type PodState: 'static + Send + Sync + AsyncDrop;\n\n\n\n /// The initial state for Pod state machine.\n\n type InitialState: Default + State<Self::PodState>;\n\n\n\n /// The a state to handle early Pod termination.\n\n type TerminatedState: Default + State<Self::PodState>;\n\n\n\n /// Arch returns a string specifying what architecture this provider supports\n\n const ARCH: &'static str;\n\n\n\n /// Allows provider to populate node information.\n\n async fn node(&self, _builder: &mut Builder) -> anyhow::Result<()> {\n\n Ok(())\n\n }\n\n\n\n /// Hook to allow provider to introduced shared state into Pod state.\n\n // TODO: Is there a way to provide a default implementation of this if Self::PodState: Default?\n", "file_path": "crates/kubelet/src/provider/mod.rs", "rank": 80, "score": 96959.35872368126 }, { "content": "pub fn health(_h: codec::core::HealthRequest) -> HandlerResult<()> {\n\n Ok(())\n\n}\n", "file_path": "demos/wascc/greet/src/lib.rs", "rank": 81, "score": 96615.92223309906 }, { "content": "fn prepare_for_bootstrap() -> BootstrapReadiness {\n\n let host_name = hostname::get()\n\n .expect(\"Can't get host name\")\n\n .into_string()\n\n .expect(\"Can't get host name\");\n\n\n\n let cert_paths: Vec<_> = vec![\n\n \"krustlet-wasi.crt\",\n\n \"krustlet-wasi.key\",\n\n \"krustlet-wascc.crt\",\n\n \"krustlet-wascc.key\",\n\n ]\n\n .iter()\n\n .map(|f| config_dir().join(f))\n\n .collect();\n\n\n\n let status = all_or_none(cert_paths);\n\n\n\n match status {\n\n AllOrNone::AllExist => {\n", "file_path": "tests/oneclick/src/main.rs", "rank": 82, "score": 96433.87053768378 }, { "content": "fn run_tests(readiness: BootstrapReadiness) -> anyhow::Result<()> {\n\n let wasi_process_result = launch_kubelet(\n\n \"krustlet-wasi\",\n\n \"wasi\",\n\n 3001,\n\n matches!(readiness, BootstrapReadiness::NeedBootstrapAndApprove),\n\n );\n\n let wascc_process_result = launch_kubelet(\n\n \"krustlet-wascc\",\n\n \"wascc\",\n\n 3000,\n\n matches!(readiness, BootstrapReadiness::NeedBootstrapAndApprove),\n\n );\n\n\n\n for process in &[&wasi_process_result, &wascc_process_result] {\n\n match process {\n\n Err(e) => {\n\n eprintln!(\"Error running kubelet process: {}\", e);\n\n return Err(anyhow::anyhow!(\"Error running kubelet process: {}\", e));\n\n }\n", "file_path": "tests/oneclick/src/main.rs", "rank": 83, "score": 95692.30246186766 }, { "content": "struct TestContext<E: Environment> {\n\n variables: HashMap<String, String>,\n\n environment: E,\n\n}\n\n\n\nimpl<E: Environment> TestContext<E> {\n\n fn new(environment: E) -> Self {\n\n TestContext {\n\n variables: Default::default(),\n\n environment,\n\n }\n\n }\n\n\n\n fn process_commands(&mut self, commands: Vec<String>) -> anyhow::Result<()> {\n\n for command_text in commands {\n\n self.process_command_text(command_text)?\n\n }\n\n Ok(())\n\n }\n\n\n", "file_path": "demos/wasi/wasmerciser/src/main.rs", "rank": 84, "score": 95261.81475350482 }, { "content": "#[async_trait::async_trait]\n\npub trait BackoffStrategy: Send {\n\n /// Resets the strategy after a success.\n\n fn reset(&mut self);\n\n /// Gets how long to wait before retrying.\n\n fn next_duration(&mut self) -> Duration;\n\n /// Waits the prescribed amount of time (as per `next_duration`).\n\n async fn wait(&mut self) {\n\n tokio::time::delay_for(self.next_duration()).await\n\n }\n\n}\n\n\n\n/// A `BackoffStrategy` in which the durations increase exponentially\n\n/// until hitting a cap.\n\npub struct ExponentialBackoffStrategy {\n\n base_duration: Duration,\n\n cap: Duration,\n\n last_duration: Duration,\n\n}\n\n\n\nimpl Default for ExponentialBackoffStrategy {\n", "file_path": "crates/kubelet/src/backoff/mod.rs", "rank": 85, "score": 94981.76002225767 }, { "content": "fn build_workspace() -> anyhow::Result<()> {\n\n let build_result = std::process::Command::new(\"cargo\")\n\n .args(&[\"build\"])\n\n .output()?;\n\n\n\n if build_result.status.success() {\n\n Ok(())\n\n } else {\n\n Err(anyhow::anyhow!(\n\n \"{}\",\n\n String::from_utf8(build_result.stderr).unwrap()\n\n ))\n\n }\n\n}\n\n\n", "file_path": "tests/oneclick/src/main.rs", "rank": 86, "score": 94053.67590541179 }, { "content": "fn run_bootstrap() -> anyhow::Result<()> {\n\n let (shell, ext) = match std::env::consts::OS {\n\n \"windows\" => Ok((\"powershell.exe\", \"ps1\")),\n\n \"linux\" | \"macos\" => Ok((\"bash\", \"sh\")),\n\n os => Err(anyhow::anyhow!(\"Unsupported OS {}\", os)),\n\n }?;\n\n\n\n let repo_root = std::env!(\"CARGO_MANIFEST_DIR\");\n\n\n\n let bootstrap_script = format!(\"{}/docs/howto/assets/bootstrap.{}\", repo_root, ext);\n\n let bootstrap_output = std::process::Command::new(shell)\n\n .arg(bootstrap_script)\n\n .env(\"CONFIG_DIR\", config_dir())\n\n .stdout(std::process::Stdio::piped())\n\n .stderr(std::process::Stdio::piped())\n\n .output()?;\n\n\n\n match bootstrap_output.status.code() {\n\n Some(0) => Ok(()),\n\n Some(e) => Err(anyhow::anyhow!(\n", "file_path": "tests/oneclick/src/main.rs", "rank": 87, "score": 94053.67590541179 }, { "content": "/// A `Store` that has additional logic to determine if it can satisfy\n\n/// a particular reference. An `InterceptingStore` can be composed with\n\n/// another Store to satisfy specific requests in a custom way.\n\npub trait InterceptingStore: Store {\n\n /// Whether this `InterceptingStore` can satisfy the given reference.\n\n fn intercepts(&self, image_ref: &Reference) -> bool;\n\n}\n\n\n", "file_path": "crates/kubelet/src/store/composite/mod.rs", "rank": 88, "score": 93112.03824434851 }, { "content": "type PodStream = std::pin::Pin<\n\n Box<\n\n dyn futures::Stream<Item = Result<kube::api::WatchEvent<KubePod>, kube::error::Error>>\n\n + Send,\n\n >,\n\n>;\n\n\n\nasync fn evict_pod(\n\n client: &kube::Client,\n\n name: &str,\n\n namespace: &str,\n\n stream: &mut PodStream,\n\n) -> anyhow::Result<()> {\n\n let ns_client: Api<KubePod> = Api::namespaced(client.clone(), namespace);\n\n info!(\"Evicting namespace '{}' pod '{}'\", namespace, name);\n\n let params = Default::default();\n\n let response = ns_client.delete(name, &params).await?;\n\n\n\n if response.is_left() {\n\n // TODO Timeout?\n", "file_path": "crates/kubelet/src/node/mod.rs", "rank": 89, "score": 91419.38907780487 }, { "content": "pub fn greet(r: codec::http::Request) -> HandlerResult<codec::http::Response> {\n\n println(&format!(\"Received HTTP request: {:?}\", &r));\n\n Ok(codec::http::Response {\n\n status_code: 200,\n\n status: \"OK\".to_owned(),\n\n header: HashMap::new(),\n\n body: b\"Hello, world!\\n\".to_vec(),\n\n })\n\n}\n\n\n", "file_path": "demos/wascc/greet/src/lib.rs", "rank": 90, "score": 87950.78544205954 }, { "content": "fn default_data_dir() -> anyhow::Result<PathBuf> {\n\n Ok(dirs::home_dir()\n\n .ok_or_else(|| anyhow::anyhow!(\"Unable to get home directory\"))?\n\n .join(\".krustlet\"))\n\n}\n\n\n", "file_path": "crates/kubelet/src/config.rs", "rank": 91, "score": 87358.93386616017 }, { "content": "// TODO: Both providers make a handle containing a tempfile. If this is a common pattern,\n\n// it might make sense to provide that implementation here. This would add `tempfile` as a\n\n// dependency of `kubelet`.\n\n/// Trait to describe necessary behavior for creating multiple log readers.\n\npub trait HandleFactory<R>: Sync + Send {\n\n /// Create new log reader.\n\n fn new_handle(&self) -> R;\n\n}\n", "file_path": "crates/kubelet/src/log/mod.rs", "rank": 92, "score": 87337.19433154116 }, { "content": "fn try_deserialize_u16<'de, D>(d: D) -> Result<Option<anyhow::Result<u16>>, D::Error>\n\nwhere\n\n D: serde::Deserializer<'de>,\n\n{\n\n let n = u16::deserialize(d).map_err(|e| anyhow::Error::msg(format!(\"{}\", e)));\n\n Ok(Some(n))\n\n}\n\n\n\n/// CLI options that can be configured for Kubelet\n\n///\n\n/// These can be parsed from args using `Opts::into_app()`\n\n#[derive(StructOpt, Clone, Debug)]\n\n#[cfg(any(feature = \"cli\", feature = \"docs\"))]\n\n#[cfg_attr(feature = \"docs\", doc(cfg(feature = \"cli\")))]\n\n#[structopt(\n\n name = \"krustlet\",\n\n about = \"A kubelet for running WebAssembly workloads\"\n\n)]\n\npub struct Opts {\n\n #[structopt(\n", "file_path": "crates/kubelet/src/config.rs", "rank": 93, "score": 82654.24496993807 }, { "content": "fn is_resource_gone(kubectl_output: &std::process::Output) -> bool {\n\n kubectl_output.status.success()\n\n || match String::from_utf8(kubectl_output.stderr.clone()) {\n\n Ok(s) => s.contains(\"NotFound\"),\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "tests/oneclick/src/main.rs", "rank": 94, "score": 81852.30030621176 }, { "content": "fn try_deserialize_ip_addr<'de, D>(d: D) -> Result<Option<anyhow::Result<IpAddr>>, D::Error>\n\nwhere\n\n D: serde::Deserializer<'de>,\n\n{\n\n let s = String::deserialize(d)?;\n\n let addr = s.parse::<IpAddr>().map_err(anyhow::Error::new);\n\n Ok(Some(addr))\n\n}\n\n\n", "file_path": "crates/kubelet/src/config.rs", "rank": 95, "score": 80140.72883772216 }, { "content": "fn wasmerciser_volume_mount(spec: &WasmerciserVolumeSpec) -> anyhow::Result<VolumeMount> {\n\n let mount: VolumeMount = serde_json::from_value(json!({\n\n \"mountPath\": spec.mount_path,\n\n \"name\": spec.volume_name\n\n }))?;\n\n Ok(mount)\n\n}\n\n\n", "file_path": "tests/pod_builder.rs", "rank": 96, "score": 78727.53685370089 }, { "content": "fn gen_tls_cert(config: &KubeletConfig) -> anyhow::Result<Certificate> {\n\n let mut params = CertificateParams::default();\n\n params.not_before = chrono::Utc::now();\n\n params.not_after = chrono::Utc::now() + chrono::Duration::weeks(52);\n\n let mut distinguished_name = DistinguishedName::new();\n\n distinguished_name.push(DnType::OrganizationName, \"system:nodes\");\n\n distinguished_name.push(\n\n DnType::CommonName,\n\n &format!(\"system:node:{}\", config.hostname),\n\n );\n\n params.distinguished_name = distinguished_name;\n\n params\n\n .key_pair\n\n .replace(KeyPair::generate(&PKCS_ECDSA_P256_SHA256)?);\n\n\n\n params.alg = &PKCS_ECDSA_P256_SHA256;\n\n\n\n params.subject_alt_names = vec![\n\n SanType::DnsName(config.hostname.clone()),\n\n SanType::IpAddress(config.node_ip),\n\n ];\n\n\n\n Ok(Certificate::from_params(params)?)\n\n}\n\n\n", "file_path": "crates/kubelet/src/bootstrapping/mod.rs", "rank": 97, "score": 78509.30710877888 }, { "content": "fn gen_auth_cert(config: &KubeletConfig) -> anyhow::Result<Certificate> {\n\n let mut params = CertificateParams::default();\n\n params.not_before = chrono::Utc::now();\n\n params.not_after = chrono::Utc::now() + chrono::Duration::weeks(52);\n\n let mut distinguished_name = DistinguishedName::new();\n\n distinguished_name.push(DnType::OrganizationName, \"system:nodes\");\n\n distinguished_name.push(\n\n DnType::CommonName,\n\n &format!(\"system:node:{}\", config.node_name),\n\n );\n\n params.distinguished_name = distinguished_name;\n\n params\n\n .key_pair\n\n .replace(KeyPair::generate(&PKCS_ECDSA_P256_SHA256)?);\n\n\n\n params.alg = &PKCS_ECDSA_P256_SHA256;\n\n\n\n Ok(Certificate::from_params(params)?)\n\n}\n\n\n", "file_path": "crates/kubelet/src/bootstrapping/mod.rs", "rank": 98, "score": 78509.30710877888 }, { "content": "fn option_values<T: Clone>(source: &Vec<Option<T>>) -> Vec<T> {\n\n source.iter().filter_map(|t| t.clone()).collect()\n\n}\n\n\n", "file_path": "tests/pod_builder.rs", "rank": 99, "score": 77202.01443567334 } ]
Rust
src_testbed/nphysics_backend.rs
sebcrozet/rapier
b61bec83487224c285a1e95ac58d48885254285b
use ncollide::shape::{Ball, Capsule, Cuboid, ShapeHandle}; use nphysics::force_generator::DefaultForceGeneratorSet; use nphysics::joint::{ DefaultJointConstraintSet, FixedConstraint, PrismaticConstraint, RevoluteConstraint, }; use nphysics::object::{ BodyPartHandle, ColliderDesc, DefaultBodyHandle, DefaultBodySet, DefaultColliderSet, RigidBodyDesc, }; use nphysics::world::{DefaultGeometricalWorld, DefaultMechanicalWorld}; use rapier::counters::Counters; use rapier::dynamics::{ IntegrationParameters, JointParams, JointSet, RigidBodyHandle, RigidBodySet, }; use rapier::geometry::{Collider, ColliderSet}; use rapier::math::Vector; use std::collections::HashMap; #[cfg(feature = "dim3")] use {ncollide::shape::TriMesh, nphysics::joint::BallConstraint}; pub struct NPhysicsWorld { rapier2nphysics: HashMap<RigidBodyHandle, DefaultBodyHandle>, mechanical_world: DefaultMechanicalWorld<f32>, geometrical_world: DefaultGeometricalWorld<f32>, bodies: DefaultBodySet<f32>, colliders: DefaultColliderSet<f32>, joints: DefaultJointConstraintSet<f32>, force_generators: DefaultForceGeneratorSet<f32>, } impl NPhysicsWorld { pub fn from_rapier( gravity: Vector<f32>, bodies: &RigidBodySet, colliders: &ColliderSet, joints: &JointSet, ) -> Self { let mut rapier2nphysics = HashMap::new(); let mechanical_world = DefaultMechanicalWorld::new(gravity); let geometrical_world = DefaultGeometricalWorld::new(); let mut nphysics_bodies = DefaultBodySet::new(); let mut nphysics_colliders = DefaultColliderSet::new(); let mut nphysics_joints = DefaultJointConstraintSet::new(); let force_generators = DefaultForceGeneratorSet::new(); for (rapier_handle, rb) in bodies.iter() { let nphysics_rb = RigidBodyDesc::new().position(*rb.position()).build(); let nphysics_rb_handle = nphysics_bodies.insert(nphysics_rb); rapier2nphysics.insert(rapier_handle, nphysics_rb_handle); } for (_, collider) in colliders.iter() { let parent = &bodies[collider.parent()]; let nphysics_rb_handle = rapier2nphysics[&collider.parent()]; if let Some(collider) = nphysics_collider_from_rapier_collider(&collider, parent.is_dynamic()) { let nphysics_collider = collider.build(BodyPartHandle(nphysics_rb_handle, 0)); nphysics_colliders.insert(nphysics_collider); } else { eprintln!("Creating shape unknown to the nphysics backend.") } } for joint in joints.iter() { let b1 = BodyPartHandle(rapier2nphysics[&joint.1.body1], 0); let b2 = BodyPartHandle(rapier2nphysics[&joint.1.body2], 0); match &joint.1.params { JointParams::FixedJoint(params) => { let c = FixedConstraint::new( b1, b2, params.local_anchor1.translation.vector.into(), params.local_anchor1.rotation, params.local_anchor2.translation.vector.into(), params.local_anchor2.rotation, ); nphysics_joints.insert(c); } #[cfg(feature = "dim3")] JointParams::BallJoint(params) => { let c = BallConstraint::new(b1, b2, params.local_anchor1, params.local_anchor2); nphysics_joints.insert(c); } #[cfg(feature = "dim2")] JointParams::BallJoint(params) => { let c = RevoluteConstraint::new(b1, b2, params.local_anchor1, params.local_anchor2); nphysics_joints.insert(c); } #[cfg(feature = "dim3")] JointParams::RevoluteJoint(params) => { let c = RevoluteConstraint::new( b1, b2, params.local_anchor1, params.local_axis1, params.local_anchor2, params.local_axis2, ); nphysics_joints.insert(c); } JointParams::PrismaticJoint(params) => { let mut c = PrismaticConstraint::new( b1, b2, params.local_anchor1, params.local_axis1(), params.local_anchor2, ); if params.limits_enabled { c.enable_min_offset(params.limits[0]); c.enable_max_offset(params.limits[1]); } nphysics_joints.insert(c); } } } Self { rapier2nphysics, mechanical_world, geometrical_world, bodies: nphysics_bodies, colliders: nphysics_colliders, joints: nphysics_joints, force_generators, } } pub fn step(&mut self, counters: &mut Counters, params: &IntegrationParameters) { self.mechanical_world .integration_parameters .max_position_iterations = params.max_position_iterations; self.mechanical_world .integration_parameters .max_velocity_iterations = params.max_velocity_iterations; self.mechanical_world .integration_parameters .set_dt(params.dt()); counters.step_started(); self.mechanical_world.step( &mut self.geometrical_world, &mut self.bodies, &mut self.colliders, &mut self.joints, &mut self.force_generators, ); counters.step_completed(); } pub fn sync(&self, bodies: &mut RigidBodySet, colliders: &mut ColliderSet) { for (rapier_handle, nphysics_handle) in self.rapier2nphysics.iter() { let rb = bodies.get_mut(*rapier_handle).unwrap(); let ra = self.bodies.rigid_body(*nphysics_handle).unwrap(); let pos = *ra.position(); rb.set_position(pos, false); for coll_handle in rb.colliders() { let collider = &mut colliders[*coll_handle]; collider.set_position_debug(pos * collider.position_wrt_parent()); } } } } fn nphysics_collider_from_rapier_collider( collider: &Collider, is_dynamic: bool, ) -> Option<ColliderDesc<f32>> { let margin = ColliderDesc::<f32>::default_margin(); let mut pos = *collider.position_wrt_parent(); let shape = collider.shape(); let shape = if let Some(cuboid) = shape.as_cuboid() { ShapeHandle::new(Cuboid::new(cuboid.half_extents.map(|e| e - margin))) } else if let Some(ball) = shape.as_ball() { ShapeHandle::new(Ball::new(ball.radius - margin)) } else if let Some(capsule) = shape.as_capsule() { pos *= capsule.transform_wrt_y(); ShapeHandle::new(Capsule::new(capsule.half_height(), capsule.radius)) } else if let Some(heightfield) = shape.as_heightfield() { ShapeHandle::new(heightfield.clone()) } else { #[cfg(feature = "dim3")] if let Some(trimesh) = shape.as_trimesh() { ShapeHandle::new(TriMesh::new( trimesh.vertices().to_vec(), trimesh .indices() .iter() .map(|idx| na::convert(*idx)) .collect(), None, )) } else { return None; } #[cfg(feature = "dim2")] { return None; } }; let density = if is_dynamic { collider.density() } else { 0.0 }; Some( ColliderDesc::new(shape) .position(pos) .density(density) .sensor(collider.is_sensor()), ) }
use ncollide::shape::{Ball, Capsule, Cuboid, ShapeHandle}; use nphysics::force_generator::DefaultForceGeneratorSet; use nphysics::joint::{ DefaultJointConstraintSet, FixedConstraint, PrismaticConstraint, RevoluteConstraint, }; use nphysics::object::{ BodyPartHandle, ColliderDesc, DefaultBodyHandle, DefaultBodySet, DefaultColliderSet, RigidBodyDesc, }; use nphysics::world::{DefaultGeometricalWorld, DefaultMechanicalWorld}; use rapier::counters::Counters; use rapier::dynamics::{ IntegrationParameters, JointParams, JointSet, RigidBodyHandle, RigidBodySet, }; use rapier::geometry::{Collider, ColliderSet}; use rapier::math::Vector; use std::collections::HashMap; #[cfg(feature = "dim3")] use {ncollide::shape::TriMesh, nphysics::joint::BallConstraint}; pub struct NPhysicsWorld { rapier2nphysics: HashMap<RigidBodyHandle, DefaultBodyHandle>, mechanical_world: DefaultMechanicalWorld<f32>, geometrical_world: DefaultGeometricalWorld<f32>, bodies: DefaultBodySet<f32>, colliders: DefaultColliderSet<f32>, joints: DefaultJointConstraintSet<f32>, force_generators: DefaultForceGeneratorSet<f32>, } impl NPhysicsWorld { pub fn from_rapier( gravity: Vector<f32>, bodies: &RigidBodySet, colliders: &ColliderSet, joints: &JointSet, ) -> Self { let mut rapier2nphysics = HashMap::new(); let mechanical_world = DefaultMechanicalWorld::new(gravity); let geometrical_world = DefaultGeometricalWorld::new(); let mut nphysics_bodies = DefaultBodySet::new(); let mut nphysics_colliders = DefaultColliderSet::new(); let mut nphysics_joints = DefaultJointConstraintSet::new(); let force_generators = DefaultForceGeneratorSet::new(); for (rapier_handle, rb) in bodies.iter() { let nphysics_rb = RigidBodyDesc::new().position(*rb.position()).build(); let nphysics_rb_handle = nphysics_bodies.insert(nphysics_rb); rapier2nphysics.insert(rapier_handle, nphysics_rb_handle); } for (_, collider) in colliders.iter() { let parent = &bodies[collider.parent()]; let nphysics_rb_handle = rapier2nphysics[&collider.parent()]; if let Some(collider) = nphysics_collider_from_rapier_collider(&collider, parent.is_dynamic()) { let nphysics_collider = collider.build(BodyPartHandle(nphysics_rb_handle, 0)); nphysics_colliders.insert(nphysics_collider); } else { eprintln!("Creating shape unknown to the nphysics backend.") } } for joint in joints.iter() { let b1 = BodyPartHandle(rapier2nphysics[&joint.1.body1], 0); let b2 = BodyPartHandle(rapier2nphysics[&joint.1.body2], 0); match &joint.1.params { JointParams::FixedJoint(params) => { let c = FixedConstraint::new( b1, b2, params.local_anchor1.translation.vector.into(), params.local_anchor1.rotation, params.local_anchor2.translation.vector.into(), params.local_anchor2.rotation, ); nphysics_joints.insert(c); } #[cfg(feature = "dim3")] JointParams::BallJoint(params) => { let c = BallConstraint::new(b1, b2, params.local_anchor1, params.local_anchor2); nphysics_joints.insert(c); } #[cfg(feature = "dim2")] JointParams::BallJoint(params) => { let c = RevoluteConstraint::new(b1, b2, params.local_anchor1, params.local_anchor2); nphysics_joints.insert(c); } #[cfg(feature = "dim3")] JointParams::RevoluteJoint(params) => { let c = RevoluteConstraint::new( b1, b2, params.local_anchor1, params.local_axis1, params.local_anchor2, params.local_axis2, ); nphysics_joints.insert(c); } JointParams::PrismaticJoint(params) => { let mut c = PrismaticConstraint::new( b1, b2, params.local_anchor1, params.local_axis1(), params.local_anchor2, ); if params.limits_enabled { c.enable_min_offset(params.limits[0]); c.enable_max_offset(params.limits[1]); } nphysics_joints.insert(c); } } } Self { rapier2nphysics, mechanical_world, geometrical_world, bodies: nphysics_bodies, colliders: nphysics_colliders, joints: nphysics_joints, force_generators, } } pub fn step(&mut self, counters: &mut Counters, params: &IntegrationParameters) { self.mechanical_world .integration_parameters .max_position_iterations = params.max_position_iterations; self.mechanical_world .integration_parameters .max_velocity_iterations = params.max_velocity_iterations; self.mechanical_world .integration_parameters .set_dt(params.dt()); counters.step_started(); self.mechanical_world.step( &mut self.geometrical_world, &mut self.bodies, &mut self.colliders, &mut self.joints, &mut self.force_generators, ); counters.step_completed(); } pub fn sync(&self, bodies: &mut RigidBodySet, colliders: &mut ColliderSet) { for (rapier_handle, nphysics_handle) in self.rapier2nphysics.iter() { let rb = bodies.get_mut(*rapier_handle).unwrap(); let ra = self.bodies.rigid_body(*nphysics_handle).unwrap(); let pos = *ra.position(); rb.set_position(pos, false); for coll_handle in rb.colliders() { let collider = &mut colliders[*coll_handle]; collider.set_position_debug(pos * collider.position_wrt_parent()); } } } } fn nphysics_collider_from_rapier_collider( collider: &Collider, is_dynamic: bool, ) -> Option<ColliderDesc<f32>> { let margin = ColliderDesc::<f32>::default_margin(); let mut pos = *collider.position_wrt_parent(); let shape = collider.shape(); let shape = if let Some(cuboid) = shape.as_cuboid() { ShapeHandle::new(Cuboid::new(cuboid.half_extents.map(|e| e - margin))) } else if let Some(ball) = shape.as_ball() { ShapeHandle::new(Ball::new(ball.radius - margin)) } else if let Some(capsule) = shape.as_capsule() { pos *= capsule.transform_wrt_y(); ShapeHandle::new(Capsule::new(capsule.half_height(), capsule.radius)) } else if let Some(heightfield) = shape.as_heightfield() { ShapeHandle::new(heightfield.clone()) } else { #[cfg(feature = "dim3")] if let Some(trimesh) = shape.as_trimesh() { ShapeHandle::new(TriMesh::new( trimesh.vertices().to_vec(), trimesh .indices() .iter() .map(|idx| na::convert(*idx)) .collect(), None, )) } else { return None; } #[cfg(feature = "dim2")] { return None; } }; let density = if is_dynamic { collider.density() } else { 0.0 };
}
Some( ColliderDesc::new(shape) .position(pos) .density(density) .sensor(collider.is_sensor()), )
call_expression
[ { "content": "pub fn generate_contacts_trimesh_shape(ctxt: &mut ContactGenerationContext) {\n\n let collider1 = &ctxt.colliders[ctxt.pair.pair.collider1];\n\n let collider2 = &ctxt.colliders[ctxt.pair.pair.collider2];\n\n\n\n if let Some(trimesh1) = collider1.shape().as_trimesh() {\n\n do_generate_contacts(trimesh1, collider1, collider2, ctxt, false)\n\n } else if let Some(trimesh2) = collider2.shape().as_trimesh() {\n\n do_generate_contacts(trimesh2, collider2, collider1, ctxt, true)\n\n }\n\n}\n\n\n", "file_path": "src/geometry/contact_generator/trimesh_shape_contact_generator.rs", "rank": 0, "score": 258024.37910952905 }, { "content": "pub fn generate_contacts_cuboid_capsule(ctxt: &mut PrimitiveContactGenerationContext) {\n\n if let (Some(cube1), Some(capsule2)) = (ctxt.shape1.as_cuboid(), ctxt.shape2.as_capsule()) {\n\n generate_contacts(\n\n ctxt.prediction_distance,\n\n cube1,\n\n ctxt.position1,\n\n capsule2,\n\n ctxt.position2,\n\n ctxt.manifold,\n\n false,\n\n );\n\n ctxt.manifold.update_warmstart_multiplier();\n\n } else if let (Some(capsule1), Some(cube2)) =\n\n (ctxt.shape1.as_capsule(), ctxt.shape2.as_cuboid())\n\n {\n\n generate_contacts(\n\n ctxt.prediction_distance,\n\n cube2,\n\n ctxt.position2,\n\n capsule1,\n\n ctxt.position1,\n\n ctxt.manifold,\n\n true,\n\n );\n\n ctxt.manifold.update_warmstart_multiplier();\n\n }\n\n ctxt.manifold.sort_contacts(ctxt.prediction_distance);\n\n}\n\n\n", "file_path": "src/geometry/contact_generator/cuboid_capsule_contact_generator.rs", "rank": 1, "score": 254396.99845662044 }, { "content": "pub fn detect_proximity_trimesh_shape(ctxt: &mut ProximityDetectionContext) -> Proximity {\n\n let collider1 = &ctxt.colliders[ctxt.pair.pair.collider1];\n\n let collider2 = &ctxt.colliders[ctxt.pair.pair.collider2];\n\n\n\n if let Some(trimesh1) = collider1.shape().as_trimesh() {\n\n do_detect_proximity(trimesh1, collider1, collider2, ctxt)\n\n } else if let Some(trimesh2) = collider2.shape().as_trimesh() {\n\n do_detect_proximity(trimesh2, collider2, collider1, ctxt)\n\n } else {\n\n panic!(\"Invalid shape types provided.\")\n\n }\n\n}\n\n\n", "file_path": "src/geometry/proximity_detector/trimesh_shape_proximity_detector.rs", "rank": 2, "score": 249634.14122026888 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let mut joints = JointSet::new();\n\n\n\n /*\n\n * Create the balls\n\n */\n\n // Build the rigid body.\n\n let rad = 0.4;\n\n let numi = 100; // Num vertical nodes.\n\n let numk = 100; // Num horizontal nodes.\n\n let shift = 1.0;\n\n\n\n let mut body_handles = Vec::new();\n\n\n\n for k in 0..numk {\n", "file_path": "benchmarks2d/joint_ball2.rs", "rank": 3, "score": 229908.57896441076 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let mut joints = JointSet::new();\n\n\n\n /*\n\n * Create the balls\n\n */\n\n // Build the rigid body.\n\n let rad = 0.4;\n\n let num = 30; // Num vertical nodes.\n\n let shift = 1.0;\n\n\n\n let mut body_handles = Vec::new();\n\n\n\n for xx in 0..4 {\n\n let x = xx as f32 * shift * (num as f32 + 2.0);\n", "file_path": "benchmarks2d/joint_fixed2.rs", "rank": 4, "score": 229908.57896441076 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let mut joints = JointSet::new();\n\n\n\n let rad = 0.4;\n\n let num = 10;\n\n let shift = 2.0;\n\n\n\n for l in 0..4 {\n\n let y = l as f32 * shift * (num as f32) * 3.0;\n\n\n\n for j in 0..50 {\n\n let x = j as f32 * shift * 4.0;\n\n\n\n let ground = RigidBodyBuilder::new_static()\n\n .translation(x, y, 0.0)\n", "file_path": "benchmarks3d/joint_revolute3.rs", "rank": 5, "score": 229908.57896441076 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let mut joints = JointSet::new();\n\n\n\n /*\n\n * Create the balls\n\n */\n\n // Build the rigid body.\n\n let rad = 0.4;\n\n let num = 10;\n\n let shift = 1.0;\n\n\n\n for l in 0..25 {\n\n let y = l as f32 * shift * (num as f32 + 2.0) * 2.0;\n\n\n\n for j in 0..50 {\n", "file_path": "benchmarks2d/joint_prismatic2.rs", "rank": 6, "score": 229908.57896441076 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let mut joints = JointSet::new();\n\n\n\n let rad = 0.4;\n\n let num = 5;\n\n let shift = 1.0;\n\n\n\n for m in 0..8 {\n\n let z = m as f32 * shift * (num as f32 + 2.0);\n\n\n\n for l in 0..8 {\n\n let y = l as f32 * shift * (num as f32) * 2.0;\n\n\n\n for j in 0..50 {\n\n let x = j as f32 * shift * 4.0;\n", "file_path": "benchmarks3d/joint_prismatic3.rs", "rank": 7, "score": 229908.57896441076 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let mut joints = JointSet::new();\n\n\n\n let rad = 0.4;\n\n let num = 5;\n\n let shift = 1.0;\n\n\n\n let mut body_handles = Vec::new();\n\n\n\n for m in 0..10 {\n\n let z = m as f32 * shift * (num as f32 + 2.0);\n\n\n\n for l in 0..10 {\n\n let y = l as f32 * shift * 3.0;\n\n\n", "file_path": "benchmarks3d/joint_fixed3.rs", "rank": 8, "score": 229908.57896441076 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let mut joints = JointSet::new();\n\n\n\n let rad = 0.4;\n\n let num = 100;\n\n let shift = 1.0;\n\n\n\n let mut body_handles = Vec::new();\n\n\n\n for k in 0..num {\n\n for i in 0..num {\n\n let fk = k as f32;\n\n let fi = i as f32;\n\n\n\n let status = if i == 0 && (k % 4 == 0 || k == num - 1) {\n", "file_path": "benchmarks3d/joint_ball3.rs", "rank": 9, "score": 229908.57896441076 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n /*\n\n * Ground.\n\n */\n\n let ground_size = 20.0;\n\n let ground_height = 0.1;\n\n\n\n let rigid_body = RigidBodyBuilder::new_static()\n\n .translation(0.0, -ground_height, 0.0)\n\n .build();\n\n let ground_handle = bodies.insert(rigid_body);\n\n let collider = ColliderBuilder::cuboid(ground_size, ground_height, 0.4)\n\n .friction(0.15)\n\n // .restitution(0.5)\n", "file_path": "examples3d/debug_dynamic_collider_add3.rs", "rank": 11, "score": 222301.49710846067 }, { "content": "pub fn generate_contacts_capsule_capsule(ctxt: &mut PrimitiveContactGenerationContext) {\n\n if let (Some(capsule1), Some(capsule2)) = (ctxt.shape1.as_capsule(), ctxt.shape2.as_capsule()) {\n\n generate_contacts(\n\n ctxt.prediction_distance,\n\n capsule1,\n\n ctxt.position1,\n\n capsule2,\n\n ctxt.position2,\n\n ctxt.manifold,\n\n );\n\n }\n\n\n\n ctxt.manifold.update_warmstart_multiplier();\n\n ctxt.manifold.sort_contacts(ctxt.prediction_distance);\n\n}\n\n\n", "file_path": "src/geometry/contact_generator/capsule_capsule_contact_generator.rs", "rank": 12, "score": 220429.28585299873 }, { "content": "pub fn generate_contacts_cuboid_cuboid(ctxt: &mut PrimitiveContactGenerationContext) {\n\n if let (Some(cube1), Some(cube2)) = (ctxt.shape1.as_cuboid(), ctxt.shape2.as_cuboid()) {\n\n generate_contacts(\n\n ctxt.prediction_distance,\n\n cube1,\n\n ctxt.position1,\n\n cube2,\n\n ctxt.position2,\n\n ctxt.manifold,\n\n );\n\n } else {\n\n unreachable!()\n\n }\n\n\n\n ctxt.manifold.update_warmstart_multiplier();\n\n ctxt.manifold.sort_contacts(ctxt.prediction_distance);\n\n}\n\n\n", "file_path": "src/geometry/contact_generator/cuboid_cuboid_contact_generator.rs", "rank": 13, "score": 220218.6296395923 }, { "content": "/// Conditionally swaps each lanes of `a` with those of `b`.\n\n///\n\n/// For each `i in [0..SIMD_WIDTH[`, if `do_swap.extract(i)` is `true` then\n\n/// `a.extract(i)` is swapped with `b.extract(i)`.\n\npub fn simd_swap(do_swap: SimdBool, a: &mut SimdFloat, b: &mut SimdFloat) {\n\n let _a = *a;\n\n *a = b.select(do_swap, *a);\n\n *b = _a.select(do_swap, *b);\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 14, "score": 214428.85549716736 }, { "content": "pub fn detect_proximity_cuboid_cuboid(ctxt: &mut PrimitiveProximityDetectionContext) -> Proximity {\n\n if let (Some(cube1), Some(cube2)) = (ctxt.shape1.as_cuboid(), ctxt.shape2.as_cuboid()) {\n\n detect_proximity(\n\n ctxt.prediction_distance,\n\n cube1,\n\n ctxt.position1,\n\n cube2,\n\n ctxt.position2,\n\n )\n\n } else {\n\n unreachable!()\n\n }\n\n}\n\n\n", "file_path": "src/geometry/proximity_detector/cuboid_cuboid_proximity_detector.rs", "rank": 15, "score": 212768.10419457578 }, { "content": "pub fn generate_contacts_heightfield_shape(ctxt: &mut ContactGenerationContext) {\n\n let collider1 = &ctxt.colliders[ctxt.pair.pair.collider1];\n\n let collider2 = &ctxt.colliders[ctxt.pair.pair.collider2];\n\n\n\n if let Some(heightfield1) = collider1.shape().as_heightfield() {\n\n do_generate_contacts(heightfield1, collider1, collider2, ctxt, false)\n\n } else if let Some(heightfield2) = collider2.shape().as_heightfield() {\n\n do_generate_contacts(heightfield2, collider2, collider1, ctxt, true)\n\n }\n\n}\n\n\n", "file_path": "src/geometry/contact_generator/heightfield_shape_contact_generator.rs", "rank": 16, "score": 211701.2115620944 }, { "content": "pub fn generate_contacts_cuboid_triangle(ctxt: &mut PrimitiveContactGenerationContext) {\n\n if let (Some(cube1), Some(triangle2)) = (ctxt.shape1.as_cuboid(), ctxt.shape2.as_triangle()) {\n\n generate_contacts(\n\n ctxt.prediction_distance,\n\n cube1,\n\n ctxt.position1,\n\n triangle2,\n\n ctxt.position2,\n\n ctxt.manifold,\n\n false,\n\n );\n\n ctxt.manifold.update_warmstart_multiplier();\n\n } else if let (Some(triangle1), Some(cube2)) =\n\n (ctxt.shape1.as_triangle(), ctxt.shape2.as_cuboid())\n\n {\n\n generate_contacts(\n\n ctxt.prediction_distance,\n\n cube2,\n\n ctxt.position2,\n\n triangle1,\n\n ctxt.position1,\n\n ctxt.manifold,\n\n true,\n\n );\n\n ctxt.manifold.update_warmstart_multiplier();\n\n }\n\n ctxt.manifold.sort_contacts(ctxt.prediction_distance);\n\n}\n\n\n", "file_path": "src/geometry/contact_generator/cuboid_triangle_contact_generator.rs", "rank": 17, "score": 208786.0375499788 }, { "content": "pub fn generate_contacts<'a>(\n\n prediction_distance: f32,\n\n cube1: &'a Cuboid,\n\n mut pos1: &'a Isometry<f32>,\n\n capsule2: &'a Capsule,\n\n mut pos2: &'a Isometry<f32>,\n\n manifold: &mut ContactManifold,\n\n swapped: bool,\n\n) {\n\n let mut pos12 = pos1.inverse() * pos2;\n\n let mut pos21 = pos12.inverse();\n\n\n\n if (!swapped && manifold.try_update_contacts(&pos12))\n\n || (swapped && manifold.try_update_contacts(&pos21))\n\n {\n\n return;\n\n }\n\n\n\n let segment2 = capsule2.segment;\n\n\n", "file_path": "src/geometry/contact_generator/cuboid_capsule_contact_generator.rs", "rank": 18, "score": 207211.595348188 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n\n\n /*\n\n * Ground\n\n */\n\n let ground_size = 50.0;\n\n let ground_height = 0.1;\n\n\n\n let rigid_body = RigidBodyBuilder::new_static()\n\n .translation(0.0, -ground_height, 0.0)\n\n .build();\n\n let handle = bodies.insert(rigid_body);\n\n let collider = ColliderBuilder::cuboid(ground_size, ground_height, ground_size).build();\n\n colliders.insert(collider, handle, &mut bodies);\n", "file_path": "benchmarks3d/keva3.rs", "rank": 19, "score": 192833.25889087917 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n\n\n /*\n\n * Ground\n\n */\n\n let ground_size = Vector3::new(100.0, 1.0, 100.0);\n\n let nsubdivs = 20;\n\n\n\n let heights = DMatrix::from_fn(nsubdivs + 1, nsubdivs + 1, |i, j| {\n\n if i == 0 || i == nsubdivs || j == 0 || j == nsubdivs {\n\n 10.0\n\n } else {\n\n let x = i as f32 * ground_size.x / (nsubdivs as f32);\n\n let z = j as f32 * ground_size.z / (nsubdivs as f32);\n", "file_path": "examples3d/heightfield3.rs", "rank": 20, "score": 192833.25889087917 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n\n\n /*\n\n * Ground\n\n */\n\n let ground_size = 200.1;\n\n let ground_height = 0.1;\n\n\n\n let rigid_body = RigidBodyBuilder::new_static()\n\n .translation(0.0, -ground_height, 0.0)\n\n .build();\n\n let handle = bodies.insert(rigid_body);\n\n let collider = ColliderBuilder::cuboid(ground_size, ground_height, ground_size).build();\n\n colliders.insert(collider, handle, &mut bodies);\n", "file_path": "examples3d/domino3.rs", "rank": 21, "score": 192833.25889087917 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n\n\n /*\n\n * Ground\n\n */\n\n let ground_size = Vector3::new(200.0, 1.0, 200.0);\n\n let nsubdivs = 20;\n\n\n\n let heights = DMatrix::from_fn(nsubdivs + 1, nsubdivs + 1, |i, j| {\n\n if i == 0 || i == nsubdivs || j == 0 || j == nsubdivs {\n\n 10.0\n\n } else {\n\n let x = i as f32 * ground_size.x / (nsubdivs as f32);\n\n let z = j as f32 * ground_size.z / (nsubdivs as f32);\n", "file_path": "benchmarks3d/heightfield3.rs", "rank": 22, "score": 192833.25889087917 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n\n\n /*\n\n * Ground\n\n */\n\n let ground_size = 200.0;\n\n let ground_height = 0.1;\n\n\n\n let rigid_body = RigidBodyBuilder::new_static()\n\n .translation(0.0, -ground_height, 0.0)\n\n .build();\n\n let handle = bodies.insert(rigid_body);\n\n let collider = ColliderBuilder::cuboid(ground_size, ground_height, ground_size).build();\n\n colliders.insert(collider, handle, &mut bodies);\n", "file_path": "benchmarks3d/stacks3.rs", "rank": 23, "score": 192833.25889087917 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n\n\n /*\n\n * Ground\n\n */\n\n let ground_size = 20.;\n\n let ground_height = 1.0;\n\n\n\n let rigid_body = RigidBodyBuilder::new_static()\n\n .translation(0.0, -ground_height, 0.0)\n\n .build();\n\n let handle = bodies.insert(rigid_body);\n\n let collider = ColliderBuilder::cuboid(ground_size, ground_height, 2.0)\n\n .restitution(1.0)\n", "file_path": "examples3d/restitution3.rs", "rank": 24, "score": 192833.25889087917 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n\n\n /*\n\n * Ground\n\n */\n\n let ground_size = 200.1;\n\n let ground_height = 0.1;\n\n\n\n let rigid_body = RigidBodyBuilder::new_static()\n\n .translation(0.0, -ground_height, 0.0)\n\n .build();\n\n let handle = bodies.insert(rigid_body);\n\n let collider = ColliderBuilder::cuboid(ground_size, ground_height, ground_size).build();\n\n colliders.insert(collider, handle, &mut bodies);\n", "file_path": "benchmarks3d/boxes3.rs", "rank": 25, "score": 192833.25889087917 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n\n\n /*\n\n * Ground\n\n */\n\n let ground_size = 200.1;\n\n let ground_height = 0.1;\n\n\n\n let rigid_body = RigidBodyBuilder::new_static()\n\n .translation(0.0, -ground_height, 0.0)\n\n .build();\n\n let handle = bodies.insert(rigid_body);\n\n let collider = ColliderBuilder::cuboid(ground_size, ground_height, ground_size).build();\n\n colliders.insert(collider, handle, &mut bodies);\n", "file_path": "benchmarks3d/compound3.rs", "rank": 26, "score": 192833.25889087917 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n\n\n /*\n\n * Create the balls\n\n */\n\n let num = 10;\n\n let rad = 0.2;\n\n\n\n let subdiv = 1.0 / (num as f32);\n\n\n\n for i in 0usize..num {\n\n let (x, y) = (i as f32 * subdiv * std::f32::consts::PI * 2.0).sin_cos();\n\n\n\n // Build the rigid body.\n", "file_path": "examples2d/damping2.rs", "rank": 27, "score": 192833.25889087917 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n\n\n /*\n\n * Ground\n\n */\n\n let ground_size = 100.0f32;\n\n let ground_height = 1.0;\n\n let nsubdivs = 20;\n\n\n\n let quad = rapier3d::ncollide::procedural::quad(ground_size, ground_size, nsubdivs, nsubdivs);\n\n let indices = quad\n\n .flat_indices()\n\n .chunks(3)\n\n .map(|is| Point3::new(is[0], is[2], is[1]))\n", "file_path": "examples3d/trimesh3.rs", "rank": 28, "score": 192833.25889087917 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n\n\n /*\n\n * Create the balls\n\n */\n\n let num = 20;\n\n let rad = 1.0;\n\n\n\n let shift = rad * 2.0 + 1.0;\n\n let centerx = shift * (num as f32) / 2.0;\n\n let centery = shift / 2.0;\n\n let centerz = shift * (num as f32) / 2.0;\n\n\n\n for i in 0..num {\n", "file_path": "benchmarks3d/balls3.rs", "rank": 29, "score": 192833.25889087917 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n\n\n /*\n\n * Ground\n\n */\n\n let ground_size = 25.0;\n\n\n\n let rigid_body = RigidBodyBuilder::new_static().build();\n\n let handle = bodies.insert(rigid_body);\n\n let collider = ColliderBuilder::cuboid(ground_size, 1.2).build();\n\n colliders.insert(collider, handle, &mut bodies);\n\n\n\n let rigid_body = RigidBodyBuilder::new_static()\n\n .rotation(std::f32::consts::FRAC_PI_2)\n", "file_path": "benchmarks2d/capsules2.rs", "rank": 30, "score": 192833.25889087917 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n\n\n /*\n\n * Ground\n\n */\n\n let ground_size = 25.0;\n\n\n\n let rigid_body = RigidBodyBuilder::new_static().build();\n\n let handle = bodies.insert(rigid_body);\n\n let collider = ColliderBuilder::cuboid(ground_size, 1.2).build();\n\n colliders.insert(collider, handle, &mut bodies);\n\n\n\n let rigid_body = RigidBodyBuilder::new_static()\n\n .rotation(std::f32::consts::FRAC_PI_2)\n", "file_path": "benchmarks2d/boxes2.rs", "rank": 31, "score": 192833.25889087917 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n\n\n /*\n\n * Ground\n\n */\n\n let ground_size = 10.0;\n\n let ground_thickness = 1.0;\n\n\n\n let rigid_body = RigidBodyBuilder::new_static().build();\n\n let ground_handle = bodies.insert(rigid_body);\n\n let collider = ColliderBuilder::cuboid(ground_size, ground_thickness).build();\n\n colliders.insert(collider, ground_handle, &mut bodies);\n\n\n\n /*\n", "file_path": "examples2d/pyramid2.rs", "rank": 32, "score": 192833.25889087917 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n\n\n /*\n\n * Ground.\n\n */\n\n let ground_size = 10.0;\n\n let ground_height = 0.1;\n\n\n\n let rigid_body = RigidBodyBuilder::new_static()\n\n .translation(0.0, -ground_height, 0.0)\n\n .build();\n\n let handle = bodies.insert(rigid_body);\n\n let collider = ColliderBuilder::cuboid(ground_size, ground_height, ground_size).build();\n\n colliders.insert(collider, handle, &mut bodies);\n", "file_path": "examples3d/platform3.rs", "rank": 33, "score": 192833.25889087917 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n\n\n /*\n\n * Ground\n\n */\n\n let ground_size = 50.0;\n\n let ground_height = 0.1;\n\n\n\n let rigid_body = RigidBodyBuilder::new_static()\n\n .translation(0.0, -ground_height, 0.0)\n\n .build();\n\n let handle = bodies.insert(rigid_body);\n\n let collider = ColliderBuilder::cuboid(ground_size, ground_height, ground_size).build();\n\n colliders.insert(collider, handle, &mut bodies);\n", "file_path": "examples3d/keva3.rs", "rank": 34, "score": 192833.25889087917 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n\n\n /*\n\n * Ground\n\n */\n\n let ground_size = 100.1;\n\n let ground_height = 2.1;\n\n\n\n let rigid_body = RigidBodyBuilder::new_static()\n\n .translation(0.0, -ground_height, 0.0)\n\n .build();\n\n let handle = bodies.insert(rigid_body);\n\n let collider = ColliderBuilder::cuboid(ground_size, ground_height, ground_size).build();\n\n colliders.insert(collider, handle, &mut bodies);\n", "file_path": "examples3d/primitives3.rs", "rank": 35, "score": 192833.25889087917 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n let rad = 0.5;\n\n\n\n /*\n\n * Ground\n\n */\n\n let ground_size = 100.1;\n\n let ground_height = 2.1;\n\n\n\n let rigid_body = RigidBodyBuilder::new_static()\n\n .translation(0.0, -ground_height, 0.0)\n\n .build();\n\n let handle = bodies.insert(rigid_body);\n\n let collider = ColliderBuilder::cuboid(ground_size, ground_height, ground_size).build();\n\n colliders.insert(collider, handle, &mut bodies);\n\n let mut k = 0;\n\n\n", "file_path": "examples3d/fountain3.rs", "rank": 36, "score": 192833.25889087917 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n\n\n /*\n\n * Ground\n\n */\n\n let ground_size = Vector2::new(50.0, 1.0);\n\n let nsubdivs = 2000;\n\n\n\n let heights = DVector::from_fn(nsubdivs + 1, |i, _| {\n\n if i == 0 || i == nsubdivs {\n\n 8.0\n\n } else {\n\n (i as f32 * ground_size.x / (nsubdivs as f32)).cos() * 2.0\n\n }\n", "file_path": "examples2d/heightfield2.rs", "rank": 37, "score": 192833.25889087917 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n\n\n /*\n\n * Ground.\n\n */\n\n let ground_size = 200.1;\n\n let ground_height = 0.1;\n\n\n\n let rigid_body = RigidBodyBuilder::new_static()\n\n .translation(0.0, -ground_height, 0.0)\n\n .build();\n\n let ground_handle = bodies.insert(rigid_body);\n\n let collider = ColliderBuilder::cuboid(ground_size, ground_height, ground_size).build();\n\n colliders.insert(collider, ground_handle, &mut bodies);\n", "file_path": "examples3d/sensor3.rs", "rank": 38, "score": 192833.25889087917 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n\n\n /*\n\n * Ground.\n\n */\n\n let ground_size = 10.0;\n\n let ground_height = 0.1;\n\n\n\n let rigid_body = RigidBodyBuilder::new_static()\n\n .translation(0.0, -ground_height)\n\n .build();\n\n let handle = bodies.insert(rigid_body);\n\n let collider = ColliderBuilder::cuboid(ground_size, ground_height).build();\n\n colliders.insert(collider, handle, &mut bodies);\n", "file_path": "examples2d/platform2.rs", "rank": 39, "score": 192833.25889087917 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n\n\n /*\n\n * Ground.\n\n */\n\n let ground_size = 200.1;\n\n let ground_height = 0.1;\n\n\n\n let rigid_body = RigidBodyBuilder::new_static()\n\n .translation(0.0, -ground_height)\n\n .build();\n\n let ground_handle = bodies.insert(rigid_body);\n\n let collider = ColliderBuilder::cuboid(ground_size, ground_height).build();\n\n colliders.insert(collider, ground_handle, &mut bodies);\n", "file_path": "examples2d/sensor2.rs", "rank": 40, "score": 192833.25889087917 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n\n\n /*\n\n * Ground\n\n */\n\n let ground_size = 20.;\n\n let ground_height = 1.0;\n\n\n\n let rigid_body = RigidBodyBuilder::new_static()\n\n .translation(0.0, -ground_height)\n\n .build();\n\n let handle = bodies.insert(rigid_body);\n\n let collider = ColliderBuilder::cuboid(ground_size, ground_height)\n\n .restitution(1.0)\n", "file_path": "examples2d/restitution2.rs", "rank": 41, "score": 192833.25889087917 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n\n\n /*\n\n * Ground\n\n */\n\n let ground_size = Vector2::new(50.0, 1.0);\n\n let nsubdivs = 2000;\n\n\n\n let heights = DVector::from_fn(nsubdivs + 1, |i, _| {\n\n if i == 0 || i == nsubdivs {\n\n 80.0\n\n } else {\n\n (i as f32 * ground_size.x / (nsubdivs as f32)).cos() * 2.0\n\n }\n", "file_path": "benchmarks2d/heightfield2.rs", "rank": 42, "score": 192833.25889087917 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let mut joints = JointSet::new();\n\n\n\n create_prismatic_joints(\n\n &mut bodies,\n\n &mut colliders,\n\n &mut joints,\n\n Point3::new(20.0, 10.0, 0.0),\n\n 5,\n\n );\n\n create_revolute_joints(\n\n &mut bodies,\n\n &mut colliders,\n\n &mut joints,\n\n Point3::new(20.0, 0.0, 0.0),\n", "file_path": "examples3d/joints3.rs", "rank": 43, "score": 192833.25889087917 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n\n\n /*\n\n * Ground\n\n */\n\n let ground_size = 200.0;\n\n let ground_height = 0.1;\n\n\n\n let rigid_body = RigidBodyBuilder::new_static()\n\n .translation(0.0, -ground_height, 0.0)\n\n .build();\n\n let handle = bodies.insert(rigid_body);\n\n let collider = ColliderBuilder::cuboid(ground_size, ground_height, ground_size).build();\n\n colliders.insert(collider, handle, &mut bodies);\n", "file_path": "examples3d/stacks3.rs", "rank": 44, "score": 192833.25889087917 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n\n\n /*\n\n * Ground\n\n */\n\n let ground_size = 200.0f32;\n\n let ground_height = 1.0;\n\n let nsubdivs = 20;\n\n\n\n let quad = rapier3d::ncollide::procedural::quad(ground_size, ground_size, nsubdivs, nsubdivs);\n\n let indices = quad\n\n .flat_indices()\n\n .chunks(3)\n\n .map(|is| Point3::new(is[0], is[2], is[1]))\n", "file_path": "benchmarks3d/trimesh3.rs", "rank": 45, "score": 192833.25889087917 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n\n\n /*\n\n * Ground\n\n */\n\n let ground_size = 50.0;\n\n let ground_height = 0.1;\n\n\n\n let rigid_body = RigidBodyBuilder::new_static()\n\n .translation(0.0, -ground_height, 0.0)\n\n .build();\n\n let handle = bodies.insert(rigid_body);\n\n let collider = ColliderBuilder::cuboid(ground_size, ground_height, ground_size).build();\n\n colliders.insert(collider, handle, &mut bodies);\n", "file_path": "examples3d/compound3.rs", "rank": 46, "score": 192833.25889087917 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n\n\n /*\n\n * Ground\n\n */\n\n let ground_size = 200.1;\n\n let ground_height = 0.1;\n\n\n\n let rigid_body = RigidBodyBuilder::new_static()\n\n .translation(0.0, -ground_height, 0.0)\n\n .build();\n\n let handle = bodies.insert(rigid_body);\n\n let collider = ColliderBuilder::cuboid(ground_size, ground_height, ground_size).build();\n\n colliders.insert(collider, handle, &mut bodies);\n", "file_path": "benchmarks3d/capsules3.rs", "rank": 47, "score": 192833.25889087917 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n\n\n /*\n\n * Create the cubes\n\n */\n\n let num = 10;\n\n let rad = 0.2;\n\n\n\n let subdiv = 1.0 / (num as f32);\n\n\n\n for i in 0usize..num {\n\n let (x, y) = (i as f32 * subdiv * std::f32::consts::PI * 2.0).sin_cos();\n\n\n\n // Build the rigid body.\n", "file_path": "examples3d/damping3.rs", "rank": 48, "score": 192833.25889087917 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let mut joints = JointSet::new();\n\n\n\n /*\n\n * Create the balls\n\n */\n\n // Build the rigid body.\n\n // NOTE: a smaller radius (e.g. 0.1) breaks Box2D so\n\n // in order to be able to compare rapier with Box2D,\n\n // we set it to 0.4.\n\n let rad = 0.4;\n\n let numi = 10; // Num vertical nodes.\n\n let numk = 10; // Num horizontal nodes.\n\n let shift = 1.0;\n\n\n", "file_path": "examples2d/joints2.rs", "rank": 49, "score": 192833.25889087917 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n\n\n /*\n\n * Ground\n\n */\n\n let ground_size = 100.0;\n\n let ground_thickness = 1.0;\n\n\n\n let rigid_body = RigidBodyBuilder::new_static().build();\n\n let ground_handle = bodies.insert(rigid_body);\n\n let collider = ColliderBuilder::cuboid(ground_size, ground_thickness).build();\n\n colliders.insert(collider, ground_handle, &mut bodies);\n\n\n\n /*\n", "file_path": "benchmarks2d/pyramid2.rs", "rank": 50, "score": 192833.25889087917 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n\n\n /*\n\n * Ground\n\n */\n\n let ground_size = 50.0;\n\n let ground_height = 0.1;\n\n\n\n let rigid_body = RigidBodyBuilder::new_static()\n\n .translation(0.0, -ground_height, 0.0)\n\n .build();\n\n let ground_handle = bodies.insert(rigid_body);\n\n let collider = ColliderBuilder::cuboid(ground_size, ground_height, ground_size).build();\n\n colliders.insert(collider, ground_handle, &mut bodies);\n", "file_path": "benchmarks3d/pyramid3.rs", "rank": 51, "score": 192833.25889087917 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n /*\n\n * Ground\n\n */\n\n let _ground_size = 25.0;\n\n\n\n /*\n\n let ground_shape = ShapeHandle::new(Cuboid::new(Vector2::new(ground_size, 1.0)));\n\n\n\n let ground_handle = bodies.insert(Ground::new());\n\n let co = ColliderDesc::new(ground_shape)\n\n .translation(-Vector2::y())\n\n .build(BodyPartHandle(ground_handle, 0));\n\n colliders.insert(co);\n", "file_path": "benchmarks2d/balls2.rs", "rank": 52, "score": 192833.25889087917 }, { "content": "// This shows a bug when a cylinder is in contact with a very large\n\n// but very thin cuboid. In this case the EPA returns an incorrect\n\n// contact normal, resulting in the cylinder falling through the floor.\n\npub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n\n\n /*\n\n * The ground\n\n */\n\n let ground_size = 5.0;\n\n let ground_height = 0.1;\n\n\n\n let rigid_body = RigidBodyBuilder::new_static()\n\n .translation(0.0, -ground_height)\n\n .build();\n\n let handle = bodies.insert(rigid_body);\n\n let collider = ColliderBuilder::cuboid(ground_size, ground_height).build();\n\n colliders.insert(collider, handle, &mut bodies);\n", "file_path": "examples2d/locked_rotations2.rs", "rank": 53, "score": 189440.2940956253 }, { "content": "// This shows a bug when a cylinder is in contact with a very large\n\n// but very thin cuboid. In this case the EPA returns an incorrect\n\n// contact normal, resulting in the cylinder falling through the floor.\n\npub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n\n\n /*\n\n * The ground\n\n */\n\n let ground_size = 5.0;\n\n let ground_height = 0.1;\n\n\n\n let rigid_body = RigidBodyBuilder::new_static()\n\n .translation(0.0, -ground_height, 0.0)\n\n .build();\n\n let handle = bodies.insert(rigid_body);\n\n let collider = ColliderBuilder::cuboid(ground_size, ground_height, ground_size).build();\n\n colliders.insert(collider, handle, &mut bodies);\n", "file_path": "examples3d/locked_rotations3.rs", "rank": 54, "score": 189440.2940956253 }, { "content": "// This shows a bug when a cylinder is in contact with a very large\n\n// but very thin cuboid. In this case the EPA returns an incorrect\n\n// contact normal, resulting in the cylinder falling through the floor.\n\npub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n\n\n /*\n\n * Ground\n\n */\n\n let ground_size = 100.1;\n\n let ground_height = 0.1;\n\n\n\n let rigid_body = RigidBodyBuilder::new_static()\n\n .translation(0.0, -ground_height, 0.0)\n\n .build();\n\n let handle = bodies.insert(rigid_body);\n\n let collider = ColliderBuilder::cuboid(ground_size, ground_height, ground_size).build();\n\n colliders.insert(collider, handle, &mut bodies);\n", "file_path": "examples3d/debug_cylinder3.rs", "rank": 55, "score": 189440.2940956253 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n\n\n /*\n\n * Ground\n\n */\n\n let ground_size = 5.0;\n\n let ground_height = 0.1;\n\n\n\n let rigid_body = RigidBodyBuilder::new_static()\n\n .translation(0.0, -ground_height)\n\n .build();\n\n let floor_handle = bodies.insert(rigid_body);\n\n let collider = ColliderBuilder::cuboid(ground_size, ground_height).build();\n\n colliders.insert(collider, floor_handle, &mut bodies);\n", "file_path": "examples2d/collision_groups2.rs", "rank": 56, "score": 189430.41088714387 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n\n\n /*\n\n * Ground\n\n */\n\n let ground_size = 5.0;\n\n let ground_height = 0.1;\n\n\n\n let rigid_body = RigidBodyBuilder::new_static()\n\n .translation(0.0, -ground_height, 0.0)\n\n .build();\n\n let floor_handle = bodies.insert(rigid_body);\n\n let collider = ColliderBuilder::cuboid(ground_size, ground_height, ground_size).build();\n\n colliders.insert(collider, floor_handle, &mut bodies);\n", "file_path": "examples3d/collision_groups3.rs", "rank": 57, "score": 189430.41088714387 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n\n\n // Triangle ground.\n\n let width = 0.5;\n\n let vtx = vec![\n\n Point3::new(-width, 0.0, -width),\n\n Point3::new(width, 0.0, -width),\n\n Point3::new(width, 0.0, width),\n\n Point3::new(-width, 0.0, width),\n\n Point3::new(-width, -width, -width),\n\n Point3::new(width, -width, -width),\n\n Point3::new(width, -width, width),\n\n Point3::new(-width, -width, width),\n\n ];\n", "file_path": "examples3d/debug_trimesh3.rs", "rank": 58, "score": 189430.41088714387 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n /*\n\n * Ground.\n\n */\n\n let ground_size = 20.0;\n\n let ground_height = 0.1;\n\n\n\n let rigid_body = RigidBodyBuilder::new_static()\n\n .translation(0.0, -ground_height, 0.0)\n\n .build();\n\n let ground_handle = bodies.insert(rigid_body);\n\n let collider = ColliderBuilder::cuboid(ground_size, ground_height, 0.4)\n\n .friction(0.15)\n\n // .restitution(0.5)\n", "file_path": "examples3d/debug_rollback3.rs", "rank": 59, "score": 189430.41088714387 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n let bodies = RigidBodySet::new();\n\n let colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n let rad = 0.5;\n\n\n\n // Callback that will be executed on the main loop to handle proximities.\n\n testbed.add_callback(move |mut window, mut graphics, physics, _, _| {\n\n let rigid_body = RigidBodyBuilder::new_dynamic()\n\n .translation(0.0, 10.0)\n\n .build();\n\n let handle = physics.bodies.insert(rigid_body);\n\n let collider = ColliderBuilder::cuboid(rad, rad).build();\n\n physics\n\n .colliders\n\n .insert(collider, handle, &mut physics.bodies);\n\n\n\n if let (Some(graphics), Some(window)) = (&mut graphics, &mut window) {\n\n graphics.add(*window, handle, &physics.bodies, &physics.colliders);\n\n }\n", "file_path": "examples2d/add_remove2.rs", "rank": 60, "score": 189430.41088714387 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n\n\n /*\n\n * Ground\n\n */\n\n let ground_size = 100.1;\n\n let ground_height = 0.1;\n\n\n\n for _ in 0..6 {\n\n let rigid_body = RigidBodyBuilder::new_static()\n\n .translation(0.0, -ground_height, 0.0)\n\n .build();\n\n let handle = bodies.insert(rigid_body);\n\n let collider = ColliderBuilder::cuboid(ground_size, ground_height, ground_size).build();\n", "file_path": "examples3d/debug_boxes3.rs", "rank": 61, "score": 189430.41088714387 }, { "content": "pub fn init_world(harness: &mut Harness) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n\n\n /*\n\n * Ground\n\n */\n\n let ground_size = 200.1;\n\n let ground_height = 0.1;\n\n\n\n let rigid_body = RigidBodyBuilder::new_static()\n\n .translation(0.0, -ground_height, 0.0)\n\n .build();\n\n let handle = bodies.insert(rigid_body);\n\n let collider = ColliderBuilder::cuboid(ground_size, ground_height, ground_size).build();\n\n colliders.insert(collider, handle, &mut bodies);\n", "file_path": "benchmarks3d/harness_capsules3.rs", "rank": 62, "score": 189430.41088714387 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n\n\n // Triangle ground.\n\n let vtx = [\n\n Point3::new(-10.0, 0.0, -10.0),\n\n Point3::new(10.0, 0.0, -10.0),\n\n Point3::new(0.0, 0.0, 10.0),\n\n ];\n\n\n\n let rigid_body = RigidBodyBuilder::new_static()\n\n .translation(0.0, 0.0, 0.0)\n\n .build();\n\n let handle = bodies.insert(rigid_body);\n\n let collider = ColliderBuilder::triangle(vtx[0], vtx[1], vtx[2]).build();\n", "file_path": "examples3d/debug_triangle3.rs", "rank": 63, "score": 189430.41088714387 }, { "content": "fn edges_walker_mut<E>(\n\n edges: &mut [Edge<E>],\n\n next: EdgeIndex,\n\n dir: Direction,\n\n) -> EdgesWalkerMut<E> {\n\n EdgesWalkerMut { edges, next, dir }\n\n}\n\n\n\nimpl<'a, E> EdgesWalkerMut<'a, E> {\n\n fn next_edge(&mut self) -> Option<&mut Edge<E>> {\n\n self.next().map(|t| t.1)\n\n }\n\n\n\n fn next(&mut self) -> Option<(EdgeIndex, &mut Edge<E>)> {\n\n let this_index = self.next;\n\n let k = self.dir as usize;\n\n match self.edges.get_mut(self.next.index()) {\n\n None => None,\n\n Some(edge) => {\n\n self.next = edge.next[k];\n", "file_path": "src/data/graph.rs", "rank": 64, "score": 189126.96543706645 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n\n\n /*\n\n * Ground\n\n */\n\n let rad = 1.0;\n\n let rigid_body = RigidBodyBuilder::new_static()\n\n .translation(0.0, -rad)\n\n .rotation(std::f32::consts::PI / 4.0)\n\n .build();\n\n let handle = bodies.insert(rigid_body);\n\n let collider = ColliderBuilder::cuboid(rad, rad).build();\n\n colliders.insert(collider, handle, &mut bodies);\n\n\n", "file_path": "examples2d/debug_box_ball2.rs", "rank": 65, "score": 186206.03044923238 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n\n\n let rad = 1.0;\n\n // Build the dynamic box rigid body.\n\n let rigid_body = RigidBodyBuilder::new_dynamic()\n\n .translation(0.0, 3.0 * rad, 0.0)\n\n .can_sleep(false)\n\n .build();\n\n let handle = bodies.insert(rigid_body);\n\n let collider = ColliderBuilder::ball(rad).build();\n\n colliders.insert(collider, handle, &mut bodies);\n\n\n\n /*\n\n * Set up the testbed.\n\n */\n\n testbed.set_world(bodies, colliders, joints);\n\n}\n\n\n", "file_path": "examples3d/debug_infinite_fall3.rs", "rank": 66, "score": 186206.03044923238 }, { "content": "#[cfg(feature = \"dim3\")]\n\npub fn cuboid_cuboid_compute_separation_wrt_local_line(\n\n cube1: &Cuboid,\n\n cube2: &Cuboid,\n\n pos12: &Isometry<f32>,\n\n pos21: &Isometry<f32>,\n\n axis1: &Vector<f32>,\n\n) -> (f32, Vector<f32>) {\n\n let signum = pos12.translation.vector.dot(axis1).copy_sign_to(1.0);\n\n let axis1 = axis1 * signum;\n\n let local_pt1 = cuboid::local_support_point(cube1, axis1);\n\n let local_pt2 = cuboid::local_support_point(cube2, pos21 * -axis1);\n\n let pt2 = pos12 * local_pt2;\n\n let separation = (pt2 - local_pt1).dot(&axis1);\n\n (separation, axis1)\n\n}\n\n\n", "file_path": "src/geometry/sat.rs", "rank": 67, "score": 185970.0191112067 }, { "content": "#[cfg(feature = \"dim3\")]\n\npub fn cuboid_cuboid_find_local_separating_edge_twoway(\n\n cube1: &Cuboid,\n\n cube2: &Cuboid,\n\n pos12: &Isometry<f32>,\n\n pos21: &Isometry<f32>,\n\n) -> (f32, Vector<f32>) {\n\n use approx::AbsDiffEq;\n\n let mut best_separation = -std::f32::MAX;\n\n let mut best_dir = Vector::zeros();\n\n\n\n let x2 = pos12 * Vector::x();\n\n let y2 = pos12 * Vector::y();\n\n let z2 = pos12 * Vector::z();\n\n\n\n // We have 3 * 3 = 9 axes to test.\n\n let axes = [\n\n // Vector::{x, y ,z}().cross(y2)\n\n Vector::new(0.0, -x2.z, x2.y),\n\n Vector::new(x2.z, 0.0, -x2.x),\n\n Vector::new(-x2.y, x2.x, 0.0),\n", "file_path": "src/geometry/sat.rs", "rank": 68, "score": 185970.0191112067 }, { "content": "pub fn cuboid_cuboid_find_local_separating_normal_oneway(\n\n cube1: &Cuboid,\n\n cube2: &Cuboid,\n\n pos12: &Isometry<f32>,\n\n pos21: &Isometry<f32>,\n\n) -> (f32, Vector<f32>) {\n\n let mut best_separation = -std::f32::MAX;\n\n let mut best_dir = Vector::zeros();\n\n\n\n for i in 0..DIM {\n\n let sign = pos12.translation.vector[i].copy_sign_to(1.0);\n\n let axis1 = Vector::ith(i, sign);\n\n let local_pt2 = cuboid::local_support_point(cube2, pos21 * -axis1);\n\n let pt2 = pos12 * local_pt2;\n\n let separation = pt2[i] * sign - cube1.half_extents[i];\n\n\n\n if separation > best_separation {\n\n best_separation = separation;\n\n best_dir = axis1;\n\n }\n", "file_path": "src/geometry/sat.rs", "rank": 69, "score": 185964.5035550416 }, { "content": "struct EdgesWalkerMut<'a, E: 'a> {\n\n edges: &'a mut [Edge<E>],\n\n next: EdgeIndex,\n\n dir: Direction,\n\n}\n\n\n", "file_path": "src/data/graph.rs", "rank": 70, "score": 183197.76666114252 }, { "content": "pub fn init_world(testbed: &mut Testbed) {\n\n /*\n\n * World\n\n */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n let joints = JointSet::new();\n\n /*\n\n * Ground.\n\n */\n\n let ground_size = 3.0;\n\n let ground_height = 0.1;\n\n\n\n let rigid_body = RigidBodyBuilder::new_static()\n\n .translation(0.0, -ground_height, 0.0)\n\n .build();\n\n let ground_handle = bodies.insert(rigid_body);\n\n let collider = ColliderBuilder::cuboid(ground_size, ground_height, 0.4).build();\n\n let mut ground_collider_handle = colliders.insert(collider, ground_handle, &mut bodies);\n\n\n", "file_path": "examples3d/debug_add_remove_collider3.rs", "rank": 71, "score": 183146.4363374156 }, { "content": "#[cfg(feature = \"dim2\")]\n\npub fn generate_contacts<'a>(\n\n prediction_distance: f32,\n\n capsule1: &'a Capsule,\n\n pos1: &'a Isometry<f32>,\n\n capsule2: &'a Capsule,\n\n pos2: &'a Isometry<f32>,\n\n manifold: &mut ContactManifold,\n\n) {\n\n // FIXME: the contact kinematics is not correctly set here.\n\n // We use the common \"Point-Plane\" kinematics with zero radius everytime.\n\n // Instead we should select point/point ore point-plane (with non-zero\n\n // radius for the point) depending on the features involved in the contact.\n\n let pos12 = pos1.inverse() * pos2;\n\n let pos21 = pos12.inverse();\n\n\n\n let seg1 = capsule1.segment;\n\n let seg2_1 = capsule2.segment.transformed(&pos12);\n\n let (loc1, loc2) = ncollide::query::closest_points_segment_segment_with_locations_nD(\n\n (&seg1.a, &seg1.b),\n\n (&seg2_1.a, &seg2_1.b),\n", "file_path": "src/geometry/contact_generator/capsule_capsule_contact_generator.rs", "rank": 72, "score": 180767.16052699872 }, { "content": "pub fn generate_contacts<'a>(\n\n prediction_distance: f32,\n\n mut cube1: &'a Cuboid<f32>,\n\n mut pos1: &'a Isometry<f32>,\n\n mut cube2: &'a Cuboid<f32>,\n\n mut pos2: &'a Isometry<f32>,\n\n manifold: &mut ContactManifold,\n\n) {\n\n let mut pos12 = pos1.inverse() * pos2;\n\n let mut pos21 = pos12.inverse();\n\n\n\n if manifold.try_update_contacts(&pos12) {\n\n return;\n\n }\n\n\n\n /*\n\n *\n\n * Point-Face\n\n *\n\n */\n", "file_path": "src/geometry/contact_generator/cuboid_cuboid_contact_generator.rs", "rank": 73, "score": 180570.56985403344 }, { "content": "pub fn detect_proximity<'a>(\n\n prediction_distance: f32,\n\n cube1: &'a Cuboid<f32>,\n\n pos1: &'a Isometry<f32>,\n\n cube2: &'a Cuboid<f32>,\n\n pos2: &'a Isometry<f32>,\n\n) -> Proximity {\n\n let pos12 = pos1.inverse() * pos2;\n\n let pos21 = pos12.inverse();\n\n\n\n /*\n\n *\n\n * Point-Face\n\n *\n\n */\n\n let sep1 =\n\n sat::cuboid_cuboid_find_local_separating_normal_oneway(cube1, cube2, &pos12, &pos21).0;\n\n if sep1 > prediction_distance {\n\n return Proximity::Disjoint;\n\n }\n", "file_path": "src/geometry/proximity_detector/cuboid_cuboid_proximity_detector.rs", "rank": 74, "score": 180570.56985403344 }, { "content": "pub fn detect_proximity_cuboid_triangle(\n\n ctxt: &mut PrimitiveProximityDetectionContext,\n\n) -> Proximity {\n\n if let (Some(cube1), Some(triangle2)) = (ctxt.shape1.as_cuboid(), ctxt.shape2.as_triangle()) {\n\n detect_proximity(\n\n ctxt.prediction_distance,\n\n cube1,\n\n ctxt.position1,\n\n triangle2,\n\n ctxt.position2,\n\n )\n\n } else if let (Some(triangle1), Some(cube2)) =\n\n (ctxt.shape1.as_triangle(), ctxt.shape2.as_cuboid())\n\n {\n\n detect_proximity(\n\n ctxt.prediction_distance,\n\n cube2,\n\n ctxt.position2,\n\n triangle1,\n\n ctxt.position1,\n\n )\n\n } else {\n\n panic!(\"Invalid shape types\")\n\n }\n\n}\n\n\n", "file_path": "src/geometry/proximity_detector/cuboid_triangle_proximity_detector.rs", "rank": 75, "score": 180507.09688123944 }, { "content": "#[cfg(feature = \"dim2\")]\n\npub fn support_face(cube: &Cuboid, local_dir: Vector<f32>) -> CuboidFeatureFace {\n\n let he = cube.half_extents;\n\n let i = local_dir.iamin();\n\n let j = (i + 1) % 2;\n\n let mut a = Point::origin();\n\n a[i] = he[i];\n\n a[j] = local_dir[j].copy_sign_to(he[j]);\n\n\n\n let mut b = a;\n\n b[i] = -he[i];\n\n\n\n let vid1 = vertex_feature_id(a);\n\n let vid2 = vertex_feature_id(b);\n\n let fid = (vid1.max(vid2) << 2) | vid1.min(vid2) | 0b11_00_00;\n\n\n\n CuboidFeatureFace {\n\n vertices: [a, b],\n\n vids: [vid1, vid2],\n\n fid,\n\n }\n", "file_path": "src/geometry/cuboid.rs", "rank": 76, "score": 179670.45754909766 }, { "content": "fn draw_contacts(window: &mut Window, nf: &NarrowPhase, colliders: &ColliderSet) {\n\n for pair in nf.contact_pairs() {\n\n for manifold in &pair.manifolds {\n\n for pt in manifold.all_contacts() {\n\n let color = if pt.dist > 0.0 {\n\n Point3::new(0.0, 0.0, 1.0)\n\n } else {\n\n Point3::new(1.0, 0.0, 0.0)\n\n };\n\n let pos1 = colliders[manifold.pair.collider1].position();\n\n let pos2 = colliders[manifold.pair.collider2].position();\n\n let start = pos1 * pt.local_p1;\n\n let end = pos2 * pt.local_p2;\n\n let n = pos1 * manifold.local_n1;\n\n\n\n use crate::engine::GraphicsWindow;\n\n window.draw_graphics_line(&start, &(start + n * 0.4), &Point3::new(0.5, 1.0, 0.5));\n\n window.draw_graphics_line(&start, &end, &color);\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src_testbed/testbed.rs", "rank": 77, "score": 176314.03028761904 }, { "content": "#[cfg(feature = \"dim2\")]\n\npub fn segment_cuboid_find_local_separating_normal_oneway(\n\n segment1: &Segment,\n\n shape2: &Cuboid,\n\n pos12: &Isometry<f32>,\n\n) -> (f32, Vector<f32>) {\n\n point_cuboid_find_local_separating_normal_oneway(segment1.a, segment1.normal(), shape2, pos12)\n\n}\n\n\n\n/*\n\n * Capsules\n\n */\n", "file_path": "src/geometry/sat.rs", "rank": 78, "score": 173428.42454793467 }, { "content": "// NOTE: this only works with cuboid on the rhs because it has its symmetry origin at zero\n\n// (therefore we can check only one normal direction).\n\npub fn point_cuboid_find_local_separating_normal_oneway(\n\n point1: Point<f32>,\n\n normal1: Option<Unit<Vector<f32>>>,\n\n shape2: &Cuboid,\n\n pos12: &Isometry<f32>,\n\n) -> (f32, Vector<f32>) {\n\n let mut best_separation = -std::f32::MAX;\n\n let mut best_dir = Vector::zeros();\n\n\n\n if let Some(normal1) = normal1 {\n\n let axis1 = if (pos12.translation.vector - point1.coords).dot(&normal1) >= 0.0 {\n\n normal1\n\n } else {\n\n -normal1\n\n };\n\n\n\n let pt2 = shape2.support_point_toward(&pos12, &-axis1);\n\n let separation = (pt2 - point1).dot(&axis1);\n\n\n\n if separation > best_separation {\n\n best_separation = separation;\n\n best_dir = *axis1;\n\n }\n\n }\n\n\n\n (best_separation, best_dir)\n\n}\n\n\n", "file_path": "src/geometry/sat.rs", "rank": 79, "score": 173427.96932191483 }, { "content": "pub fn triangle_cuboid_find_local_separating_normal_oneway(\n\n triangle1: &Triangle,\n\n shape2: &Cuboid,\n\n pos12: &Isometry<f32>,\n\n) -> (f32, Vector<f32>) {\n\n point_cuboid_find_local_separating_normal_oneway(triangle1.a, triangle1.normal(), shape2, pos12)\n\n}\n\n\n", "file_path": "src/geometry/sat.rs", "rank": 80, "score": 173422.8762671612 }, { "content": "#[cfg(feature = \"dim3\")]\n\npub fn polyhedron_support_face(cube: &Cuboid, local_dir: Vector<f32>) -> PolyhedronFace {\n\n support_face(cube, local_dir).into()\n\n}\n\n\n\n#[cfg(feature = \"dim2\")]\n\npub(crate) fn support_feature(cube: &Cuboid, local_dir: Vector<f32>) -> CuboidFeature {\n\n // In 2D, it is best for stability to always return a face.\n\n // It won't have any notable impact on performances anyway.\n\n CuboidFeature::Face(support_face(cube, local_dir))\n\n\n\n /*\n\n let amax = local_dir.amax();\n\n\n\n const MAX_DOT_THRESHOLD: f32 = 0.98480775301; // 10 degrees.\n\n\n\n if amax > MAX_DOT_THRESHOLD {\n\n // Support face.\n\n CuboidFeature::Face(cube.support_face(local_dir))\n\n } else {\n\n // Support vertex\n", "file_path": "src/geometry/cuboid.rs", "rank": 81, "score": 172994.2610107434 }, { "content": "pub fn local_support_point(cube: &Cuboid, local_dir: Vector<f32>) -> Point<f32> {\n\n local_dir.copy_sign_to(cube.half_extents).into()\n\n}\n\n\n\n// #[cfg(feature = \"dim2\")]\n\n// pub fn polygon_ref(\n\n// cuboid: Cuboid,\n\n// out_vertices: &mut [Point<f32>; 4],\n\n// out_normals: &mut [Vector<f32>; 4],\n\n// ) -> PolygonRef {\n\n// *out_vertices = [\n\n// Point::new(cuboid.half_extents.x, -cuboid.half_extents.y),\n\n// Point::new(cuboid.half_extents.x, cuboid.half_extents.y),\n\n// Point::new(-cuboid.half_extents.x, cuboid.half_extents.y),\n\n// Point::new(-cuboid.half_extents.x, -cuboid.half_extents.y),\n\n// ];\n\n// *out_normals = [Vector::x(), Vector::y(), -Vector::x(), -Vector::y()];\n\n//\n\n// PolygonRef {\n\n// vertices: &out_vertices[..],\n\n// normals: &out_normals[..],\n\n// }\n\n// }\n\n\n", "file_path": "src/geometry/cuboid.rs", "rank": 82, "score": 169705.1876152027 }, { "content": "fn physx_collider_from_rapier_collider(\n\n collider: &Collider,\n\n) -> Option<(ColliderDesc, Isometry3<f32>)> {\n\n let mut local_pose = *collider.position_wrt_parent();\n\n let shape = collider.shape();\n\n\n\n let desc = if let Some(cuboid) = shape.as_cuboid() {\n\n ColliderDesc::Box(\n\n cuboid.half_extents.x,\n\n cuboid.half_extents.y,\n\n cuboid.half_extents.z,\n\n )\n\n } else if let Some(ball) = shape.as_ball() {\n\n ColliderDesc::Sphere(ball.radius)\n\n } else if let Some(capsule) = shape.as_capsule() {\n\n let center = capsule.center();\n\n let mut dir = capsule.segment.b - capsule.segment.a;\n\n\n\n if dir.x < 0.0 {\n\n dir = -dir;\n", "file_path": "src_testbed/physx_backend.rs", "rank": 83, "score": 169051.54921743713 }, { "content": "fn b2_transform_to_na_isometry(v: b2::Transform) -> Isometry2<f32> {\n\n Isometry2::new(b2_vec_to_na_vec(v.pos), v.rot.angle())\n\n}\n\n\n\npub struct Box2dWorld {\n\n world: b2::World<NoUserData>,\n\n rapier2box2d: HashMap<RigidBodyHandle, b2::BodyHandle>,\n\n}\n\n\n\nimpl Box2dWorld {\n\n pub fn from_rapier(\n\n gravity: Vector2<f32>,\n\n bodies: &RigidBodySet,\n\n colliders: &ColliderSet,\n\n joints: &JointSet,\n\n ) -> Self {\n\n let mut world = b2::World::new(&na_vec_to_b2_vec(gravity));\n\n world.set_continuous_physics(false);\n\n\n\n let mut res = Box2dWorld {\n", "file_path": "src_testbed/box2d_backend.rs", "rank": 84, "score": 168276.84492839803 }, { "content": "fn na_vec_to_b2_vec(v: Vector2<f32>) -> b2::Vec2 {\n\n b2::Vec2 { x: v.x, y: v.y }\n\n}\n\n\n", "file_path": "src_testbed/box2d_backend.rs", "rank": 85, "score": 168276.84492839803 }, { "content": "fn b2_vec_to_na_vec(v: b2::Vec2) -> Vector2<f32> {\n\n Vector2::new(v.x, v.y)\n\n}\n\n\n", "file_path": "src_testbed/box2d_backend.rs", "rank": 86, "score": 168276.84492839803 }, { "content": "pub fn detect_proximity<'a>(\n\n prediction_distance: f32,\n\n cube1: &'a Cuboid,\n\n pos1: &'a Isometry<f32>,\n\n triangle2: &'a Triangle,\n\n pos2: &'a Isometry<f32>,\n\n) -> Proximity {\n\n let pos12 = pos1.inverse() * pos2;\n\n let pos21 = pos12.inverse();\n\n\n\n /*\n\n *\n\n * Point-Face cases.\n\n *\n\n */\n\n let sep1 =\n\n sat::cube_support_map_find_local_separating_normal_oneway(cube1, triangle2, &pos12).0;\n\n if sep1 > prediction_distance {\n\n return Proximity::Disjoint;\n\n }\n", "file_path": "src/geometry/proximity_detector/cuboid_triangle_proximity_detector.rs", "rank": 87, "score": 168028.94256615304 }, { "content": "pub fn generate_contacts<'a>(\n\n prediction_distance: f32,\n\n cube1: &'a Cuboid,\n\n mut pos1: &'a Isometry<f32>,\n\n triangle2: &'a Triangle,\n\n mut pos2: &'a Isometry<f32>,\n\n manifold: &mut ContactManifold,\n\n swapped: bool,\n\n) {\n\n let mut pos12 = pos1.inverse() * pos2;\n\n let mut pos21 = pos12.inverse();\n\n\n\n if (!swapped && manifold.try_update_contacts(&pos12))\n\n || (swapped && manifold.try_update_contacts(&pos21))\n\n {\n\n return;\n\n }\n\n\n\n /*\n\n *\n", "file_path": "src/geometry/contact_generator/cuboid_triangle_contact_generator.rs", "rank": 88, "score": 168028.94256615304 }, { "content": "#[cfg_attr(feature = \"serde-serialize\", derive(Serialize, Deserialize))]\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq)]\n\nstruct ColliderGraphIndices {\n\n contact_graph_index: ColliderGraphIndex,\n\n proximity_graph_index: ColliderGraphIndex,\n\n}\n\n\n\nimpl ColliderGraphIndices {\n\n fn invalid() -> Self {\n\n Self {\n\n contact_graph_index: InteractionGraph::<ContactPair>::invalid_graph_index(),\n\n proximity_graph_index: InteractionGraph::<ProximityPair>::invalid_graph_index(),\n\n }\n\n }\n\n}\n\n\n\n/// The narrow-phase responsible for computing precise contact information between colliders.\n\n#[cfg_attr(feature = \"serde-serialize\", derive(Serialize, Deserialize))]\n\n#[derive(Clone)]\n\npub struct NarrowPhase {\n\n contact_graph: InteractionGraph<ContactPair>,\n\n proximity_graph: InteractionGraph<ProximityPair>,\n", "file_path": "src/geometry/narrow_phase.rs", "rank": 89, "score": 166267.34095254412 }, { "content": "pub fn generate_contacts_ball_convex(ctxt: &mut PrimitiveContactGenerationContext) {\n\n if let Some(ball1) = ctxt.shape1.as_ball() {\n\n ctxt.manifold.swap_identifiers();\n\n do_generate_contacts(ctxt.shape2, ball1, ctxt, true);\n\n } else if let Some(ball2) = ctxt.shape2.as_ball() {\n\n do_generate_contacts(ctxt.shape1, ball2, ctxt, false);\n\n }\n\n\n\n ctxt.manifold.sort_contacts(ctxt.prediction_distance);\n\n}\n\n\n", "file_path": "src/geometry/contact_generator/ball_convex_contact_generator.rs", "rank": 90, "score": 163343.38848140603 }, { "content": "pub fn generate_contacts_polygon_polygon(_ctxt: &mut PrimitiveContactGenerationContext) {\n\n unimplemented!()\n\n // if let (Shape::Polygon(polygon1), Shape::Polygon(polygon2)) = (ctxt.shape1, ctxt.shape2) {\n\n // generate_contacts(\n\n // polygon1,\n\n // &ctxt.position1,\n\n // polygon2,\n\n // &ctxt.position2,\n\n // ctxt.manifold,\n\n // );\n\n // ctxt.manifold.update_warmstart_multiplier();\n\n // } else {\n\n // unreachable!()\n\n // }\n\n //\n\n // ctxt.manifold.sort_contacts(ctxt.prediction_distance);\n\n}\n\n\n", "file_path": "src/geometry/contact_generator/polygon_polygon_contact_generator.rs", "rank": 91, "score": 163343.38848140603 }, { "content": "pub fn generate_contacts_ball_ball(ctxt: &mut PrimitiveContactGenerationContext) {\n\n let pos_ba = ctxt.position2.inverse() * ctxt.position1;\n\n let radius_a = ctxt.shape1.as_ball().unwrap().radius;\n\n let radius_b = ctxt.shape2.as_ball().unwrap().radius;\n\n\n\n let dcenter = pos_ba.inverse_transform_point(&Point::origin()).coords;\n\n let center_dist = dcenter.magnitude();\n\n let dist = center_dist - radius_a - radius_b;\n\n\n\n if dist < ctxt.prediction_distance {\n\n let local_n1 = if center_dist != 0.0 {\n\n dcenter / center_dist\n\n } else {\n\n Vector::y()\n\n };\n\n\n\n let local_n2 = pos_ba.inverse_transform_vector(&-local_n1);\n\n let local_p1 = local_n1 * radius_a;\n\n let local_p2 = local_n2 * radius_b;\n\n let contact = Contact::new(local_p1.into(), local_p2.into(), 0, 0, dist);\n", "file_path": "src/geometry/contact_generator/ball_ball_contact_generator.rs", "rank": 92, "score": 163343.38848140603 }, { "content": "pub fn generate_contacts_pfm_pfm(ctxt: &mut PrimitiveContactGenerationContext) {\n\n if let (Some((pfm1, border_radius1)), Some((pfm2, border_radius2))) = (\n\n ctxt.shape1.as_polygonal_feature_map(),\n\n ctxt.shape2.as_polygonal_feature_map(),\n\n ) {\n\n do_generate_contacts(pfm1, border_radius1, pfm2, border_radius2, ctxt);\n\n ctxt.manifold.update_warmstart_multiplier();\n\n ctxt.manifold.sort_contacts(ctxt.prediction_distance);\n\n }\n\n}\n\n\n", "file_path": "src/geometry/contact_generator/pfm_pfm_contact_generator.rs", "rank": 93, "score": 163343.38848140603 }, { "content": "#[cfg(feature = \"simd-is-enabled\")]\n\npub fn generate_contacts_ball_ball_simd(ctxt: &mut PrimitiveContactGenerationContextSimd) {\n\n let pos_ba = ctxt.positions2.inverse() * ctxt.positions1;\n\n let radii_a =\n\n SimdFloat::from(array![|ii| ctxt.shapes1[ii].as_ball().unwrap().radius; SIMD_WIDTH]);\n\n let radii_b =\n\n SimdFloat::from(array![|ii| ctxt.shapes2[ii].as_ball().unwrap().radius; SIMD_WIDTH]);\n\n\n\n let wball_a = WBall::new(Point::origin(), radii_a);\n\n let wball_b = WBall::new(pos_ba.inverse_transform_point(&Point::origin()), radii_b);\n\n let contacts = generate_contacts_simd(&wball_a, &wball_b, &pos_ba);\n\n\n\n for (i, manifold) in ctxt.manifolds.iter_mut().enumerate() {\n\n // FIXME: compare the dist before extracting the contact.\n\n let (contact, local_n1, local_n2) = contacts.extract(i);\n\n if contact.dist <= ctxt.prediction_distance {\n\n if manifold.points.len() != 0 {\n\n manifold.points[0].copy_geometry_from(contact);\n\n } else {\n\n manifold.points.push(contact);\n\n }\n", "file_path": "src/geometry/contact_generator/ball_ball_contact_generator.rs", "rank": 94, "score": 159400.22902140583 }, { "content": "#[cfg(feature = \"dim2\")]\n\npub fn vertex_feature_id(vertex: Point<f32>) -> u8 {\n\n ((vertex.x.to_bits() >> 31) & 0b001 | (vertex.y.to_bits() >> 30) & 0b010) as u8\n\n}\n\n\n\n// #[cfg(feature = \"dim3\")]\n\n// pub fn vertex_feature_id(vertex: Point<f32>) -> u8 {\n\n// ((vertex.x.to_bits() >> 31) & 0b001\n\n// | (vertex.y.to_bits() >> 30) & 0b010\n\n// | (vertex.z.to_bits() >> 29) & 0b100) as u8\n\n// }\n\n\n", "file_path": "src/geometry/cuboid.rs", "rank": 95, "score": 156637.259648911 }, { "content": "pub fn detect_proximity_ball_ball(ctxt: &mut PrimitiveProximityDetectionContext) -> Proximity {\n\n let pos_ba = ctxt.position2.inverse() * ctxt.position1;\n\n let radius_a = ctxt.shape1.as_ball().unwrap().radius;\n\n let radius_b = ctxt.shape2.as_ball().unwrap().radius;\n\n\n\n let center_a = Point::origin();\n\n let center_b = pos_ba.inverse_transform_point(&Point::origin());\n\n\n\n let dcenter = center_b - center_a;\n\n let center_dist = dcenter.magnitude();\n\n let dist = center_dist - radius_a - radius_b;\n\n\n\n if dist > ctxt.prediction_distance {\n\n Proximity::Disjoint\n\n } else if dist > 0.0 {\n\n Proximity::WithinMargin\n\n } else {\n\n Proximity::Intersecting\n\n }\n\n}\n", "file_path": "src/geometry/proximity_detector/ball_ball_proximity_detector.rs", "rank": 96, "score": 156417.3319783783 }, { "content": "pub fn detect_proximity_ball_convex(ctxt: &mut PrimitiveProximityDetectionContext) -> Proximity {\n\n if let Some(ball1) = ctxt.shape1.as_ball() {\n\n do_detect_proximity(ctxt.shape2, ball1, &ctxt)\n\n } else if let Some(ball2) = ctxt.shape2.as_ball() {\n\n do_detect_proximity(ctxt.shape1, ball2, &ctxt)\n\n } else {\n\n panic!(\"Invalid shape types provide.\")\n\n }\n\n}\n\n\n", "file_path": "src/geometry/proximity_detector/ball_convex_proximity_detector.rs", "rank": 97, "score": 156417.3319783783 }, { "content": "#[cfg_attr(target_arch = \"wasm32\", wasm_bindgen(start))]\n\npub fn main() {\n\n let demo = demo_name_from_command_line()\n\n .or_else(|| demo_name_from_url())\n\n .unwrap_or(String::new())\n\n .to_camel_case();\n\n\n\n let mut builders: Vec<(_, fn(&mut Testbed))> = vec![\n\n (\"Balls\", balls2::init_world),\n\n (\"Boxes\", boxes2::init_world),\n\n (\"Capsules\", capsules2::init_world),\n\n (\"Heightfield\", heightfield2::init_world),\n\n (\"Pyramid\", pyramid2::init_world),\n\n (\"(Stress test) joint ball\", joint_ball2::init_world),\n\n (\"(Stress test) joint fixed\", joint_fixed2::init_world),\n\n (\n\n \"(Stress test) joint prismatic\",\n\n joint_prismatic2::init_world,\n\n ),\n\n ];\n\n\n", "file_path": "benchmarks2d/all_benchmarks2.rs", "rank": 98, "score": 151288.05874241635 } ]
Rust
tests/cli-v1.rs
Boddlnagg/rustup.rs
c4e7616c565630d0ad93767ec8676c8a570995c5
extern crate rustup_dist; extern crate rustup_utils; extern crate rustup_mock; extern crate tempdir; use std::fs; use tempdir::TempDir; use rustup_mock::clitools::{self, Config, Scenario, expect_ok, expect_stdout_ok, expect_err, expect_stderr_ok, set_current_dist_date, this_host_triple}; macro_rules! for_host { ($s: expr) => (&format!($s, this_host_triple())) } pub fn setup(f: &Fn(&mut Config)) { clitools::setup(Scenario::SimpleV1, f); } #[test] fn rustc_no_default_toolchain() { setup(&|config| { expect_err(config, &["rustc"], "no default toolchain configured"); }); } #[test] fn expected_bins_exist() { setup(&|config| { expect_ok(config, &["rustup", "default", "nightly"]); expect_stdout_ok(config, &["rustc", "--version"], "1.3.0"); }); } #[test] fn install_toolchain_from_channel() { setup(&|config| { expect_ok(config, &["rustup", "default", "nightly"]); expect_stdout_ok(config, &["rustc", "--version"], "hash-n-2"); expect_ok(config, &["rustup", "default", "beta"]); expect_stdout_ok(config, &["rustc", "--version"], "hash-b-2"); expect_ok(config, &["rustup", "default", "stable"]); expect_stdout_ok(config, &["rustc", "--version"], "hash-s-2"); }); } #[test] fn install_toolchain_from_archive() { clitools::setup(Scenario::ArchivesV1, &|config| { expect_ok(config, &["rustup", "default" , "nightly-2015-01-01"]); expect_stdout_ok(config, &["rustc", "--version"], "hash-n-1"); expect_ok(config, &["rustup", "default" , "beta-2015-01-01"]); expect_stdout_ok(config, &["rustc", "--version"], "hash-b-1"); expect_ok(config, &["rustup", "default" , "stable-2015-01-01"]); expect_stdout_ok(config, &["rustc", "--version"], "hash-s-1"); }); } #[test] fn install_toolchain_from_version() { setup(&|config| { expect_ok(config, &["rustup", "default" , "1.1.0"]); expect_stdout_ok(config, &["rustc", "--version"], "hash-s-2"); }); } #[test] fn default_existing_toolchain() { setup(&|config| { expect_ok(config, &["rustup", "update", "nightly"]); expect_stderr_ok(config, &["rustup", "default", "nightly"], for_host!("using existing install for 'nightly-{0}'")); }); } #[test] fn update_channel() { clitools::setup(Scenario::ArchivesV1, &|config| { set_current_dist_date(config, "2015-01-01"); expect_ok(config, &["rustup", "default", "nightly"]); expect_stdout_ok(config, &["rustc", "--version"], "hash-n-1"); set_current_dist_date(config, "2015-01-02"); expect_ok(config, &["rustup", "update", "nightly"]); expect_stdout_ok(config, &["rustc", "--version"], "hash-n-2"); }); } #[test] fn list_toolchains() { clitools::setup(Scenario::ArchivesV1, &|config| { expect_ok(config, &["rustup", "update", "nightly"]); expect_ok(config, &["rustup", "update", "beta-2015-01-01"]); expect_stdout_ok(config, &["rustup", "toolchain", "list"], "nightly"); expect_stdout_ok(config, &["rustup", "toolchain", "list"], "beta-2015-01-01"); }); } #[test] fn list_toolchains_with_none() { setup(&|config| { expect_stdout_ok(config, &["rustup", "toolchain", "list"], "no installed toolchains"); }); } #[test] fn remove_toolchain() { setup(&|config| { expect_ok(config, &["rustup", "update", "nightly"]); expect_ok(config, &["rustup", "toolchain", "remove", "nightly"]); expect_ok(config, &["rustup", "toolchain", "list"]); expect_stdout_ok(config, &["rustup", "toolchain", "list"], "no installed toolchains"); }); } #[test] fn remove_default_toolchain_err_handling() { setup(&|config| { expect_ok(config, &["rustup", "default", "nightly"]); expect_ok(config, &["rustup", "toolchain", "remove", "nightly"]); expect_err(config, &["rustc"], for_host!("toolchain 'nightly-{0}' is not installed")); }); } #[test] fn remove_override_toolchain_err_handling() { setup(&|config| { let tempdir = TempDir::new("rustup").unwrap(); config.change_dir(tempdir.path(), &|| { expect_ok(config, &["rustup", "default", "nightly"]); expect_ok(config, &["rustup", "override", "add", "beta"]); expect_ok(config, &["rustup", "toolchain", "remove", "beta"]); expect_err(config, &["rustc"], for_host!("toolchain 'beta-{0}' is not installed")); }); }); } #[test] fn bad_sha_on_manifest() { setup(&|config| { let sha_file = config.distdir.join("dist/channel-rust-nightly.sha256"); let sha_str = rustup_utils::raw::read_file(&sha_file).unwrap(); let mut sha_bytes = sha_str.into_bytes(); sha_bytes[..10].clone_from_slice(b"aaaaaaaaaa"); let sha_str = String::from_utf8(sha_bytes).unwrap(); rustup_utils::raw::write_file(&sha_file, &sha_str).unwrap(); expect_err(config, &["rustup", "default", "nightly"], "checksum failed"); }); } #[test] fn bad_sha_on_installer() { setup(&|config| { let dir = config.distdir.join("dist"); for file in fs::read_dir(&dir).unwrap() { let file = file.unwrap(); let path = file.path(); let filename = path.to_string_lossy(); if filename.ends_with(".tar.gz") || filename.ends_with(".tar.xz") { rustup_utils::raw::write_file(&path, "xxx").unwrap(); } } expect_err(config, &["rustup", "default", "nightly"], "checksum failed"); }); } #[test] fn install_override_toolchain_from_channel() { setup(&|config| { expect_ok(config, &["rustup", "override", "add", "nightly"]); expect_stdout_ok(config, &["rustc", "--version"], "hash-n-2"); expect_ok(config, &["rustup", "override", "add", "beta"]); expect_stdout_ok(config, &["rustc", "--version"], "hash-b-2"); expect_ok(config, &["rustup", "override", "add", "stable"]); expect_stdout_ok(config, &["rustc", "--version"], "hash-s-2"); }); } #[test] fn install_override_toolchain_from_archive() { clitools::setup(Scenario::ArchivesV1, &|config| { expect_ok(config, &["rustup", "override", "add", "nightly-2015-01-01"]); expect_stdout_ok(config, &["rustc", "--version"], "hash-n-1"); expect_ok(config, &["rustup", "override", "add", "beta-2015-01-01"]); expect_stdout_ok(config, &["rustc", "--version"], "hash-b-1"); expect_ok(config, &["rustup", "override", "add", "stable-2015-01-01"]); expect_stdout_ok(config, &["rustc", "--version"], "hash-s-1"); }); } #[test] fn install_override_toolchain_from_version() { setup(&|config| { expect_ok(config, &["rustup", "override", "add", "1.1.0"]); expect_stdout_ok(config, &["rustc", "--version"], "hash-s-2"); }); } #[test] fn override_overrides_default() { setup(&|config| { let tempdir = TempDir::new("rustup").unwrap(); expect_ok(config, &["rustup", "default" , "nightly"]); config.change_dir(tempdir.path(), &|| { expect_ok(config, &["rustup", "override" , "add", "beta"]); expect_stdout_ok(config, &["rustc", "--version"], "hash-b-2"); }); }); } #[test] fn multiple_overrides() { setup(&|config| { let tempdir1 = TempDir::new("rustup").unwrap(); let tempdir2 = TempDir::new("rustup").unwrap(); expect_ok(config, &["rustup", "default", "nightly"]); config.change_dir(tempdir1.path(), &|| { expect_ok(config, &["rustup", "override", "add", "beta"]); }); config.change_dir(tempdir2.path(), &|| { expect_ok(config, &["rustup", "override", "add", "stable"]); }); expect_stdout_ok(config, &["rustc", "--version"], "hash-n-2"); config.change_dir(tempdir1.path(), &|| { expect_stdout_ok(config, &["rustc", "--version"], "hash-b-2"); }); config.change_dir(tempdir2.path(), &|| { expect_stdout_ok(config, &["rustc", "--version"], "hash-s-2"); }); }); } #[test] fn change_override() { setup(&|config| { let tempdir = TempDir::new("rustup").unwrap(); config.change_dir(tempdir.path(), &|| { expect_ok(config, &["rustup", "override", "add", "nightly"]); expect_ok(config, &["rustup", "override", "add", "beta"]); expect_stdout_ok(config, &["rustc", "--version"], "hash-b-2"); }); }); } #[test] fn remove_override_no_default() { setup(&|config| { let tempdir = TempDir::new("rustup").unwrap(); config.change_dir(tempdir.path(), &|| { expect_ok(config, &["rustup", "override", "add", "nightly"]); expect_ok(config, &["rustup", "override", "remove"]); expect_err(config, &["rustc"], "no default toolchain configured"); }); }); } #[test] fn remove_override_with_default() { setup(&|config| { let tempdir = TempDir::new("rustup").unwrap(); config.change_dir(tempdir.path(), &|| { expect_ok(config, &["rustup", "default", "nightly"]); expect_ok(config, &["rustup", "override", "add", "beta"]); expect_ok(config, &["rustup", "override", "remove"]); expect_stdout_ok(config, &["rustc", "--version"], "hash-n-2"); }); }); } #[test] fn remove_override_with_multiple_overrides() { setup(&|config| { let tempdir1 = TempDir::new("rustup").unwrap(); let tempdir2 = TempDir::new("rustup").unwrap(); expect_ok(config, &["rustup", "default", "nightly"]); config.change_dir(tempdir1.path(), &|| { expect_ok(config, &["rustup", "override", "add", "beta"]); }); config.change_dir(tempdir2.path(), &|| { expect_ok(config, &["rustup", "override", "add", "stable"]); }); expect_stdout_ok(config, &["rustc", "--version"], "hash-n-2"); config.change_dir(tempdir1.path(), &|| { expect_ok(config, &["rustup", "override", "remove"]); expect_stdout_ok(config, &["rustc", "--version"], "hash-n-2"); }); config.change_dir(tempdir2.path(), &|| { expect_stdout_ok(config, &["rustc", "--version"], "hash-s-2"); }); }); } #[test] fn no_update_on_channel_when_date_has_not_changed() { setup(&|config| { expect_ok(config, &["rustup", "update", "nightly"]); expect_stdout_ok(config, &["rustup", "update", "nightly"], "unchanged"); }); } #[test] fn update_on_channel_when_date_has_changed() { clitools::setup(Scenario::ArchivesV1, &|config| { set_current_dist_date(config, "2015-01-01"); expect_ok(config, &["rustup", "default", "nightly"]); expect_stdout_ok(config, &["rustc", "--version"], "hash-n-1"); set_current_dist_date(config, "2015-01-02"); expect_ok(config, &["rustup", "update", "nightly"]); expect_stdout_ok(config, &["rustc", "--version"], "hash-n-2"); }); } #[test] fn run_command() { setup(&|config| { expect_ok(config, &["rustup", "update", "nightly"]); expect_ok(config, &["rustup", "default", "beta"]); expect_stdout_ok(config, &["rustup", "run", "nightly", "rustc" , "--version"], "hash-n-2"); }); } #[test] fn remove_toolchain_then_add_again() { setup(&|config| { expect_ok(config, &["rustup", "default", "beta"]); expect_ok(config, &["rustup", "toolchain", "remove", "beta"]); expect_ok(config, &["rustup", "update", "beta"]); expect_ok(config, &["rustc", "--version"]); }); }
extern crate rustup_dist; extern crate rustup_utils; extern crate rustup_mock; extern crate tempdir; use std::fs; use tempdir::TempDir; use rustup_mock::clitools::{self, Config, Scenario, expect_ok, expect_stdout_ok, expect_err, expect_stderr_ok, set_current_dist_date, this_host_triple}; macro_rules! for_host { ($s: expr) => (&format!($s, this_host_triple())) } pub fn setup(f: &Fn(&mut Config)) { clitools::setup(Scenario::SimpleV1, f); } #[test] fn rustc_no_default_toolchain() { setup(&|config| { expect_err(config, &["rustc"], "no default toolchain configured"); }); } #[test] fn expected_bins_exist() { setup(&|config| { expect_ok(config, &["rustup", "default", "nightly"]); expect_stdout_ok(config, &["rustc", "--version"], "1.3.0"); }); } #[test] fn install_toolchain_from_channel() { setup(&|config| { expect_ok(config, &["rustup", "default", "nightly"]); expect_stdout_ok(config, &["rustc", "--version"], "hash-n-2"); expect_ok(config, &["rustup", "default", "beta"]); expect_stdout_ok(config, &["rustc", "--version"], "hash-b-2"); expect_ok(config, &["rustup", "default", "stable"]); expect_stdout_ok(config, &["rustc", "--version"], "hash-s-2"); }); } #[test] fn install_toolchain_from_archive() { clitools::setup(Scenario::ArchivesV1, &|config| { expect_ok(config, &["rustup", "default" , "nightly-2015-01-01"]); expect_stdout_ok(config, &["rustc", "--version"], "hash-n-1"); expect_ok(config, &["rustup", "default" , "beta-2015-01-01"]); expect_stdout_ok(config, &["rustc", "--version"], "hash-b-1"); expect_ok(config, &["rustup", "default" , "stable-2015-01-01"]); expect_stdout_ok(config, &["rustc", "--version"], "hash-s-1"); }); } #[test]
#[test] fn default_existing_toolchain() { setup(&|config| { expect_ok(config, &["rustup", "update", "nightly"]); expect_stderr_ok(config, &["rustup", "default", "nightly"], for_host!("using existing install for 'nightly-{0}'")); }); } #[test] fn update_channel() { clitools::setup(Scenario::ArchivesV1, &|config| { set_current_dist_date(config, "2015-01-01"); expect_ok(config, &["rustup", "default", "nightly"]); expect_stdout_ok(config, &["rustc", "--version"], "hash-n-1"); set_current_dist_date(config, "2015-01-02"); expect_ok(config, &["rustup", "update", "nightly"]); expect_stdout_ok(config, &["rustc", "--version"], "hash-n-2"); }); } #[test] fn list_toolchains() { clitools::setup(Scenario::ArchivesV1, &|config| { expect_ok(config, &["rustup", "update", "nightly"]); expect_ok(config, &["rustup", "update", "beta-2015-01-01"]); expect_stdout_ok(config, &["rustup", "toolchain", "list"], "nightly"); expect_stdout_ok(config, &["rustup", "toolchain", "list"], "beta-2015-01-01"); }); } #[test] fn list_toolchains_with_none() { setup(&|config| { expect_stdout_ok(config, &["rustup", "toolchain", "list"], "no installed toolchains"); }); } #[test] fn remove_toolchain() { setup(&|config| { expect_ok(config, &["rustup", "update", "nightly"]); expect_ok(config, &["rustup", "toolchain", "remove", "nightly"]); expect_ok(config, &["rustup", "toolchain", "list"]); expect_stdout_ok(config, &["rustup", "toolchain", "list"], "no installed toolchains"); }); } #[test] fn remove_default_toolchain_err_handling() { setup(&|config| { expect_ok(config, &["rustup", "default", "nightly"]); expect_ok(config, &["rustup", "toolchain", "remove", "nightly"]); expect_err(config, &["rustc"], for_host!("toolchain 'nightly-{0}' is not installed")); }); } #[test] fn remove_override_toolchain_err_handling() { setup(&|config| { let tempdir = TempDir::new("rustup").unwrap(); config.change_dir(tempdir.path(), &|| { expect_ok(config, &["rustup", "default", "nightly"]); expect_ok(config, &["rustup", "override", "add", "beta"]); expect_ok(config, &["rustup", "toolchain", "remove", "beta"]); expect_err(config, &["rustc"], for_host!("toolchain 'beta-{0}' is not installed")); }); }); } #[test] fn bad_sha_on_manifest() { setup(&|config| { let sha_file = config.distdir.join("dist/channel-rust-nightly.sha256"); let sha_str = rustup_utils::raw::read_file(&sha_file).unwrap(); let mut sha_bytes = sha_str.into_bytes(); sha_bytes[..10].clone_from_slice(b"aaaaaaaaaa"); let sha_str = String::from_utf8(sha_bytes).unwrap(); rustup_utils::raw::write_file(&sha_file, &sha_str).unwrap(); expect_err(config, &["rustup", "default", "nightly"], "checksum failed"); }); } #[test] fn bad_sha_on_installer() { setup(&|config| { let dir = config.distdir.join("dist"); for file in fs::read_dir(&dir).unwrap() { let file = file.unwrap(); let path = file.path(); let filename = path.to_string_lossy(); if filename.ends_with(".tar.gz") || filename.ends_with(".tar.xz") { rustup_utils::raw::write_file(&path, "xxx").unwrap(); } } expect_err(config, &["rustup", "default", "nightly"], "checksum failed"); }); } #[test] fn install_override_toolchain_from_channel() { setup(&|config| { expect_ok(config, &["rustup", "override", "add", "nightly"]); expect_stdout_ok(config, &["rustc", "--version"], "hash-n-2"); expect_ok(config, &["rustup", "override", "add", "beta"]); expect_stdout_ok(config, &["rustc", "--version"], "hash-b-2"); expect_ok(config, &["rustup", "override", "add", "stable"]); expect_stdout_ok(config, &["rustc", "--version"], "hash-s-2"); }); } #[test] fn install_override_toolchain_from_archive() { clitools::setup(Scenario::ArchivesV1, &|config| { expect_ok(config, &["rustup", "override", "add", "nightly-2015-01-01"]); expect_stdout_ok(config, &["rustc", "--version"], "hash-n-1"); expect_ok(config, &["rustup", "override", "add", "beta-2015-01-01"]); expect_stdout_ok(config, &["rustc", "--version"], "hash-b-1"); expect_ok(config, &["rustup", "override", "add", "stable-2015-01-01"]); expect_stdout_ok(config, &["rustc", "--version"], "hash-s-1"); }); } #[test] fn install_override_toolchain_from_version() { setup(&|config| { expect_ok(config, &["rustup", "override", "add", "1.1.0"]); expect_stdout_ok(config, &["rustc", "--version"], "hash-s-2"); }); } #[test] fn override_overrides_default() { setup(&|config| { let tempdir = TempDir::new("rustup").unwrap(); expect_ok(config, &["rustup", "default" , "nightly"]); config.change_dir(tempdir.path(), &|| { expect_ok(config, &["rustup", "override" , "add", "beta"]); expect_stdout_ok(config, &["rustc", "--version"], "hash-b-2"); }); }); } #[test] fn multiple_overrides() { setup(&|config| { let tempdir1 = TempDir::new("rustup").unwrap(); let tempdir2 = TempDir::new("rustup").unwrap(); expect_ok(config, &["rustup", "default", "nightly"]); config.change_dir(tempdir1.path(), &|| { expect_ok(config, &["rustup", "override", "add", "beta"]); }); config.change_dir(tempdir2.path(), &|| { expect_ok(config, &["rustup", "override", "add", "stable"]); }); expect_stdout_ok(config, &["rustc", "--version"], "hash-n-2"); config.change_dir(tempdir1.path(), &|| { expect_stdout_ok(config, &["rustc", "--version"], "hash-b-2"); }); config.change_dir(tempdir2.path(), &|| { expect_stdout_ok(config, &["rustc", "--version"], "hash-s-2"); }); }); } #[test] fn change_override() { setup(&|config| { let tempdir = TempDir::new("rustup").unwrap(); config.change_dir(tempdir.path(), &|| { expect_ok(config, &["rustup", "override", "add", "nightly"]); expect_ok(config, &["rustup", "override", "add", "beta"]); expect_stdout_ok(config, &["rustc", "--version"], "hash-b-2"); }); }); } #[test] fn remove_override_no_default() { setup(&|config| { let tempdir = TempDir::new("rustup").unwrap(); config.change_dir(tempdir.path(), &|| { expect_ok(config, &["rustup", "override", "add", "nightly"]); expect_ok(config, &["rustup", "override", "remove"]); expect_err(config, &["rustc"], "no default toolchain configured"); }); }); } #[test] fn remove_override_with_default() { setup(&|config| { let tempdir = TempDir::new("rustup").unwrap(); config.change_dir(tempdir.path(), &|| { expect_ok(config, &["rustup", "default", "nightly"]); expect_ok(config, &["rustup", "override", "add", "beta"]); expect_ok(config, &["rustup", "override", "remove"]); expect_stdout_ok(config, &["rustc", "--version"], "hash-n-2"); }); }); } #[test] fn remove_override_with_multiple_overrides() { setup(&|config| { let tempdir1 = TempDir::new("rustup").unwrap(); let tempdir2 = TempDir::new("rustup").unwrap(); expect_ok(config, &["rustup", "default", "nightly"]); config.change_dir(tempdir1.path(), &|| { expect_ok(config, &["rustup", "override", "add", "beta"]); }); config.change_dir(tempdir2.path(), &|| { expect_ok(config, &["rustup", "override", "add", "stable"]); }); expect_stdout_ok(config, &["rustc", "--version"], "hash-n-2"); config.change_dir(tempdir1.path(), &|| { expect_ok(config, &["rustup", "override", "remove"]); expect_stdout_ok(config, &["rustc", "--version"], "hash-n-2"); }); config.change_dir(tempdir2.path(), &|| { expect_stdout_ok(config, &["rustc", "--version"], "hash-s-2"); }); }); } #[test] fn no_update_on_channel_when_date_has_not_changed() { setup(&|config| { expect_ok(config, &["rustup", "update", "nightly"]); expect_stdout_ok(config, &["rustup", "update", "nightly"], "unchanged"); }); } #[test] fn update_on_channel_when_date_has_changed() { clitools::setup(Scenario::ArchivesV1, &|config| { set_current_dist_date(config, "2015-01-01"); expect_ok(config, &["rustup", "default", "nightly"]); expect_stdout_ok(config, &["rustc", "--version"], "hash-n-1"); set_current_dist_date(config, "2015-01-02"); expect_ok(config, &["rustup", "update", "nightly"]); expect_stdout_ok(config, &["rustc", "--version"], "hash-n-2"); }); } #[test] fn run_command() { setup(&|config| { expect_ok(config, &["rustup", "update", "nightly"]); expect_ok(config, &["rustup", "default", "beta"]); expect_stdout_ok(config, &["rustup", "run", "nightly", "rustc" , "--version"], "hash-n-2"); }); } #[test] fn remove_toolchain_then_add_again() { setup(&|config| { expect_ok(config, &["rustup", "default", "beta"]); expect_ok(config, &["rustup", "toolchain", "remove", "beta"]); expect_ok(config, &["rustup", "update", "beta"]); expect_ok(config, &["rustc", "--version"]); }); }
fn install_toolchain_from_version() { setup(&|config| { expect_ok(config, &["rustup", "default" , "1.1.0"]); expect_stdout_ok(config, &["rustc", "--version"], "hash-s-2"); }); }
function_block-full_function
[ { "content": "pub fn rustc_version(toolchain: &Toolchain) -> String {\n\n if toolchain.exists() {\n\n let rustc_path = toolchain.binary_file(\"rustc\");\n\n if utils::is_file(&rustc_path) {\n\n let mut cmd = Command::new(&rustc_path);\n\n cmd.arg(\"--version\");\n\n cmd.stdin(Stdio::null());\n\n cmd.stdout(Stdio::piped());\n\n cmd.stderr(Stdio::piped());\n\n toolchain.set_ldpath(&mut cmd);\n\n\n\n // some toolchains are faulty with some combinations of platforms and\n\n // may fail to launch but also to timely terminate.\n\n // (known cases include Rust 1.3.0 through 1.10.0 in recent macOS Sierra.)\n\n // we guard against such cases by enforcing a reasonable timeout to read.\n\n let mut line1 = None;\n\n if let Ok(mut child) = cmd.spawn() {\n\n let timeout = Duration::new(3, 0);\n\n match child.wait_timeout(timeout) {\n\n Ok(Some(status)) if status.success() => {\n", "file_path": "src/rustup-cli/common.rs", "rank": 0, "score": 316452.52119958797 }, { "content": "pub fn setup(f: &Fn(&Config)) {\n\n clitools::setup(Scenario::ArchivesV2, &|config| {\n\n f(config);\n\n });\n\n}\n\n\n", "file_path": "tests/cli-rustup.rs", "rank": 1, "score": 314291.5435607604 }, { "content": "/// Run this to create the test environment containing rustup, and\n\n/// a mock dist server.\n\npub fn setup(s: Scenario, f: &Fn(&mut Config)) {\n\n // Unset env variables that will break our testing\n\n env::remove_var(\"RUSTUP_TOOLCHAIN\");\n\n env::remove_var(\"SHELL\");\n\n env::remove_var(\"ZDOTDIR\");\n\n\n\n let current_exe_path = env::current_exe().map(PathBuf::from).unwrap();\n\n let mut exe_dir = current_exe_path.parent().unwrap();\n\n if exe_dir.ends_with(\"deps\") {\n\n exe_dir = exe_dir.parent().unwrap();\n\n }\n\n let test_dir = exe_dir.parent().unwrap().join(\"tests\");\n\n fs::create_dir_all(&test_dir).unwrap();\n\n\n\n let exedir = TempDir::new_in(&test_dir, \"rustup-exe\").unwrap();\n\n let distdir = TempDir::new_in(&test_dir, \"rustup-dist\").unwrap();\n\n let rustupdir = TempDir::new_in(&test_dir, \"rustup\").unwrap();\n\n let customdir = TempDir::new_in(&test_dir, \"rustup-custom\").unwrap();\n\n let cargodir = TempDir::new_in(&test_dir, \"rustup-cargo\").unwrap();\n\n let homedir = TempDir::new_in(&test_dir, \"rustup-home\").unwrap();\n", "file_path": "src/rustup-mock/src/clitools.rs", "rank": 2, "score": 307639.30260304984 }, { "content": "pub fn setup(f: &Fn(&Config)) {\n\n clitools::setup(Scenario::SimpleV2, &|config| {\n\n // Lock protects environment variables\n\n lazy_static! {\n\n static ref LOCK: Mutex<()> = Mutex::new(());\n\n }\n\n let _g = LOCK.lock();\n\n\n\n // An windows these tests mess with the user's PATH. Save\n\n // and restore them here to keep from trashing things.\n\n let saved_path = get_path();\n\n let _g = scopeguard::guard(saved_path, |p| restore_path(p));\n\n\n\n f(config);\n\n });\n\n}\n\n\n", "file_path": "tests/cli-inst-interactive.rs", "rank": 3, "score": 277968.2263507049 }, { "content": "pub fn setup(f: &Fn(&Config)) {\n\n clitools::setup(Scenario::SimpleV2, &|config| {\n\n // Lock protects environment variables\n\n lazy_static! {\n\n static ref LOCK: Mutex<()> = Mutex::new(());\n\n }\n\n let _g = LOCK.lock();\n\n\n\n // An windows these tests mess with the user's PATH. Save\n\n // and restore them here to keep from trashing things.\n\n let saved_path = get_path();\n\n let _g = scopeguard::guard(saved_path, |p| restore_path(p));\n\n\n\n f(config);\n\n });\n\n}\n\n\n", "file_path": "tests/cli-self-upd.rs", "rank": 4, "score": 277968.2263507049 }, { "content": "pub fn setup(f: &Fn(&mut Config)) {\n\n clitools::setup(Scenario::SimpleV2, f);\n\n}\n\n\n", "file_path": "tests/cli-misc.rs", "rank": 5, "score": 271611.83877611125 }, { "content": "pub fn setup(f: &Fn(&mut Config)) {\n\n clitools::setup(Scenario::SimpleV2, f);\n\n}\n\n\n", "file_path": "tests/cli-v2.rs", "rank": 6, "score": 271611.83877611125 }, { "content": "#[test]\n\nfn rustc_no_default_toolchain() {\n\n setup(&|config| {\n\n expect_err(config, &[\"rustc\"],\n\n \"no default toolchain configured\");\n\n });\n\n}\n\n\n", "file_path": "tests/cli-v2.rs", "rank": 8, "score": 269513.335808416 }, { "content": "pub fn update_setup(f: &Fn(&Config, &Path)) {\n\n setup(&|config| {\n\n\n\n // Create a mock self-update server\n\n let ref self_dist_tmp = TempDir::new(\"self_dist\").unwrap();\n\n let ref self_dist = self_dist_tmp.path();\n\n\n\n let ref trip = this_host_triple();\n\n let ref dist_dir = self_dist.join(&format!(\"archive/{}/{}\", TEST_VERSION, trip));\n\n let ref dist_exe = dist_dir.join(&format!(\"rustup-init{}\", EXE_SUFFIX));\n\n let ref rustup_bin = config.exedir.join(&format!(\"rustup-init{}\", EXE_SUFFIX));\n\n\n\n fs::create_dir_all(dist_dir).unwrap();\n\n output_release_file(self_dist, \"1\", TEST_VERSION);\n\n fs::copy(rustup_bin, dist_exe).unwrap();\n\n // Modify the exe so it hashes different\n\n raw::append_file(dist_exe, \"\").unwrap();\n\n\n\n let ref root_url = format!(\"file://{}\", self_dist.display());\n\n env::set_var(\"RUSTUP_UPDATE_ROOT\", root_url);\n\n\n\n f(config, self_dist);\n\n });\n\n}\n\n\n", "file_path": "tests/cli-self-upd.rs", "rank": 10, "score": 264296.4792373568 }, { "content": "pub fn expect_ok(config: &Config, args: &[&str]) {\n\n let out = run(config, args[0], &args[1..], &[]);\n\n if !out.ok {\n\n print_command(args, &out);\n\n println!(\"expected.ok: {}\", true);\n\n panic!();\n\n }\n\n}\n\n\n", "file_path": "src/rustup-mock/src/clitools.rs", "rank": 11, "score": 258066.7656147881 }, { "content": "pub fn expect_err(config: &Config, args: &[&str], expected: &str) {\n\n let out = run(config, args[0], &args[1..], &[]);\n\n if out.ok || !out.stderr.contains(expected) {\n\n print_command(args, &out);\n\n println!(\"expected.ok: {}\", false);\n\n print_indented(\"expected.stderr.contains\", expected);\n\n panic!();\n\n }\n\n}\n\n\n", "file_path": "src/rustup-mock/src/clitools.rs", "rank": 12, "score": 241484.81004502956 }, { "content": "pub fn list_components(toolchain: &Toolchain) -> Result<()> {\n\n let mut t = term2::stdout();\n\n for component in try!(toolchain.list_components()) {\n\n let name = component.component.name();\n\n if component.required {\n\n let _ = t.attr(term2::Attr::Bold);\n\n let _ = writeln!(t, \"{} (default)\", name);\n\n let _ = t.reset();\n\n } else if component.installed {\n\n let _ = t.attr(term2::Attr::Bold);\n\n let _ = writeln!(t, \"{} (installed)\", name);\n\n let _ = t.reset();\n\n } else if component.available {\n\n let _ = writeln!(t, \"{}\", name);\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/rustup-cli/common.rs", "rank": 13, "score": 239392.63217282988 }, { "content": "pub fn list_targets(toolchain: &Toolchain) -> Result<()> {\n\n let mut t = term2::stdout();\n\n for component in try!(toolchain.list_components()) {\n\n if component.component.pkg == \"rust-std\" {\n\n let target = component.component.target.as_ref().expect(\"rust-std should have a target\");\n\n if component.required {\n\n let _ = t.attr(term2::Attr::Bold);\n\n let _ = writeln!(t, \"{} (default)\", target);\n\n let _ = t.reset();\n\n } else if component.installed {\n\n let _ = t.attr(term2::Attr::Bold);\n\n let _ = writeln!(t, \"{} (installed)\", target);\n\n let _ = t.reset();\n\n } else if component.available {\n\n let _ = writeln!(t, \"{}\", target);\n\n }\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/rustup-cli/common.rs", "rank": 14, "score": 239392.63217282988 }, { "content": "pub fn expect_stdout_ok(config: &Config, args: &[&str], expected: &str) {\n\n let out = run(config, args[0], &args[1..], &[]);\n\n if !out.ok || !out.stdout.contains(expected) {\n\n print_command(args, &out);\n\n println!(\"expected.ok: {}\", true);\n\n print_indented(\"expected.stdout.contains\", expected);\n\n panic!();\n\n }\n\n}\n\n\n", "file_path": "src/rustup-mock/src/clitools.rs", "rank": 15, "score": 238745.66149492862 }, { "content": "pub fn expect_stderr_ok(config: &Config, args: &[&str], expected: &str) {\n\n let out = run(config, args[0], &args[1..], &[]);\n\n if !out.ok || !out.stderr.contains(expected) {\n\n print_command(args, &out);\n\n println!(\"expected.ok: {}\", true);\n\n print_indented(\"expected.stderr.contains\", expected);\n\n panic!();\n\n }\n\n}\n\n\n", "file_path": "src/rustup-mock/src/clitools.rs", "rank": 16, "score": 238745.66149492862 }, { "content": "pub fn version() -> &'static str {\n\n concat!(env!(\"CARGO_PKG_VERSION\"), include_str!(concat!(env!(\"OUT_DIR\"), \"/commit-info.txt\")))\n\n}\n\n\n\n\n", "file_path": "src/rustup-cli/common.rs", "rank": 17, "score": 230251.00709860842 }, { "content": "#[test]\n\nfn default() {\n\n setup(&|config| {\n\n expect_ok_ex(config, &[\"rustup\", \"default\", \"nightly\"],\n\nfor_host!(r\"\n\n nightly-{0} installed - 1.3.0 (hash-n-2)\n\n\n\n\"),\n\nfor_host!(r\"info: syncing channel updates for 'nightly-{0}'\n\ninfo: latest update on 2015-01-02, rust version 1.3.0\n\ninfo: downloading component 'rust-std'\n\ninfo: downloading component 'rustc'\n\ninfo: downloading component 'cargo'\n\ninfo: downloading component 'rust-docs'\n\ninfo: installing component 'rust-std'\n\ninfo: installing component 'rustc'\n\ninfo: installing component 'cargo'\n\ninfo: installing component 'rust-docs'\n\ninfo: default toolchain set to 'nightly-{0}'\n\n\"));\n\n });\n\n}\n\n\n", "file_path": "tests/cli-rustup.rs", "rank": 18, "score": 226812.81369469728 }, { "content": "pub fn filter_file<F: FnMut(&str) -> bool>(src: &Path,\n\n dest: &Path,\n\n mut filter: F)\n\n -> io::Result<usize> {\n\n let src_file = try!(fs::File::open(src));\n\n let dest_file = try!(fs::File::create(dest));\n\n\n\n let mut reader = io::BufReader::new(src_file);\n\n let mut writer = io::BufWriter::new(dest_file);\n\n let mut removed = 0;\n\n\n\n for result in io::BufRead::lines(&mut reader) {\n\n let line = try!(result);\n\n if filter(&line) {\n\n try!(writeln!(&mut writer, \"{}\", &line));\n\n } else {\n\n removed += 1;\n\n }\n\n }\n\n\n\n try!(writer.flush());\n\n\n\n Ok(removed)\n\n}\n\n\n", "file_path": "src/rustup-utils/src/raw.rs", "rank": 19, "score": 226762.09633703146 }, { "content": "pub fn expect_err_ex(config: &Config, args: &[&str],\n\n stdout: &str, stderr: &str) {\n\n let out = run(config, args[0], &args[1..], &[]);\n\n if out.ok || out.stdout != stdout || out.stderr != stderr {\n\n print_command(args, &out);\n\n println!(\"expected.ok: {}\", false);\n\n print_indented(\"expected.stdout\", stdout);\n\n print_indented(\"expected.stderr\", stderr);\n\n panic!();\n\n }\n\n}\n\n\n", "file_path": "src/rustup-mock/src/clitools.rs", "rank": 20, "score": 225984.9308113456 }, { "content": "pub fn expect_ok_ex(config: &Config, args: &[&str],\n\n stdout: &str, stderr: &str) {\n\n let out = run(config, args[0], &args[1..], &[]);\n\n if !out.ok || out.stdout != stdout || out.stderr != stderr {\n\n print_command(args, &out);\n\n println!(\"expected.ok: {}\", true);\n\n print_indented(\"expected.stdout\", stdout);\n\n print_indented(\"expected.stderr\", stderr);\n\n panic!();\n\n }\n\n}\n\n\n", "file_path": "src/rustup-mock/src/clitools.rs", "rank": 21, "score": 225984.9308113456 }, { "content": "pub fn env(config: &Config, cmd: &mut Command) {\n\n // Ensure PATH is prefixed with the rustup-exe directory\n\n let prev_path = env::var_os(\"PATH\");\n\n let mut new_path = config.exedir.clone().into_os_string();\n\n if let Some(ref p) = prev_path {\n\n new_path.push(if cfg!(windows) { \";\" } else { \":\" });\n\n new_path.push(p);\n\n }\n\n cmd.env(\"PATH\", new_path);\n\n cmd.env(\"RUSTUP_HOME\", config.rustupdir.to_string_lossy().to_string());\n\n cmd.env(\"RUSTUP_DIST_SERVER\", format!(\"file://{}\", config.distdir.to_string_lossy()));\n\n cmd.env(\"CARGO_HOME\", config.cargodir.to_string_lossy().to_string());\n\n cmd.env(\"RUSTUP_OVERRIDE_HOST_TRIPLE\", this_host_triple());\n\n\n\n // These are used in some installation tests that unset RUSTUP_HOME/CARGO_HOME\n\n cmd.env(\"HOME\", config.homedir.to_string_lossy().to_string());\n\n cmd.env(\"USERPROFILE\", config.homedir.to_string_lossy().to_string());\n\n\n\n // Setting HOME will confuse the sudo check for rustup-init. Override it\n\n cmd.env(\"RUSTUP_INIT_SKIP_SUDO_CHECK\", \"yes\");\n\n\n\n // Skip the MSVC warning check since it's environment dependent\n\n cmd.env(\"RUSTUP_INIT_SKIP_MSVC_CHECK\", \"yes\");\n\n\n\n // The test environment may interfere with checking the PATH for the existence of rustc or\n\n // cargo, so we disable that check globally\n\n cmd.env(\"RUSTUP_INIT_SKIP_PATH_CHECK\", \"yes\");\n\n}\n\n\n", "file_path": "src/rustup-mock/src/clitools.rs", "rank": 22, "score": 224949.5423031463 }, { "content": "/// Change the current distribution manifest to a particular date\n\npub fn set_current_dist_date(config: &Config, date: &str) {\n\n let ref url = Url::from_file_path(&config.distdir).unwrap();\n\n for channel in &[\"nightly\", \"beta\", \"stable\"] {\n\n change_channel_date(url, channel, date);\n\n }\n\n}\n\n\n", "file_path": "src/rustup-mock/src/clitools.rs", "rank": 23, "score": 223130.74177164622 }, { "content": "pub fn filter_file<F: FnMut(&str) -> bool>(name: &'static str,\n\n src: &Path,\n\n dest: &Path,\n\n filter: F)\n\n -> Result<usize> {\n\n raw::filter_file(src, dest, filter).chain_err(|| {\n\n ErrorKind::FilteringFile {\n\n name: name,\n\n src: PathBuf::from(src),\n\n dest: PathBuf::from(dest),\n\n }\n\n })\n\n}\n\n\n", "file_path": "src/rustup-utils/src/utils.rs", "rank": 24, "score": 219543.3216793071 }, { "content": "#[test]\n\nfn list_default_toolchain() {\n\n setup(&|config| {\n\n expect_ok(config, &[\"rustup\", \"default\", \"nightly\"]);\n\n expect_ok_ex(config, &[\"rustup\", \"toolchain\", \"list\"],\n\nfor_host!(r\"nightly-{0} (default)\n\n\"),\n\nr\"\");\n\n });\n\n}\n\n\n", "file_path": "tests/cli-rustup.rs", "rank": 25, "score": 218550.83937717177 }, { "content": "#[test]\n\nfn show_toolchain_default() {\n\n setup(&|config| {\n\n expect_ok(config, &[\"rustup\", \"default\", \"nightly\"]);\n\n expect_ok_ex(config, &[\"rustup\", \"show\"],\n\nfor_host!(r\"Default host: {0}\n\n\n\nnightly-{0} (default)\n\n1.3.0 (hash-n-2)\n\n\"),\n\nr\"\");\n\n });\n\n}\n\n\n", "file_path": "tests/cli-rustup.rs", "rank": 26, "score": 218550.83937717177 }, { "content": "pub fn expect_timeout_ok(config: &Config, timeout: Duration, args: &[&str]) {\n\n let mut child = cmd(config, args[0], &args[1..])\n\n .stdout(Stdio::null())\n\n .stderr(Stdio::null())\n\n .spawn().unwrap();\n\n\n\n match child.wait_timeout(timeout).unwrap() {\n\n Some(status) => {\n\n assert!(status.success(), \"not ok {:?}\", args);\n\n }\n\n None => {\n\n // child hasn't exited yet\n\n child.kill().unwrap();\n\n panic!(\"command timed out: {:?}\", args);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/rustup-mock/src/clitools.rs", "rank": 27, "score": 212747.07173298235 }, { "content": "pub fn expect_not_stdout_ok(config: &Config, args: &[&str], expected: &str) {\n\n let out = run(config, args[0], &args[1..], &[]);\n\n if !out.ok || out.stdout.contains(expected) {\n\n print_command(args, &out);\n\n println!(\"expected.ok: {}\", true);\n\n print_indented(\"expected.stdout.does_not_contain\", expected);\n\n panic!();\n\n }\n\n}\n\n\n", "file_path": "src/rustup-mock/src/clitools.rs", "rank": 28, "score": 212747.07173298235 }, { "content": "pub fn cmd(config: &Config, name: &str, args: &[&str]) -> Command {\n\n let exe_path = config.exedir.join(format!(\"{}{}\", name, EXE_SUFFIX));\n\n let mut cmd = Command::new(exe_path);\n\n cmd.args(args);\n\n cmd.current_dir(&*config.workdir.borrow());\n\n env(config, &mut cmd);\n\n cmd\n\n}\n\n\n", "file_path": "src/rustup-mock/src/clitools.rs", "rank": 29, "score": 212181.13737281805 }, { "content": "fn setup(f: &Fn(&mut Config)) {\n\n clitools::setup(Scenario::SimpleV2, f);\n\n}\n\n\n", "file_path": "tests/cli-exact.rs", "rank": 30, "score": 211455.0856214904 }, { "content": "pub fn match_file<T, F: FnMut(&str) -> Option<T>>(src: &Path, mut f: F) -> io::Result<Option<T>> {\n\n let src_file = try!(fs::File::open(src));\n\n\n\n let mut reader = io::BufReader::new(src_file);\n\n\n\n for result in io::BufRead::lines(&mut reader) {\n\n let line = try!(result);\n\n if let Some(r) = f(&line) {\n\n return Ok(Some(r));\n\n }\n\n }\n\n\n\n Ok(None)\n\n}\n\n\n", "file_path": "src/rustup-utils/src/raw.rs", "rank": 31, "score": 207703.98833896808 }, { "content": "pub fn match_file<T, F: FnMut(&str) -> Option<T>>(name: &'static str,\n\n src: &Path,\n\n f: F)\n\n -> Result<Option<T>> {\n\n raw::match_file(src, f).chain_err(|| {\n\n ErrorKind::ReadingFile {\n\n name: name,\n\n path: PathBuf::from(src),\n\n }\n\n })\n\n}\n\n\n", "file_path": "src/rustup-utils/src/utils.rs", "rank": 32, "score": 206671.9956744018 }, { "content": "#[test]\n\nfn rustc_with_bad_rustup_toolchain_env_var() {\n\n setup(&|config| {\n\n let out = run(config, \"rustc\", &[], &[(\"RUSTUP_TOOLCHAIN\", \"bogus\")]);\n\n assert!(!out.ok);\n\n assert!(out.stderr.contains(\"toolchain 'bogus' is not installed\"));\n\n });\n\n}\n\n\n", "file_path": "tests/cli-misc.rs", "rank": 33, "score": 203711.0374905791 }, { "content": "#[test]\n\n#[ignore(windows)] // FIXME Windows shows UNC paths\n\nfn show_toolchain_version_nested_file_override() {\n\n setup(&|config| {\n\n expect_ok(config, &[\"rustup\", \"default\", \"stable\"]);\n\n expect_ok(config, &[\"rustup\", \"toolchain\", \"install\", \"nightly\"]);\n\n\n\n let cwd = config.current_dir();\n\n let toolchain_file = cwd.join(\"rust-toolchain\");\n\n\n\n raw::write_file(&toolchain_file, \"nightly\").unwrap();\n\n\n\n let subdir = cwd.join(\"foo\");\n\n\n\n fs::create_dir_all(&subdir).unwrap();\n\n config.change_dir(&subdir, &|| {\n\n expect_ok_ex(config, &[\"rustup\", \"show\"],\n\n &format!(r\"Default host: {0}\n\n\n", "file_path": "tests/cli-rustup.rs", "rank": 34, "score": 203692.44646352125 }, { "content": "pub fn confirm(question: &str, default: bool) -> Result<bool> {\n\n print!(\"{} \", question);\n\n let _ = std::io::stdout().flush();\n\n let input = try!(read_line());\n\n\n\n let r = match &*input {\n\n \"y\" | \"Y\" => true,\n\n \"n\" | \"N\" => false,\n\n \"\" => default,\n\n _ => false,\n\n };\n\n\n\n println!(\"\");\n\n\n\n Ok(r)\n\n}\n\n\n\npub enum Confirm {\n\n Yes, No, Advanced\n\n}\n\n\n", "file_path": "src/rustup-cli/common.rs", "rank": 35, "score": 198791.09545078114 }, { "content": "pub fn question_bool(question: &str, default: bool) -> Result<bool> {\n\n println!(\"{}\", question);\n\n\n\n let _ = std::io::stdout().flush();\n\n let input = try!(read_line());\n\n\n\n println!(\"\");\n\n\n\n if input.is_empty() {\n\n Ok(default)\n\n } else {\n\n match &*input {\n\n \"y\" | \"Y\" | \"yes\" => Ok(true),\n\n \"n\" | \"N\" | \"no\" => Ok(false),\n\n _ => Ok(default)\n\n }\n\n }\n\n\n\n}\n\n\n", "file_path": "src/rustup-cli/common.rs", "rank": 36, "score": 195708.46764704282 }, { "content": "pub fn question_str(question: &str, default: &str) -> Result<String> {\n\n println!(\"{}\", question);\n\n let _ = std::io::stdout().flush();\n\n let input = try!(read_line());\n\n\n\n println!(\"\");\n\n\n\n if input.is_empty() {\n\n Ok(default.to_string())\n\n } else {\n\n Ok(input)\n\n }\n\n}\n\n\n", "file_path": "src/rustup-cli/common.rs", "rank": 37, "score": 195708.46764704282 }, { "content": "pub fn run(config: &Config, name: &str, args: &[&str], env: &[(&str, &str)]) -> SanitizedOutput {\n\n let mut cmd = cmd(config, name, args);\n\n for env in env {\n\n cmd.env(env.0, env.1);\n\n }\n\n let out = cmd.output().expect(\"failed to run test command\");\n\n\n\n SanitizedOutput {\n\n ok: out.status.success(),\n\n stdout: String::from_utf8(out.stdout).unwrap(),\n\n stderr: String::from_utf8(out.stderr).unwrap(),\n\n }\n\n}\n\n\n", "file_path": "src/rustup-mock/src/clitools.rs", "rank": 38, "score": 193499.34632887435 }, { "content": "pub fn compute_rustc_percentiles(values: &[u64]) -> RustcStatistics {\n\n RustcStatistics {\n\n rustc_execution_count: (values.len() as u32),\n\n compile_time_ms_total: values.iter().fold(0, |sum, val| sum + val),\n\n compile_time_ms_mean: mean(values),\n\n compile_time_ms_ntile_75: ntile(75, values),\n\n compile_time_ms_ntile_90: ntile(90, values),\n\n compile_time_ms_ntile_95: ntile(95, values),\n\n compile_time_ms_ntile_99: ntile(99, values),\n\n compile_time_ms_stdev: stdev(values),\n\n exit_codes_with_count: HashMap::new(),\n\n error_codes_with_counts: HashMap::new()\n\n }\n\n}\n\n\n", "file_path": "src/rustup/telemetry_analysis.rs", "rank": 39, "score": 191935.5515560305 }, { "content": "#[test]\n\nfn default() {\n\n setup(&|config| {\n\n expect_ok_ex(config, &[\"rustup\", \"default\", \"nightly\"],\n\nfor_host!(r\"\n\n nightly-{0} installed - 1.3.0 (hash-n-2)\n\n\n\n\"),\n\nfor_host!(r\"info: syncing channel updates for 'nightly-{0}'\n\ninfo: latest update on 2015-01-02, rust version 1.3.0\n\ninfo: downloading component 'rust-std'\n\ninfo: downloading component 'rustc'\n\ninfo: downloading component 'cargo'\n\ninfo: downloading component 'rust-docs'\n\ninfo: installing component 'rust-std'\n\ninfo: installing component 'rustc'\n\ninfo: installing component 'cargo'\n\ninfo: installing component 'rust-docs'\n\ninfo: default toolchain set to 'nightly-{0}'\n\n\"));\n\n });\n\n}\n\n\n", "file_path": "tests/cli-exact.rs", "rank": 40, "score": 191607.25044317497 }, { "content": "pub fn ensure_dir_exists<P: AsRef<Path>, F: FnOnce(&Path)>(path: P,\n\n callback: F)\n\n -> io::Result<bool> {\n\n if !is_directory(path.as_ref()) {\n\n callback(path.as_ref());\n\n fs::create_dir_all(path.as_ref()).map(|()| true)\n\n } else {\n\n Ok(false)\n\n }\n\n}\n\n\n", "file_path": "src/rustup-utils/src/raw.rs", "rank": 41, "score": 189395.09143857914 }, { "content": "#[test]\n\nfn rustup_stable() {\n\n setup(&|config| {\n\n set_current_dist_date(config, \"2015-01-01\");\n\n expect_ok(config, &[\"rustup\", \"update\", \"stable\"]);\n\n set_current_dist_date(config, \"2015-01-02\");\n\n expect_ok_ex(config, &[\"rustup\", \"update\", \"--no-self-update\"],\n\nfor_host!(r\"\n\n stable-{0} updated - 1.1.0 (hash-s-2)\n\n\n\n\"),\n\nfor_host!(r\"info: syncing channel updates for 'stable-{0}'\n\ninfo: latest update on 2015-01-02, rust version 1.1.0\n\ninfo: downloading component 'rust-std'\n\ninfo: downloading component 'rustc'\n\ninfo: downloading component 'cargo'\n\ninfo: downloading component 'rust-docs'\n\ninfo: removing component 'rust-std'\n\ninfo: removing component 'rustc'\n\ninfo: removing component 'cargo'\n\ninfo: removing component 'rust-docs'\n\ninfo: installing component 'rust-std'\n\ninfo: installing component 'rustc'\n\ninfo: installing component 'cargo'\n\ninfo: installing component 'rust-docs'\n\n\"));\n\n });\n\n}\n\n\n", "file_path": "tests/cli-rustup.rs", "rank": 42, "score": 188964.97307631816 }, { "content": "pub fn list_toolchains(cfg: &Cfg) -> Result<()> {\n\n let toolchains = try!(cfg.list_toolchains());\n\n\n\n if toolchains.is_empty() {\n\n println!(\"no installed toolchains\");\n\n } else {\n\n if let Ok(Some(def_toolchain)) = cfg.find_default() {\n\n for toolchain in toolchains {\n\n let if_default = if def_toolchain.name() == &*toolchain {\n\n \" (default)\"\n\n } else {\n\n \"\"\n\n };\n\n println!(\"{}{}\", &toolchain, if_default);\n\n }\n\n\n\n } else {\n\n for toolchain in toolchains {\n\n println!(\"{}\", &toolchain);\n\n }\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/rustup-cli/common.rs", "rank": 43, "score": 186297.67034711223 }, { "content": "#[test]\n\nfn install_toolchain_from_version() {\n\n setup(&|config| {\n\n expect_ok(config, &[\"rustup\", \"default\" , \"1.1.0\"]);\n\n expect_stdout_ok(config, &[\"rustc\", \"--version\"], \"hash-s-2\");\n\n });\n\n}\n\n\n", "file_path": "tests/cli-v2.rs", "rank": 45, "score": 185334.02979565837 }, { "content": "// Run without setting RUSTUP_HOME, with setting HOME and USERPROFILE\n\nfn run_no_home(config: &Config, args: &[&str], env: &[(&str, &str)]) -> process::Output {\n\n let home_dir_str = &format!(\"{}\", config.homedir.display());\n\n let mut cmd = clitools::cmd(config, args[0], &args[1..]);\n\n clitools::env(config, &mut cmd);\n\n cmd.env_remove(\"RUSTUP_HOME\");\n\n cmd.env(\"HOME\", home_dir_str);\n\n cmd.env(\"USERPROFILE\", home_dir_str);\n\n for &(name, val) in env {\n\n cmd.env(name, val);\n\n }\n\n let out = cmd.output().unwrap();\n\n assert!(out.status.success());\n\n\n\n out\n\n}\n\n\n\n// Rename ~/.multirust to ~/.rustup\n", "file_path": "tests/cli-rustup.rs", "rank": 46, "score": 185307.28222611162 }, { "content": "#[test]\n\nfn default_existing_toolchain() {\n\n setup(&|config| {\n\n expect_ok(config, &[\"rustup\", \"update\", \"nightly\"]);\n\n expect_stderr_ok(config, &[\"rustup\", \"default\", \"nightly\"],\n\n for_host!(\"using existing install for 'nightly-{0}'\"));\n\n });\n\n}\n\n\n", "file_path": "tests/cli-v2.rs", "rank": 47, "score": 185269.79958908964 }, { "content": "#[test]\n\nfn default_invalid_toolchain() {\n\n setup(&|config| {\n\n expect_err_ex(config, &[\"rustup\", \"default\", \"nightly-2016-03-1\"],\n\nr\"\",\n\nr\"info: syncing channel updates for 'nightly-2016-03-1'\n\ninfo: latest update on 2015-01-02, rust version 1.3.0\n\nerror: target not found: '2016-03-1'\n\n\");\n\n });\n\n}\n\n\n", "file_path": "tests/cli-exact.rs", "rank": 48, "score": 185269.79958908964 }, { "content": "#[test]\n\nfn rustup_stable_no_change() {\n\n setup(&|config| {\n\n set_current_dist_date(config, \"2015-01-01\");\n\n expect_ok(config, &[\"rustup\", \"update\", \"stable\"]);\n\n expect_ok_ex(config, &[\"rustup\", \"update\", \"--no-self-update\"],\n\nfor_host!(r\"\n\n stable-{0} unchanged - 1.0.0 (hash-s-1)\n\n\n\n\"),\n\nfor_host!(r\"info: syncing channel updates for 'stable-{0}'\n\n\"));\n\n });\n\n}\n\n\n", "file_path": "tests/cli-rustup.rs", "rank": 50, "score": 184827.45755569168 }, { "content": "#[test]\n\nfn install_toolchain_from_channel() {\n\n setup(&|config| {\n\n expect_ok(config, &[\"rustup\", \"default\", \"nightly\"]);\n\n expect_stdout_ok(config, &[\"rustc\", \"--version\"], \"hash-n-2\");\n\n expect_ok(config, &[\"rustup\", \"default\", \"beta\"]);\n\n expect_stdout_ok(config, &[\"rustc\", \"--version\"], \"hash-b-2\");\n\n expect_ok(config, &[\"rustup\", \"default\", \"stable\"]);\n\n expect_stdout_ok(config, &[\"rustc\", \"--version\"], \"hash-s-2\");\n\n });\n\n}\n\n\n", "file_path": "tests/cli-v2.rs", "rank": 52, "score": 184092.66964137313 }, { "content": "// Creates a mock dist server populated with some test data\n\npub fn create_mock_dist_server(path: &Path,\n\n edit: Option<&Fn(&str, &mut MockPackage)>) -> MockDistServer {\n\n MockDistServer {\n\n path: path.to_owned(),\n\n channels: vec![\n\n create_mock_channel(\"nightly\", \"2016-02-01\", edit),\n\n create_mock_channel(\"nightly\", \"2016-02-02\", edit),\n\n ]\n\n }\n\n}\n\n\n", "file_path": "src/rustup-dist/tests/dist.rs", "rank": 53, "score": 181189.4811797401 }, { "content": "#[test]\n\nfn set_nightly_toolchain() {\n\n setup(&|config| {\n\n let out = run_input(config, &[\"rustup-init\"],\n\n \"2\\n\\nnightly\\n\\n\\n\\n\");\n\n assert!(out.ok);\n\n\n\n expect_stdout_ok(config, &[\"rustup\", \"show\"], \"nightly\");\n\n });\n\n}\n\n\n", "file_path": "tests/cli-inst-interactive.rs", "rank": 54, "score": 181057.54436567187 }, { "content": "#[test]\n\nfn install_override_toolchain_from_version() {\n\n setup(&|config| {\n\n expect_ok(config, &[\"rustup\", \"override\", \"add\", \"1.1.0\"]);\n\n expect_stdout_ok(config, &[\"rustc\", \"--version\"],\n\n \"hash-s-2\");\n\n });\n\n}\n\n\n", "file_path": "tests/cli-v2.rs", "rank": 56, "score": 180980.74983873867 }, { "content": "#[test]\n\nfn with_non_default_toolchain() {\n\n setup(&|config| {\n\n let out = run_input(config, &[\"rustup-init\", \"--default-toolchain=nightly\"], \"\\n\\n\");\n\n assert!(out.ok);\n\n\n\n expect_stdout_ok(config, &[\"rustup\", \"show\"], \"nightly\");\n\n });\n\n}\n\n\n", "file_path": "tests/cli-inst-interactive.rs", "rank": 57, "score": 180918.2285065585 }, { "content": "pub fn format_path_for_display(path: &str) -> String {\n\n let unc_present = path.find(r\"\\\\?\\\");\n\n\n\n match unc_present {\n\n None => path.to_owned(),\n\n Some(_) => path[4..].to_owned(),\n\n }\n\n}\n\n\n\n/// Encodes a utf-8 string as a null-terminated UCS-2 string in bytes\n", "file_path": "src/rustup-utils/src/utils.rs", "rank": 58, "score": 180271.54138523055 }, { "content": "#[test]\n\n#[ignore(windows)] // FIXME Windows shows UNC paths\n\nfn show_toolchain_toolchain_file_override() {\n\n setup(&|config| {\n\n expect_ok(config, &[\"rustup\", \"default\", \"stable\"]);\n\n expect_ok(config, &[\"rustup\", \"toolchain\", \"install\", \"nightly\"]);\n\n\n\n let cwd = config.current_dir();\n\n let toolchain_file = cwd.join(\"rust-toolchain\");\n\n\n\n raw::write_file(&toolchain_file, \"nightly\").unwrap();\n\n\n\n expect_ok_ex(config, &[\"rustup\", \"show\"],\n\n&format!(r\"Default host: {0}\n\n\n", "file_path": "tests/cli-rustup.rs", "rank": 59, "score": 178839.5499039976 }, { "content": "#[test]\n\nfn set_nightly_toolchain_and_unset() {\n\n setup(&|config| {\n\n let out = run_input(config, &[\"rustup-init\"],\n\n \"2\\n\\nnightly\\n\\n2\\n\\nbeta\\n\\n\\n\\n\");\n\n assert!(out.ok);\n\n\n\n expect_stdout_ok(config, &[\"rustup\", \"show\"], \"beta\");\n\n });\n\n}\n\n\n", "file_path": "tests/cli-inst-interactive.rs", "rank": 60, "score": 176927.9130376738 }, { "content": "#[test]\n\nfn remove_default_toolchain_err_handling() {\n\n setup(&|config| {\n\n expect_ok(config, &[\"rustup\", \"default\", \"nightly\"]);\n\n expect_ok(config, &[\"rustup\", \"toolchain\", \"remove\", \"nightly\"]);\n\n expect_err(config, &[\"rustc\"],\n\n for_host!(\"toolchain 'nightly-{0}' is not installed\"));\n\n });\n\n}\n\n\n", "file_path": "tests/cli-v2.rs", "rank": 61, "score": 176792.2076823784 }, { "content": "#[test]\n\nfn set_default_host() {\n\n setup(&|config| {\n\n expect_ok(config, &[\"rustup\", \"set\", \"default-host\", &this_host_triple()]);\n\n expect_stdout_ok(config, &[\"rustup\", \"show\"],\n\n for_host!(\"Default host: {0}\"));\n\n });\n\n}\n\n\n\n// #846\n", "file_path": "tests/cli-rustup.rs", "rank": 63, "score": 176405.91799978615 }, { "content": "#[test]\n\nfn show_toolchain_none() {\n\n setup(&|config| {\n\n expect_ok_ex(config, &[\"rustup\", \"show\"],\n\nfor_host!(r\"Default host: {0}\n\n\n\nno active toolchain\n\n\"),\n\nr\"\");\n\n });\n\n}\n\n\n", "file_path": "tests/cli-rustup.rs", "rank": 64, "score": 175882.44416182526 }, { "content": "#[test]\n\nfn proxy_toolchain_shorthand() {\n\n setup(&|config| {\n\n expect_ok(config, &[\"rustup\", \"default\", \"stable\"]);\n\n expect_ok(config, &[\"rustup\", \"toolchain\", \"update\" , \"nightly\"]);\n\n expect_stdout_ok(config, &[\"rustc\", \"--version\"], \"hash-s-2\");\n\n expect_stdout_ok(config, &[\"rustc\", \"+stable\", \"--version\"], \"hash-s-2\");\n\n expect_stdout_ok(config, &[\"rustc\", \"+nightly\", \"--version\"], \"hash-n-2\");\n\n });\n\n}\n\n\n", "file_path": "tests/cli-rustup.rs", "rank": 65, "score": 175882.44416182526 }, { "content": "#[test]\n\nfn show_multiple_toolchains() {\n\n setup(&|config| {\n\n expect_ok(config, &[\"rustup\", \"default\", \"nightly\"]);\n\n expect_ok(config, &[\"rustup\", \"update\", \"stable\"]);\n\n expect_ok_ex(config, &[\"rustup\", \"show\"],\n\nfor_host!(r\"Default host: {0}\n\n\n", "file_path": "tests/cli-rustup.rs", "rank": 66, "score": 175882.44416182526 }, { "content": "#[test]\n\nfn show_toolchain_env() {\n\n setup(&|config| {\n\n expect_ok(config, &[\"rustup\", \"default\", \"nightly\"]);\n\n let mut cmd = clitools::cmd(config, \"rustup\", &[\"show\"]);\n\n clitools::env(config, &mut cmd);\n\n cmd.env(\"RUSTUP_TOOLCHAIN\", \"nightly\");\n\n let out = cmd.output().unwrap();\n\n assert!(out.status.success());\n\n let stdout = String::from_utf8(out.stdout).unwrap();\n\n assert_eq!(&stdout, for_host!(r\"Default host: {0}\n\n\n\nnightly-{0} (environment override by RUSTUP_TOOLCHAIN)\n\n1.3.0 (hash-n-2)\n\n\"));\n\n });\n\n}\n\n\n", "file_path": "tests/cli-rustup.rs", "rank": 67, "score": 175882.44416182526 }, { "content": "#[test]\n\n#[ignore(windows)] // FIXME Windows shows UNC paths\n\nfn show_toolchain_override() {\n\n setup(&|config| {\n\n let cwd = config.current_dir();\n\n expect_ok(config, &[\"rustup\", \"override\", \"add\", \"nightly\"]);\n\n expect_ok_ex(config, &[\"rustup\", \"show\"],\n\n&format!(r\"Default host: {0}\n\n\n\nnightly-{0} (directory override for '{1}')\n\n1.3.0 (hash-n-2)\n\n\", this_host_triple(), cwd.display()),\n\nr\"\");\n\n });\n\n}\n\n\n", "file_path": "tests/cli-rustup.rs", "rank": 68, "score": 175882.00985047338 }, { "content": "#[test]\n\n#[ignore(windows)] // FIXME Windows shows UNC paths\n\nfn show_toolchain_toolchain_file_override_not_installed() {\n\n setup(&|config| {\n\n expect_ok(config, &[\"rustup\", \"default\", \"stable\"]);\n\n\n\n let cwd = config.current_dir();\n\n let toolchain_file = cwd.join(\"rust-toolchain\");\n\n\n\n raw::write_file(&toolchain_file, \"nightly\").unwrap();\n\n\n\n // I'm not sure this should really be erroring when the toolchain\n\n // is not installed; just capturing the behavior.\n\n let mut cmd = clitools::cmd(config, \"rustup\", &[\"show\"]);\n\n clitools::env(config, &mut cmd);\n\n let out = cmd.output().unwrap();\n\n assert!(!out.status.success());\n\n let stderr = String::from_utf8(out.stderr).unwrap();\n\n assert!(stderr.starts_with(\n\n \"error: override toolchain 'nightly' is not installed\"));\n\n assert!(stderr.contains(\n\n &format!(\"the toolchain file at '{}' specifies an uninstalled toolchain\",\n\n toolchain_file.display())));\n\n });\n\n}\n\n\n", "file_path": "tests/cli-rustup.rs", "rank": 69, "score": 175329.513028831 }, { "content": "fn make_manifest_url(dist_server: &Url, toolchain: &ToolchainDesc) -> Result<Url> {\n\n let url = format!(\"{}/dist/channel-rust-{}.toml\", dist_server, toolchain.channel);\n\n\n\n Ok(Url::parse(&url).unwrap())\n\n}\n\n\n", "file_path": "src/rustup-dist/tests/dist.rs", "rank": 70, "score": 174686.3866807508 }, { "content": "fn uninstall(toolchain: &ToolchainDesc, prefix: &InstallPrefix, temp_cfg: &temp::Cfg,\n\n notify_handler: &Fn(Notification)) -> Result<()> {\n\n let trip = toolchain.target.clone();\n\n let manifestation = try!(Manifestation::open(prefix.clone(), trip));\n\n\n\n try!(manifestation.uninstall(temp_cfg, notify_handler.clone()));\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/rustup-dist/tests/dist.rs", "rank": 71, "score": 172459.08623096958 }, { "content": "pub fn create_mock_channel(channel: &str, date: &str,\n\n edit: Option<&Fn(&str, &mut MockPackage)>) -> MockChannel {\n\n // Put the date in the files so they can be differentiated\n\n let contents = Arc::new(date.as_bytes().to_vec());\n\n\n\n let rust_pkg = MockPackage {\n\n name: \"rust\",\n\n version: \"1.0.0\",\n\n targets: vec![\n\n MockTargetedPackage {\n\n target: \"x86_64-apple-darwin\".to_string(),\n\n available: true,\n\n components: vec![\n\n MockComponent {\n\n name: \"rustc\".to_string(),\n\n target: \"x86_64-apple-darwin\".to_string(),\n\n },\n\n MockComponent {\n\n name: \"rust-std\".to_string(),\n\n target: \"x86_64-apple-darwin\".to_string(),\n", "file_path": "src/rustup-dist/tests/dist.rs", "rank": 72, "score": 172307.68065600545 }, { "content": "#[test]\n\nfn toolchain_update_is_like_update() {\n\n setup(&|config| {\n\n expect_ok(config, &[\"rustup\", \"toolchain\", \"update\" , \"nightly\"]);\n\n expect_stdout_ok(config, &[\"rustup\", \"run\", \"nightly\", \"rustc\", \"--version\"],\n\n \"hash-n-2\");\n\n });\n\n}\n\n\n\n\n", "file_path": "tests/cli-rustup.rs", "rank": 73, "score": 171780.6280173888 }, { "content": "#[test]\n\nfn show_toolchain_override_not_installed() {\n\n setup(&|config| {\n\n expect_ok(config, &[\"rustup\", \"override\", \"add\", \"nightly\"]);\n\n expect_ok(config, &[\"rustup\", \"toolchain\", \"remove\", \"nightly\"]);\n\n // I'm not sure this should really be erroring when the toolchain\n\n // is not installed; just capturing the behavior.\n\n let mut cmd = clitools::cmd(config, \"rustup\", &[\"show\"]);\n\n clitools::env(config, &mut cmd);\n\n let out = cmd.output().unwrap();\n\n assert!(out.status.success());\n\n let stdout = String::from_utf8(out.stdout).unwrap();\n\n assert!(!stdout.contains(\"not a directory\"));\n\n assert!(Regex::new(r\"error: override toolchain 'nightly.*' is not installed, the directory override for '.*' specifies an uninstalled toolchain\").unwrap().is_match(&stdout))\n\n });\n\n}\n\n\n", "file_path": "tests/cli-rustup.rs", "rank": 74, "score": 171780.6280173888 }, { "content": "#[test]\n\nfn show_multiple_toolchains_and_targets() {\n\n if cfg!(target_os = \"linux\") && cfg!(target_arch = \"x86\") { return }\n\n\n\n clitools::setup(Scenario::MultiHost, &|config| {\n\n expect_ok(config, &[\"rustup\", \"default\",\n\n &format!(\"nightly-{}\", clitools::MULTI_ARCH1)]);\n\n expect_ok(config, &[\"rustup\", \"target\", \"add\", clitools::CROSS_ARCH2]);\n\n expect_ok(config, &[\"rustup\", \"update\",\n\n &format!(\"stable-{}\", clitools::MULTI_ARCH1)]);\n\n expect_ok_ex(config, &[\"rustup\", \"show\"],\n\n&format!(r\"Default host: {2}\n\n\n", "file_path": "tests/cli-rustup.rs", "rank": 75, "score": 171780.6280173888 }, { "content": "#[test]\n\nfn show_toolchain_env_not_installed() {\n\n setup(&|config| {\n\n let mut cmd = clitools::cmd(config, \"rustup\", &[\"show\"]);\n\n clitools::env(config, &mut cmd);\n\n cmd.env(\"RUSTUP_TOOLCHAIN\", \"nightly\");\n\n let out = cmd.output().unwrap();\n\n // I'm not sure this should really be erroring when the toolchain\n\n // is not installed; just capturing the behavior.\n\n assert!(out.status.success());\n\n let stdout = String::from_utf8(out.stdout).unwrap();\n\n assert!(stdout.contains(\"override toolchain 'nightly' is not installed, the RUSTUP_TOOLCHAIN environment variable specifies an uninstalled toolchain\"));\n\n });\n\n}\n\n\n\n// #846\n", "file_path": "tests/cli-rustup.rs", "rank": 76, "score": 171780.6280173888 }, { "content": "#[test]\n\nfn toolchain_uninstall_is_like_uninstall() {\n\n setup(&|config| {\n\n expect_ok(config, &[\"rustup\", \"uninstall\", \"nightly\"]);\n\n let mut cmd = clitools::cmd(config, \"rustup\", &[\"show\"]);\n\n clitools::env(config, &mut cmd);\n\n let out = cmd.output().unwrap();\n\n assert!(out.status.success());\n\n let stdout = String::from_utf8(out.stdout).unwrap();\n\n assert!(!stdout.contains(\n\n for_host!(\"'nightly-2015-01-01-{}'\")));\n\n\n\n });\n\n}\n\n\n", "file_path": "tests/cli-rustup.rs", "rank": 77, "score": 171780.6280173888 }, { "content": "#[test]\n\nfn toolchain_install_is_like_update() {\n\n setup(&|config| {\n\n expect_ok(config, &[\"rustup\", \"toolchain\", \"install\" , \"nightly\"]);\n\n expect_stdout_ok(config, &[\"rustup\", \"run\", \"nightly\", \"rustc\", \"--version\"],\n\n \"hash-n-2\");\n\n });\n\n}\n\n\n", "file_path": "tests/cli-rustup.rs", "rank": 78, "score": 171780.6280173888 }, { "content": "#[test]\n\nfn install_sets_up_stable_unless_there_is_already_a_default() {\n\n setup(&|config| {\n\n expect_ok(config, &[\"rustup-init\", \"-y\"]);\n\n expect_ok(config, &[\"rustup\", \"default\", \"nightly\"]);\n\n expect_ok(config, &[\"rustup\", \"toolchain\", \"remove\", \"stable\"]);\n\n expect_ok(config, &[\"rustup-init\", \"-y\"]);\n\n expect_stdout_ok(config, &[\"rustc\", \"--version\"],\n\n \"hash-n-2\");\n\n expect_err(config, &[\"rustup\", \"run\", \"stable\", \"rustc\", \"--version\"],\n\n for_host!(\"toolchain 'stable-{0}' is not installed\"));\n\n });\n\n}\n\n\n\n// Installation used to be to ~/.multirust/bin instead of\n\n// ~/.cargo/bin. If those bins exist during installation they\n\n// should be deleted to avoid confusion.\n", "file_path": "tests/cli-self-upd.rs", "rank": 79, "score": 169737.43039847878 }, { "content": "fn parse_new_rustup_version(version: String) -> String {\n\n let re = Regex::new(r\"\\d+.\\d+.\\d+[0-9a-zA-Z-]*\").unwrap();\n\n let capture = re.captures(&version);\n\n let matched_version = match capture {\n\n Some(cap) => cap.get(0).unwrap().as_str(),\n\n None => \"(unknown)\"\n\n };\n\n String::from(matched_version)\n\n}\n\n\n", "file_path": "src/rustup-cli/self_update.rs", "rank": 80, "score": 169492.14909906947 }, { "content": "#[test]\n\nfn with_non_release_channel_non_default_toolchain() {\n\n setup(&|config| {\n\n let out = run_input(config, &[\"rustup-init\", \"--default-toolchain=nightly-2015-01-02\"],\n\n \"\\n\\n\");\n\n assert!(out.ok);\n\n\n\n expect_stdout_ok(config, &[\"rustup\", \"show\"], \"nightly\");\n\n expect_stdout_ok(config, &[\"rustup\", \"show\"], \"2015-01-02\");\n\n });\n\n}\n\n\n", "file_path": "tests/cli-inst-interactive.rs", "rank": 81, "score": 169150.1291485267 }, { "content": "pub fn main() -> Result<()> {\n\n try!(::self_update::cleanup_self_updater());\n\n\n\n let ref matches = cli().get_matches();\n\n let verbose = matches.is_present(\"verbose\");\n\n let ref cfg = try!(common::set_globals(verbose));\n\n\n\n if try!(maybe_upgrade_data(cfg, matches)) {\n\n return Ok(())\n\n }\n\n\n\n try!(cfg.check_metadata_version());\n\n\n\n match matches.subcommand() {\n\n (\"show\", Some(_)) => try!(show(cfg)),\n\n (\"install\", Some(m)) => try!(update(cfg, m)),\n\n (\"update\", Some(m)) => try!(update(cfg, m)),\n\n (\"uninstall\", Some(m)) => try!(toolchain_remove(cfg, m)),\n\n (\"default\", Some(m)) => try!(default_(cfg, m)),\n\n (\"toolchain\", Some(c)) => {\n", "file_path": "src/rustup-cli/rustup_mode.rs", "rank": 82, "score": 168931.57757001594 }, { "content": "#[test]\n\nfn fallback_cargo_calls_correct_rustc() {\n\n setup(&|config| {\n\n // Hm, this is the _only_ test that assumes that toolchain proxies\n\n // exist in CARGO_HOME. Adding that proxy here.\n\n let ref rustup_path = config.exedir.join(format!(\"rustup{}\", EXE_SUFFIX));\n\n let ref cargo_bin_path = config.cargodir.join(\"bin\");\n\n fs::create_dir_all(cargo_bin_path).unwrap();\n\n let ref rustc_path = cargo_bin_path.join(format!(\"rustc{}\", EXE_SUFFIX));\n\n fs::hard_link(rustup_path, rustc_path).unwrap();\n\n\n\n // Install a custom toolchain and a nightly toolchain for the cargo fallback\n\n let path = config.customdir.join(\"custom-1\");\n\n let path = path.to_string_lossy();\n\n expect_ok(config, &[\"rustup\", \"toolchain\", \"link\", \"custom\",\n\n &path]);\n\n expect_ok(config, &[\"rustup\", \"default\", \"custom\"]);\n\n expect_ok(config, &[\"rustup\", \"update\", \"nightly\"]);\n\n expect_stdout_ok(config, &[\"rustc\", \"--version\"],\n\n \"hash-c-1\");\n\n expect_stdout_ok(config, &[\"cargo\", \"--version\"],\n", "file_path": "tests/cli-rustup.rs", "rank": 83, "score": 168467.28633819162 }, { "content": "#[test]\n\nfn component_bad_version() {\n\n let pkgdir = TempDir::new(\"rustup\").unwrap();\n\n\n\n let mock = MockInstallerBuilder {\n\n components: vec![\n\n MockComponentBuilder {\n\n name: \"mycomponent\".to_string(),\n\n files: vec![MockFile::new(\"bin/foo\", b\"foo\")],\n\n },\n\n ],\n\n };\n\n\n\n mock.build(pkgdir.path());\n\n\n\n let instdir = TempDir::new(\"rustup\").unwrap();\n\n let prefix = InstallPrefix::from(instdir.path().to_owned());\n\n\n\n let tmpdir = TempDir::new(\"rustup\").unwrap();\n\n let tmpcfg = temp::Cfg::new(tmpdir.path().to_owned(), DEFAULT_DIST_SERVER, Box::new(|_| ()));\n\n let notify = |_: Notification| ();\n", "file_path": "src/rustup-dist/tests/install.rs", "rank": 84, "score": 168448.6590738217 }, { "content": "#[test]\n\nfn package_bad_version() {\n\n let tempdir = TempDir::new(\"rustup\").unwrap();\n\n\n\n let mock = MockInstallerBuilder {\n\n components: vec![\n\n MockComponentBuilder {\n\n name: \"mycomponent\".to_string(),\n\n files: vec![MockFile::new(\"bin/foo\", b\"foo\")],\n\n },\n\n ],\n\n };\n\n\n\n mock.build(tempdir.path());\n\n\n\n let mut ver = File::create(tempdir.path().join(\"rust-installer-version\")).unwrap();\n\n writeln!(ver, \"100\").unwrap();\n\n\n\n assert!(DirectoryPackage::new(tempdir.path().to_owned()).is_err());\n\n}\n\n\n", "file_path": "src/rustup-dist/tests/install.rs", "rank": 85, "score": 168448.6590738217 }, { "content": "#[test]\n\nfn set_default_host_invalid_triple() {\n\n setup(&|config| {\n\n expect_err(config, &[\"rustup\", \"set\", \"default-host\", \"foo\"],\n\n \"Invalid host triple\");\n\n });\n\n}\n\n\n\n// #422\n", "file_path": "tests/cli-rustup.rs", "rank": 86, "score": 168387.75804179956 }, { "content": "fn build_mock_rustc_installer(target: &str, version: &str, version_hash_: &str) -> MockInstallerBuilder {\n\n // For cross-host rustc's modify the version_hash so they can be identified from\n\n // test cases.\n\n let this_host = this_host_triple();\n\n let version_hash;\n\n if this_host != target {\n\n version_hash = format!(\"xxxx-{}\", &version_hash_[5..]);\n\n } else {\n\n version_hash = version_hash_.to_string();\n\n }\n\n\n\n MockInstallerBuilder {\n\n components: vec![MockComponentBuilder {\n\n name: \"rustc\".to_string(),\n\n files: mock_bin(\"rustc\", version, &version_hash),\n\n }],\n\n }\n\n}\n\n\n", "file_path": "src/rustup-mock/src/clitools.rs", "rank": 87, "score": 167741.00657555 }, { "content": "fn make_component_unavailable(config: &Config, name: &str, target: &TargetTriple) {\n\n use rustup_dist::manifest::Manifest;\n\n use rustup_mock::dist::create_hash;\n\n\n\n let ref manifest_path = config.distdir.join(\"dist/channel-rust-nightly.toml\");\n\n let ref manifest_str = rustup_utils::raw::read_file(manifest_path).unwrap();\n\n let mut manifest = Manifest::parse(manifest_str).unwrap();\n\n {\n\n let mut std_pkg = manifest.packages.get_mut(name).unwrap();\n\n let mut target_pkg = std_pkg.targets.get_mut(target).unwrap();\n\n target_pkg.bins = None;\n\n }\n\n let ref manifest_str = manifest.stringify();\n\n rustup_utils::raw::write_file(manifest_path, manifest_str).unwrap();\n\n\n\n // Have to update the hash too\n\n let ref hash_path = manifest_path.with_extension(\"toml.sha256\");\n\n println!(\"{}\", hash_path.display());\n\n create_hash(manifest_path, hash_path);\n\n}\n\n\n", "file_path": "tests/cli-v2.rs", "rank": 88, "score": 166348.23390708072 }, { "content": "#[test]\n\nfn install_sets_up_stable_unless_a_different_default_is_requested() {\n\n setup(&|config| {\n\n expect_ok(config, &[\"rustup-init\", \"-y\", \"--default-toolchain\", \"nightly\"]);\n\n expect_stdout_ok(config, &[\"rustc\", \"--version\"],\n\n \"hash-n-2\");\n\n });\n\n}\n\n\n", "file_path": "tests/cli-self-upd.rs", "rank": 89, "score": 166177.91537291725 }, { "content": "#[test]\n\nfn uninstall_removes_config_file() {\n\n setup(None, false, &|url, toolchain, prefix, download_cfg, temp_cfg| {\n\n update_from_dist(url, toolchain, prefix, &[], &[], download_cfg, temp_cfg).unwrap();\n\n assert!(utils::path_exists(&prefix.manifest_file(\"multirust-config.toml\")));\n\n uninstall(toolchain, prefix, temp_cfg, &|_| ()).unwrap();\n\n assert!(!utils::path_exists(&prefix.manifest_file(\"multirust-config.toml\")));\n\n });\n\n}\n\n\n", "file_path": "src/rustup-dist/tests/dist.rs", "rank": 90, "score": 164571.27831450332 }, { "content": "pub fn toolchain_sort<T: AsRef<str>>(v: &mut Vec<T>) {\n\n use semver::{Version, Identifier};\n\n\n\n fn special_version(ord: u64, s: &str) -> Version {\n\n Version {\n\n major: 0,\n\n minor: 0,\n\n patch: 0,\n\n pre: vec![Identifier::Numeric(ord), Identifier::AlphaNumeric(s.into())],\n\n build: vec![],\n\n }\n\n }\n\n\n\n fn toolchain_sort_key(s: &str) -> Version {\n\n if s.starts_with(\"stable\") {\n\n special_version(0, s)\n\n } else if s.starts_with(\"beta\") {\n\n special_version(1, s)\n\n } else if s.starts_with(\"nightly\") {\n\n special_version(2, s)\n", "file_path": "src/rustup-utils/src/utils.rs", "rank": 91, "score": 164413.1245050617 }, { "content": "// Convert the ~/.multirust folder to ~/.rustup while dealing with rustup.sh\n\n// metadata, which used to also live in ~/.rustup, but now lives in ~/rustup.sh.\n\npub fn do_rustup_home_upgrade() -> bool {\n\n\n\n fn rustup_home_is_set() -> bool {\n\n env::var_os(\"RUSTUP_HOME\").is_some()\n\n }\n\n\n\n fn rustup_dir() -> Option<PathBuf> {\n\n dot_dir(\".rustup\")\n\n }\n\n\n\n fn rustup_sh_dir() -> Option<PathBuf> {\n\n dot_dir(\".rustup.sh\")\n\n }\n\n\n\n fn multirust_dir() -> Option<PathBuf> {\n\n dot_dir(\".multirust\")\n\n }\n\n\n\n fn rustup_dir_exists() -> bool {\n\n rustup_dir().map(|p| p.exists()).unwrap_or(false)\n", "file_path": "src/rustup-utils/src/utils.rs", "rank": 92, "score": 163898.0333000131 }, { "content": "// Creates a ~/.rustup folder and a ~/.multirust symlink\n\npub fn create_rustup_home() -> Result<()> {\n\n // If there's an existing install, then try to upgrade\n\n do_rustup_home_upgrade();\n\n\n\n // If RUSTUP_HOME is set then don't make any assumptions about where it's\n\n // ok to put ~/.multirust\n\n if env::var_os(\"RUSTUP_HOME\").is_some() { return Ok(()) }\n\n\n\n let home = rustup_home_in_user_dir()?;\n\n fs::create_dir_all(&home)\n\n .chain_err(|| \"unable to create ~/.rustup\")?;\n\n\n\n // This is a temporary compatibility symlink\n\n create_legacy_multirust_symlink()?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/rustup-utils/src/utils.rs", "rank": 93, "score": 163892.79809327214 }, { "content": "active toolchain\n\n----------------\n\n\n\nnightly-{0} (overridden by '{1}')\n\n1.3.0 (hash-n-2)\n\n\n\n\", this_host_triple(), toolchain_file.display()),\n\n r\"\");\n\n });\n\n });\n\n}\n\n\n", "file_path": "tests/cli-rustup.rs", "rank": 94, "score": 163738.0439254858 }, { "content": "fn run_input(config: &Config, args: &[&str], input: &str) -> SanitizedOutput {\n\n let mut cmd = clitools::cmd(config, args[0], &args[1..]);\n\n clitools::env(config, &mut cmd);\n\n\n\n cmd.stdin(Stdio::piped());\n\n cmd.stdout(Stdio::piped());\n\n cmd.stderr(Stdio::piped());\n\n let mut child = cmd.spawn().unwrap();\n\n\n\n child.stdin.as_mut().unwrap().write_all(input.as_bytes()).unwrap();\n\n let out = child.wait_with_output().unwrap();\n\n\n\n SanitizedOutput {\n\n ok: out.status.success(),\n\n stdout: String::from_utf8(out.stdout).unwrap(),\n\n stderr: String::from_utf8(out.stderr).unwrap(),\n\n }\n\n}\n\n\n", "file_path": "tests/cli-inst-interactive.rs", "rank": 95, "score": 161921.78282805055 }, { "content": "//! Yet more cli test cases. These are testing that the output\n\n//! is exactly as expected.\n\n\n\nextern crate rustup_dist;\n\nextern crate rustup_mock;\n\nextern crate tempdir;\n\nextern crate rustup_utils;\n\n\n\nuse rustup_mock::clitools::{self, Config, Scenario,\n\n expect_ok, expect_ok_ex,\n\n expect_err_ex,\n\n this_host_triple};\n\n\n\nmacro_rules! for_host { ($s: expr) => (&format!($s, this_host_triple())) }\n\n\n", "file_path": "tests/cli-exact.rs", "rank": 97, "score": 35.92864452406291 }, { "content": "#![recursion_limit = \"1024\"]\n\n\n\nextern crate rustup_dist;\n\nextern crate rustup_utils;\n\n#[macro_use]\n\nextern crate error_chain;\n\nextern crate url;\n\nextern crate regex;\n\nextern crate itertools;\n\nextern crate rustc_serialize;\n\nextern crate tempfile;\n\nextern crate time;\n\nextern crate toml;\n\n#[cfg(unix)]\n\nextern crate libc;\n\n\n\npub use errors::*;\n\npub use notifications::*;\n\npub use config::*;\n\npub use toolchain::*;\n", "file_path": "src/rustup/lib.rs", "rank": 98, "score": 35.53924043703416 }, { "content": "//! Test cases of the rustup command, using v2 manifests, mostly\n\n//! derived from multirust/test-v2.sh\n\n\n\nextern crate rustup_dist;\n\nextern crate rustup_utils;\n\nextern crate rustup_mock;\n\nextern crate tempdir;\n\n\n\nuse std::fs;\n\nuse tempdir::TempDir;\n\nuse rustup_mock::clitools::{self, Config, Scenario,\n\n expect_ok, expect_stdout_ok, expect_err,\n\n expect_stderr_ok, expect_not_stdout_ok,\n\n set_current_dist_date,\n\n this_host_triple};\n\n\n\nuse rustup_dist::dist::TargetTriple;\n\n\n\nmacro_rules! for_host { ($s: expr) => (&format!($s, this_host_triple())) }\n\n\n", "file_path": "tests/cli-v2.rs", "rank": 99, "score": 35.1711612415364 } ]
Rust
src/bin/vessel.rs
ByronBecker/vessel
a2c2d3c2ffe39b3802a82be512f812e75ee32ea2
use anyhow::Result; use fern::colors::ColoredLevelConfig; use fern::Output; use log::LevelFilter; use std::io::Write; use std::path::PathBuf; use structopt::StructOpt; #[derive(Debug, StructOpt)] #[structopt(about = "Simple package management for Motoko")] struct Opts { #[structopt(long, parse(from_os_str), default_value = "package-set.dhall")] package_set: PathBuf, #[structopt(subcommand)] command: Command, } #[derive(Debug, StructOpt)] enum Command { Init, Install, UpgradeSet { tag: Option<String>, }, Sources, Bin, Verify { #[structopt(long)] version: Option<String>, #[structopt(long, parse(from_os_str))] moc: Option<PathBuf>, #[structopt(long)] moc_args: Option<String>, #[structopt()] package: Option<String>, }, } fn setup_logger(opts: &Opts) -> Result<(), fern::InitError> { let (log_level, out_channel): (LevelFilter, Output) = match opts.command { Command::Sources | Command::Bin => (log::LevelFilter::Info, std::io::stderr().into()), _ => (log::LevelFilter::Info, std::io::stdout().into()), }; let colors = ColoredLevelConfig::new(); fern::Dispatch::new() .format(move |out, message, record| { out.finish(format_args!( "[{}] {}", colors.color(record.level()), message )) }) .level(log_level) .chain(out_channel) .apply()?; Ok(()) } fn main() -> Result<()> { let opts = Opts::from_args(); setup_logger(&opts)?; match opts.command { Command::Init => vessel::init(), Command::Install => { let vessel = vessel::Vessel::new(&opts.package_set)?; let _ = vessel.install_packages()?; Ok(()) } Command::UpgradeSet { tag } => { let (url, hash) = match tag { None => vessel::fetch_latest_package_set()?, Some(tag) => vessel::fetch_package_set(&tag)?, }; println!("let upstream =\n {} {}", url, hash); Ok(()) } Command::Bin => { let vessel = vessel::Vessel::new(&opts.package_set)?; let path = vessel.install_compiler()?; print!("{}", path.display().to_string()); std::io::stdout().flush()?; Ok(()) } Command::Sources => { let vessel = vessel::Vessel::new(&opts.package_set)?; let sources = vessel .install_packages()? .into_iter() .map(|(name, path)| format!("--package {} {}", name, path.display().to_string())) .collect::<Vec<_>>() .join(" "); print!("{}", sources); std::io::stdout().flush()?; Ok(()) } Command::Verify { moc, moc_args, version, package, } => { let vessel = vessel::Vessel::new_without_manifest(&opts.package_set)?; let moc = match (moc, version) { (None, None) => PathBuf::from("moc"), (Some(moc), None) => moc, (None, Some(version)) => { let bin_path = vessel::download_compiler(&version)?; bin_path.join("moc") } (Some(_), Some(_)) => { return Err(anyhow::anyhow!( "The --version and --moc flags are mutually exclusive." )) } }; match package { None => vessel.verify_all(&moc, &moc_args), Some(package) => vessel.verify_package(&moc, &moc_args, &package), } } } }
use anyhow::Result; use fern::colors::ColoredLevelConfig; use fern::Output; use log::LevelFilter; use std::io::Write; use std::path::PathBuf; use structopt::StructOpt; #[derive(Debug, StructOpt)] #[structopt(about = "Simple package management for Motoko")] struct Opts { #[structopt(long, parse(from_os_str), default_value = "package-set.dhall")] package_set: PathBuf, #[structopt(subcommand)] command: Command, } #[derive(Debug, StructOpt)] enum Command { Init, Install, UpgradeSet { tag: Option<String>, }, Sources, Bin, Verify { #[structopt(long)] version: Option<String>, #[structopt(long, parse(from_os_str))] moc: Option<PathBuf>, #[structopt(long)] moc_args: Option<String>, #[structopt()] package: Option<String>, }, } fn setup_logger(opts: &Opts) -> Result<(), fern::InitError> { let (log_level, out_channel): (LevelFilter, Output) = match opts.command { Command::Sources | Command::Bin => (log::LevelFilter::Info, std::io::stderr().into()), _ => (log::LevelFilter::Info, std::io::stdout().into()), }; let colors = ColoredLevelConfig::new(); fern::Dispatch::new() .format(move |out, message, record| { out.finish(format_args!( "[{}] {}", colors.color(record.level()), message )) }) .level(log_level) .chain(out_channel) .apply()?; Ok(()) } fn main() -> Result<()> { let opts = Opts::from_args(); setup_logger(&opts)?; match opts.command { Command::Init => vessel::init(), Command::Install => { let vessel = vessel::Vessel::new(&opts.package_set)?; let _ = vessel.install_packages()?; Ok(()) } Command::UpgradeSet { tag } => { let (url, hash) = match tag { None => vessel::fetch_latest_package_set()?, Some(tag) => vessel::fetch_package_set(&tag)?, }; println!("let upstream =\n {} {}", url, hash); Ok(()) } Command::Bin => { let vessel = vessel::
bin_path.join("moc") } (Some(_), Some(_)) => { return Err(anyhow::anyhow!( "The --version and --moc flags are mutually exclusive." )) } }; match package { None => vessel.verify_all(&moc, &moc_args), Some(package) => vessel.verify_package(&moc, &moc_args, &package), } } } }
Vessel::new(&opts.package_set)?; let path = vessel.install_compiler()?; print!("{}", path.display().to_string()); std::io::stdout().flush()?; Ok(()) } Command::Sources => { let vessel = vessel::Vessel::new(&opts.package_set)?; let sources = vessel .install_packages()? .into_iter() .map(|(name, path)| format!("--package {} {}", name, path.display().to_string())) .collect::<Vec<_>>() .join(" "); print!("{}", sources); std::io::stdout().flush()?; Ok(()) } Command::Verify { moc, moc_args, version, package, } => { let vessel = vessel::Vessel::new_without_manifest(&opts.package_set)?; let moc = match (moc, version) { (None, None) => PathBuf::from("moc"), (Some(moc), None) => moc, (None, Some(version)) => { let bin_path = vessel::download_compiler(&version)?;
function_block-random_span
[ { "content": "/// Like `fetch_latest_package_set`, but lets you specify the tag\n\npub fn fetch_package_set(tag: &str) -> Result<(Url, Hash)> {\n\n let client = reqwest::blocking::Client::new();\n\n fetch_package_set_impl(&client, tag)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 0, "score": 107145.56514279639 }, { "content": "/// Fetches the latest release of dfinity/vessel-package-set and computes its\n\n/// Dhall hash. This way it can be used to initialize the package-set file.\n\npub fn fetch_latest_package_set() -> Result<(Url, Hash)> {\n\n let client = reqwest::blocking::Client::new();\n\n let response = client\n\n .get(\"https://api.github.com/repos/dfinity/vessel-package-set/releases\")\n\n .header(reqwest::header::ACCEPT, \"application/vnd.github.v3+json\")\n\n .header(reqwest::header::USER_AGENT, \"vessel\")\n\n .send()?;\n\n if !response.status().is_success() {\n\n return Err(anyhow::anyhow!(\n\n \"Failed to read Github releases: {:#?}\",\n\n response\n\n ));\n\n }\n\n let releases: Vec<GhRelease> = response.json()?;\n\n let release = &releases[0].tag_name;\n\n fetch_package_set_impl(&client, release)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 1, "score": 92438.41481923501 }, { "content": "fn fetch_package_set_impl(client: &reqwest::blocking::Client, tag: &str) -> Result<(Url, Hash)> {\n\n let package_set_url = format!(\n\n \"https://github.com/dfinity/vessel-package-set/releases/download/{}/package-set.dhall\",\n\n tag\n\n );\n\n let package_set = client\n\n .get(&package_set_url)\n\n .send()\n\n .context(\"When downloading the package set release\")?\n\n .text()\n\n .context(\"When decoding the package set release\")?;\n\n let hash = hash_dhall_expression(&package_set).context(\"When hashing the package set\")?;\n\n Ok((package_set_url, hash))\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 2, "score": 90870.2595467835 }, { "content": "/// Initializes a new vessel project by creating a `vessel.dhall` file with no\n\n/// dependencies and adding a small package set referencing vessel-package-set\n\npub fn init() -> Result<()> {\n\n let package_set_path: PathBuf = PathBuf::from(\"package-set.dhall\");\n\n let manifest_path: PathBuf = PathBuf::from(\"vessel.dhall\");\n\n let (package_set_url, hash) = match fetch_latest_package_set() {\n\n Ok(r) => r,\n\n Err(e) => {\n\n warn!(\"Failed to fetch latest package-set. Initializing with an older fallback version.\\n\\nDetails: {}\", e);\n\n (\"https://github.com/dfinity/vessel-package-set/releases/download/mo-0.4.3-20200916/package-set.dhall\".to_string(),\n\n \"sha256:3e1d8d20e35550bc711ae94f94da8b0091e3a3094f91874ff62686c070478dd7\".to_string())\n\n }\n\n };\n\n if package_set_path.exists() {\n\n return Err(anyhow::anyhow!(\n\n \"Failed to initialize, there is an existing package-set.dhall file here\"\n\n ));\n\n }\n\n if manifest_path.exists() {\n\n return Err(anyhow::anyhow!(\n\n \"Failed to initialize, there is an existing vessel.dhall file here\"\n\n ));\n", "file_path": "src/lib.rs", "rank": 3, "score": 74131.32855507392 }, { "content": "pub fn download_compiler(version: &str) -> Result<PathBuf> {\n\n let bin = Path::new(\".vessel\").join(\".bin\");\n\n let dest = bin.join(&version);\n\n if dest.exists() {\n\n return Ok(dest);\n\n }\n\n\n\n let tmp = Path::new(\".vessel\").join(\".tmp\");\n\n if !tmp.exists() {\n\n fs::create_dir_all(&tmp)?\n\n }\n\n\n\n let os = if cfg!(target_os = \"linux\") {\n\n (\"x86_64-linux\", \"linux64\")\n\n } else if cfg!(target_os = \"macos\") {\n\n (\"x86_64-darwin\", \"macos\")\n\n } else {\n\n return Err(anyhow::anyhow!(\n\n \"Installing the compiler is only supported on Linux or MacOS for now\"\n\n ));\n", "file_path": "src/lib.rs", "rank": 4, "score": 67180.56091936698 }, { "content": "/// Downloads a package either as a tar-ball from Github or clones it as a repo\n\npub fn download_package(package: &Package) -> Result<PathBuf> {\n\n let package_dir = Path::new(\".vessel\").join(package.name.clone());\n\n if !package_dir.exists() {\n\n fs::create_dir_all(&package_dir).context(format!(\n\n \"Failed to create the package directory at {}\",\n\n package_dir.display()\n\n ))?;\n\n }\n\n let repo_dir = package_dir.join(&package.version);\n\n if !repo_dir.exists() {\n\n let tmp = Path::new(\".vessel\").join(\".tmp\");\n\n if !tmp.exists() {\n\n fs::create_dir_all(&tmp)?\n\n }\n\n if package.repo.starts_with(\"https://github.com\") {\n\n info!(\"Downloading tar-ball: \\\"{}\\\"\", package.name);\n\n download_tar_ball(&tmp, &repo_dir, &package.repo, &package.version).or_else(|_| {\n\n warn!(\n\n \"Downloading tar-ball failed, cloning as git repo instead: \\\"{}\\\"\",\n\n package.name\n", "file_path": "src/lib.rs", "rank": 5, "score": 66614.03867651457 }, { "content": "/// Clones `repo` into `dest` and checks out `version`\n\nfn clone_package(tmp: &Path, dest: &Path, repo: &str, version: &str) -> Result<()> {\n\n let tmp_dir: TempDir = tempfile::tempdir_in(tmp)?;\n\n let clone_result = Command::new(\"git\")\n\n .args(&[\"clone\", repo, \"repo\"])\n\n .current_dir(tmp_dir.path())\n\n .output()\n\n .context(format!(\"Failed to clone the repo at {}\", repo))?;\n\n if !clone_result.status.success() {\n\n return Err(anyhow::anyhow!(\n\n \"Failed to clone the repo at: {}\\nwith:\\n{}\",\n\n repo,\n\n std::str::from_utf8(&clone_result.stderr).unwrap()\n\n ));\n\n }\n\n\n\n let repo_dir = tmp_dir.path().join(\"repo\");\n\n let checkout_result = Command::new(\"git\")\n\n .args(&[\"-c\", \"advice.detachedHead=false\", \"checkout\", version])\n\n .current_dir(&repo_dir)\n\n .output()\n", "file_path": "src/lib.rs", "rank": 6, "score": 58741.89082304097 }, { "content": "/// Computes the sha256 hash for a given Dhall expression\n\n/// Computes the sha256 hash for a given Dhall expression\n\nfn hash_dhall_expression(expr: &str) -> Result<String> {\n\n let dhall_expr = dhall::syntax::text::parser::parse_expr(expr)\n\n .context(format!(\"Failed to parse a dhall expression: {}\", expr))?;\n\n let hash = dhall_expr\n\n .sha256_hash()\n\n .context(format!(\"Failed to hash the expression: {:?}\", dhall_expr))?;\n\n let formatted_hash = format!(\"{}\", dhall::syntax::Hash::SHA256(hash));\n\n Ok(formatted_hash)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 7, "score": 49498.47511968664 }, { "content": "/// Downloads and unpacks a tar-ball from Github into the `dest` path\n\nfn download_tar_ball(tmp: &Path, dest: &Path, repo: &str, version: &str) -> Result<()> {\n\n let target = format!(\n\n \"{}/archive/{}/.tar.gz\",\n\n repo.trim_end_matches(\".git\"),\n\n version\n\n );\n\n let response = reqwest::blocking::get(&target)?;\n\n\n\n if !response.status().is_success() {\n\n return Err(anyhow::anyhow!(\n\n \"Failed to download tarball for repo \\\"{}\\\" at version \\\"{}\\\", with \\\"{}\\\"\\n\\nDetails: {}\",\n\n repo,\n\n version,\n\n response.status(),\n\n response.text().unwrap_or_else(|_| \"No more details\".to_string())\n\n ));\n\n }\n\n\n\n // We unpack into a temporary directory and rename it in one go once\n\n // the full unpacking was successful\n", "file_path": "src/lib.rs", "rank": 8, "score": 46995.84963633558 }, { "content": "#[derive(Deserialize)]\n\nstruct GhRelease {\n\n tag_name: String,\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 9, "score": 32853.68696580792 }, { "content": "type Hash = String;\n\n\n", "file_path": "src/lib.rs", "rank": 10, "score": 29220.21421518672 }, { "content": "# vessel\n\n\n\nA simple package manager for the Motoko programming language.\n\n\n\n## Getting started\n\n\n\n1. Download a copy of the `vessel` binary [from the release page](https://github.com/dfinity/vessel/releases) or build one yourself\n\n2. Run `vessel init` in your project root.\n\n3. Edit `vessel.dhall` to include your dependencies (potentially also edit\n\n `package-set.dhall` to include additional package sources)\n\n4. In a dfx project: Edit `dfx.json` under defaults->build->packtool to say `\"vessel sources\"` like so:\n\n ```\n\n ...\n\n \"defaults\": {\n\n \"build\": {\n\n \"packtool\": \"vessel sources\"\n\n }\n\n }\n\n ...\n\n ```\n\n Then run `dfx build`\n\n4. In a non-dfx project: Run `$(vessel bin)/moc $(vessel sources)\n\n -wasi-system-api main.mo` to compile the `main.mo` file with the installed\n\n packages in scope and using the `wasi` API to let you run the generated WASM\n\n with tools like [wasmtime](https://wasmtime.dev).\n\n\n\n## How it works\n\n\n\n`vessel` is inspired by the [spago](https://github.com/purescript/spago) package\n\nmanager for PureScript. Any git repository with a `src/` directory is a valid\n\npackage to `vessel`, which is a flexible and lightweight approach to package\n\nmanagement, that is easily extended with more guarantees and features as our\n\ncommunity grows. The two concepts you need to understand to work with `vessel`\n\nare _package sets_ and the _manifest_ file.\n\n\n\n### Package sets\n\n\n\n`vessel` uses the idea of a _package set_ to manage where it pulls dependencies\n\nfrom. A package set is a collection of packages at certain versions that are\n\nknown to compile together. The package set also specifies the dependencies\n\nbetween these packages, so that `vessel` can find all the transitively needed\n\npackages to build your project. There will be a community maintained package set of\n\npublicly available, open source packages. You can then base your projects\n\npackage set on the public one and extend it with your private and local\n\npackages. The package set your project uses is stored in the `package-set.dhall`\n\nfile by default.\n\n\n", "file_path": "README.md", "rank": 11, "score": 10801.361994689853 }, { "content": "### Manifest file\n\n\n\nYour `vessel.dhall` file contains the list of packages you need for your project\n\nto build. `vessel` will look at this file, and figure out all the transitive\n\npackages you need using the package set file. Optionally it also contains a\n\ncompiler version that `vessel` uses to download the compiler binaries for you.\n\nAny change to this file requires a reload of the language service so your\n\npackages can be picked up by your editor for now.\n\n\n\nAfter `vessel` has installed all required packages through cloning or\n\ndownloading tarballs, it puts them in a project local location (the `.vessel`\n\ndirectory).\n\n\n\n## How Tos\n\n\n\n### How do I reset all caches?\n\n\n\nRemove the `.vessel` directory in your project\n\n\n\n### How do I depend on a git branch of a package?\n\n\n\nThe `\"version\"` field in the package set format refers to any git ref so you can\n\nput a branch name, a commit hash or a tag in there.\n\n\n\n**CAREFUL:** `vessel` has no way of invalidating \"moving\" references like a\n\nbranch name. If you push a new commit to the branch you'll need to manually\n\nreset your caches and re-install.\n\n\n\n### How do I add a local package to my package set?\n\n\n\nMake sure your local package is a git repository, then add an entry like so to\n\nyour `additions` in the `package-set.dhall` file:\n\n\n\n```dhall\n\nlet additions = [\n\n { name = \"mypackage\"\n\n , repo = \"file:///home/path/to/mypackage\"\n\n , version = \"v1.0.0\"\n\n , dependencies = [\"base\"]\n\n }\n\n]\n\n```\n\n\n\nNow you can depend on this package by adding `mypackage` to your `vessel.dhall` file.\n\n\n\n### How do I integrate `vessel` into my custom build?\n\n\n\nRunning `vessel sources` will return flags in a format you can pass directly to\n\nthe various compiler tools. Running `vessel bin` returns the path containing the\n\ncompiler binaries. Use like so: `$(vessel bin)/mo-doc`.\n\n\n\n## License\n\nvessel is distributed under the terms of the Apache License (Version 2.0).\n\n\n\nSee LICENSE for details.\n", "file_path": "README.md", "rank": 12, "score": 10798.654734558771 }, { "content": " pub fn install_compiler(&self) -> Result<PathBuf> {\n\n let version =\n\n self.manifest.compiler.as_ref().ok_or_else(|| {\n\n anyhow::anyhow!(\"No compiler version was specified in vessel.dhall\")\n\n })?;\n\n download_compiler(version).map(|path| self.nested_path(path))\n\n }\n\n\n\n /// Verifies that every source file inside the given package compiles in the current package set\n\n pub fn verify_package(&self, moc: &Path, moc_args: &Option<String>, name: &str) -> Result<()> {\n\n match self.package_set.find(name) {\n\n None => Err(anyhow::anyhow!(\n\n \"The package \\\"{}\\\" does not exist in the package set\",\n\n name\n\n )),\n\n Some(package) => {\n\n let mut cmd = Command::new(moc);\n\n cmd.arg(\"--check\");\n\n if let Some(args) = moc_args {\n\n cmd.args(args.split(' '));\n", "file_path": "src/lib.rs", "rank": 13, "score": 14.173509385461463 }, { "content": " .join(self.version.clone())\n\n .join(\"src\")\n\n }\n\n\n\n /// Returns all Motoko sources found inside this package's installation directory\n\n pub fn sources(&self) -> impl Iterator<Item = PathBuf> {\n\n WalkDir::new(self.install_path())\n\n .into_iter()\n\n .filter_map(|e| match e {\n\n Err(_) => None,\n\n Ok(entry) => {\n\n let file_name = entry.path();\n\n if let Some(ext) = file_name.extension() {\n\n if ext == \"mo\" {\n\n return Some(file_name.to_owned());\n\n }\n\n }\n\n None\n\n }\n\n })\n", "file_path": "src/lib.rs", "rank": 14, "score": 10.48117577345489 }, { "content": " }\n\n Ok(())\n\n } else {\n\n Err(anyhow::anyhow!(\n\n \"Failed to verify \\\"{}\\\" with:\\n{}\",\n\n package.name,\n\n String::from_utf8(output.stderr)?\n\n ))\n\n }\n\n }\n\n }\n\n }\n\n\n\n pub fn verify_all(&self, moc: &Path, moc_args: &Option<String>) -> Result<()> {\n\n let mut errors: Vec<(Name, anyhow::Error)> = vec![];\n\n for package in &self.package_set.topo_sorted() {\n\n if errors\n\n .iter()\n\n .find(|(n, _)| package.dependencies.contains(n))\n\n .is_none()\n", "file_path": "src/lib.rs", "rank": 15, "score": 9.738105044727124 }, { "content": " }\n\n Ok(None)\n\n }\n\n\n\n pub fn new(package_set_file: &Path) -> Result<Vessel> {\n\n let mut new_vessel = match Vessel::find_dominating_manifest()? {\n\n None => {\n\n return Err(anyhow::anyhow!(\n\n \"Could not find a 'vessel.dhall' file in this directory or a parent one.\"\n\n ))\n\n }\n\n Some(nested) => Vessel {\n\n nested,\n\n ..Default::default()\n\n },\n\n };\n\n new_vessel.read_package_set(package_set_file)?;\n\n new_vessel.read_manifest_file()?;\n\n Ok(new_vessel)\n\n }\n", "file_path": "src/lib.rs", "rank": 16, "score": 9.504115067726605 }, { "content": "}\n\n\n\npub type Url = String;\n\n\n\npub type Tag = String;\n\n\n\npub type Name = String;\n\n\n\n#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, serde_dhall::StaticType)]\n\npub struct Package {\n\n pub name: Name,\n\n pub repo: Url,\n\n pub version: Tag,\n\n pub dependencies: Vec<Name>,\n\n}\n\n\n\nimpl Package {\n\n pub fn install_path(&self) -> PathBuf {\n\n Path::new(\".vessel\")\n\n .join(self.name.clone())\n", "file_path": "src/lib.rs", "rank": 17, "score": 8.7038473571757 }, { "content": " }\n\n let mut manifest = fs::File::create(\"vessel.dhall\")?;\n\n manifest.write_all(\n\n br#\"{\n\n dependencies = [ \"base\", \"matchers\" ],\n\n compiler = None Text\n\n}\n\n\"#,\n\n )?;\n\n let mut manifest = fs::File::create(\"package-set.dhall\")?;\n\n write!(&mut manifest, \"let upstream = {} {}\", package_set_url, hash)?;\n\n manifest.write_all(\n\n br#\"\n\nlet Package =\n\n { name : Text, version : Text, repo : Text, dependencies : List Text }\n\n\n\nlet\n\n -- This is where you can add your own packages to the package-set\n\n additions =\n\n [] : List Package\n", "file_path": "src/lib.rs", "rank": 18, "score": 8.683223994624715 }, { "content": "use anyhow::{self, Context, Result};\n\nuse flate2::read::GzDecoder;\n\nuse log::{debug, info, warn};\n\nuse semver::Version;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::cfg;\n\nuse std::collections::{HashMap, HashSet};\n\nuse std::env;\n\nuse std::fs;\n\nuse std::io::Write;\n\nuse std::iter::Iterator;\n\nuse std::path::{Path, PathBuf};\n\nuse std::process::Command;\n\nuse tar::Archive;\n\nuse tempfile::TempDir;\n\nuse topological_sort::TopologicalSort;\n\nuse walkdir::WalkDir;\n\n\n\n#[derive(Debug, Default)]\n\npub struct Vessel {\n", "file_path": "src/lib.rs", "rank": 19, "score": 8.12388213777784 }, { "content": " };\n\n let target = match Version::parse(version) {\n\n Ok(semver) if semver > Version::new(0, 6, 2) => format!(\n\n \"https://github.com/dfinity/motoko/releases/download/{}/motoko-{}-{}.tar.gz\",\n\n version, os.1, version\n\n ),\n\n _ => format!(\n\n \"https://download.dfinity.systems/motoko/{}/{}/motoko-{}.tar.gz\",\n\n version, os.0, version\n\n ),\n\n };\n\n\n\n let client = reqwest::blocking::Client::new();\n\n let response = client\n\n .get(&target)\n\n .header(reqwest::header::USER_AGENT, \"vessel\")\n\n .send()?;\n\n\n\n if !response.status().is_success() {\n\n return Err(anyhow::anyhow!(\n", "file_path": "src/lib.rs", "rank": 20, "score": 7.887263172691267 }, { "content": "\n\nlet\n\n {- This is where you can override existing packages in the package-set\n\n\n\n For example, if you wanted to use version `v2.0.0` of the foo library:\n\n let overrides = [\n\n { name = \"foo\"\n\n , version = \"v2.0.0\"\n\n , repo = \"https://github.com/bar/foo\"\n\n , dependencies = [] : List Text\n\n }\n\n ]\n\n -}\n\n overrides =\n\n [] : List Package\n\n\n\nin upstream # additions # overrides\n\n\"#,\n\n )?;\n\n Ok(())\n", "file_path": "src/lib.rs", "rank": 21, "score": 7.4733079618157605 }, { "content": " let install_plan = self\n\n .package_set\n\n .transitive_deps(self.manifest.dependencies.clone());\n\n\n\n info!(\"Installing {} packages\", install_plan.len());\n\n\n\n let paths = install_plan\n\n .iter()\n\n .map(|package| {\n\n download_package(package).map(|path| (package.name.clone(), self.nested_path(path)))\n\n })\n\n .collect::<Result<Vec<(String, PathBuf)>>>()?;\n\n\n\n info!(\"Installation complete.\");\n\n\n\n Ok(paths)\n\n }\n\n\n\n /// Downloads the compiler binaries at the version specified in the manifest\n\n /// and returns the path to them.\n", "file_path": "src/lib.rs", "rank": 22, "score": 7.081392507203697 }, { "content": " }\n\n download_package(&package)?;\n\n let dependencies = self\n\n .package_set\n\n .transitive_deps(package.dependencies.clone());\n\n for package in dependencies {\n\n let path = download_package(&package)?;\n\n cmd.arg(\"--package\").arg(&package.name).arg(path);\n\n }\n\n\n\n package.sources().for_each(|entry_point| {\n\n cmd.arg(entry_point);\n\n });\n\n let output = cmd.output().context(format!(\"Failed to run {:?}\", cmd))?;\n\n if output.status.success() {\n\n let warnings = String::from_utf8(output.stderr)?;\n\n if !warnings.is_empty() {\n\n info!(\"Verified \\\"{}\\\" with output:\\n{}\", package.name, warnings);\n\n } else {\n\n info!(\"Verified \\\"{}\\\"\", package.name);\n", "file_path": "src/lib.rs", "rank": 23, "score": 6.996779643035756 }, { "content": "\n\n pub fn new_without_manifest(package_set_file: &Path) -> Result<Vessel> {\n\n let mut new_vessel: Vessel = Default::default();\n\n new_vessel.read_package_set(package_set_file)?;\n\n Ok(new_vessel)\n\n }\n\n\n\n fn read_manifest_file(&mut self) -> Result<()> {\n\n let manifest_file = PathBuf::from(\"vessel.dhall\");\n\n self.manifest = serde_dhall::from_file(manifest_file)\n\n .static_type_annotation()\n\n .parse()\n\n .context(\"Failed to parse the vessel.dhall file\")?;\n\n Ok(())\n\n }\n\n\n\n fn read_package_set(&mut self, package_set_file: &Path) -> Result<()> {\n\n self.package_set = PackageSet::new(\n\n serde_dhall::from_file(package_set_file)\n\n .static_type_annotation()\n", "file_path": "src/lib.rs", "rank": 24, "score": 6.930383892526937 }, { "content": " {\n\n if let Err(err) = self.verify_package(moc, moc_args, &package.name) {\n\n errors.push((package.name.clone(), err))\n\n }\n\n }\n\n }\n\n if errors.is_empty() {\n\n Ok(())\n\n } else {\n\n let err = anyhow::anyhow!(\n\n \"Failed to verify: {:?}\",\n\n errors\n\n .iter()\n\n .map(|(n, _)| n.clone())\n\n .collect::<Vec<String>>()\n\n );\n\n for err in errors.iter().rev() {\n\n eprintln!(\"{}\", err.1);\n\n }\n\n Err(err)\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 25, "score": 6.50644161718328 }, { "content": " .parse()\n\n .context(\"Failed to parse the package set file\")?,\n\n );\n\n Ok(())\n\n }\n\n\n\n fn nested_path(&self, path: PathBuf) -> PathBuf {\n\n if self.nested == 0 {\n\n return path;\n\n }\n\n\n\n let mut res = PathBuf::new();\n\n for _ in 0..self.nested {\n\n res.push(\"..\");\n\n }\n\n res.join(path)\n\n }\n\n\n\n /// Installs all transitive dependencies and returns a mapping of package name -> installation location\n\n pub fn install_packages(&self) -> Result<Vec<(Name, PathBuf)>> {\n", "file_path": "src/lib.rs", "rank": 26, "score": 5.724211452682552 }, { "content": " \"Failed to download Motoko binaries for version {}, with \\\"{}\\\"\\n\\nDetails: {}\",\n\n version,\n\n response.status(),\n\n response\n\n .text()\n\n .unwrap_or_else(|_| \"No more details\".to_string())\n\n ));\n\n }\n\n\n\n // We unpack into a temporary directory and rename it in one go once\n\n // the full unpacking was successful\n\n let tmp_dir: TempDir = tempfile::tempdir_in(tmp)?;\n\n Archive::new(GzDecoder::new(response)).unpack(tmp_dir.path())?;\n\n\n\n if !bin.exists() {\n\n fs::create_dir_all(&bin)?\n\n }\n\n fs::rename(tmp_dir, &dest)?;\n\n\n\n Ok(dest)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 27, "score": 5.618437725419421 }, { "content": " .context(format!(\n\n \"Failed to checkout version {} for the repository {} in {}\",\n\n version,\n\n repo,\n\n repo_dir.display()\n\n ))?;\n\n if !checkout_result.status.success() {\n\n return Err(anyhow::anyhow!(\n\n \"Failed to checkout version {} for the repo at: {}\\nwith:\\n{}\",\n\n version,\n\n repo,\n\n std::str::from_utf8(&checkout_result.stderr).unwrap()\n\n ));\n\n }\n\n\n\n fs::rename(repo_dir, dest)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 28, "score": 5.413494792008965 }, { "content": " );\n\n clone_package(&tmp, &repo_dir, &package.repo, &package.version)\n\n })?\n\n } else {\n\n info!(\"Cloning git repository: \\\"{}\\\"\", package.name);\n\n clone_package(&tmp, &repo_dir, &package.repo, &package.version)?\n\n }\n\n } else {\n\n debug!(\n\n \"{} at version {} has already been downloaded\",\n\n package.name, package.version\n\n )\n\n }\n\n Ok(repo_dir.join(\"src\"))\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 29, "score": 5.357238132908865 }, { "content": " PackageSet(package_set)\n\n }\n\n\n\n /// Finds a package by name\n\n fn find(&self, name: &str) -> Option<&Package> {\n\n self.0.get(name)\n\n }\n\n\n\n fn find_unsafe(&self, name: &str) -> &Package {\n\n self.find(name)\n\n .unwrap_or_else(|| panic!(\"Package \\\"{}\\\" wasn't specified in the package set\", name))\n\n }\n\n\n\n /// Finds all transitive dependencies starting from the given package names.\n\n /// Includes the entry points in the resulting vector\n\n fn transitive_deps(&self, entry_points: Vec<Name>) -> Vec<&Package> {\n\n let mut found: HashSet<Name> = HashSet::new();\n\n let mut todo: Vec<Name> = entry_points;\n\n while let Some(next) = todo.pop() {\n\n if !found.contains(&next) {\n", "file_path": "src/lib.rs", "rank": 30, "score": 4.363615843690853 }, { "content": " ts.map(|name| self.find_unsafe(name)).collect()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n fn mk_package(name: &str, deps: Vec<&str>) -> Package {\n\n Package {\n\n name: name.to_string(),\n\n repo: \"\".to_string(),\n\n version: \"\".to_string(),\n\n dependencies: deps.into_iter().map(|x| x.to_string()).collect(),\n\n }\n\n }\n\n\n\n #[test]\n\n fn it_finds_a_transitive_dependency() {\n\n let a = mk_package(\"A\", vec![\"B\"]);\n", "file_path": "src/lib.rs", "rank": 31, "score": 4.35663651883778 }, { "content": " pub package_set: PackageSet,\n\n pub manifest: Manifest,\n\n /// How many parent directories are we nested underneath the project root\n\n pub nested: u32,\n\n}\n\n\n\nimpl Vessel {\n\n fn find_dominating_manifest() -> Result<Option<u32>> {\n\n let cwd = env::current_dir().context(\"Unable to access the current directory\")?;\n\n for (depth, path) in cwd.ancestors().enumerate() {\n\n if path.join(\"vessel.dhall\").exists() {\n\n if depth != 0 {\n\n info!(\"Changing working directory to {}\", path.display());\n\n env::set_current_dir(&path).context(format!(\n\n \"Failed to change current directory to {}\",\n\n path.display()\n\n ))?;\n\n }\n\n return Ok(Some(depth as u32));\n\n }\n", "file_path": "src/lib.rs", "rank": 32, "score": 4.197621250783117 }, { "content": " }\n\n}\n\n\n\n// This isn't normalized, as the package name is duplicated, but it's too handy\n\n// to have a `Package` carry its name along.\n\n#[derive(Debug, Clone, PartialEq, Default)]\n\npub struct PackageSet(pub HashMap<Name, Package>);\n\n\n\n#[derive(Debug, PartialEq, Default, Serialize, Deserialize, serde_dhall::StaticType)]\n\npub struct Manifest {\n\n pub compiler: Option<String>,\n\n pub dependencies: Vec<Name>,\n\n}\n\n\n\nimpl PackageSet {\n\n fn new(packages: Vec<Package>) -> PackageSet {\n\n let mut package_set = HashMap::new();\n\n for package in packages {\n\n package_set.insert(package.name.clone(), package);\n\n }\n", "file_path": "src/lib.rs", "rank": 33, "score": 3.5464834733926645 }, { "content": " let tmp_dir: TempDir = tempfile::tempdir_in(tmp)?;\n\n Archive::new(GzDecoder::new(response)).unpack(tmp_dir.path())?;\n\n\n\n // We expect an unpacked repo to contain exactly one directory\n\n let repo_dir = match fs::read_dir(tmp_dir.path())?.next() {\n\n None => return Err(anyhow::anyhow!(\"Unpacked an empty tarball for {}\", repo)),\n\n Some(dir) => dir?,\n\n };\n\n\n\n if !repo_dir.path().is_dir() {\n\n return Err(anyhow::anyhow!(\"Failed to unpack tarball for \\\"{}\\\"\", repo));\n\n }\n\n fs::rename(repo_dir.path(), dest)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 34, "score": 3.1617900095436053 }, { "content": " let b = mk_package(\"B\", vec![]);\n\n let ps = PackageSet::new(vec![a.clone(), b.clone()]);\n\n assert_eq!(vec![&b], ps.transitive_deps(vec![\"B\".to_string()]));\n\n assert_eq!(vec![&a, &b], ps.transitive_deps(vec![\"A\".to_string()]))\n\n }\n\n\n\n #[test]\n\n fn it_finds_transitive_dependencies_with_overlaps() {\n\n let a = mk_package(\"A\", vec![\"B\"]);\n\n let b = mk_package(\"B\", vec![]);\n\n let c = mk_package(\"C\", vec![\"B\"]);\n\n let ps = PackageSet::new(vec![a.clone(), b.clone(), c.clone()]);\n\n assert_eq!(\n\n vec![&a, &b, &c],\n\n ps.transitive_deps(vec![\"A\".to_string(), \"C\".to_string()])\n\n );\n\n\n\n assert_eq!(vec![&b, &c], ps.transitive_deps(vec![\"C\".to_string()]))\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 35, "score": 1.8221950081540972 }, { "content": " todo.append(&mut self.find_unsafe(&next).dependencies.clone());\n\n found.insert(next);\n\n }\n\n }\n\n // Once we have incremental compilation we could return these toposorted to allow\n\n // starting to compile the first packages while others are still being downloaded.\n\n // For now we sort them to get deterministic behaviour for testing.\n\n let mut found: Vec<Name> = found.into_iter().collect();\n\n found.sort();\n\n found.iter().map(|n| self.find_unsafe(n)).collect()\n\n }\n\n\n\n pub fn topo_sorted(&self) -> Vec<&Package> {\n\n let mut ts = TopologicalSort::<&str>::new();\n\n for (name, package) in &self.0 {\n\n ts.insert(name.as_ref());\n\n for dep in &package.dependencies {\n\n ts.add_dependency(dep.as_ref(), name.as_ref())\n\n }\n\n }\n", "file_path": "src/lib.rs", "rank": 36, "score": 0.7838635003078751 } ]
Rust
crates/binding_core_node/src/parse.rs
10088/swc
6bc39005bb851b57f4f49b046688f4acc6e850f3
use std::{ path::{Path, PathBuf}, sync::Arc, }; use anyhow::Context as _; use napi::{ bindgen_prelude::{AbortSignal, AsyncTask, Buffer}, Env, Task, }; use swc::{ config::{ErrorFormat, ParseOptions}, Compiler, }; use swc_common::{comments::Comments, FileName}; use swc_nodejs_common::{deserialize_json, get_deserialized, MapErr}; use crate::{get_compiler, util::try_with}; pub struct ParseTask { pub c: Arc<Compiler>, pub filename: FileName, pub src: String, pub options: String, } pub struct ParseFileTask { pub c: Arc<Compiler>, pub path: PathBuf, pub options: String, } #[napi] impl Task for ParseTask { type JsValue = String; type Output = String; fn compute(&mut self) -> napi::Result<Self::Output> { let options: ParseOptions = deserialize_json(&self.options)?; let fm = self .c .cm .new_source_file(self.filename.clone(), self.src.clone()); let comments = if options.comments { Some(self.c.comments() as &dyn Comments) } else { None }; let program = try_with(self.c.cm.clone(), false, ErrorFormat::Normal, |handler| { self.c.parse_js( fm, handler, options.target, options.syntax, options.is_module, comments, ) }) .convert_err()?; let ast_json = serde_json::to_string(&program)?; Ok(ast_json) } fn resolve(&mut self, _env: Env, result: Self::Output) -> napi::Result<Self::JsValue> { Ok(result) } } #[napi] impl Task for ParseFileTask { type JsValue = String; type Output = String; fn compute(&mut self) -> napi::Result<Self::Output> { let program = try_with(self.c.cm.clone(), false, ErrorFormat::Normal, |handler| { self.c.run(|| { let options: ParseOptions = deserialize_json(&self.options)?; let fm = self .c .cm .load_file(&self.path) .context("failed to read module")?; let c = self.c.comments().clone(); let comments = if options.comments { Some(&c as &dyn Comments) } else { None }; self.c.parse_js( fm, handler, options.target, options.syntax, options.is_module, comments, ) }) }) .convert_err()?; let ast_json = serde_json::to_string(&program)?; Ok(ast_json) } fn resolve(&mut self, _env: Env, result: Self::Output) -> napi::Result<Self::JsValue> { Ok(result) } } #[napi] pub fn parse( src: String, options: Buffer, filename: Option<String>, signal: Option<AbortSignal>, ) -> AsyncTask<ParseTask> { swc_nodejs_common::init_default_trace_subscriber(); let c = get_compiler(); let options = String::from_utf8_lossy(options.as_ref()).to_string(); let filename = if let Some(value) = filename { FileName::Real(value.into()) } else { FileName::Anon }; AsyncTask::with_optional_signal( ParseTask { c, filename, src, options, }, signal, ) } #[napi] pub fn parse_sync(src: String, opts: Buffer, filename: Option<String>) -> napi::Result<String> { swc_nodejs_common::init_default_trace_subscriber(); let c = get_compiler(); let options: ParseOptions = get_deserialized(&opts)?; let filename = if let Some(value) = filename { FileName::Real(value.into()) } else { FileName::Anon }; let program = try_with(c.cm.clone(), false, ErrorFormat::Normal, |handler| { c.run(|| { let fm = c.cm.new_source_file(filename, src); let comments = if options.comments { Some(c.comments() as &dyn Comments) } else { None }; c.parse_js( fm, handler, options.target, options.syntax, options.is_module, comments, ) }) }) .convert_err()?; Ok(serde_json::to_string(&program)?) } #[napi] pub fn parse_file_sync(path: String, opts: Buffer) -> napi::Result<String> { swc_nodejs_common::init_default_trace_subscriber(); let c = get_compiler(); let options: ParseOptions = get_deserialized(&opts)?; let program = { try_with(c.cm.clone(), false, ErrorFormat::Normal, |handler| { let fm = c.cm.load_file(Path::new(path.as_str())) .expect("failed to read program file"); let comments = if options.comments { Some(c.comments() as &dyn Comments) } else { None }; c.parse_js( fm, handler, options.target, options.syntax, options.is_module, comments, ) }) } .convert_err()?; Ok(serde_json::to_string(&program)?) } #[napi] pub fn parse_file( path: String, options: Buffer, signal: Option<AbortSignal>, ) -> AsyncTask<ParseFileTask> { swc_nodejs_common::init_default_trace_subscriber(); let c = get_compiler(); let path = PathBuf::from(&path); let options = String::from_utf8_lossy(options.as_ref()).to_string(); AsyncTask::with_optional_signal(ParseFileTask { c, path, options }, signal) }
use std::{ path::{Path, PathBuf}, sync::Arc, }; use anyhow::Context as _; use napi::{ bindgen_prelude::{AbortSignal, AsyncTask, Buffer}, Env, Task, }; use swc::{ config::{ErrorFormat, ParseOptions}, Compiler, }; use swc_common::{comments::Comments, FileName}; use swc_nodejs_common::{deserialize_json, get_deserialized, MapErr}; use crate::{get_compiler, util::try_with}; pub struct ParseTask { pub c: Arc<Compiler>, pub filename: FileName, pub src: String, pub options: String, } pub struct ParseFileTask { pub c: Arc<Compiler>, pub path: PathBuf, pub options: String, } #[napi] impl Task for ParseTask { type JsValue = String; type Output = String; fn compute(&mut self) -> napi::Result<Self::Output> { let options: ParseOptions = deserialize_json(&self.options)?; let fm = self .c .cm .new_source_file(self.filename.clone(), self.src.clone()); let comments = if options.comments { Some(self.c.comments() as &dyn Comments) } else { None }; let program = try_with(self.c.cm.clone(), false, ErrorFormat::Normal, |handler| { self.c.parse_js( fm, handler, options.target, options.syntax, options.is_module, comments, ) }) .convert_err()?; let ast_json = serde_json::to_string(&program)?; Ok(ast_json) } fn resolve(&mut self, _env: Env, result: Self::Output) -> napi::Result<Self::JsValue> { Ok(result) } } #[napi] impl Task for ParseFileTask { type JsValue = String; type Output = String; fn compute(&mut self) -> napi::Result<Self::Output> { let program = try_with(self.c.cm.clone(), false, ErrorFormat::Normal, |handler| { self.c.run(|| { let options: ParseOptions = deserialize_json(&self.options)?; let fm = self .c .cm .load_file(&self.path) .context("failed to read module")?; let c = self.c.comments().clone(); let comments = if options.comments { Some(&c as &dyn Comments) } else { None }; self.c.parse_js( fm, handler, options.target, options.syntax, options.is_module, comments, ) }) }) .convert_err()?; let ast_json = serde_json::to_string(&program)?; Ok(ast_json) } fn resolve(&mut self, _env: Env, result: Self::Output) -> napi::Result<Self::JsValue> { Ok(result) } } #[napi] pub fn parse( src: String, options: Buffer, filename: Option<String>, signal: Option<AbortSignal>, ) -> AsyncTask<ParseTask> { swc_nodejs_common::init_default_trace_subscriber(); let c = get_compiler(); let options = String::from_utf8_lossy(options.as_ref()).to_string(); let filename = if let Some(value) = filename { FileName::Real(value.into()) } else { FileName::Anon }; AsyncTask::with_optional_signal( ParseTask { c, filename, src, options, }, signal, ) } #[napi] pub fn parse_sync(src: String, opts: Buffer, filename: Option<String>) -> napi::Result<String> { swc_nodejs_common::init_default_trace_subscriber(); let c = get_compiler(); let options: ParseOptions = get_deserialized(&opts)?; let filename = if let Some(value) = filename { FileName::Real(value.into()) } else { FileName::Anon }; let program = try_with(c.cm.clone(), false, ErrorFormat::Normal, |handler| { c.run(|| { let fm = c.cm.new_source_file(filename, src);
c.parse_js( fm, handler, options.target, options.syntax, options.is_module, comments, ) }) }) .convert_err()?; Ok(serde_json::to_string(&program)?) } #[napi] pub fn parse_file_sync(path: String, opts: Buffer) -> napi::Result<String> { swc_nodejs_common::init_default_trace_subscriber(); let c = get_compiler(); let options: ParseOptions = get_deserialized(&opts)?; let program = { try_with(c.cm.clone(), false, ErrorFormat::Normal, |handler| { let fm = c.cm.load_file(Path::new(path.as_str())) .expect("failed to read program file"); let comments = if options.comments { Some(c.comments() as &dyn Comments) } else { None }; c.parse_js( fm, handler, options.target, options.syntax, options.is_module, comments, ) }) } .convert_err()?; Ok(serde_json::to_string(&program)?) } #[napi] pub fn parse_file( path: String, options: Buffer, signal: Option<AbortSignal>, ) -> AsyncTask<ParseFileTask> { swc_nodejs_common::init_default_trace_subscriber(); let c = get_compiler(); let path = PathBuf::from(&path); let options = String::from_utf8_lossy(options.as_ref()).to_string(); AsyncTask::with_optional_signal(ParseFileTask { c, path, options }, signal) }
let comments = if options.comments { Some(c.comments() as &dyn Comments) } else { None };
assignment_statement
[]
Rust
rlp/src/impls.rs
Byeongjee/rlp
6a4c2c39f76eba2b3b68863941cd64fddee3e5cc
use super::stream::RlpStream; use super::traits::{Decodable, Encodable}; use super::{DecoderError, Rlp}; use primitives::{H128, H160, H256, H384, H512, H520, H768, U256}; use std::iter::{empty, once}; use std::{cmp, mem, str}; pub fn decode_usize(bytes: &[u8]) -> Result<usize, DecoderError> { let expected = mem::size_of::<usize>(); match bytes.len() { l if l <= expected => { if bytes[0] == 0 { return Err(DecoderError::RlpInvalidIndirection) } let mut res = 0usize; for (i, byte) in bytes.iter().enumerate() { let shift = (l - 1 - i) * 8; res += (*byte as usize) << shift; } Ok(res) } got => Err(DecoderError::RlpIsTooBig { expected, got, }), } } impl Encodable for bool { fn rlp_append(&self, s: &mut RlpStream) { s.encoder().encode_iter(once(if *self { 1u8 } else { 0 })); } } impl Decodable for bool { fn decode(rlp: &Rlp) -> Result<Self, DecoderError> { rlp.decoder().decode_value(|bytes| match bytes.len() { 0 => Ok(false), 1 => Ok(bytes[0] != 0), got => Err(DecoderError::RlpIsTooBig { expected: 1, got, }), }) } } impl<'a> Encodable for &'a [u8] { fn rlp_append(&self, s: &mut RlpStream) { s.encoder().encode_value(self); } } impl Encodable for Vec<u8> { fn rlp_append(&self, s: &mut RlpStream) { s.encoder().encode_value(self); } } impl Decodable for Vec<u8> { fn decode(rlp: &Rlp) -> Result<Self, DecoderError> { rlp.decoder().decode_value(|bytes| Ok(bytes.to_vec())) } } impl Encodable for Vec<Vec<u8>> { fn rlp_append(&self, s: &mut RlpStream) { s.begin_list(self.len()); for e in self { s.append(e); } } } impl Decodable for Vec<Vec<u8>> { fn decode(rlp: &Rlp) -> Result<Self, DecoderError> { rlp.as_list::<Vec<u8>>() } } impl<T> Encodable for Option<T> where T: Encodable, { fn rlp_append(&self, s: &mut RlpStream) { match *self { None => { s.begin_list(0); } Some(ref value) => { s.begin_list(1); s.append(value); } } } } impl<T> Decodable for Option<T> where T: Decodable, { fn decode(rlp: &Rlp) -> Result<Self, DecoderError> { let items = rlp.item_count()?; match items { 1 => rlp.val_at(0).map(Some), 0 => Ok(None), got => Err(DecoderError::RlpIncorrectListLen { expected: 1, got, }), } } } impl Encodable for u8 { fn rlp_append(&self, s: &mut RlpStream) { if *self != 0 { s.encoder().encode_iter(once(*self)); } else { s.encoder().encode_iter(empty()); } } } impl Decodable for u8 { fn decode(rlp: &Rlp) -> Result<Self, DecoderError> { rlp.decoder().decode_value(|bytes| match bytes.len() { 1 if bytes[0] != 0 => Ok(bytes[0]), 0 => Ok(0), 1 => Err(DecoderError::RlpInvalidIndirection), got => Err(DecoderError::RlpIsTooBig { expected: 1, got, }), }) } } macro_rules! impl_encodable_for_u { ($name: ident) => { impl Encodable for $name { fn rlp_append(&self, s: &mut RlpStream) { let leading_empty_bytes = self.leading_zeros() as usize / 8; let buffer = self.to_be_bytes(); s.encoder().encode_value(&buffer[leading_empty_bytes..]); } } }; } macro_rules! impl_decodable_for_u { ($name: ident) => { impl Decodable for $name { fn decode(rlp: &Rlp) -> Result<Self, DecoderError> { rlp.decoder().decode_value(|bytes| match bytes.len() { 0 | 1 => u8::decode(rlp).map($name::from), l if l <= mem::size_of::<$name>() => { if bytes[0] == 0 { return Err(DecoderError::RlpInvalidIndirection) } let mut res = 0 as $name; for (i, byte) in bytes.iter().enumerate() { let shift = (l - 1 - i) * 8; res += $name::from(*byte) << shift; } Ok(res) } got => Err(DecoderError::RlpIsTooBig { expected: mem::size_of::<$name>(), got, }), }) } } }; } impl_encodable_for_u!(u16); impl_encodable_for_u!(u32); impl_encodable_for_u!(u64); impl_encodable_for_u!(u128); impl_decodable_for_u!(u16); impl_decodable_for_u!(u32); impl_decodable_for_u!(u64); impl_decodable_for_u!(u128); impl Encodable for usize { fn rlp_append(&self, s: &mut RlpStream) { (*self as u64).rlp_append(s); } } impl Decodable for usize { fn decode(rlp: &Rlp) -> Result<Self, DecoderError> { u64::decode(rlp).map(|value| value as usize) } } macro_rules! impl_encodable_for_hash { ($name: ident) => { impl Encodable for $name { fn rlp_append(&self, s: &mut RlpStream) { s.encoder().encode_value(self); } } }; } macro_rules! impl_decodable_for_hash { ($name: ident, $size: expr) => { impl Decodable for $name { fn decode(rlp: &Rlp) -> Result<Self, DecoderError> { rlp.decoder().decode_value(|bytes| match bytes.len().cmp(&$size) { cmp::Ordering::Less => Err(DecoderError::RlpIsTooShort { expected: $size, got: bytes.len(), }), cmp::Ordering::Greater => Err(DecoderError::RlpIsTooBig { expected: $size, got: bytes.len(), }), cmp::Ordering::Equal => { let mut t = [0u8; $size]; t.copy_from_slice(bytes); Ok($name(t)) } }) } } }; } impl_encodable_for_hash!(H128); impl_encodable_for_hash!(H160); impl_encodable_for_hash!(H256); impl_encodable_for_hash!(H384); impl_encodable_for_hash!(H512); impl_encodable_for_hash!(H520); impl_encodable_for_hash!(H768); impl_decodable_for_hash!(H128, 16); impl_decodable_for_hash!(H160, 20); impl_decodable_for_hash!(H256, 32); impl_decodable_for_hash!(H384, 48); impl_decodable_for_hash!(H512, 64); impl_decodable_for_hash!(H520, 65); impl_decodable_for_hash!(H768, 96); macro_rules! impl_encodable_for_uint { ($name: ident, $size: expr) => { impl Encodable for $name { fn rlp_append(&self, s: &mut RlpStream) { let leading_empty_bytes = $size - (self.bits() + 7) / 8; let mut buffer = [0u8; $size]; self.to_big_endian(&mut buffer); s.encoder().encode_value(&buffer[leading_empty_bytes..]); } } }; } macro_rules! impl_decodable_for_uint { ($name: ident, $size: expr) => { impl Decodable for $name { fn decode(rlp: &Rlp) -> Result<Self, DecoderError> { rlp.decoder().decode_value(|bytes| { if !bytes.is_empty() && bytes[0] == 0 { Err(DecoderError::RlpInvalidIndirection) } else if bytes.len() <= $size { Ok($name::from(bytes)) } else { Err(DecoderError::RlpIsTooBig { expected: $size, got: bytes.len(), }) } }) } } }; } impl_encodable_for_uint!(U256, 32); impl_decodable_for_uint!(U256, 32); impl<'a> Encodable for &'a str { fn rlp_append(&self, s: &mut RlpStream) { s.encoder().encode_value(self.as_bytes()); } } impl Encodable for String { fn rlp_append(&self, s: &mut RlpStream) { s.encoder().encode_value(self.as_bytes()); } } impl Decodable for String { fn decode(rlp: &Rlp) -> Result<Self, DecoderError> { rlp.decoder().decode_value(|bytes| { if bytes.contains(&b'\0') { return Err(DecoderError::RlpNullTerminatedString) } match str::from_utf8(bytes) { Ok(s) => Ok(s.to_owned()), Err(_err) => Err(DecoderError::RlpExpectedToBeData), } }) } } impl<T1: Encodable, T2: Encodable, T3: Encodable> Encodable for (T1, T2, T3) { fn rlp_append(&self, s: &mut RlpStream) { s.begin_list(3).append(&self.0).append(&self.1).append(&self.2); } } impl<T1: Decodable, T2: Decodable, T3: Decodable> Decodable for (T1, T2, T3) { fn decode(rlp: &Rlp) -> Result<Self, DecoderError> { let item_count = rlp.item_count()?; if item_count != 3 { return Err(DecoderError::RlpIncorrectListLen { expected: 3, got: item_count, }) } Ok((rlp.val_at(0)?, rlp.val_at(1)?, rlp.val_at(2)?)) } } #[macro_export] macro_rules! rlp_encode_and_decode_test { ($origin:expr) => { fn rlp_encode_and_decode_test<T>(origin: T) where T: $crate::Encodable + $crate::Decodable + ::std::fmt::Debug + PartialEq, { let encoded = $crate::encode(&origin); let decoded = $crate::decode::<T>(&encoded); assert_eq!(Ok(origin), decoded); } rlp_encode_and_decode_test($origin); }; } #[cfg(test)] mod tests { use super::*; #[test] fn vec_of_bytes() { let origin: Vec<Vec<u8>> = vec![vec![0, 1, 2, 3, 4], vec![5, 6, 7], vec![], vec![8, 9]]; let encoded = crate::encode(&origin); let expected = { let mut s = RlpStream::new(); s.begin_list(4); s.append::<Vec<u8>>(&origin[0]); s.append::<Vec<u8>>(&origin[1]); s.append::<Vec<u8>>(&origin[2]); s.append::<Vec<u8>>(&origin[3]); s.out() }; assert_eq!(expected, encoded.to_vec()); rlp_encode_and_decode_test!(origin); } #[test] fn rlp_zero_h160() { let h = H160::zero(); let encoded = h.rlp_bytes().to_vec(); assert_eq!(&[0x80 + 20, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], encoded.as_slice()); } #[test] fn vec_and_hash() { let vec: Vec<u8> = { let mut vec = Vec::with_capacity(32); for i in 0..32 { vec.push(i); } vec }; let hash: H256 = { let mut hash = H256::zero(); assert_eq!(32, hash.iter().len()); for (i, h) in hash.iter_mut().enumerate().take(32) { *h = i as u8; } hash }; assert_eq!(vec.rlp_bytes(), hash.rlp_bytes()); } #[test] fn slice_and_hash() { let array: [u8; 32] = { let mut array = [0 as u8; 32]; assert_eq!(32, array.iter().len()); for (i, a) in array.iter_mut().enumerate().take(32) { *a = i as u8; } array }; let slice: &[u8] = &array; let hash: H256 = { let mut hash = H256::zero(); assert_eq!(32, hash.iter().len()); for (i, h) in hash.iter_mut().enumerate().take(32) { *h = i as u8; } hash }; assert_eq!(slice.rlp_bytes(), hash.rlp_bytes()); } #[test] fn empty_bytes() { let empty_bytes: Vec<u8> = vec![]; assert_eq!(&[0x80], &empty_bytes.rlp_bytes().to_vec().as_slice()); rlp_encode_and_decode_test!(empty_bytes); } #[test] fn empty_slice_of_u8() { let empty_slice: &[u8] = &[]; assert_eq!(&[0x80], &empty_slice.rlp_bytes().to_vec().as_slice()); } #[test] fn empty_list() { let mut stream = RlpStream::new(); stream.begin_list(0); assert_eq!(vec![0xC0], stream.out()); } #[test] fn tuple() { let tuple: (u32, u32, u32) = (1, 2, 3); rlp_encode_and_decode_test!(tuple); } }
use super::stream::RlpStream; use super::traits::{Decodable, Encodable}; use super::{DecoderError, Rlp}; use primitives::{H128, H160, H256, H384, H512, H520, H768, U256}; use std::iter::{empty, once}; use std::{cmp, mem, str}; pub fn decode_usize(bytes: &[u8]) -> Result<usize, DecoderError> { let expected = mem::size_of::<usize>(); match bytes.len() { l if l <= expected => { if bytes[0] == 0 { return Err(DecoderError::RlpInvalidIndirection) } let mut res = 0usize; for (i, byte) in bytes.iter().enumerate() { let shift = (l - 1 - i) * 8; res += (*byte as usize) << shift; } Ok(res) } got => Err(DecoderError::RlpIsTooBig { expected, got, }), } } impl Encodable for bool { fn rlp_append(&self, s: &mut RlpStream) { s.encoder().encode_iter(once(if *self { 1u8 } else { 0 })); } } impl Decodable for bool { fn decode(rlp: &Rlp) -> Result<Self, DecoderError> { rlp.decoder().decode_value(|bytes| match bytes.len() { 0 => Ok(false), 1 => Ok(bytes[0] != 0), got => Err(DecoderError::RlpIsTooBig { expected: 1, got, }), }) } } impl<'a> Encodable for &'a [u8] { fn rlp_append(&self, s: &mut RlpStream) { s.encoder().encode_value(self); } } impl Encodable for Vec<u8> { fn rlp_append(&self, s: &mut RlpStream) { s.encoder().encode_value(self); } } impl Decodable for Vec<u8> { fn decode(rlp: &Rlp) -> Result<Self, DecoderError> { rlp.decoder().decode_value(|bytes| Ok(bytes.to_vec())) } } impl Encodable for Vec<Vec<u8>> { fn rlp_append(&self, s: &mut RlpStream) { s.begin_list(self.len()); for e in self { s.append(e); } } } impl Decodable for Vec<Vec<u8>> { fn decode(rlp: &Rlp) -> Result<Self, DecoderError> { rlp.as_list::<Vec<u8>>() } } impl<T> Encodable for Option<T> where T: Encodable, { fn rlp_append(&self, s: &mut RlpStream) { match *self { None => {
} impl<T> Decodable for Option<T> where T: Decodable, { fn decode(rlp: &Rlp) -> Result<Self, DecoderError> { let items = rlp.item_count()?; match items { 1 => rlp.val_at(0).map(Some), 0 => Ok(None), got => Err(DecoderError::RlpIncorrectListLen { expected: 1, got, }), } } } impl Encodable for u8 { fn rlp_append(&self, s: &mut RlpStream) { if *self != 0 { s.encoder().encode_iter(once(*self)); } else { s.encoder().encode_iter(empty()); } } } impl Decodable for u8 { fn decode(rlp: &Rlp) -> Result<Self, DecoderError> { rlp.decoder().decode_value(|bytes| match bytes.len() { 1 if bytes[0] != 0 => Ok(bytes[0]), 0 => Ok(0), 1 => Err(DecoderError::RlpInvalidIndirection), got => Err(DecoderError::RlpIsTooBig { expected: 1, got, }), }) } } macro_rules! impl_encodable_for_u { ($name: ident) => { impl Encodable for $name { fn rlp_append(&self, s: &mut RlpStream) { let leading_empty_bytes = self.leading_zeros() as usize / 8; let buffer = self.to_be_bytes(); s.encoder().encode_value(&buffer[leading_empty_bytes..]); } } }; } macro_rules! impl_decodable_for_u { ($name: ident) => { impl Decodable for $name { fn decode(rlp: &Rlp) -> Result<Self, DecoderError> { rlp.decoder().decode_value(|bytes| match bytes.len() { 0 | 1 => u8::decode(rlp).map($name::from), l if l <= mem::size_of::<$name>() => { if bytes[0] == 0 { return Err(DecoderError::RlpInvalidIndirection) } let mut res = 0 as $name; for (i, byte) in bytes.iter().enumerate() { let shift = (l - 1 - i) * 8; res += $name::from(*byte) << shift; } Ok(res) } got => Err(DecoderError::RlpIsTooBig { expected: mem::size_of::<$name>(), got, }), }) } } }; } impl_encodable_for_u!(u16); impl_encodable_for_u!(u32); impl_encodable_for_u!(u64); impl_encodable_for_u!(u128); impl_decodable_for_u!(u16); impl_decodable_for_u!(u32); impl_decodable_for_u!(u64); impl_decodable_for_u!(u128); impl Encodable for usize { fn rlp_append(&self, s: &mut RlpStream) { (*self as u64).rlp_append(s); } } impl Decodable for usize { fn decode(rlp: &Rlp) -> Result<Self, DecoderError> { u64::decode(rlp).map(|value| value as usize) } } macro_rules! impl_encodable_for_hash { ($name: ident) => { impl Encodable for $name { fn rlp_append(&self, s: &mut RlpStream) { s.encoder().encode_value(self); } } }; } macro_rules! impl_decodable_for_hash { ($name: ident, $size: expr) => { impl Decodable for $name { fn decode(rlp: &Rlp) -> Result<Self, DecoderError> { rlp.decoder().decode_value(|bytes| match bytes.len().cmp(&$size) { cmp::Ordering::Less => Err(DecoderError::RlpIsTooShort { expected: $size, got: bytes.len(), }), cmp::Ordering::Greater => Err(DecoderError::RlpIsTooBig { expected: $size, got: bytes.len(), }), cmp::Ordering::Equal => { let mut t = [0u8; $size]; t.copy_from_slice(bytes); Ok($name(t)) } }) } } }; } impl_encodable_for_hash!(H128); impl_encodable_for_hash!(H160); impl_encodable_for_hash!(H256); impl_encodable_for_hash!(H384); impl_encodable_for_hash!(H512); impl_encodable_for_hash!(H520); impl_encodable_for_hash!(H768); impl_decodable_for_hash!(H128, 16); impl_decodable_for_hash!(H160, 20); impl_decodable_for_hash!(H256, 32); impl_decodable_for_hash!(H384, 48); impl_decodable_for_hash!(H512, 64); impl_decodable_for_hash!(H520, 65); impl_decodable_for_hash!(H768, 96); macro_rules! impl_encodable_for_uint { ($name: ident, $size: expr) => { impl Encodable for $name { fn rlp_append(&self, s: &mut RlpStream) { let leading_empty_bytes = $size - (self.bits() + 7) / 8; let mut buffer = [0u8; $size]; self.to_big_endian(&mut buffer); s.encoder().encode_value(&buffer[leading_empty_bytes..]); } } }; } macro_rules! impl_decodable_for_uint { ($name: ident, $size: expr) => { impl Decodable for $name { fn decode(rlp: &Rlp) -> Result<Self, DecoderError> { rlp.decoder().decode_value(|bytes| { if !bytes.is_empty() && bytes[0] == 0 { Err(DecoderError::RlpInvalidIndirection) } else if bytes.len() <= $size { Ok($name::from(bytes)) } else { Err(DecoderError::RlpIsTooBig { expected: $size, got: bytes.len(), }) } }) } } }; } impl_encodable_for_uint!(U256, 32); impl_decodable_for_uint!(U256, 32); impl<'a> Encodable for &'a str { fn rlp_append(&self, s: &mut RlpStream) { s.encoder().encode_value(self.as_bytes()); } } impl Encodable for String { fn rlp_append(&self, s: &mut RlpStream) { s.encoder().encode_value(self.as_bytes()); } } impl Decodable for String { fn decode(rlp: &Rlp) -> Result<Self, DecoderError> { rlp.decoder().decode_value(|bytes| { if bytes.contains(&b'\0') { return Err(DecoderError::RlpNullTerminatedString) } match str::from_utf8(bytes) { Ok(s) => Ok(s.to_owned()), Err(_err) => Err(DecoderError::RlpExpectedToBeData), } }) } } impl<T1: Encodable, T2: Encodable, T3: Encodable> Encodable for (T1, T2, T3) { fn rlp_append(&self, s: &mut RlpStream) { s.begin_list(3).append(&self.0).append(&self.1).append(&self.2); } } impl<T1: Decodable, T2: Decodable, T3: Decodable> Decodable for (T1, T2, T3) { fn decode(rlp: &Rlp) -> Result<Self, DecoderError> { let item_count = rlp.item_count()?; if item_count != 3 { return Err(DecoderError::RlpIncorrectListLen { expected: 3, got: item_count, }) } Ok((rlp.val_at(0)?, rlp.val_at(1)?, rlp.val_at(2)?)) } } #[macro_export] macro_rules! rlp_encode_and_decode_test { ($origin:expr) => { fn rlp_encode_and_decode_test<T>(origin: T) where T: $crate::Encodable + $crate::Decodable + ::std::fmt::Debug + PartialEq, { let encoded = $crate::encode(&origin); let decoded = $crate::decode::<T>(&encoded); assert_eq!(Ok(origin), decoded); } rlp_encode_and_decode_test($origin); }; } #[cfg(test)] mod tests { use super::*; #[test] fn vec_of_bytes() { let origin: Vec<Vec<u8>> = vec![vec![0, 1, 2, 3, 4], vec![5, 6, 7], vec![], vec![8, 9]]; let encoded = crate::encode(&origin); let expected = { let mut s = RlpStream::new(); s.begin_list(4); s.append::<Vec<u8>>(&origin[0]); s.append::<Vec<u8>>(&origin[1]); s.append::<Vec<u8>>(&origin[2]); s.append::<Vec<u8>>(&origin[3]); s.out() }; assert_eq!(expected, encoded.to_vec()); rlp_encode_and_decode_test!(origin); } #[test] fn rlp_zero_h160() { let h = H160::zero(); let encoded = h.rlp_bytes().to_vec(); assert_eq!(&[0x80 + 20, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], encoded.as_slice()); } #[test] fn vec_and_hash() { let vec: Vec<u8> = { let mut vec = Vec::with_capacity(32); for i in 0..32 { vec.push(i); } vec }; let hash: H256 = { let mut hash = H256::zero(); assert_eq!(32, hash.iter().len()); for (i, h) in hash.iter_mut().enumerate().take(32) { *h = i as u8; } hash }; assert_eq!(vec.rlp_bytes(), hash.rlp_bytes()); } #[test] fn slice_and_hash() { let array: [u8; 32] = { let mut array = [0 as u8; 32]; assert_eq!(32, array.iter().len()); for (i, a) in array.iter_mut().enumerate().take(32) { *a = i as u8; } array }; let slice: &[u8] = &array; let hash: H256 = { let mut hash = H256::zero(); assert_eq!(32, hash.iter().len()); for (i, h) in hash.iter_mut().enumerate().take(32) { *h = i as u8; } hash }; assert_eq!(slice.rlp_bytes(), hash.rlp_bytes()); } #[test] fn empty_bytes() { let empty_bytes: Vec<u8> = vec![]; assert_eq!(&[0x80], &empty_bytes.rlp_bytes().to_vec().as_slice()); rlp_encode_and_decode_test!(empty_bytes); } #[test] fn empty_slice_of_u8() { let empty_slice: &[u8] = &[]; assert_eq!(&[0x80], &empty_slice.rlp_bytes().to_vec().as_slice()); } #[test] fn empty_list() { let mut stream = RlpStream::new(); stream.begin_list(0); assert_eq!(vec![0xC0], stream.out()); } #[test] fn tuple() { let tuple: (u32, u32, u32) = (1, 2, 3); rlp_encode_and_decode_test!(tuple); } }
s.begin_list(0); } Some(ref value) => { s.begin_list(1); s.append(value); } } }
function_block-function_prefix_line
[ { "content": "/// Shortcut function to encode structure into rlp.\n\n///\n\n/// ```rust\n\n/// fn main () {\n\n/// \tlet animal = \"cat\";\n\n/// \tlet out = rlp::encode(&animal);\n\n/// \tassert_eq!(out, vec![0x83, b'c', b'a', b't']);\n\n/// }\n\n/// ```\n\npub fn encode<E>(object: &E) -> Vec<u8>\n\nwhere\n\n E: Encodable, {\n\n let mut stream = RlpStream::new();\n\n stream.append_single_value(object);\n\n stream.drain()\n\n}\n\n\n", "file_path": "rlp/src/lib.rs", "rank": 1, "score": 187716.03999490294 }, { "content": "/// Shortcut function to decode trusted rlp\n\n///\n\n/// ```rust\n\n/// fn main () {\n\n/// \tlet data = vec![0x83, b'c', b'a', b't'];\n\n/// \tlet animal: String = rlp::decode(&data).expect(\"could not decode\");\n\n/// \tassert_eq!(animal, \"cat\".to_string());\n\n/// }\n\n/// ```\n\npub fn decode<T>(bytes: &[u8]) -> Result<T, DecoderError>\n\nwhere\n\n T: Decodable, {\n\n let rlp = Rlp::new(bytes);\n\n rlp.as_val()\n\n}\n\n\n", "file_path": "rlp/src/lib.rs", "rank": 2, "score": 167139.7879058525 }, { "content": "pub fn encode_list<E, K>(object: &[K]) -> Vec<u8>\n\nwhere\n\n E: Encodable,\n\n K: Borrow<E>, {\n\n let mut stream = RlpStream::new();\n\n stream.append_list(object);\n\n stream.drain()\n\n}\n", "file_path": "rlp/src/lib.rs", "rank": 3, "score": 157992.54058351004 }, { "content": "pub fn decode_list<T>(bytes: &[u8]) -> Vec<T>\n\nwhere\n\n T: Decodable, {\n\n let rlp = Rlp::new(bytes);\n\n rlp.as_list().expect(\"trusted rlp should be valid\")\n\n}\n\n\n", "file_path": "rlp/src/lib.rs", "rank": 4, "score": 151050.5103438653 }, { "content": "#[bench]\n\nfn bench_decode_u256_value(b: &mut Bencher) {\n\n b.iter(|| {\n\n // u256\n\n let data = vec![\n\n 0xa0, 0x80, 0x90, 0xa0, 0xb0, 0xc0, 0xd0, 0xe0, 0xf0, 0x09, 0x10, 0x20, 0x30, 0x40, 0x50, 0x60, 0x77, 0x00,\n\n 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x12, 0xf0,\n\n ];\n\n let rlp = Rlp::new(&data);\n\n let _: U256 = rlp.as_val().unwrap();\n\n });\n\n}\n\n\n", "file_path": "rlp/benches/rlp.rs", "rank": 5, "score": 131728.3372281128 }, { "content": "fn calculate_payload_info(header_bytes: &[u8], len_of_len: usize) -> Result<PayloadInfo, DecoderError> {\n\n let header_len = 1 + len_of_len;\n\n match header_bytes.get(1) {\n\n Some(&0) => return Err(DecoderError::RlpDataLenWithZeroPrefix),\n\n None => {\n\n return Err(DecoderError::RlpIsTooShort {\n\n expected: 1,\n\n got: 0,\n\n })\n\n }\n\n _ => (),\n\n }\n\n if header_bytes.len() < header_len {\n\n return Err(DecoderError::RlpIsTooShort {\n\n expected: header_len,\n\n got: header_bytes.len(),\n\n })\n\n }\n\n let value_len = decode_usize(&header_bytes[1..header_len])?;\n\n if value_len <= 55 {\n", "file_path": "rlp/src/rlpin.rs", "rank": 6, "score": 128617.72569852212 }, { "content": "/// Call this function to compress rlp.\n\npub fn compress(c: &[u8], swapper: &dyn Compressor) -> Vec<u8> {\n\n let rlp = Rlp::new(c);\n\n if rlp.is_data() {\n\n swapper.compressed(rlp.as_raw()).unwrap_or_else(|| rlp.as_raw()).to_vec()\n\n } else {\n\n map_rlp(&rlp, |r| compress(r.as_raw(), swapper))\n\n }\n\n}\n\n\n", "file_path": "rlp-compress/src/lib.rs", "rank": 7, "score": 116671.1979178734 }, { "content": "/// Call this function to decompress rlp.\n\npub fn decompress(c: &[u8], swapper: &dyn Decompressor) -> Vec<u8> {\n\n let rlp = Rlp::new(c);\n\n if rlp.is_data() {\n\n swapper.decompressed(rlp.as_raw()).unwrap_or_else(|| rlp.as_raw()).to_vec()\n\n } else {\n\n map_rlp(&rlp, |r| decompress(r.as_raw(), swapper))\n\n }\n\n}\n\n\n", "file_path": "rlp-compress/src/lib.rs", "rank": 8, "score": 116671.1979178734 }, { "content": "#[bench]\n\nfn bench_decode_1000_values(b: &mut Bencher) {\n\n let mut stream = RlpStream::new_list(1000);\n\n for _ in 0..1000 {\n\n stream.append(&U256::from(1));\n\n }\n\n let data = stream.out();\n\n b.iter(|| {\n\n let rlp = Rlp::new(&data);\n\n for i in 0..1000 {\n\n let _: U256 = rlp.val_at(i).unwrap();\n\n }\n\n });\n\n}\n", "file_path": "rlp/benches/rlp.rs", "rank": 9, "score": 111094.9105963991 }, { "content": "pub fn impl_encodable(ast: &syn::DeriveInput) -> TokenStream {\n\n let body = match ast.data {\n\n syn::Data::Struct(ref s) => s,\n\n _ => panic!(\"#[derive(RlpEncodable)] is only defined for structs.\"),\n\n };\n\n\n\n let stmts: Vec<_> = body.fields.iter().enumerate().map(encodable_field_map).collect();\n\n let name = &ast.ident;\n\n\n\n let stmts_len = stmts.len();\n\n let stmts_len = quote! { #stmts_len };\n\n let dummy_const = syn::Ident::new(&format!(\"_IMPL_RLP_ENCODABLE_FOR_{}\", name), Span::call_site());\n\n let impl_block = quote! {\n\n impl rlp::Encodable for #name {\n\n fn rlp_append(&self, stream: &mut rlp::RlpStream) {\n\n stream.begin_list(#stmts_len);\n\n #(#stmts)*\n\n }\n\n }\n\n };\n\n\n\n quote! {\n\n #[allow(non_upper_case_globals, unused_attributes, unused_qualifications)]\n\n const #dummy_const: () = {\n\n #impl_block\n\n };\n\n }\n\n}\n\n\n", "file_path": "rlp-derive/src/en.rs", "rank": 10, "score": 110643.34589781608 }, { "content": "pub fn impl_decodable(ast: &syn::DeriveInput) -> TokenStream {\n\n let body = match ast.data {\n\n syn::Data::Struct(ref s) => s,\n\n _ => panic!(\"#[derive(RlpDecodable)] is only defined for structs.\"),\n\n };\n\n\n\n let stmts: Vec<_> = body.fields.iter().enumerate().map(decodable_field_map).collect();\n\n let name = &ast.ident;\n\n\n\n let dummy_const = syn::Ident::new(&format!(\"_IMPL_RLP_DECODABLE_FOR_{}\", name), Span::call_site());\n\n let impl_block = quote! {\n\n impl rlp::Decodable for #name {\n\n fn decode(rlp: &rlp::Rlp) -> Result<Self, rlp::DecoderError> {\n\n let result = #name {\n\n #(#stmts)*\n\n };\n\n\n\n Ok(result)\n\n }\n\n }\n\n };\n\n\n\n quote! {\n\n #[allow(non_upper_case_globals, unused_attributes, unused_qualifications)]\n\n const #dummy_const: () = {\n\n #impl_block\n\n };\n\n }\n\n}\n\n\n", "file_path": "rlp-derive/src/de.rs", "rank": 11, "score": 110577.71891566798 }, { "content": "#[test]\n\nfn encode_bool() {\n\n let tests = vec![ETestPair(false, vec![0x00]), ETestPair(true, vec![0x01])];\n\n run_encode_tests(tests);\n\n}\n\n\n", "file_path": "rlp/tests/tests.rs", "rank": 12, "score": 108703.08058843289 }, { "content": "#[test]\n\nfn encode_u256() {\n\n let tests = vec![\n\n ETestPair(U256::from(0u64), vec![0x80u8]),\n\n ETestPair(U256::from(0x0100_0000u64), vec![0x84, 0x01, 0x00, 0x00, 0x00]),\n\n ETestPair(U256::from(0xffff_ffffu64), vec![0x84, 0xff, 0xff, 0xff, 0xff]),\n\n ETestPair(\n\n (\"8090a0b0c0d0e0f00910203040506077000000000000\\\n\n 000100000000000012f0\")\n\n .into(),\n\n vec![\n\n 0xa0, 0x80, 0x90, 0xa0, 0xb0, 0xc0, 0xd0, 0xe0, 0xf0, 0x09, 0x10, 0x20, 0x30, 0x40, 0x50, 0x60, 0x77,\n\n 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x12, 0xf0,\n\n ],\n\n ),\n\n ];\n\n run_encode_tests(tests);\n\n}\n\n\n", "file_path": "rlp/tests/tests.rs", "rank": 13, "score": 108660.43749727611 }, { "content": "#[test]\n\nfn encode_str() {\n\n let tests = vec![\n\n ETestPair(\"cat\", vec![0x83, b'c', b'a', b't']),\n\n ETestPair(\"dog\", vec![0x83, b'd', b'o', b'g']),\n\n ETestPair(\"Marek\", vec![0x85, b'M', b'a', b'r', b'e', b'k']),\n\n ETestPair(\"\", vec![0x80]),\n\n ETestPair(\"Lorem ipsum dolor sit amet, consectetur adipisicing elit\", vec![\n\n 0xb8, 0x38, b'L', b'o', b'r', b'e', b'm', b' ', b'i', b'p', b's', b'u', b'm', b' ', b'd', b'o', b'l', b'o',\n\n b'r', b' ', b's', b'i', b't', b' ', b'a', b'm', b'e', b't', b',', b' ', b'c', b'o', b'n', b's', b'e', b'c',\n\n b't', b'e', b't', b'u', b'r', b' ', b'a', b'd', b'i', b'p', b'i', b's', b'i', b'c', b'i', b'n', b'g', b' ',\n\n b'e', b'l', b'i', b't',\n\n ]),\n\n ];\n\n run_encode_tests(tests);\n\n}\n\n\n", "file_path": "rlp/tests/tests.rs", "rank": 14, "score": 108660.43749727611 }, { "content": "#[bench]\n\nfn bench_stream_u256_value(b: &mut Bencher) {\n\n b.iter(|| {\n\n // u256\n\n let mut stream = RlpStream::new();\n\n let uint: U256 = \"8090a0b0c0d0e0f00910203040506077000000000000000100000000000012f0\".into();\n\n stream.append_single_value(&uint);\n\n let _ = stream.out();\n\n });\n\n}\n\n\n", "file_path": "rlp/benches/rlp.rs", "rank": 15, "score": 108564.30692818295 }, { "content": "fn map_rlp<F: Fn(&Rlp<'_>) -> Vec<u8>>(rlp: &Rlp<'_>, f: F) -> Vec<u8> {\n\n let mut stream = RlpStream::new_list(rlp.item_count().unwrap_or_default());\n\n for subrlp in rlp.iter() {\n\n stream.append_raw(&f(&subrlp), 1);\n\n }\n\n stream.drain()\n\n}\n\n\n\n/// Stores RLPs used for compression\n\npub struct Swapper<'a> {\n\n compressed_to_rlp: HashMap<&'a [u8], &'a [u8]>,\n\n rlp_to_compressed: HashMap<&'a [u8], &'a [u8]>,\n\n}\n\n\n\nimpl<'a> Swapper<'a> {\n\n /// Construct a swapper from a list of common RLPs\n\n pub fn new(rlps_to_swap: &[&'a [u8]], compressed: &[&'a [u8]]) -> Self {\n\n if rlps_to_swap.len() > 0x7e {\n\n panic!(\"Invalid usage, only 127 RLPs can be swappable.\");\n\n }\n", "file_path": "rlp-compress/src/lib.rs", "rank": 16, "score": 108370.27061625884 }, { "content": "#[bench]\n\nfn bench_decode_u64_value(b: &mut Bencher) {\n\n b.iter(|| {\n\n // u64\n\n let data = vec![0x88, 0x10, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef];\n\n let rlp = Rlp::new(&data);\n\n let _: u64 = rlp.as_val().unwrap();\n\n });\n\n}\n\n\n", "file_path": "rlp/benches/rlp.rs", "rank": 17, "score": 108090.27873950513 }, { "content": "pub fn impl_encodable_wrapper(ast: &syn::DeriveInput) -> TokenStream {\n\n let body = match ast.data {\n\n syn::Data::Struct(ref s) => s,\n\n _ => panic!(\"#[derive(RlpEncodableWrapper)] is only defined for structs.\"),\n\n };\n\n\n\n let stmt = {\n\n let fields: Vec<_> = body.fields.iter().collect();\n\n if fields.len() == 1 {\n\n let field = fields.first().expect(\"fields.len() == 1; qed\");\n\n encodable_field(0, field)\n\n } else {\n\n panic!(\"#[derive(RlpEncodableWrapper)] is only defined for structs with one field.\")\n\n }\n\n };\n\n\n\n let name = &ast.ident;\n\n\n\n let dummy_const = syn::Ident::new(&format!(\"_IMPL_RLP_ENCODABLE_FOR_{}\", name), Span::call_site());\n\n let impl_block = quote! {\n", "file_path": "rlp-derive/src/en.rs", "rank": 18, "score": 107907.50827080115 }, { "content": "pub fn impl_decodable_wrapper(ast: &syn::DeriveInput) -> TokenStream {\n\n let body = match ast.data {\n\n syn::Data::Struct(ref s) => s,\n\n _ => panic!(\"#[derive(RlpDecodableWrapper)] is only defined for structs.\"),\n\n };\n\n\n\n let stmt = {\n\n let fields: Vec<_> = body.fields.iter().collect();\n\n if fields.len() == 1 {\n\n let field = fields.first().expect(\"fields.len() == 1; qed\");\n\n decodable_field(0, field, decodable_wrapper_parse_quotes())\n\n } else {\n\n panic!(\"#[derive(RlpEncodableWrapper)] is only defined for structs with one field.\")\n\n }\n\n };\n\n\n\n let name = &ast.ident;\n\n\n\n let dummy_const = syn::Ident::new(&format!(\"_IMPL_RLP_DECODABLE_FOR_{}\", name), Span::call_site());\n\n let impl_block = quote! {\n", "file_path": "rlp-derive/src/de.rs", "rank": 19, "score": 107844.08166112528 }, { "content": "#[bench]\n\nfn bench_decode_nested_empty_lists(b: &mut Bencher) {\n\n b.iter(|| {\n\n // [ [], [[]], [ [], [[]] ] ]\n\n let data = vec![0xc7, 0xc0, 0xc1, 0xc0, 0xc3, 0xc0, 0xc1, 0xc0];\n\n let rlp = Rlp::new(&data);\n\n let _v0: Vec<u16> = rlp.at(0).unwrap().as_list().unwrap();\n\n let _v1: Vec<u16> = rlp.at(1).unwrap().at(0).unwrap().as_list().unwrap();\n\n let nested_rlp = rlp.at(2).unwrap();\n\n let _v2a: Vec<u16> = nested_rlp.at(0).unwrap().as_list().unwrap();\n\n let _v2b: Vec<u16> = nested_rlp.at(1).unwrap().at(0).unwrap().as_list().unwrap();\n\n });\n\n}\n\n\n", "file_path": "rlp/benches/rlp.rs", "rank": 20, "score": 105315.60575775875 }, { "content": "#[test]\n\nfn encode_vector_str() {\n\n let tests = vec![VETestPair(vec![\"cat\", \"dog\"], vec![0xc8, 0x83, b'c', b'a', b't', 0x83, b'd', b'o', b'g'])];\n\n run_encode_tests_list(tests);\n\n}\n\n\n", "file_path": "rlp/tests/tests.rs", "rank": 21, "score": 104737.29254743808 }, { "content": "#[test]\n\nfn decode_untrusted_bool() {\n\n let tests = vec![DTestPair(false, vec![0x00]), DTestPair(true, vec![0x01])];\n\n run_decode_tests(tests);\n\n}\n\n\n\n/// Vec<u8> (Bytes) is treated as a single value\n", "file_path": "rlp/tests/tests.rs", "rank": 22, "score": 104691.30646454115 }, { "content": "#[test]\n\nfn decode_untrusted_u256() {\n\n let tests = vec![\n\n DTestPair(U256::from(0u64), vec![0x80u8]),\n\n DTestPair(U256::from(0x0100_0000u64), vec![0x84, 0x01, 0x00, 0x00, 0x00]),\n\n DTestPair(U256::from(0xffff_ffffu64), vec![0x84, 0xff, 0xff, 0xff, 0xff]),\n\n DTestPair(\n\n (\"8090a0b0c0d0e0f00910203040506077000000000000\\\n\n 000100000000000012f0\")\n\n .into(),\n\n vec![\n\n 0xa0, 0x80, 0x90, 0xa0, 0xb0, 0xc0, 0xd0, 0xe0, 0xf0, 0x09, 0x10, 0x20, 0x30, 0x40, 0x50, 0x60, 0x77,\n\n 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x12, 0xf0,\n\n ],\n\n ),\n\n ];\n\n run_decode_tests(tests);\n\n}\n\n\n", "file_path": "rlp/tests/tests.rs", "rank": 23, "score": 104650.61720228831 }, { "content": "#[test]\n\nfn decode_untrusted_str() {\n\n let tests = vec![\n\n DTestPair(\"cat\".to_string(), vec![0x83, b'c', b'a', b't']),\n\n DTestPair(\"dog\".to_string(), vec![0x83, b'd', b'o', b'g']),\n\n DTestPair(\"Marek\".to_string(), vec![0x85, b'M', b'a', b'r', b'e', b'k']),\n\n DTestPair(\"\".to_string(), vec![0x80]),\n\n DTestPair(\"Lorem ipsum dolor sit amet, consectetur adipisicing elit\".to_string(), vec![\n\n 0xb8, 0x38, b'L', b'o', b'r', b'e', b'm', b' ', b'i', b'p', b's', b'u', b'm', b' ', b'd', b'o', b'l', b'o',\n\n b'r', b' ', b's', b'i', b't', b' ', b'a', b'm', b'e', b't', b',', b' ', b'c', b'o', b'n', b's', b'e', b'c',\n\n b't', b'e', b't', b'u', b'r', b' ', b'a', b'd', b'i', b'p', b'i', b's', b'i', b'c', b'i', b'n', b'g', b' ',\n\n b'e', b'l', b'i', b't',\n\n ]),\n\n ];\n\n run_decode_tests(tests);\n\n}\n\n\n", "file_path": "rlp/tests/tests.rs", "rank": 24, "score": 104650.61720228831 }, { "content": "#[test]\n\nfn encode_vector_u8() {\n\n let tests = vec![\n\n ETestPair(vec![], vec![0x80]),\n\n ETestPair(vec![0u8], vec![0]),\n\n ETestPair(vec![0x15], vec![0x15]),\n\n ETestPair(vec![0x40, 0x00], vec![0x82, 0x40, 0x00]),\n\n ];\n\n run_encode_tests(tests);\n\n}\n\n\n", "file_path": "rlp/tests/tests.rs", "rank": 25, "score": 104495.0642438786 }, { "content": "#[test]\n\nfn decode_untrusted_u8() {\n\n let tests = vec![DTestPair(0x0u8, vec![0x80]), DTestPair(0x77u8, vec![0x77]), DTestPair(0xccu8, vec![0x81, 0xcc])];\n\n run_decode_tests(tests);\n\n}\n\n\n", "file_path": "rlp/tests/tests.rs", "rank": 26, "score": 104408.38889872882 }, { "content": "#[test]\n\nfn decode_vector_u8() {\n\n let tests = vec![\n\n DTestPair(vec![], vec![0x80]),\n\n DTestPair(vec![0u8], vec![0]),\n\n DTestPair(vec![0x15], vec![0x15]),\n\n DTestPair(vec![0x40, 0x00], vec![0x82, 0x40, 0x00]),\n\n ];\n\n run_decode_tests(tests);\n\n}\n\n\n", "file_path": "rlp/tests/tests.rs", "rank": 27, "score": 104408.38889872882 }, { "content": "#[test]\n\nfn decode_untrusted_vector_str() {\n\n let tests = vec![VDTestPair(vec![\"cat\".to_string(), \"dog\".to_string()], vec![\n\n 0xc8, 0x83, b'c', b'a', b't', 0x83, b'd', b'o', b'g',\n\n ])];\n\n run_decode_tests_list(tests);\n\n}\n\n\n", "file_path": "rlp/tests/tests.rs", "rank": 28, "score": 101075.02219015826 }, { "content": "#[proc_macro_derive(RlpEncodable)]\n\npub fn encodable(input: TokenStream) -> TokenStream {\n\n let ast = syn::parse(input).unwrap();\n\n let gen = impl_encodable(&ast);\n\n gen.into()\n\n}\n\n\n", "file_path": "rlp-derive/src/lib.rs", "rank": 29, "score": 97881.09494842691 }, { "content": "#[proc_macro_derive(RlpDecodable)]\n\npub fn decodable(input: TokenStream) -> TokenStream {\n\n let ast = syn::parse(input).unwrap();\n\n let gen = impl_decodable(&ast);\n\n gen.into()\n\n}\n\n\n", "file_path": "rlp-derive/src/lib.rs", "rank": 30, "score": 97810.52673261493 }, { "content": "#[proc_macro_derive(RlpEncodableWrapper)]\n\npub fn encodable_wrapper(input: TokenStream) -> TokenStream {\n\n let ast = syn::parse(input).unwrap();\n\n let gen = impl_encodable_wrapper(&ast);\n\n gen.into()\n\n}\n\n\n", "file_path": "rlp-derive/src/lib.rs", "rank": 31, "score": 95502.09240301058 }, { "content": "#[proc_macro_derive(RlpDecodableWrapper)]\n\npub fn decodable_wrapper(input: TokenStream) -> TokenStream {\n\n let ast = syn::parse(input).unwrap();\n\n let gen = impl_decodable_wrapper(&ast);\n\n gen.into()\n\n}\n", "file_path": "rlp-derive/src/lib.rs", "rank": 32, "score": 95434.06007419314 }, { "content": "#[bench]\n\nfn bench_stream_u64_value(b: &mut Bencher) {\n\n b.iter(|| {\n\n // u64\n\n let mut stream = RlpStream::new();\n\n stream.append_single_value(&0x1023_4567_89ab_cdefu64);\n\n let _ = stream.out();\n\n });\n\n}\n\n\n", "file_path": "rlp/benches/rlp.rs", "rank": 33, "score": 84926.24843957525 }, { "content": "#[bench]\n\nfn bench_stream_1000_empty_lists(b: &mut Bencher) {\n\n b.iter(|| {\n\n let mut stream = RlpStream::new_list(1000);\n\n for _ in 0..1000 {\n\n stream.begin_list(0);\n\n }\n\n let _ = stream.out();\n\n });\n\n}\n\n\n", "file_path": "rlp/benches/rlp.rs", "rank": 34, "score": 84926.24843957525 }, { "content": "#[bench]\n\nfn bench_stream_nested_empty_lists(b: &mut Bencher) {\n\n b.iter(|| {\n\n // [ [], [[]], [ [], [[]] ] ]\n\n let mut stream = RlpStream::new_list(3);\n\n stream.begin_list(0);\n\n stream.begin_list(1).begin_list(0);\n\n stream.begin_list(2).begin_list(0).begin_list(1).begin_list(0);\n\n let _ = stream.out();\n\n });\n\n}\n\n\n", "file_path": "rlp/benches/rlp.rs", "rank": 35, "score": 83048.52315844082 }, { "content": "/// Structure encodable to RLP\n\npub trait Encodable {\n\n /// Append a value to the stream\n\n fn rlp_append(&self, s: &mut RlpStream);\n\n\n\n /// Get rlp-encoded bytes for this instance\n\n fn rlp_bytes(&self) -> Vec<u8> {\n\n let mut s = RlpStream::new();\n\n self.rlp_append(&mut s);\n\n s.drain()\n\n }\n\n}\n", "file_path": "rlp/src/traits.rs", "rank": 36, "score": 82457.40260339656 }, { "content": "#[test]\n\nfn encode_u32() {\n\n let tests = vec![\n\n ETestPair(0u32, vec![0x80u8]),\n\n ETestPair(0x0001_0000, vec![0x83, 0x01, 0x00, 0x00]),\n\n ETestPair(0x00ff_ffff, vec![0x83, 0xff, 0xff, 0xff]),\n\n ];\n\n run_encode_tests(tests);\n\n}\n\n\n", "file_path": "rlp/tests/tests.rs", "rank": 37, "score": 80482.14216481999 }, { "content": "#[test]\n\nfn encode_u64() {\n\n let tests = vec![\n\n ETestPair(0u64, vec![0x80u8]),\n\n ETestPair(0x0100_0000, vec![0x84, 0x01, 0x00, 0x00, 0x00]),\n\n ETestPair(0xFFFF_FFFF, vec![0x84, 0xff, 0xff, 0xff, 0xff]),\n\n ];\n\n run_encode_tests(tests);\n\n}\n\n\n", "file_path": "rlp/tests/tests.rs", "rank": 38, "score": 80482.14216481999 }, { "content": "#[test]\n\nfn encode_address() {\n\n let tests = vec![ETestPair(H160::from(\"ef2d6d194084c2de36e0dabfce45d046b37d1106\"), vec![\n\n 0x94, 0xef, 0x2d, 0x6d, 0x19, 0x40, 0x84, 0xc2, 0xde, 0x36, 0xe0, 0xda, 0xbf, 0xce, 0x45, 0xd0, 0x46, 0xb3,\n\n 0x7d, 0x11, 0x06,\n\n ])];\n\n run_encode_tests(tests);\n\n}\n\n\n\n/// Vec<u8> (Bytes) is treated as a single value\n", "file_path": "rlp/tests/tests.rs", "rank": 39, "score": 80482.14216481999 }, { "content": "#[test]\n\nfn encode_u16() {\n\n let tests = vec![\n\n ETestPair(0u16, vec![0x80u8]),\n\n ETestPair(0x100, vec![0x82, 0x01, 0x00]),\n\n ETestPair(0xffff, vec![0x82, 0xff, 0xff]),\n\n ];\n\n run_encode_tests(tests);\n\n}\n\n\n", "file_path": "rlp/tests/tests.rs", "rank": 40, "score": 80482.14216481999 }, { "content": "pub fn snapshot_swapper() -> &'static Swapper<'static> {\n\n &SNAPSHOT_SWAPPER as &Swapper<'_>\n\n}\n\n\n", "file_path": "rlp-compress/src/lib.rs", "rank": 41, "score": 78097.29148144832 }, { "content": "pub fn blocks_swapper() -> &'static Swapper<'static> {\n\n &BLOCKS_SWAPPER as &Swapper<'_>\n\n}\n\n\n", "file_path": "rlp-compress/src/lib.rs", "rank": 42, "score": 78097.29148144832 }, { "content": "#[test]\n\nfn encode_vector_u64() {\n\n let tests = vec![\n\n VETestPair(vec![], vec![0xc0]),\n\n VETestPair(vec![15u64], vec![0xc1, 0x0f]),\n\n VETestPair(vec![1, 2, 3, 7, 0xff], vec![0xc6, 1, 2, 3, 7, 0x81, 0xff]),\n\n VETestPair(vec![0xffff_ffff, 1, 2, 3, 7, 0xff], vec![\n\n 0xcb, 0x84, 0xff, 0xff, 0xff, 0xff, 1, 2, 3, 7, 0x81, 0xff,\n\n ]),\n\n ];\n\n run_encode_tests_list(tests);\n\n}\n\n\n", "file_path": "rlp/tests/tests.rs", "rank": 43, "score": 77850.07547385085 }, { "content": "#[test]\n\nfn decode_untrusted_address() {\n\n let tests = vec![DTestPair(H160::from(\"ef2d6d194084c2de36e0dabfce45d046b37d1106\"), vec![\n\n 0x94, 0xef, 0x2d, 0x6d, 0x19, 0x40, 0x84, 0xc2, 0xde, 0x36, 0xe0, 0xda, 0xbf, 0xce, 0x45, 0xd0, 0x46, 0xb3,\n\n 0x7d, 0x11, 0x06,\n\n ])];\n\n run_decode_tests(tests);\n\n}\n\n\n", "file_path": "rlp/tests/tests.rs", "rank": 44, "score": 77763.40012870108 }, { "content": "#[test]\n\nfn decode_untrusted_u32() {\n\n let tests = vec![\n\n DTestPair(0x10000u32, vec![0x83, 0x01, 0x00, 0x00]),\n\n DTestPair(0x00ff_ffffu32, vec![0x83, 0xff, 0xff, 0xff]),\n\n ];\n\n run_decode_tests(tests);\n\n}\n\n\n", "file_path": "rlp/tests/tests.rs", "rank": 45, "score": 77763.40012870108 }, { "content": "#[test]\n\nfn decode_untrusted_u16() {\n\n let tests = vec![DTestPair(0x100u16, vec![0x82, 0x01, 0x00]), DTestPair(0xffffu16, vec![0x82, 0xff, 0xff])];\n\n run_decode_tests(tests);\n\n}\n\n\n", "file_path": "rlp/tests/tests.rs", "rank": 46, "score": 77763.40012870108 }, { "content": "#[test]\n\nfn decode_untrusted_u64() {\n\n let tests = vec![\n\n DTestPair(0x0100_0000u64, vec![0x84, 0x01, 0x00, 0x00, 0x00]),\n\n DTestPair(0xFFFF_FFFFu64, vec![0x84, 0xff, 0xff, 0xff, 0xff]),\n\n ];\n\n run_decode_tests(tests);\n\n}\n\n\n", "file_path": "rlp/tests/tests.rs", "rank": 47, "score": 77763.40012870108 }, { "content": "/// RLP decodable trait\n\npub trait Decodable: Sized {\n\n /// Decode a value from RLP bytes\n\n fn decode(rlp: &Rlp) -> Result<Self, DecoderError>;\n\n}\n\n\n", "file_path": "rlp/src/traits.rs", "rank": 48, "score": 77069.43031469858 }, { "content": "#[test]\n\nfn test_encode_foo() {\n\n #[allow(clippy::blacklisted_name)]\n\n let foo = Foo {\n\n a: \"cat\".into(),\n\n };\n\n\n\n let expected = vec![0xc4, 0x83, b'c', b'a', b't'];\n\n let out = encode(&foo);\n\n assert_eq!(out, expected);\n\n\n\n let decoded = decode(&expected).expect(\"decode failure\");\n\n assert_eq!(foo, decoded);\n\n}\n\n\n", "file_path": "rlp-derive/tests/rlp.rs", "rank": 49, "score": 76626.92998377823 }, { "content": "#[test]\n\nfn test_canonical_string_encoding() {\n\n assert_ne!(\n\n Rlp::new(&[0xc0 + 4, 0xb7 + 1, 2, b'a', b'b']).val_at::<String>(0),\n\n Rlp::new(&[0xc0 + 3, 0x82, b'a', b'b']).val_at::<String>(0)\n\n );\n\n\n\n assert_eq!(\n\n Rlp::new(&[0xc0 + 4, 0xb7 + 1, 2, b'a', b'b']).val_at::<String>(0),\n\n Err(DecoderError::RlpInvalidIndirection)\n\n );\n\n}\n\n\n", "file_path": "rlp/tests/tests.rs", "rank": 50, "score": 75448.63493243814 }, { "content": "#[test]\n\nfn test_canonical_list_encoding() {\n\n assert_ne!(\n\n Rlp::new(&[0xc0 + 3, 0x82, b'a', b'b']).val_at::<String>(0),\n\n Rlp::new(&[0xf7 + 1, 3, 0x82, b'a', b'b']).val_at::<String>(0)\n\n );\n\n\n\n assert_eq!(\n\n Rlp::new(&[0xf7 + 1, 3, 0x82, b'a', b'b']).val_at::<String>(0),\n\n Err(DecoderError::RlpInvalidIndirection)\n\n );\n\n}\n\n\n\n// test described in\n\n//\n\n// https://github.com/paritytech/parity-common/issues/105\n", "file_path": "rlp/tests/tests.rs", "rank": 51, "score": 75448.63493243814 }, { "content": "#[test]\n\nfn decode_untrusted_vector_u64() {\n\n let tests = vec![\n\n VDTestPair(vec![], vec![0xc0]),\n\n VDTestPair(vec![15u64], vec![0xc1, 0x0f]),\n\n VDTestPair(vec![1, 2, 3, 7, 0xff], vec![0xc6, 1, 2, 3, 7, 0x81, 0xff]),\n\n VDTestPair(vec![0xffff_ffff, 1, 2, 3, 7, 0xff], vec![\n\n 0xcb, 0x84, 0xff, 0xff, 0xff, 0xff, 1, 2, 3, 7, 0x81, 0xff,\n\n ]),\n\n ];\n\n run_decode_tests_list(tests);\n\n}\n\n\n", "file_path": "rlp/tests/tests.rs", "rank": 52, "score": 75365.7569076948 }, { "content": "fn run_encode_tests<T>(tests: Vec<ETestPair<T>>)\n\nwhere\n\n T: Encodable, {\n\n for t in &tests {\n\n let res = rlp::encode(&t.0);\n\n assert_eq!(&res[..], &t.1[..]);\n\n }\n\n}\n\n\n", "file_path": "rlp/tests/tests.rs", "rank": 53, "score": 75307.9941433225 }, { "content": "fn encodable_field_map(tuple: (usize, &syn::Field)) -> TokenStream {\n\n encodable_field(tuple.0, tuple.1)\n\n}\n\n\n", "file_path": "rlp-derive/src/en.rs", "rank": 54, "score": 75277.74338989292 }, { "content": "fn decodable_field_map(tuple: (usize, &syn::Field)) -> TokenStream {\n\n decodable_field(tuple.0, tuple.1, decodable_parse_quotes())\n\n}\n\n\n", "file_path": "rlp-derive/src/de.rs", "rank": 55, "score": 75212.1164077448 }, { "content": "#[test]\n\nfn rlp_2bytes_data_length_check() {\n\n let mut data: Vec<u8> = vec![0xb9, 2, 255]; // 512+255\n\n for _ in 0..700 {\n\n data.push(b'c');\n\n }\n\n\n\n let rlp = Rlp::new(&data);\n\n\n\n let as_val: Result<String, DecoderError> = rlp.as_val();\n\n assert_eq!(\n\n Err(DecoderError::RlpInconsistentLengthAndData {\n\n max: 703,\n\n index: 770\n\n }),\n\n as_val\n\n );\n\n}\n\n\n", "file_path": "rlp/tests/tests.rs", "rank": 56, "score": 74830.98258436708 }, { "content": "#[test]\n\nfn test_encode_foo_wrapper() {\n\n #[allow(clippy::blacklisted_name)]\n\n let foo = FooWrapper {\n\n a: \"cat\".into(),\n\n };\n\n\n\n let expected = vec![0x83, b'c', b'a', b't'];\n\n let out = encode(&foo);\n\n assert_eq!(out, expected);\n\n\n\n let decoded = decode(&expected).expect(\"decode failure\");\n\n assert_eq!(foo, decoded);\n\n}\n", "file_path": "rlp-derive/tests/rlp.rs", "rank": 57, "score": 74434.77594057925 }, { "content": "#[test]\n\nfn rlp_nested_empty_list_encode() {\n\n let mut stream = RlpStream::new_list(2);\n\n stream.append_list(&(Vec::new() as Vec<u32>));\n\n stream.append(&40u32);\n\n assert_eq!(stream.drain()[..], [0xc2u8, 0xc0u8, 40u8][..]);\n\n}\n\n\n", "file_path": "rlp/tests/tests.rs", "rank": 58, "score": 74434.77594057925 }, { "content": "fn encodable_field(index: usize, field: &syn::Field) -> TokenStream {\n\n let ident = match field.ident {\n\n Some(ref ident) => quote! { #ident },\n\n None => {\n\n let index: syn::Index = index.into();\n\n quote! { #index }\n\n }\n\n };\n\n\n\n let id = quote! { self.#ident };\n\n\n\n match field.ty {\n\n syn::Type::Path(ref path) => {\n\n let top_segment = path.path.segments.first().expect(\"there must be at least 1 segment\");\n\n let ident = &top_segment.value().ident;\n\n if &ident.to_string() == \"Vec\" {\n\n let inner_ident = match top_segment.value().arguments {\n\n syn::PathArguments::AngleBracketed(ref angle) => {\n\n let ty = angle.args.first().expect(\"Vec has only one angle bracketed type; qed\");\n\n match **ty.value() {\n", "file_path": "rlp-derive/src/en.rs", "rank": 59, "score": 74244.28097757054 }, { "content": "fn decodable_parse_quotes() -> ParseQuotes {\n\n ParseQuotes {\n\n single: quote! { rlp.val_at },\n\n list: quote! { rlp.list_at },\n\n takes_index: true,\n\n }\n\n}\n\n\n", "file_path": "rlp-derive/src/de.rs", "rank": 60, "score": 68274.70994689819 }, { "content": "fn decodable_field(index: usize, field: &syn::Field, quotes: ParseQuotes) -> TokenStream {\n\n let id = match field.ident {\n\n Some(ref ident) => quote! { #ident },\n\n None => {\n\n let index: syn::Index = index.into();\n\n quote! { #index }\n\n }\n\n };\n\n\n\n let index = quote! { #index };\n\n\n\n let single = quotes.single;\n\n let list = quotes.list;\n\n\n\n match field.ty {\n\n syn::Type::Path(ref path) => {\n\n let ident = &path.path.segments.first().expect(\"there must be at least 1 segment\").value().ident;\n\n if &ident.to_string() == \"Vec\" {\n\n if quotes.takes_index {\n\n quote! { #id: #list(#index)?, }\n", "file_path": "rlp-derive/src/de.rs", "rank": 61, "score": 66702.56993809626 }, { "content": "fn decodable_wrapper_parse_quotes() -> ParseQuotes {\n\n ParseQuotes {\n\n single: quote! { rlp.as_val },\n\n list: quote! { rlp.as_list },\n\n takes_index: false,\n\n }\n\n}\n\n\n", "file_path": "rlp-derive/src/de.rs", "rank": 62, "score": 66411.67139716334 }, { "content": "struct ETestPair<T>(T, Vec<u8>)\n\nwhere\n\n T: Encodable;\n\n\n", "file_path": "rlp/tests/tests.rs", "rank": 63, "score": 65325.32214606885 }, { "content": "#[test]\n\nfn rlp_at() {\n\n let data = vec![0xc8, 0x83, b'c', b'a', b't', 0x83, b'd', b'o', b'g'];\n\n {\n\n let rlp = Rlp::new(&data);\n\n assert!(rlp.is_list());\n\n let animals: Vec<String> = rlp.as_list().unwrap();\n\n assert_eq!(animals, vec![\"cat\".to_string(), \"dog\".to_string()]);\n\n\n\n let cat = rlp.at(0).unwrap();\n\n assert!(cat.is_data());\n\n assert_eq!(cat.as_raw(), &[0x83, b'c', b'a', b't']);\n\n assert_eq!(cat.as_val::<String>().unwrap(), \"cat\".to_string());\n\n\n\n let dog = rlp.at(1).unwrap();\n\n assert!(dog.is_data());\n\n assert_eq!(dog.as_raw(), &[0x83, b'd', b'o', b'g']);\n\n assert_eq!(dog.as_val::<String>().unwrap(), \"dog\".to_string());\n\n\n\n let cat_again = rlp.at(0).unwrap();\n\n assert!(cat_again.is_data());\n\n assert_eq!(cat_again.as_raw(), &[0x83, b'c', b'a', b't']);\n\n assert_eq!(cat_again.as_val::<String>().unwrap(), \"cat\".to_string());\n\n }\n\n}\n\n\n", "file_path": "rlp/tests/tests.rs", "rank": 64, "score": 55401.750005945825 }, { "content": "fn run_decode_tests<T>(tests: Vec<DTestPair<T>>)\n\nwhere\n\n T: Decodable + fmt::Debug + cmp::Eq, {\n\n for t in &tests {\n\n let res: Result<T, DecoderError> = rlp::decode(&t.1);\n\n assert_eq!(res.as_ref(), Ok(&t.0));\n\n }\n\n}\n\n\n", "file_path": "rlp/tests/tests.rs", "rank": 65, "score": 54884.47978442197 }, { "content": "#[test]\n\nfn rlp_at_err() {\n\n let data = vec![0xc8, 0x83, b'c', b'a', b't', 0x83, b'd', b'o'];\n\n {\n\n let rlp = Rlp::new(&data);\n\n assert!(rlp.is_list());\n\n\n\n let cat_err = rlp.at(0).unwrap_err();\n\n assert_eq!(cat_err, DecoderError::RlpIsTooShort {\n\n expected: 1,\n\n got: 0\n\n });\n\n\n\n let dog_err = rlp.at(1).unwrap_err();\n\n assert_eq!(dog_err, DecoderError::RlpIsTooShort {\n\n expected: 1,\n\n got: 0\n\n });\n\n }\n\n}\n\n\n", "file_path": "rlp/tests/tests.rs", "rank": 66, "score": 53926.291922212054 }, { "content": "#[test]\n\nfn rlp_iter() {\n\n let data = vec![0xc8, 0x83, b'c', b'a', b't', 0x83, b'd', b'o', b'g'];\n\n {\n\n let rlp = Rlp::new(&data);\n\n let mut iter = rlp.iter();\n\n\n\n let cat = iter.next().unwrap();\n\n assert!(cat.is_data());\n\n assert_eq!(cat.as_raw(), &[0x83, b'c', b'a', b't']);\n\n\n\n let dog = iter.next().unwrap();\n\n assert!(dog.is_data());\n\n assert_eq!(dog.as_raw(), &[0x83, b'd', b'o', b'g']);\n\n\n\n let none = iter.next();\n\n assert!(none.is_none());\n\n\n\n let cat_again = rlp.at(0).unwrap();\n\n assert!(cat_again.is_data());\n\n assert_eq!(cat_again.as_raw(), &[0x83, b'c', b'a', b't']);\n\n }\n\n}\n\n\n", "file_path": "rlp/tests/tests.rs", "rank": 67, "score": 53926.291922212054 }, { "content": "fn run_encode_tests_list<T>(tests: Vec<VETestPair<T>>)\n\nwhere\n\n T: Encodable, {\n\n for t in &tests {\n\n let res = rlp::encode_list(&t.0);\n\n assert_eq!(&res[..], &t.1[..]);\n\n }\n\n}\n\n\n", "file_path": "rlp/tests/tests.rs", "rank": 68, "score": 53558.58245587538 }, { "content": "fn run_decode_tests_list<T>(tests: Vec<VDTestPair<T>>)\n\nwhere\n\n T: Decodable + fmt::Debug + cmp::Eq, {\n\n for t in &tests {\n\n let res: Vec<T> = rlp::decode_list(&t.1);\n\n assert_eq!(res, t.0);\n\n }\n\n}\n\n\n", "file_path": "rlp/tests/tests.rs", "rank": 69, "score": 53495.155846199516 }, { "content": "/// A trait used to compress rlp.\n\npub trait Compressor {\n\n /// Get compressed version of given rlp.\n\n fn compressed(&self, rlp: &[u8]) -> Option<&[u8]>;\n\n}\n\n\n", "file_path": "rlp-compress/src/lib.rs", "rank": 70, "score": 53316.30093315683 }, { "content": "/// A trait used to convert compressed rlp into it's original version.\n\npub trait Decompressor {\n\n /// Get decompressed rlp.\n\n fn decompressed(&self, compressed: &[u8]) -> Option<&[u8]>;\n\n}\n\n\n", "file_path": "rlp-compress/src/lib.rs", "rank": 71, "score": 53315.68409759548 }, { "content": "#[test]\n\nfn test_list_at() {\n\n let raw = hex!(\"f83e82022bd79020010db83c4d001500000000abcdef12820cfa8215a8d79020010db885a308d313198a2e037073488208ae82823a8443b9a355c5010203040531b9019afde696e582a78fa8d95ea13ce3297d4afb8ba6433e4154caa5ac6431af1b80ba76023fa4090c408f6b4bc3701562c031041d4702971d102c9ab7fa5eed4cd6bab8f7af956f7d565ee1917084a95398b6a21eac920fe3dd1345ec0a7ef39367ee69ddf092cbfe5b93e5e568ebc491983c09c76d922dc3\");\n\n\n\n let rlp = Rlp::new(&raw);\n\n let _rlp1 = rlp.at(1).unwrap();\n\n let rlp2 = rlp.at(2).unwrap();\n\n assert_eq!(rlp2.val_at::<u16>(2).unwrap(), 33338);\n\n}\n", "file_path": "rlp/tests/tests.rs", "rank": 72, "score": 52778.085771015685 }, { "content": "#[test]\n\nfn test_rlp_is_int() {\n\n for b in 0xb8..0xc0 {\n\n let data: Vec<u8> = vec![b];\n\n let rlp = Rlp::new(&data);\n\n assert_eq!(rlp.is_int(), false);\n\n }\n\n}\n\n\n\n/// test described in\n\n///\n\n/// https://github.com/paritytech/parity-common/issues/48\n", "file_path": "rlp/tests/tests.rs", "rank": 73, "score": 52581.413899797015 }, { "content": "#[test]\n\nfn malformed_rlp() {\n\n let malformed = vec![\n\n 248, 81, 128, 128, 128, 128, 128, 160, 12, 51, 241, 93, 69, 218, 74, 138, 79, 115, 227, 44, 216, 81, 46, 132,\n\n 85, 235, 96, 45, 252, 48, 181, 29, 75, 141, 217, 215, 86, 160, 109, 130, 160, 140, 36, 93, 200, 109, 215, 100,\n\n 241, 246, 99, 135, 92, 168, 149, 170, 114, 9, 143, 4, 93, 25, 76, 54, 176, 119, 230, 170, 154, 105, 47, 121,\n\n 10, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128,\n\n ];\n\n assert_eq!(decompress(&malformed, blocks_swapper()), malformed);\n\n}\n\n\n", "file_path": "rlp-compress/tests/compress.rs", "rank": 74, "score": 52581.413899797015 }, { "content": "#[test]\n\nfn large_block() {\n\n let block = vec![\n\n 249, 97, 87, 249, 2, 19, 160, 137, 152, 36, 115, 234, 67, 89, 207, 44, 42, 186, 128, 91, 242, 10, 16, 42, 193,\n\n 195, 2, 129, 60, 181, 150, 192, 178, 117, 15, 18, 100, 174, 249, 160, 29, 204, 77, 232, 222, 199, 93, 122, 171,\n\n 133, 181, 103, 182, 204, 212, 26, 211, 18, 69, 27, 148, 138, 116, 19, 240, 161, 66, 253, 64, 212, 147, 71, 148,\n\n 223, 125, 126, 5, 57, 51, 181, 204, 36, 55, 47, 135, 140, 144, 230, 45, 173, 173, 93, 66, 160, 93, 42, 52, 28,\n\n 156, 139, 242, 60, 121, 90, 117, 99, 92, 182, 196, 25, 131, 16, 155, 186, 239, 137, 33, 118, 105, 232, 230,\n\n 239, 213, 240, 207, 6, 160, 59, 72, 35, 216, 124, 37, 62, 178, 34, 97, 180, 254, 212, 103, 179, 45, 247, 168,\n\n 205, 145, 7, 157, 75, 247, 83, 230, 233, 248, 97, 132, 232, 161, 160, 122, 167, 249, 196, 203, 2, 173, 180,\n\n 106, 203, 129, 214, 232, 181, 87, 39, 60, 99, 135, 6, 40, 34, 163, 118, 140, 149, 79, 241, 238, 230, 201, 194,\n\n 185, 1, 0, 0, 0, 0, 0, 0, 0, 0, 8, 0, 0, 0, 0, 0, 0, 0, 32, 0, 0, 0, 0, 0, 0, 0, 0, 32, 0, 0, 0, 0, 0, 0, 0, 0,\n\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 16, 0, 0, 0, 0, 0, 0,\n\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 128, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 32,\n\n 0, 0, 0, 0, 64, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 32, 0, 0, 0, 4, 0, 0, 0, 0, 0,\n\n 0, 0, 0, 134, 36, 160, 31, 187, 182, 29, 131, 58, 212, 207, 131, 71, 168, 13, 131, 45, 60, 7, 132, 89, 53, 51,\n\n 233, 147, 69, 84, 67, 32, 101, 116, 104, 101, 114, 109, 105, 110, 101, 32, 45, 32, 69, 85, 49, 160, 204, 49,\n\n 229, 99, 26, 47, 30, 50, 223, 117, 111, 168, 102, 158, 12, 186, 140, 98, 193, 196, 214, 29, 13, 87, 44, 31,\n", "file_path": "rlp-compress/tests/compress.rs", "rank": 75, "score": 51415.368572999054 }, { "content": "#[test]\n\nfn non_canonical_string() {\n\n assert_eq!(\n\n Rlp::new(&[0xf7 + 1, 3, 0x80 + 2, b'a', b'b']).val_at::<String>(0),\n\n Err(DecoderError::RlpInvalidIndirection)\n\n );\n\n}\n\n\n", "file_path": "rlp/tests/tests.rs", "rank": 76, "score": 51415.368572999054 }, { "content": "#[test]\n\nfn the_exact_long_string() {\n\n let mut data: Vec<u8> = vec![0xb8, 255];\n\n for _ in 0..255 {\n\n data.push(b'c');\n\n }\n\n\n\n let rlp = Rlp::new(&data);\n\n\n\n let as_val: Result<String, DecoderError> = rlp.as_val();\n\n assert!(as_val.is_ok());\n\n}\n\n\n", "file_path": "rlp/tests/tests.rs", "rank": 77, "score": 51415.368572999054 }, { "content": "#[test]\n\nfn simple_compression() {\n\n let basic_account_rlp = vec![\n\n 248, 68, 4, 2, 160, 86, 232, 31, 23, 27, 204, 85, 166, 255, 131, 69, 230, 146, 192, 248, 110, 91, 72, 224, 27,\n\n 153, 108, 173, 192, 1, 98, 47, 181, 227, 99, 180, 33, 160, 197, 210, 70, 1, 134, 247, 35, 60, 146, 126, 125,\n\n 178, 220, 199, 3, 192, 229, 0, 182, 83, 202, 130, 39, 59, 123, 250, 216, 4, 93, 133, 164, 112,\n\n ];\n\n let compressed = compress(&basic_account_rlp, snapshot_swapper());\n\n assert_eq!(compressed.to_vec(), vec![198, 4, 2, 129, 0, 129, 1]);\n\n let decompressed = decompress(&compressed, snapshot_swapper());\n\n assert_eq!(decompressed.to_vec(), basic_account_rlp);\n\n}\n\n\n", "file_path": "rlp-compress/tests/compress.rs", "rank": 78, "score": 51415.368572999054 }, { "content": "#[test]\n\nfn null_terminated_string() {\n\n let data: Vec<u8> = vec![0x84, b'd', b'o', b'g', b'\\0'];\n\n let rlp = Rlp::new(&data);\n\n let as_val: Result<String, DecoderError> = rlp.as_val();\n\n assert_eq!(Err(DecoderError::RlpNullTerminatedString), as_val);\n\n}\n\n\n", "file_path": "rlp/tests/tests.rs", "rank": 79, "score": 51415.368572999054 }, { "content": "#[test]\n\nfn rlp_data_length_check() {\n\n let data = vec![0x84, b'c', b'a', b't'];\n\n let rlp = Rlp::new(&data);\n\n\n\n let as_val: Result<String, DecoderError> = rlp.as_val();\n\n assert_eq!(\n\n Err(DecoderError::RlpInconsistentLengthAndData {\n\n max: 4,\n\n index: 5\n\n }),\n\n as_val\n\n );\n\n}\n\n\n", "file_path": "rlp/tests/tests.rs", "rank": 80, "score": 51350.35001947279 }, { "content": "#[test]\n\nfn nested_list_rlp() {\n\n let nested_basic_account_rlp = vec![\n\n 228, 4, 226, 2, 160, 86, 232, 31, 23, 27, 204, 85, 166, 255, 131, 69, 230, 146, 192, 248, 110, 91, 72, 224, 27,\n\n 153, 108, 173, 192, 1, 98, 47, 181, 227, 99, 180, 33,\n\n ];\n\n\n\n let compressed = compress(&nested_basic_account_rlp, blocks_swapper());\n\n assert_eq!(compressed, vec![197, 4, 195, 2, 129, 0]);\n\n let decompressed = decompress(&compressed, blocks_swapper());\n\n assert_eq!(decompressed, nested_basic_account_rlp);\n\n let compressed = compress(&nested_basic_account_rlp, snapshot_swapper());\n\n assert_eq!(compressed, vec![197, 4, 195, 2, 129, 0]);\n\n let decompressed = decompress(&compressed, snapshot_swapper());\n\n assert_eq!(decompressed, nested_basic_account_rlp);\n\n}\n\n\n", "file_path": "rlp-compress/tests/compress.rs", "rank": 81, "score": 51350.35001947279 }, { "content": "#[test]\n\nfn invalid_rlp_swapper() {\n\n let to_swap: &[&[u8]] = &[&[0x83, b'c', b'a', b't'], &[0x83, b'd', b'o', b'g']];\n\n let invalid_rlp: &[&[u8]] = &[&[0x81, 0x00], &[0x81, 0x01]];\n\n let swapper = Swapper::new(to_swap, invalid_rlp);\n\n assert_eq!(Some(invalid_rlp[0]), swapper.compressed(&[0x83, b'c', b'a', b't']));\n\n assert_eq!(None, swapper.compressed(&[0x83, b'b', b'a', b't']));\n\n assert_eq!(Some(to_swap[1]), swapper.decompressed(invalid_rlp[1]));\n\n}\n\n\n", "file_path": "rlp-compress/tests/compress.rs", "rank": 82, "score": 51350.35001947279 }, { "content": "#[test]\n\nfn rlp_stream_unbounded_list() {\n\n let mut stream = RlpStream::new();\n\n stream.begin_unbounded_list();\n\n stream.append(&40u32);\n\n stream.append(&41u32);\n\n assert!(!stream.is_finished());\n\n stream.complete_unbounded_list();\n\n assert!(stream.is_finished());\n\n}\n\n\n", "file_path": "rlp/tests/tests.rs", "rank": 83, "score": 51350.35001947279 }, { "content": "#[test]\n\nfn rlp_list_length_overflow() {\n\n let data: Vec<u8> = vec![0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00];\n\n let rlp = Rlp::new(&data);\n\n let as_val: Result<String, DecoderError> = rlp.val_at(0);\n\n assert_eq!(\n\n Err(DecoderError::RlpIsTooShort {\n\n expected: 1,\n\n got: 0\n\n }),\n\n as_val\n\n );\n\n}\n\n\n", "file_path": "rlp/tests/tests.rs", "rank": 84, "score": 51350.35001947279 }, { "content": "#[test]\n\nfn rlp_stream_size_limit() {\n\n for limit in 40..270 {\n\n let item = [0u8; 1];\n\n let mut stream = RlpStream::new();\n\n while stream.append_raw_checked(&item, 1, limit) {}\n\n assert_eq!(stream.drain().len(), limit);\n\n }\n\n}\n\n\n", "file_path": "rlp/tests/tests.rs", "rank": 85, "score": 51350.35001947279 }, { "content": "#[test]\n\nfn rlp_long_data_length_check() {\n\n let mut data: Vec<u8> = vec![0xb8, 255];\n\n for _ in 0..253 {\n\n data.push(b'c');\n\n }\n\n\n\n let rlp = Rlp::new(&data);\n\n\n\n let as_val: Result<String, DecoderError> = rlp.as_val();\n\n assert_eq!(\n\n Err(DecoderError::RlpInconsistentLengthAndData {\n\n max: 255,\n\n index: 257\n\n }),\n\n as_val\n\n );\n\n}\n\n\n", "file_path": "rlp/tests/tests.rs", "rank": 86, "score": 50219.105062050774 }, { "content": "#[test]\n\nfn test_nested_list_roundtrip() {\n\n #[derive(Clone, Copy, Debug, PartialEq, Eq)]\n\n struct Inner(u64, u64);\n\n\n\n impl Encodable for Inner {\n\n fn rlp_append(&self, s: &mut RlpStream) {\n\n s.begin_unbounded_list().append(&self.0).append(&self.1).complete_unbounded_list();\n\n }\n\n }\n\n\n\n impl Decodable for Inner {\n\n fn decode(rlp: &Rlp<'_>) -> Result<Self, DecoderError> {\n\n Ok(Inner(rlp.val_at(0)?, rlp.val_at(1)?))\n\n }\n\n }\n\n\n\n #[derive(Debug, Clone, PartialEq, Eq)]\n\n struct Nest<T>(Vec<T>);\n\n\n\n impl<T: Encodable> Encodable for Nest<T> {\n", "file_path": "rlp/tests/tests.rs", "rank": 87, "score": 50172.054968132696 }, { "content": "#[test]\n\nfn test_inner_length_capping_for_short_lists() {\n\n assert_eq!(\n\n Rlp::new(&[0xc0, 0x82, b'a', b'b']).val_at::<String>(0),\n\n Err(DecoderError::RlpIsTooShort {\n\n expected: 1,\n\n got: 0,\n\n })\n\n );\n\n assert_eq!(\n\n Rlp::new(&[0xc0 + 1, 0x82, b'a', b'b']).val_at::<String>(0),\n\n Err(DecoderError::RlpIsTooShort {\n\n expected: 1,\n\n got: 0,\n\n })\n\n );\n\n assert_eq!(\n\n Rlp::new(&[0xc0 + 2, 0x82, b'a', b'b']).val_at::<String>(0),\n\n Err(DecoderError::RlpIsTooShort {\n\n expected: 1,\n\n got: 0,\n", "file_path": "rlp/tests/tests.rs", "rank": 88, "score": 47985.92171149427 }, { "content": "struct DTestPair<T>(T, Vec<u8>)\n\nwhere\n\n T: Decodable + fmt::Debug + cmp::Eq;\n\n\n", "file_path": "rlp/tests/tests.rs", "rank": 89, "score": 42602.56647539741 }, { "content": "struct VETestPair<T>(Vec<T>, Vec<u8>)\n\nwhere\n\n T: Encodable;\n\n\n", "file_path": "rlp/tests/tests.rs", "rank": 90, "score": 40222.499989456875 }, { "content": "struct VDTestPair<T>(Vec<T>, Vec<u8>)\n\nwhere\n\n T: Decodable + fmt::Debug + cmp::Eq;\n\n\n", "file_path": "rlp/tests/tests.rs", "rank": 91, "score": 40222.499989456875 } ]
Rust
src/main.rs
MeiK2333/river
1106b73423620bd70af37d9f41f683c0bcf4293c
#![recursion_limit = "512"] #[macro_use] extern crate log; use std::path::Path; use std::pin::Pin; use futures::StreamExt; use futures_core::Stream; use log4rs; use tempfile::tempdir_in; use tokio::fs::read_dir; use tonic::transport::Server; use tonic::{Request, Response, Status}; use river::judge_request::Data; use river::river_server::{River, RiverServer}; use river::{ Empty, JudgeRequest, JudgeResponse, JudgeResultEnum, LanguageConfigResponse, LanguageItem, LsCase, LsRequest, LsResponse, }; mod config; mod error; mod judger; mod result; mod sandbox; pub mod river { tonic::include_proto!("river"); } #[derive(Debug, Default)] pub struct RiverService {} #[tonic::async_trait] impl River for RiverService { type JudgeStream = Pin<Box<dyn Stream<Item = Result<JudgeResponse, Status>> + Send + Sync + 'static>>; async fn judge( &self, request: Request<tonic::Streaming<JudgeRequest>>, ) -> Result<Response<Self::JudgeStream>, Status> { let mut stream = request.into_inner(); let output = async_stream::try_stream! { let pwd = match tempdir_in(&config::CONFIG.judge_dir) { Ok(val) => val, Err(e) => { yield result::system_error(error::Error::IOError(e)); return; } }; debug!("{:?}", pwd); let path_str = pwd.path().to_str().unwrap(); info!("new request running on `{}`", path_str); let mut compile_success = false; let mut language = String::from(""); while let Some(req) = stream.next().await { yield result::pending(); yield result::running(); let req = req?; let result = match &req.data { Some(Data::CompileData(data)) => { language = String::from(&data.language); let res = judger::compile(&language, &data.code, &pwd.path()).await; if let Ok(ref val) = res { if let Some(river::judge_response::State::Result(rst)) = &val.state { if rst.result == JudgeResultEnum::CompileSuccess as i32 { compile_success = true; } } } res }, Some(Data::JudgeData(data)) => { if language == "" || !compile_success { Err(error::Error::CustomError(String::from("not compiled"))) } else { judger::judge( &language, &data.in_file, &data.out_file, &data.spj_file, data.time_limit, data.memory_limit, data.judge_type, &pwd.path() ).await } }, None => Err(error::Error::CustomError(String::from("unrecognized request types"))), }; let res = match result { Ok(res) => res, Err(e) => result::system_error(e) }; info!("path: {}, result: {:?}", path_str, res); yield res; }; info!("request end on `{}`", path_str); }; Ok(Response::new(Box::pin(output) as Self::JudgeStream)) } async fn language_config( &self, _request: Request<Empty>, ) -> Result<Response<LanguageConfigResponse>, Status> { let mut languages: Vec<LanguageItem> = vec![]; for (key, value) in &config::CONFIG.languages { languages.push(LanguageItem { language: String::from(key), compile: String::from(&value.compile_cmd), run: String::from(&value.run_cmd), version: String::from(&value.version), }); } let response = LanguageConfigResponse { languages }; Ok(Response::new(response)) } async fn ls(&self, request: Request<LsRequest>) -> Result<Response<LsResponse>, Status> { let pid = request.into_inner().pid; let mut response = LsResponse { cases: vec![] }; let mut directory_stream = match read_dir(Path::new(&config::CONFIG.data_dir).join(pid.to_string())).await { Ok(val) => val, Err(_) => return Ok(Response::new(response)), }; let mut files: Vec<String> = vec![]; while let Ok(Some(entry)) = directory_stream.next_entry().await { let file = entry.file_name().into_string().unwrap(); files.push(file); } let mut iter = 1; loop { let in_file = format!("data{}.in", iter); let out_file = format!("data{}.out", iter); if files.contains(&in_file) && files.contains(&out_file) { response.cases.push(LsCase { r#in: in_file, out: out_file, }); iter += 1; } else { break; } } Ok(Response::new(response)) } } #[tokio::main] async fn main() -> Result<(), Box<dyn std::error::Error>> { log4rs::init_file("log4rs.yaml", Default::default()).unwrap(); let addr = "0.0.0.0:4003".parse()?; let river = RiverService::default(); info!("listen on: {}", addr); Server::builder() .concurrency_limit_per_connection(5) .add_service(RiverServer::new(river)) .serve(addr) .await?; Ok(()) }
#![recursion_limit = "512"] #[macro_use] extern crate log; use std::path::Path; use std::pin::Pin; use futures::StreamExt; use futures_core::Stream; use log4rs; use tempfile::tempdir_in; use tokio::fs::read_dir; use tonic::transport::Server; use tonic::{Request, Response, Status}; use river::judge_request::Data; use river::river_server::{River, RiverServer}; use river::{ Empty, JudgeRequest, JudgeResponse, JudgeResultEnum, LanguageConfigResponse, LanguageItem, LsCase, LsRequest, LsResponse, }; mod config; mod error; mod judger; mod result; mod sandbox; pub mod river { tonic::include_proto!("river"); } #[derive(Debug, Default)] pub struct RiverService {} #[tonic::async_trait] impl River for RiverService { type JudgeStream = Pin<Box<dyn Stream<Item = Result<JudgeResponse, Status>> + Send + Sync + 'static>>; async fn judge( &self, request: Request<tonic::Streaming<JudgeRequest>>, ) -> Result<Response<Self::JudgeStream>, Status> { let mut stream = request.into_inner(); let output = async_stream::try_stream! { let pwd = match tempdir_in(&config::CONFIG.judge_dir) { Ok(val) => val, Err(e) => { yield result::system_error(error::Error::IOError(e)); return; } }; debug!("{:?}", pwd); let path_str = pwd.path().to_str().unwrap(); info!("new request running on `{}`", path_str); let mut compile_success = false; let mut language = String::from(""); while let Some(req) = stream.next().await { yield result::pending(); yield result::running(); let req = req?; let result = match &req.data { Some(Data::CompileData(data)) => { language = String::from(&data.language); let res = judger::compile(&language, &data.code, &pwd.path()).await; if let Ok(ref val) = res { if let Some(river::judge_response::State::Result(rst)) = &val.state { if rst.result == JudgeResultEnum::CompileSuccess as i32 { compile_success = true; } } } res }, Some(Data::JudgeData(data)) => { if language == "" || !compile_success { Err(error::Error::CustomError(String::from("not compiled"))) } else { judger::judge( &language, &data.in_file, &data.out_file, &data.spj_file, data.time_limit, data.memory_limit, data.judge_type, &pwd.path() ).await } }, None => Err(error::Error::CustomError(String::from("unrecognized request types"))), }; let res = match result { Ok(res) => res, Err(e) => result::system_error(e) }; info!("path: {}, result: {:?}", path_str, res); yield res; }; info!("request end on `{}`", path_str); }; Ok(Response::new(Box::pin(output) as Self::JudgeStream)) } async fn language_config( &self, _request: Request<Empty>, ) -> Result<Response<LanguageConfigResponse>, Status> { let mut languages: Vec<LanguageItem> = vec![]; for (key, value) in &config::CONFIG.languages { languages.push(LanguageItem { language: String::from(key), compile: String::from(&value.compile_cmd), run: String::from(&value.run_cmd), version: String::from(&value.version), }); } let response = LanguageConfigResponse { languages }; Ok(Response::new(response)) } async fn ls(&self, request: Request<LsRequest>) -> Result<Response<LsResponse>, Status> { let pid = request.into_inner().pid; let mut response = LsResponse { cases: vec![] }; let mut directory_stream = match read_dir(Path::new(&config::CONFIG.data_dir).join(pid.to_string())).await { Ok(val) => val, Err(_) => return Ok(Response::new(response)), }; let mut files: Vec<String> = vec![]; while let Ok(Some(entry)) = directory_stream.next_entry().await { let file = entry.file_name().into_string().unwrap(); files.push(file); } let mut iter = 1; loop { let in_file = format!("data{}.in", iter); let out_file = format!("data{}.out", iter); if files.contains(&in_file) && files.contains(&out_file) { response.cases.push(LsCase { r#in: in_file, out: out_file, }); iter += 1; } else { break; } } Ok(Response::new(response)) } } #[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> { log4rs::init_file("log4rs.yaml", Default::default()).unwrap(); let addr = "0.0.0.0:4003".parse()?; let river = RiverService::default(); info!("listen on: {}", addr); Server::builder() .concurrency_limit_per_connection(5) .add_service(RiverServer::new(river)) .serve(addr) .await?; Ok(()) }
function_block-full_function
[ { "content": "pub fn system_error(err: Error) -> JudgeResponse {\n\n warn!(\"{}\", err);\n\n JudgeResponse {\n\n state: Some(State::Result(JudgeResult {\n\n time_used: 0,\n\n memory_used: 0,\n\n result: JudgeResultEnum::SystemError as i32,\n\n errmsg: format!(\"{}\", err).into(),\n\n outmsg: String::from(\"\"),\n\n })),\n\n }\n\n}\n\n\n", "file_path": "src/result.rs", "rank": 0, "score": 139502.84390932968 }, { "content": "pub fn compile_success(time_used: i64, memory_used: i64) -> JudgeResponse {\n\n judge_result(time_used, memory_used, JudgeResultEnum::CompileSuccess)\n\n}\n\n\n", "file_path": "src/result.rs", "rank": 1, "score": 108497.91365643183 }, { "content": "pub fn running() -> JudgeResponse {\n\n JudgeResponse {\n\n state: Some(State::Status(JudgeStatus::Running as i32)),\n\n }\n\n}\n\n\n", "file_path": "src/result.rs", "rank": 2, "score": 102756.56827505835 }, { "content": "pub fn compile_error(time_used: i64, memory_used: i64, errmsg: &str) -> JudgeResponse {\n\n JudgeResponse {\n\n state: Some(State::Result(JudgeResult {\n\n time_used,\n\n memory_used,\n\n result: JudgeResultEnum::CompileError as i32,\n\n errmsg: String::from(errmsg),\n\n outmsg: String::from(\"\"),\n\n })),\n\n }\n\n}\n\n\n", "file_path": "src/result.rs", "rank": 3, "score": 101743.44443277009 }, { "content": "fn judge_result(time_used: i64, memory_used: i64, result: JudgeResultEnum) -> JudgeResponse {\n\n JudgeResponse {\n\n state: Some(State::Result(JudgeResult {\n\n time_used,\n\n memory_used,\n\n result: result as i32,\n\n errmsg: String::from(\"\"),\n\n outmsg: String::from(\"\"),\n\n })),\n\n }\n\n}\n\n\n", "file_path": "src/result.rs", "rank": 4, "score": 100172.66926447567 }, { "content": "pub fn standard_result(out: &[u8], ans: &[u8]) -> Result<JudgeResultEnum> {\n\n let out_len = out.len();\n\n let ans_len = ans.len();\n\n let mut out_offset = 0;\n\n let mut ans_offset = 0;\n\n // 没有 PE,PE 直接 WA\n\n let mut r = JudgeResultEnum::Accepted;\n\n while out_offset <= out_len && ans_offset <= ans_len {\n\n let (out_start, out_end, out_exists) = next_line(&out, out_offset, out_len);\n\n let (ans_start, ans_end, ans_exists) = next_line(&ans, ans_offset, ans_len);\n\n if !out_exists || !ans_exists {\n\n // 如果一个已经读取完但另一个还有数据,则结果为 WA\n\n if out_exists != ans_exists {\n\n r = JudgeResultEnum::WrongAnswer;\n\n }\n\n break;\n\n }\n\n // 如果两个数据当前行长度不同,则结果为 WA(这个长度已经排除了末尾空白符号)\n\n if out_end - out_start != ans_end - ans_start {\n\n r = JudgeResultEnum::WrongAnswer;\n", "file_path": "src/result.rs", "rank": 5, "score": 98017.96245537193 }, { "content": "pub fn runtime_error(time_used: i64, memory_used: i64, errmsg: &str) -> JudgeResponse {\n\n JudgeResponse {\n\n state: Some(State::Result(JudgeResult {\n\n time_used,\n\n memory_used,\n\n result: JudgeResultEnum::RuntimeError as i32,\n\n errmsg: String::from(errmsg),\n\n outmsg: String::from(\"\"),\n\n })),\n\n }\n\n}\n\n\n", "file_path": "src/result.rs", "rank": 6, "score": 88972.6951615689 }, { "content": "pub fn errno_str(errno: Option<i32>) -> String {\n\n match errno {\n\n Some(no) => {\n\n let stre = unsafe { strerror(no) };\n\n let c_str: &CStr = unsafe { CStr::from_ptr(stre) };\n\n c_str.to_str().unwrap().to_string()\n\n }\n\n _ => \"Unknown Error!\".to_string(),\n\n }\n\n}\n", "file_path": "src/error.rs", "rank": 7, "score": 87527.03017537801 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n tonic_build::compile_protos(\"proto/river.proto\")?;\n\n Ok(())\n\n}\n", "file_path": "build.rs", "rank": 8, "score": 85581.77666696651 }, { "content": "pub fn accepted(time_used: i64, memory_used: i64) -> JudgeResponse {\n\n judge_result(time_used, memory_used, JudgeResultEnum::Accepted)\n\n}\n\n\n", "file_path": "src/result.rs", "rank": 9, "score": 84948.15708976571 }, { "content": "pub fn pending() -> JudgeResponse {\n\n JudgeResponse {\n\n state: Some(State::Status(JudgeStatus::Pending as i32)),\n\n }\n\n}\n\n\n", "file_path": "src/result.rs", "rank": 10, "score": 83966.47960708401 }, { "content": "pub fn wrong_answer(time_used: i64, memory_used: i64) -> JudgeResponse {\n\n judge_result(time_used, memory_used, JudgeResultEnum::WrongAnswer)\n\n}\n\n\n", "file_path": "src/result.rs", "rank": 11, "score": 82343.0776419384 }, { "content": "fn string_to_i64(value: &str) -> Result<i64> {\n\n if let Ok(res) = value.parse() {\n\n return Ok(res);\n\n }\n\n Err(Error::StringToIntError(String::from(value)))\n\n}\n", "file_path": "src/sandbox.rs", "rank": 12, "score": 80375.61194701171 }, { "content": "pub fn memory_limit_exceeded(time_used: i64, memory_used: i64) -> JudgeResponse {\n\n judge_result(time_used, memory_used, JudgeResultEnum::MemoryLimitExceeded)\n\n}\n\n\n", "file_path": "src/result.rs", "rank": 13, "score": 79918.02960073858 }, { "content": "pub fn time_limit_exceeded(time_used: i64, memory_used: i64) -> JudgeResponse {\n\n judge_result(time_used, memory_used, JudgeResultEnum::TimeLimitExceeded)\n\n}\n\n\n", "file_path": "src/result.rs", "rank": 14, "score": 79918.02960073858 }, { "content": "pub fn spj_result(\n\n time_used: i64,\n\n memory_used: i64,\n\n result: JudgeResultEnum,\n\n outmsg: &str,\n\n errmsg: &str,\n\n) -> JudgeResponse {\n\n JudgeResponse {\n\n state: Some(State::Result(JudgeResult {\n\n time_used,\n\n memory_used,\n\n result: result as i32,\n\n errmsg: String::from(errmsg),\n\n outmsg: String::from(outmsg),\n\n })),\n\n }\n\n}\n\n\n", "file_path": "src/result.rs", "rank": 15, "score": 76155.29311205605 }, { "content": "fn path_to_string(path: &Path) -> Result<String> {\n\n if let Some(s) = path.to_str() {\n\n return Ok(String::from(s));\n\n }\n\n Err(Error::PathToStringError())\n\n}\n\n\n\npub async fn compile(language: &str, code: &str, path: &Path) -> Result<JudgeResponse> {\n\n info!(\"compile: language = `{}`\", language);\n\n let lang = match CONFIG.languages.get(language) {\n\n Some(val) => val,\n\n None => return Err(Error::LanguageNotFound(String::from(language))),\n\n };\n\n try_io!(fs::write(path.join(&lang.code_file), &code).await);\n\n\n\n let semaphore = CPU_SEMAPHORE.clone();\n\n let permit = semaphore.acquire().await;\n\n\n\n let mut sandbox = Sandbox::new(\n\n &lang.compile_cmd,\n", "file_path": "src/judger.rs", "rank": 16, "score": 62968.740894950744 }, { "content": "fn next_line(v: &[u8], offset: usize, len: usize) -> (usize, usize, bool) {\n\n let mut line_offset = offset;\n\n let mut left = 0;\n\n let mut right = len;\n\n let mut has_line = false;\n\n while line_offset < len {\n\n let ch = v[line_offset] as char;\n\n // 当读取到某行结束时\n\n if ch == '\\n' {\n\n if has_line {\n\n // 如果已经有新行的数据,则在这个位置结束\n\n right = line_offset;\n\n break;\n\n } else {\n\n // 如果还没有数据,说明整行为空,忽略当前行,将下一行设为起点重复过程\n\n left = line_offset + 1;\n\n }\n\n }\n\n if ch == ' ' || ch == '\\t' || ch == '\\n' || ch == '\\r' {\n\n // 空白字符\n", "file_path": "src/result.rs", "rank": 17, "score": 28325.02849823699 }, { "content": "lazy_static! {\n\n pub static ref CONFIG: Config = {\n\n let config = fs::read_to_string(\"config.yaml\").unwrap();\n\n let cfg: Config = serde_yaml::from_str(&config).unwrap();\n\n debug!(\"{:?}\", cfg);\n\n cfg\n\n };\n\n pub static ref CPU_SEMAPHORE: Arc<Semaphore> = {\n\n let num = num_cpus::get();\n\n info!(\"cpus = {}\", num);\n\n // 设置最大并发量与 CPU 核数相同,以防止因资源不足而产生系统错误\n\n Arc::new(Semaphore::new(num))\n\n };\n\n}\n\n\n\n#[derive(Debug, PartialEq, Serialize, Deserialize)]\n\npub struct LanguageConf {\n\n pub compile_cmd: String,\n\n pub code_file: String,\n\n pub run_cmd: String,\n", "file_path": "src/config.rs", "rank": 18, "score": 23501.974424744836 }, { "content": " pub version: String,\n\n}\n\n\n\n#[derive(Debug, PartialEq, Serialize, Deserialize)]\n\npub struct Config {\n\n pub data_dir: String,\n\n pub judge_dir: String,\n\n pub cgroup: i32,\n\n pub rootfs: String,\n\n pub languages: HashMap<String, LanguageConf>,\n\n}\n", "file_path": "src/config.rs", "rank": 19, "score": 23501.692223312155 }, { "content": "use std::collections::HashMap;\n\nuse std::fs;\n\nuse std::sync::Arc;\n\n\n\nuse lazy_static::lazy_static;\n\nuse num_cpus;\n\nuse serde::{Deserialize, Serialize};\n\nuse tokio::sync::Semaphore;\n\n\n\n// pub static STDIN_FILENAME: &str = \"stdin.txt\";\n\npub static STDOUT_FILENAME: &str = \"stdout.txt\";\n\npub static STDERR_FILENAME: &str = \"stderr.txt\";\n\npub static RESULT_FILENAME: &str = \"result.txt\";\n\npub static SPJ_FILENAME: &str = \"spj\";\n\npub static SPJ_INPUT_FILENAME: &str = \"spj_input.txt\";\n\npub static SPJ_ANSWER_FILENAME: &str = \"spj_answer.txt\";\n\npub static SPJ_STDOUT_FILENAME: &str = \"spj_stdout.txt\";\n\npub static SPJ_STDERR_FILENAME: &str = \"spj_stderr.txt\";\n\npub static SPJ_RESULT_FILENAME: &str = \"spj_result.txt\";\n\n\n", "file_path": "src/config.rs", "rank": 20, "score": 23499.35896330264 }, { "content": " OsStringToStringError(OsString),\n\n PathToStringError(),\n\n StringSplitError(),\n\n StringToIntError(String),\n\n}\n\n\n\npub type Result<T> = result::Result<T, Error>;\n\n\n\n// 创建一个简单的包装\n\n#[macro_export]\n\nmacro_rules! try_io {\n\n ($expression:expr) => {\n\n match $expression {\n\n Ok(val) => val,\n\n Err(e) => return Err(crate::error::Error::IOError(e)),\n\n };\n\n };\n\n}\n\n\n\nimpl fmt::Display for Error {\n", "file_path": "src/error.rs", "rank": 21, "score": 23017.116112414624 }, { "content": " let mut memory_used = 0;\n\n let mut exit_code = 0;\n\n let mut status = 0;\n\n let mut signal = 0;\n\n\n\n let text = try_io!(read_to_string(&self.result).await);\n\n for line in text.split(\"\\n\") {\n\n if !line.contains(\"=\") {\n\n continue;\n\n }\n\n let mut splitter = line.splitn(2, \" = \");\n\n let key = if let Some(s) = splitter.next() {\n\n s\n\n } else {\n\n return Err(Error::StringSplitError());\n\n };\n\n let value = if let Some(s) = splitter.next() {\n\n s\n\n } else {\n\n return Err(Error::StringSplitError());\n", "file_path": "src/sandbox.rs", "rank": 22, "score": 23016.890400437456 }, { "content": "use tokio::fs::read_to_string;\n\nuse tokio::process::Command;\n\n\n\nuse crate::error::{Error, Result};\n\n\n\n#[derive(Debug)]\n\npub struct ProcessExitStatus {\n\n pub time_used: i64,\n\n pub memory_used: i64,\n\n pub exit_code: i64,\n\n pub status: i64,\n\n pub signal: i64,\n\n}\n\n\n\npub struct Sandbox {\n\n inner_args: Vec<String>,\n\n workdir: String,\n\n rootfs: String,\n\n result: String,\n\n stdin: String,\n", "file_path": "src/sandbox.rs", "rank": 23, "score": 23016.718326275823 }, { "content": " pids,\n\n }\n\n }\n\n\n\n pub async fn spawn(&mut self) -> Result<ProcessExitStatus> {\n\n let mut args = vec![\n\n String::from(\"./newbie-sandbox/target/x86_64-unknown-linux-gnu/release/newbie-sandbox\"),\n\n String::from(\"-w\"),\n\n String::from(&self.workdir),\n\n String::from(\"--rootfs\"),\n\n String::from(&self.rootfs),\n\n String::from(\"-r\"),\n\n String::from(&self.result),\n\n String::from(\"-i\"),\n\n String::from(&self.stdin),\n\n String::from(\"-o\"),\n\n String::from(&self.stdout),\n\n String::from(\"-e\"),\n\n String::from(&self.stderr),\n\n String::from(\"-t\"),\n", "file_path": "src/sandbox.rs", "rank": 24, "score": 23014.19085815765 }, { "content": " self.time_limit.to_string(),\n\n String::from(\"-m\"),\n\n self.memory_limit.to_string(),\n\n String::from(\"-f\"),\n\n self.file_size_limit.to_string(),\n\n String::from(\"-c\"),\n\n self.cgroup.to_string(),\n\n String::from(\"-p\"),\n\n self.pids.to_string(),\n\n String::from(\"--\"),\n\n ];\n\n args.extend_from_slice(&mut self.inner_args);\n\n info!(\"args = {:?}\", args.join(\" \"));\n\n let mut child = try_io!(Command::new(&args[0]).args(&args[1..]).spawn());\n\n let exit_status = try_io!(child.wait().await);\n\n if !exit_status.success() {\n\n return Err(Error::SystemError(String::from(\"run sandbox error!\")));\n\n }\n\n\n\n let mut time_used = 0;\n", "file_path": "src/sandbox.rs", "rank": 25, "score": 23014.15528329754 }, { "content": " };\n\n match key {\n\n \"time_used\" => time_used = string_to_i64(value)?,\n\n \"memory_used\" => memory_used = string_to_i64(value)?,\n\n \"exit_code\" => exit_code = string_to_i64(value)?,\n\n \"status\" => status = string_to_i64(value)?,\n\n \"signal\" => signal = string_to_i64(value)?,\n\n _ => continue,\n\n }\n\n debug!(\"{}: {}\", key, value);\n\n }\n\n\n\n Ok(ProcessExitStatus {\n\n time_used,\n\n memory_used,\n\n exit_code,\n\n status,\n\n signal,\n\n })\n\n }\n\n}\n\n\n", "file_path": "src/sandbox.rs", "rank": 26, "score": 23013.301470883027 }, { "content": " stdout: String,\n\n stderr: String,\n\n time_limit: i32,\n\n memory_limit: i32,\n\n file_size_limit: i32,\n\n cgroup: i32,\n\n pids: i32,\n\n}\n\n\n\nimpl Sandbox {\n\n pub fn new(\n\n cmd: &String,\n\n workdir: String,\n\n rootfs: String,\n\n result: String,\n\n stdin: String,\n\n stdout: String,\n\n stderr: String,\n\n time_limit: i32,\n\n memory_limit: i32,\n", "file_path": "src/sandbox.rs", "rank": 27, "score": 23012.816102228022 }, { "content": "#![macro_use]\n\n\n\nuse std::ffi::CStr;\n\nuse std::ffi::{NulError, OsString};\n\nuse std::fmt;\n\nuse std::io;\n\nuse std::result;\n\n\n\nuse libc::strerror;\n\n\n\n#[allow(dead_code)]\n\n#[derive(Debug)]\n\npub enum Error {\n\n IOError(io::Error),\n\n StringToCStringError(NulError),\n\n ParseIntError(std::num::ParseIntError),\n\n CreateTempDirError(io::Error),\n\n CustomError(String),\n\n LanguageNotFound(String),\n\n SystemError(String),\n", "file_path": "src/error.rs", "rank": 28, "score": 23012.674988931656 }, { "content": " fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match *self {\n\n Error::IOError(ref e) => write!(f, \"IOError: `{}`\", errno_str(e.raw_os_error())),\n\n Error::CustomError(ref e) => write!(f, \"Internal Server Error: `{}`\", e),\n\n Error::LanguageNotFound(ref e) => write!(f, \"Language Not Fount: `{}`\", e),\n\n Error::SystemError(ref e) => write!(f, \"System Error: `{}`\", e),\n\n _ => write!(f, \"{:?}\", self),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/error.rs", "rank": 29, "score": 23011.93416162894 }, { "content": " file_size_limit: i32,\n\n cgroup: i32,\n\n pids: i32,\n\n ) -> Self {\n\n let inner_args = String::from(cmd)\n\n .split(\" \")\n\n .map(|s| s.to_string())\n\n .collect();\n\n Sandbox {\n\n inner_args,\n\n workdir,\n\n rootfs,\n\n result,\n\n stdin,\n\n stdout,\n\n stderr,\n\n time_limit,\n\n memory_limit,\n\n file_size_limit,\n\n cgroup,\n", "file_path": "src/sandbox.rs", "rank": 30, "score": 23011.550791664395 }, { "content": " judge_type: i32,\n\n path: &Path,\n\n) -> Result<JudgeResponse> {\n\n info!(\"judge: language = `{}`, in_file = `{}`, out_file = `{}`, time_limit = `{}`, memory_limit = `{}`, judge_type = `{}`\", language, in_file, out_file, time_limit, memory_limit, judge_type);\n\n let data_dir = Path::new(&CONFIG.data_dir);\n\n\n\n let lang = match CONFIG.languages.get(language) {\n\n Some(val) => val,\n\n None => return Err(Error::LanguageNotFound(String::from(language))),\n\n };\n\n // 信号量控制并发\n\n let semaphore = CPU_SEMAPHORE.clone();\n\n let permit = semaphore.acquire().await;\n\n\n\n try_io!(remove_file(&path.join(RESULT_FILENAME)).await);\n\n try_io!(remove_file(&path.join(STDOUT_FILENAME)).await);\n\n try_io!(remove_file(&path.join(STDERR_FILENAME)).await);\n\n let mut sandbox = Sandbox::new(\n\n &lang.run_cmd,\n\n path_to_string(&path)?,\n", "file_path": "src/judger.rs", "rank": 31, "score": 22500.942225726874 }, { "content": "use std::path::{Path, PathBuf};\n\n\n\nuse tokio::fs;\n\nuse tokio::fs::{remove_file, File};\n\nuse tokio::io::AsyncReadExt;\n\n\n\nuse crate::config::{\n\n CONFIG, CPU_SEMAPHORE, RESULT_FILENAME, SPJ_ANSWER_FILENAME, SPJ_FILENAME, SPJ_INPUT_FILENAME,\n\n SPJ_RESULT_FILENAME, SPJ_STDERR_FILENAME, SPJ_STDOUT_FILENAME, STDERR_FILENAME,\n\n STDOUT_FILENAME,\n\n};\n\nuse crate::error::{Error, Result};\n\nuse crate::result::{\n\n accepted, compile_error, compile_success, memory_limit_exceeded, runtime_error, spj_result,\n\n standard_result, time_limit_exceeded, wrong_answer,\n\n};\n\nuse crate::river::{JudgeResponse, JudgeResultEnum, JudgeType};\n\nuse crate::sandbox::{ProcessExitStatus, Sandbox};\n\n\n", "file_path": "src/judger.rs", "rank": 32, "score": 22497.39702907575 }, { "content": " status.time_used,\n\n status.memory_used,\n\n &format!(\"Exceptional program return code: `{}`\", status.exit_code),\n\n ));\n\n } else if judge_type == JudgeType::Standard as i32 {\n\n // 答案对比\n\n let out = try_io!(fs::read(path.join(STDOUT_FILENAME)).await);\n\n let ans = try_io!(fs::read(data_dir.join(&out_file)).await);\n\n let res = standard_result(&out, &ans)?;\n\n return if res == JudgeResultEnum::Accepted {\n\n Ok(accepted(status.time_used, status.memory_used))\n\n } else {\n\n Ok(wrong_answer(status.time_used, status.memory_used))\n\n };\n\n } else if judge_type == JudgeType::Special as i32 {\n\n // Special Judge\n\n return special_judge(in_file, out_file, spj_file, path, data_dir, status).await;\n\n }\n\n\n\n Err(Error::SystemError(String::from(format!(\"Unknown Error!\"))))\n", "file_path": "src/judger.rs", "rank": 33, "score": 22494.620589228394 }, { "content": " let errmsg = read_file_2048(path.join(STDERR_FILENAME)).await?;\n\n let errmsg = if outmsg == \"\" {\n\n errmsg\n\n } else if errmsg == \"\" {\n\n outmsg\n\n } else {\n\n format!(\"{}\\n{}\", outmsg, errmsg)\n\n };\n\n return Ok(compile_error(status.time_used, status.memory_used, &errmsg));\n\n }\n\n Ok(compile_success(status.time_used, status.memory_used))\n\n}\n\n\n\npub async fn judge(\n\n language: &str,\n\n in_file: &str,\n\n out_file: &str,\n\n spj_file: &str,\n\n time_limit: i32,\n\n memory_limit: i32,\n", "file_path": "src/judger.rs", "rank": 34, "score": 22494.577393742507 }, { "content": "}\n\n\n\nasync fn special_judge(\n\n in_file: &str,\n\n out_file: &str,\n\n spj_file: &str,\n\n path: &Path,\n\n data_dir: &Path,\n\n status: ProcessExitStatus,\n\n) -> Result<JudgeResponse> {\n\n if spj_file == \"\" {\n\n return Err(Error::SystemError(format!(\"field spj_file is required!\")));\n\n }\n\n let spj = data_dir.join(&spj_file);\n\n if !spj.exists() {\n\n return Err(Error::SystemError(format!(\n\n \"Special Judge File `{}` Not Found!\",\n\n spj_file\n\n )));\n\n }\n", "file_path": "src/judger.rs", "rank": 35, "score": 22490.284931773214 }, { "content": " String::from(&CONFIG.rootfs),\n\n path_to_string(&path.join(RESULT_FILENAME))?,\n\n path_to_string(data_dir.join(&in_file).as_path())?,\n\n path_to_string(&path.join(STDOUT_FILENAME))?,\n\n path_to_string(&path.join(STDERR_FILENAME))?,\n\n time_limit,\n\n if language == \"Java\"\n\n || language == \"Go\"\n\n || language == \"JavaScript\"\n\n || language == \"TypeScript\"\n\n || language == \"CSharp\"\n\n {\n\n 1024 * 1024\n\n } else {\n\n memory_limit\n\n },\n\n 50 * 1024 * 1024,\n\n i32::from(CONFIG.cgroup),\n\n 32,\n\n );\n", "file_path": "src/judger.rs", "rank": 36, "score": 22488.502974286268 }, { "content": " return if spj_status.exit_code == 0 {\n\n Ok(spj_result(\n\n status.time_used,\n\n status.memory_used,\n\n JudgeResultEnum::Accepted,\n\n &outmsg,\n\n &errmsg,\n\n ))\n\n } else {\n\n Ok(spj_result(\n\n status.time_used,\n\n status.memory_used,\n\n JudgeResultEnum::WrongAnswer,\n\n &outmsg,\n\n &errmsg,\n\n ))\n\n };\n\n}\n\n\n\nasync fn read_file_2048(filename: PathBuf) -> Result<String> {\n", "file_path": "src/judger.rs", "rank": 37, "score": 22488.267413477304 }, { "content": " &spj_cmd,\n\n path_to_string(&path)?,\n\n String::from(&CONFIG.rootfs),\n\n path_to_string(&path.join(SPJ_RESULT_FILENAME))?,\n\n String::from(\"/STDIN/\"),\n\n path_to_string(&path.join(SPJ_STDOUT_FILENAME))?,\n\n path_to_string(&path.join(SPJ_STDERR_FILENAME))?,\n\n 5000,\n\n 1024 * 1024,\n\n 50 * 1024 * 1024,\n\n i32::from(CONFIG.cgroup),\n\n 8,\n\n );\n\n let spj_status = sandbox.spawn().await?;\n\n drop(permit);\n\n\n\n // 读取 spj 程序的输出,无论结果 ac 与否,都要将其返回\n\n let outmsg = read_file_2048(path.join(SPJ_STDOUT_FILENAME)).await?;\n\n let errmsg = read_file_2048(path.join(SPJ_STDERR_FILENAME)).await?;\n\n // spj 程序的返回值(code)代表了结果,0 ac,1 wa\n", "file_path": "src/judger.rs", "rank": 38, "score": 22488.007876224547 }, { "content": " let status = sandbox.spawn().await?;\n\n drop(permit);\n\n\n\n if status.time_used > time_limit.into() {\n\n // TLE\n\n return Ok(time_limit_exceeded(status.time_used, status.memory_used));\n\n } else if status.memory_used > memory_limit.into() {\n\n // MLE\n\n return Ok(memory_limit_exceeded(status.time_used, status.memory_used));\n\n } else if status.signal != 0 {\n\n // RE\n\n return Ok(runtime_error(\n\n status.time_used,\n\n status.memory_used,\n\n &format!(\"Program was interrupted by signal: `{}`\", status.signal),\n\n ));\n\n } else if status.exit_code != 0 {\n\n // RE\n\n // 就算是用户自己返回的非零,也算 RE\n\n return Ok(runtime_error(\n", "file_path": "src/judger.rs", "rank": 39, "score": 22487.737415179323 }, { "content": " let mut buffer = [0; 2048];\n\n let mut file = try_io!(File::open(filename).await);\n\n try_io!(file.read(&mut buffer).await);\n\n\n\n let mut offset = 0;\n\n for i in 0..2047 {\n\n offset = i;\n\n if buffer[i] == 0 {\n\n break;\n\n }\n\n }\n\n try_io!(file.read(&mut buffer[offset..]).await);\n\n Ok(String::from(String::from_utf8_lossy(&buffer[..offset])))\n\n}\n", "file_path": "src/judger.rs", "rank": 40, "score": 22487.415163314556 }, { "content": " path_to_string(&path)?,\n\n String::from(&CONFIG.rootfs),\n\n path_to_string(&path.join(RESULT_FILENAME))?,\n\n String::from(\"/STDIN/\"),\n\n path_to_string(&path.join(STDOUT_FILENAME))?,\n\n path_to_string(&path.join(STDERR_FILENAME))?,\n\n 8000,\n\n 1024 * 1024,\n\n 50 * 1024 * 1024,\n\n i32::from(CONFIG.cgroup),\n\n 128,\n\n );\n\n let status = sandbox.spawn().await?;\n\n drop(permit);\n\n info!(\"status = {:?}\", status);\n\n\n\n if status.exit_code != 0 || status.signal != 0 {\n\n // 合并 stdout 与 stderr 为 errmsg\n\n // 因为不同的语言、不同的编译器,错误信息输出到了不同的地方\n\n let outmsg = read_file_2048(path.join(STDOUT_FILENAME)).await?;\n", "file_path": "src/judger.rs", "rank": 41, "score": 22487.185641408123 }, { "content": " // 将 spj 程序复制到沙盒内部\n\n try_io!(fs::copy(spj, path.join(SPJ_FILENAME)).await);\n\n\n\n // TODO: 创建 input file 与 answer file 的 named pipe,穿透沙盒以文件形式传递给 spj 程序?\n\n // 此方案不稳定因素较多,比如两个阻塞写入的线程、异常处理等。在没有明显性能问题前先不实现此方案\n\n\n\n // 将 input file 与 answer file 复制到沙盒内部,以供 spj 使用\n\n try_io!(fs::copy(data_dir.join(&in_file), path.join(SPJ_INPUT_FILENAME)).await);\n\n try_io!(fs::copy(data_dir.join(&out_file), path.join(SPJ_ANSWER_FILENAME)).await);\n\n\n\n // Program must be run with the following arguments: <input-file> <output-file> <answer-file>\n\n let spj_cmd = format!(\n\n \"{} {} {} {}\",\n\n SPJ_FILENAME, SPJ_INPUT_FILENAME, STDOUT_FILENAME, SPJ_ANSWER_FILENAME\n\n );\n\n\n\n let semaphore = CPU_SEMAPHORE.clone();\n\n let permit = semaphore.acquire().await;\n\n\n\n let mut sandbox = Sandbox::new(\n", "file_path": "src/judger.rs", "rank": 42, "score": 22486.229365041065 }, { "content": "use crate::error::Error;\n\nuse crate::error::Result;\n\nuse crate::river::judge_response::State;\n\nuse crate::river::{JudgeResponse, JudgeResult, JudgeResultEnum, JudgeStatus};\n\n\n", "file_path": "src/result.rs", "rank": 43, "score": 20708.98688368897 }, { "content": " break;\n\n }\n\n let line_len = out_end - out_start;\n\n for i in 0..line_len {\n\n // 逐个对比\n\n if out[out_start + i] != ans[ans_start + i] {\n\n r = JudgeResultEnum::WrongAnswer;\n\n break;\n\n }\n\n }\n\n // 如果结果出来了,则退出循环\n\n if r != JudgeResultEnum::Accepted {\n\n break;\n\n }\n\n out_offset = out_end;\n\n ans_offset = ans_end;\n\n }\n\n Ok(r)\n\n}\n\n\n", "file_path": "src/result.rs", "rank": 44, "score": 20702.97429372654 }, { "content": " } else {\n\n // 非空白字符\n\n has_line = true;\n\n }\n\n line_offset += 1;\n\n }\n\n // 排除该行末尾的空白字符\n\n while left < right {\n\n let ch = v[right - 1] as char;\n\n if ch == ' ' || ch == '\\t' || ch == '\\n' || ch == '\\r' {\n\n // 空白字符\n\n } else {\n\n // 非空白字符\n\n break;\n\n }\n\n right -= 1;\n\n }\n\n (left, right, has_line)\n\n}\n\n\n", "file_path": "src/result.rs", "rank": 45, "score": 20697.784045656605 }, { "content": " #[test]\n\n fn test8() {\n\n let ans: &[u8] = \"Hello World!\".as_bytes();\n\n let out: &[u8] = \"\".as_bytes();\n\n assert_eq!(\n\n standard_result(out, ans).unwrap(),\n\n JudgeResultEnum::WrongAnswer\n\n );\n\n }\n\n\n\n #[test]\n\n fn test9() {\n\n let ans: &[u8] = \"\".as_bytes();\n\n let out: &[u8] = \"\".as_bytes();\n\n assert_eq!(\n\n standard_result(out, ans).unwrap(),\n\n JudgeResultEnum::Accepted\n\n );\n\n }\n\n\n", "file_path": "src/result.rs", "rank": 46, "score": 20697.724880657188 }, { "content": " #[test]\n\n fn test6() {\n\n let ans: &[u8] = \"Hello World!\".as_bytes();\n\n let out: &[u8] = \"Hello World! \".as_bytes();\n\n assert_eq!(\n\n standard_result(out, ans).unwrap(),\n\n JudgeResultEnum::Accepted\n\n );\n\n }\n\n\n\n #[test]\n\n fn test7() {\n\n let ans: &[u8] = \"Hello World! \\n\\n\\n\\n \\n\\n\\n\\n\".as_bytes();\n\n let out: &[u8] = \"Hello World!\\t\\t\\t\\t\\n\\n\\n\\n \\n\\n\\n\\n\\t\\t\\t\\t\".as_bytes();\n\n assert_eq!(\n\n standard_result(out, ans).unwrap(),\n\n JudgeResultEnum::Accepted\n\n );\n\n }\n\n\n", "file_path": "src/result.rs", "rank": 47, "score": 20697.583059074732 }, { "content": " #[test]\n\n fn test10() {\n\n let ans: &[u8] = \"Hello World!\".as_bytes();\n\n let out: &[u8] = \"Hello World!\\n\".as_bytes();\n\n assert_eq!(\n\n standard_result(out, ans).unwrap(),\n\n JudgeResultEnum::Accepted\n\n );\n\n }\n\n}\n", "file_path": "src/result.rs", "rank": 48, "score": 20697.34048861211 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test1() {\n\n let v: &[u8] = \"Hello World!\".as_bytes();\n\n let (l, r, e) = next_line(v, 0, v.len());\n\n assert_eq!(l, 0);\n\n assert_eq!(r, 12);\n\n assert!(e);\n\n let (_l, _r, e) = next_line(v, r, v.len());\n\n assert!(!e);\n\n }\n\n\n\n #[test]\n\n fn test2() {\n\n let v: &[u8] = \"Hello World! \".as_bytes();\n\n let (l, r, e) = next_line(v, 0, v.len());\n\n assert_eq!(l, 0);\n", "file_path": "src/result.rs", "rank": 49, "score": 20696.76861683097 }, { "content": " assert_eq!(r, 15);\n\n assert!(e);\n\n let (l, r, e) = next_line(v, r, v.len());\n\n assert_eq!(l, 16);\n\n assert_eq!(r, 31);\n\n assert!(e);\n\n let (_l, _r, e) = next_line(v, r, v.len());\n\n assert!(!e);\n\n }\n\n\n\n #[test]\n\n fn test5() {\n\n let ans: &[u8] = \"Hello World!\".as_bytes();\n\n let out: &[u8] = \"Hello World!\".as_bytes();\n\n assert_eq!(\n\n standard_result(out, ans).unwrap(),\n\n JudgeResultEnum::Accepted\n\n );\n\n }\n\n\n", "file_path": "src/result.rs", "rank": 50, "score": 20696.746942287966 }, { "content": "/**\n\n * 忽略空行与每行末尾的空格与制表符\n\n * 如果某行只有空白字符,则忽略此行\n\n * \"Hello ; \"\n\n * \" \"\n\n * \" World\"\n\n * -----------------\n\n * \"Hello ;\"\n\n * \" World\"\n\n */\n", "file_path": "src/result.rs", "rank": 51, "score": 20693.7289453881 }, { "content": " assert_eq!(r, 12);\n\n assert!(e);\n\n }\n\n\n\n #[test]\n\n fn test3() {\n\n let v: &[u8] = \" Hello World!\".as_bytes();\n\n let (l, r, e) = next_line(v, 0, v.len());\n\n assert_eq!(l, 0);\n\n assert_eq!(r, 15);\n\n assert!(e);\n\n }\n\n\n\n #[test]\n\n fn test4() {\n\n let v: &[u8] =\n\n \" Hello World!\\n Hello World!\\n\\n\\n\\n \\n\\n\\n\\n\\n\\n \\t\\t\\t \\t\\n\\n\\n\\n\"\n\n .as_bytes();\n\n let (l, r, e) = next_line(v, 0, v.len());\n\n assert_eq!(l, 0);\n", "file_path": "src/result.rs", "rank": 52, "score": 20693.7289453881 }, { "content": "# river\n\n\n\n## 环境要求\n\n\n\n- linux\n", "file_path": "README.md", "rank": 53, "score": 13801.58209961186 } ]
Rust
src/sys/windows/mod.rs
jmagnuson/mio
40df934a11b05233a7796c4de19a4ee06bc4e03e
mod afd; mod io_status_block; pub mod event; pub use event::{Event, Events}; mod selector; pub use selector::{Selector, SelectorInner, SockState}; cfg_net! { macro_rules! syscall { ($fn: ident ( $($arg: expr),* $(,)* ), $err_test: path, $err_value: expr) => {{ let res = unsafe { $fn($($arg, )*) }; if $err_test(&res, &$err_value) { Err(io::Error::last_os_error()) } else { Ok(res) } }}; } } cfg_tcp! { pub(crate) mod tcp; } cfg_udp! { pub(crate) mod udp; } mod waker; pub(crate) use waker::Waker; cfg_net! { use std::io; use std::os::windows::io::RawSocket; use std::pin::Pin; use std::sync::{Arc, Mutex}; use crate::{poll, Interest, Registry, Token}; mod net; struct InternalState { selector: Arc<SelectorInner>, token: Token, interests: Interest, sock_state: Pin<Arc<Mutex<SockState>>>, } impl Drop for InternalState { fn drop(&mut self) { let mut sock_state = self.sock_state.lock().unwrap(); sock_state.mark_delete(); } } pub struct IoSourceState { inner: Option<Box<InternalState>>, } impl IoSourceState { pub fn new() -> IoSourceState { IoSourceState { inner: None } } pub fn do_io<T, F, R>(&self, f: F, io: &T) -> io::Result<R> where F: FnOnce(&T) -> io::Result<R>, { let result = f(io); if let Err(ref e) = result { if e.kind() == io::ErrorKind::WouldBlock { self.inner.as_ref().map_or(Ok(()), |state| { state .selector .reregister(state.sock_state.clone(), state.token, state.interests) })?; } } result } pub fn register( &mut self, registry: &Registry, token: Token, interests: Interest, socket: RawSocket, ) -> io::Result<()> { if self.inner.is_some() { Err(io::ErrorKind::AlreadyExists.into()) } else { poll::selector(registry) .register(socket, token, interests) .map(|state| { self.inner = Some(Box::new(state)); }) } } pub fn reregister( &mut self, registry: &Registry, token: Token, interests: Interest, ) -> io::Result<()> { match self.inner.as_mut() { Some(state) => { poll::selector(registry) .reregister(state.sock_state.clone(), token, interests) .map(|()| { state.token = token; state.interests = interests; }) } None => Err(io::ErrorKind::NotFound.into()), } } pub fn deregister(&mut self) -> io::Result<()> { match self.inner.as_mut() { Some(state) => { { let mut sock_state = state.sock_state.lock().unwrap(); sock_state.mark_delete(); } self.inner = None; Ok(()) } None => Err(io::ErrorKind::NotFound.into()), } } } }
mod afd; mod io_status_block; pub mod event; pub use event::{Event, Events}; mod selector; pub use selector::{Selector, SelectorInner, SockState}; cfg_net! { macro_rules! syscall { ($fn: ident ( $($arg: expr),* $(,)* ), $err_test: path, $err_value: expr) => {{ let res = unsafe { $fn($($arg, )*) }; if $err_test(&res, &$err_value) { Err(io::Error::last_os_error()) } else { Ok(res) } }}; } } cfg_tcp! { pub(crate) mod tcp; } cfg_udp! { pub(crate) mod udp; } mod waker; pub(crate) use waker::Waker; cfg_net! { use std::io; use std::os::windows::io::RawSocket; use std::pin::Pin; use std::sync::{Arc, Mutex}; use crate::{poll, Interest, Registry, Token}; mod net; struct InternalState { selector: Arc<SelectorInner>, token: Token, interests: Interest, sock_state: Pin<Arc<Mutex<SockState>>>, } impl Drop for InternalState { fn drop(&mut self) { let mut sock_state = self.sock_state.lock().unwrap(); sock_state.mark_delete(); } } pub struct IoSourceState { inner: Option<Box<InternalState>>, } impl IoSourceState { pub fn new
f e.kind() == io::ErrorKind::WouldBlock { self.inner.as_ref().map_or(Ok(()), |state| { state .selector .reregister(state.sock_state.clone(), state.token, state.interests) })?; } } result } pub fn register( &mut self, registry: &Registry, token: Token, interests: Interest, socket: RawSocket, ) -> io::Result<()> { if self.inner.is_some() { Err(io::ErrorKind::AlreadyExists.into()) } else { poll::selector(registry) .register(socket, token, interests) .map(|state| { self.inner = Some(Box::new(state)); }) } } pub fn reregister( &mut self, registry: &Registry, token: Token, interests: Interest, ) -> io::Result<()> { match self.inner.as_mut() { Some(state) => { poll::selector(registry) .reregister(state.sock_state.clone(), token, interests) .map(|()| { state.token = token; state.interests = interests; }) } None => Err(io::ErrorKind::NotFound.into()), } } pub fn deregister(&mut self) -> io::Result<()> { match self.inner.as_mut() { Some(state) => { { let mut sock_state = state.sock_state.lock().unwrap(); sock_state.mark_delete(); } self.inner = None; Ok(()) } None => Err(io::ErrorKind::NotFound.into()), } } } }
() -> IoSourceState { IoSourceState { inner: None } } pub fn do_io<T, F, R>(&self, f: F, io: &T) -> io::Result<R> where F: FnOnce(&T) -> io::Result<R>, { let result = f(io); if let Err(ref e) = result { i
random
[ { "content": "fn expect_waker_event(poll: &mut Poll, events: &mut Events, token: Token) {\n\n poll.poll(events, Some(Duration::from_millis(100))).unwrap();\n\n assert!(!events.is_empty());\n\n for event in events.iter() {\n\n assert_eq!(event.token(), token);\n\n assert!(event.is_readable());\n\n }\n\n}\n", "file_path": "tests/waker.rs", "rank": 0, "score": 271542.30509593844 }, { "content": "pub fn token(event: &Event) -> Token {\n\n Token(event.data as usize)\n\n}\n\n\n", "file_path": "src/sys/windows/event.rs", "rank": 1, "score": 267832.628396327 }, { "content": "pub fn expect_no_events(poll: &mut Poll, events: &mut Events) {\n\n poll.poll(events, Some(Duration::from_millis(50)))\n\n .expect(\"unable to poll\");\n\n if !events.is_empty() {\n\n for event in events.iter() {\n\n error!(\"unexpected event: {:?}\", event);\n\n }\n\n panic!(\"received events, but didn't expect any, see above\");\n\n }\n\n}\n\n\n", "file_path": "tests/util/mod.rs", "rank": 2, "score": 245529.27694749608 }, { "content": "pub fn expect_events(poll: &mut Poll, events: &mut Events, mut expected: Vec<ExpectEvent>) {\n\n // In a lot of calls we expect more then one event, but it could be that\n\n // poll returns the first event only in a single call. To be a bit more\n\n // lenient we'll poll a couple of times.\n\n for _ in 0..3 {\n\n poll.poll(events, Some(Duration::from_millis(500)))\n\n .expect(\"unable to poll\");\n\n\n\n for event in events.iter() {\n\n let index = expected.iter().position(|expected| expected.matches(event));\n\n\n\n if let Some(index) = index {\n\n expected.swap_remove(index);\n\n } else {\n\n // Must accept sporadic events.\n\n warn!(\"got unexpected event: {:?}\", event);\n\n }\n\n }\n\n\n\n if expected.is_empty() {\n", "file_path": "tests/util/mod.rs", "rank": 3, "score": 233164.62807687192 }, { "content": "fn next(current: &mut Token) -> Token {\n\n let next = current.0;\n\n current.0 += 1;\n\n Token(next)\n\n}\n\n\n", "file_path": "examples/tcp_server.rs", "rank": 4, "score": 223039.3178926276 }, { "content": "pub fn debug_details(f: &mut fmt::Formatter<'_>, event: &Event) -> fmt::Result {\n\n #[allow(clippy::trivially_copy_pass_by_ref)]\n\n fn check_flags(got: &u32, want: &u32) -> bool {\n\n (got & want) != 0\n\n }\n\n debug_detail!(\n\n FlagsDetails(u32),\n\n check_flags,\n\n afd::POLL_RECEIVE,\n\n afd::POLL_RECEIVE_EXPEDITED,\n\n afd::POLL_SEND,\n\n afd::POLL_DISCONNECT,\n\n afd::POLL_ABORT,\n\n afd::POLL_LOCAL_CLOSE,\n\n afd::POLL_CONNECT,\n\n afd::POLL_ACCEPT,\n\n afd::POLL_CONNECT_FAIL,\n\n );\n\n\n\n f.debug_struct(\"event\")\n", "file_path": "src/sys/windows/event.rs", "rank": 5, "score": 205033.08745537468 }, { "content": "pub fn accept(_: &net::TcpListener) -> io::Result<(net::TcpStream, SocketAddr)> {\n\n os_required!();\n\n}\n", "file_path": "src/sys/shell/tcp.rs", "rank": 6, "score": 195075.89844126598 }, { "content": "pub fn accept(listener: &net::TcpListener) -> io::Result<(net::TcpStream, SocketAddr)> {\n\n let mut addr: MaybeUninit<libc::sockaddr_storage> = MaybeUninit::uninit();\n\n let mut length = size_of::<libc::sockaddr_storage>() as libc::socklen_t;\n\n\n\n // On platforms that support it we can use `accept4(2)` to set `NONBLOCK`\n\n // and `CLOEXEC` in the call to accept the connection.\n\n #[cfg(any(\n\n target_os = \"android\",\n\n target_os = \"dragonfly\",\n\n target_os = \"freebsd\",\n\n target_os = \"illumos\",\n\n target_os = \"linux\",\n\n target_os = \"netbsd\",\n\n target_os = \"openbsd\"\n\n ))]\n\n let stream = {\n\n syscall!(accept4(\n\n listener.as_raw_fd(),\n\n addr.as_mut_ptr() as *mut _,\n\n &mut length,\n", "file_path": "src/sys/unix/tcp.rs", "rank": 7, "score": 192542.79983726214 }, { "content": "pub fn accept(listener: &net::TcpListener) -> io::Result<(net::TcpStream, SocketAddr)> {\n\n // The non-blocking state of `listener` is inherited. See\n\n // https://docs.microsoft.com/en-us/windows/win32/api/winsock2/nf-winsock2-accept#remarks.\n\n listener.accept()\n\n}\n", "file_path": "src/sys/windows/tcp.rs", "rank": 8, "score": 192542.7998372622 }, { "content": "fn interests_to_afd_flags(interests: Interest) -> u32 {\n\n let mut flags = 0;\n\n\n\n if interests.is_readable() {\n\n // afd::POLL_DISCONNECT for is_read_hup()\n\n flags |= afd::POLL_RECEIVE | afd::POLL_ACCEPT | afd::POLL_DISCONNECT;\n\n }\n\n\n\n if interests.is_writable() {\n\n flags |= afd::POLL_SEND;\n\n }\n\n\n\n flags\n\n}\n", "file_path": "src/sys/windows/selector.rs", "rank": 9, "score": 191224.78318146497 }, { "content": "pub fn is_readable(event: &Event) -> bool {\n\n event.flags\n\n & (afd::POLL_RECEIVE | afd::POLL_DISCONNECT | afd::POLL_ACCEPT | afd::POLL_CONNECT_FAIL)\n\n != 0\n\n}\n\n\n", "file_path": "src/sys/windows/event.rs", "rank": 10, "score": 188855.61684048222 }, { "content": "pub fn is_error(event: &Event) -> bool {\n\n event.flags & afd::POLL_CONNECT_FAIL != 0\n\n}\n\n\n", "file_path": "src/sys/windows/event.rs", "rank": 11, "score": 188855.61684048222 }, { "content": "pub fn is_priority(event: &Event) -> bool {\n\n event.flags & afd::POLL_RECEIVE_EXPEDITED != 0\n\n}\n\n\n", "file_path": "src/sys/windows/event.rs", "rank": 12, "score": 188855.61684048222 }, { "content": "pub fn is_writable(event: &Event) -> bool {\n\n event.flags & (afd::POLL_SEND | afd::POLL_CONNECT_FAIL) != 0\n\n}\n\n\n", "file_path": "src/sys/windows/event.rs", "rank": 13, "score": 188855.61684048222 }, { "content": "pub fn is_write_closed(event: &Event) -> bool {\n\n event.flags & (afd::POLL_ABORT | afd::POLL_CONNECT_FAIL) != 0\n\n}\n\n\n", "file_path": "src/sys/windows/event.rs", "rank": 14, "score": 186267.06799441495 }, { "content": "pub fn is_read_closed(event: &Event) -> bool {\n\n event.flags & afd::POLL_DISCONNECT != 0\n\n}\n\n\n", "file_path": "src/sys/windows/event.rs", "rank": 15, "score": 186267.06799441495 }, { "content": "pub fn bind(_: SocketAddr) -> io::Result<net::UdpSocket> {\n\n os_required!()\n\n}\n", "file_path": "src/sys/shell/udp.rs", "rank": 16, "score": 183574.9595233753 }, { "content": "pub fn bind(_: SocketAddr) -> io::Result<net::TcpListener> {\n\n os_required!();\n\n}\n\n\n", "file_path": "src/sys/shell/tcp.rs", "rank": 17, "score": 183074.0368977918 }, { "content": "pub fn connect(_: SocketAddr) -> io::Result<net::TcpStream> {\n\n os_required!();\n\n}\n\n\n", "file_path": "src/sys/shell/tcp.rs", "rank": 18, "score": 183074.0368977918 }, { "content": "pub fn init_with_poll() -> (Poll, Events) {\n\n init();\n\n\n\n let poll = Poll::new().expect(\"unable to create Poll instance\");\n\n let events = Events::with_capacity(16);\n\n (poll, events)\n\n}\n\n\n", "file_path": "tests/util/mod.rs", "rank": 19, "score": 181452.67460044278 }, { "content": "pub fn bind(addr: SocketAddr) -> io::Result<net::UdpSocket> {\n\n // Gives a warning for non Apple platforms.\n\n #[allow(clippy::let_and_return)]\n\n let socket = new_ip_socket(addr, libc::SOCK_DGRAM);\n\n\n\n // Set SO_NOSIGPIPE on iOS and macOS (mirrors what libstd does).\n\n #[cfg(any(target_os = \"ios\", target_os = \"macos\"))]\n\n let socket = socket.and_then(|socket| {\n\n syscall!(setsockopt(\n\n socket,\n\n libc::SOL_SOCKET,\n\n libc::SO_NOSIGPIPE,\n\n &1 as *const libc::c_int as *const libc::c_void,\n\n std::mem::size_of::<libc::c_int>() as libc::socklen_t,\n\n ))\n\n .map(|_| socket)\n\n });\n\n\n\n socket.and_then(|socket| {\n\n let (raw_addr, raw_addr_length) = socket_addr(&addr);\n", "file_path": "src/sys/unix/udp.rs", "rank": 20, "score": 180559.24443097008 }, { "content": "pub fn bind(addr: SocketAddr) -> io::Result<net::UdpSocket> {\n\n init();\n\n new_socket(addr, SOCK_DGRAM).and_then(|socket| {\n\n let (raw_addr, raw_addr_length) = socket_addr(&addr);\n\n syscall!(\n\n win_bind(socket, raw_addr, raw_addr_length,),\n\n PartialEq::eq,\n\n SOCKET_ERROR\n\n )\n\n .map_err(|err| {\n\n // Close the socket if we hit an error, ignoring the error\n\n // from closing since we can't pass back two errors.\n\n let _ = unsafe { closesocket(socket) };\n\n err\n\n })\n\n .map(|_| unsafe { net::UdpSocket::from_raw_socket(socket as StdSocket) })\n\n })\n\n}\n", "file_path": "src/sys/windows/udp.rs", "rank": 21, "score": 180559.24443097008 }, { "content": "pub fn bind(addr: SocketAddr) -> io::Result<net::TcpListener> {\n\n new_ip_socket(addr, libc::SOCK_STREAM).and_then(|socket| {\n\n // Set SO_REUSEADDR (mirrors what libstd does).\n\n syscall!(setsockopt(\n\n socket,\n\n libc::SOL_SOCKET,\n\n libc::SO_REUSEADDR,\n\n &1 as *const libc::c_int as *const libc::c_void,\n\n size_of::<libc::c_int>() as libc::socklen_t,\n\n ))\n\n .and_then(|_| {\n\n let (raw_addr, raw_addr_length) = socket_addr(&addr);\n\n syscall!(bind(socket, raw_addr, raw_addr_length))\n\n })\n\n .and_then(|_| syscall!(listen(socket, 1024)))\n\n .map_err(|err| {\n\n // Close the socket if we hit an error, ignoring the error\n\n // from closing since we can't pass back two errors.\n\n let _ = unsafe { libc::close(socket) };\n\n err\n\n })\n\n .map(|_| unsafe { net::TcpListener::from_raw_fd(socket) })\n\n })\n\n}\n\n\n", "file_path": "src/sys/unix/tcp.rs", "rank": 22, "score": 180066.91724714916 }, { "content": "pub fn connect(addr: SocketAddr) -> io::Result<net::TcpStream> {\n\n init();\n\n new_socket(addr, SOCK_STREAM)\n\n .and_then(|socket| {\n\n // Required for a future `connect_overlapped` operation to be\n\n // executed successfully.\n\n let any_addr = inaddr_any(addr);\n\n let (raw_addr, raw_addr_length) = socket_addr(&any_addr);\n\n syscall!(\n\n win_bind(socket, raw_addr, raw_addr_length),\n\n PartialEq::eq,\n\n SOCKET_ERROR\n\n )\n\n .and_then(|_| {\n\n let (raw_addr, raw_addr_length) = socket_addr(&addr);\n\n syscall!(\n\n win_connect(socket, raw_addr, raw_addr_length),\n\n PartialEq::eq,\n\n SOCKET_ERROR\n\n )\n", "file_path": "src/sys/windows/tcp.rs", "rank": 23, "score": 180066.91724714916 }, { "content": "pub fn connect(addr: SocketAddr) -> io::Result<net::TcpStream> {\n\n new_ip_socket(addr, libc::SOCK_STREAM)\n\n .and_then(|socket| {\n\n let (raw_addr, raw_addr_length) = socket_addr(&addr);\n\n syscall!(connect(socket, raw_addr, raw_addr_length))\n\n .or_else(|err| match err {\n\n // Connect hasn't finished, but that is fine.\n\n ref err if err.raw_os_error() == Some(libc::EINPROGRESS) => Ok(0),\n\n err => Err(err),\n\n })\n\n .map(|_| socket)\n\n .map_err(|err| {\n\n // Close the socket if we hit an error, ignoring the error\n\n // from closing since we can't pass back two errors.\n\n let _ = unsafe { libc::close(socket) };\n\n err\n\n })\n\n })\n\n .map(|socket| unsafe { net::TcpStream::from_raw_fd(socket) })\n\n}\n\n\n", "file_path": "src/sys/unix/tcp.rs", "rank": 24, "score": 180066.91724714916 }, { "content": "pub fn bind(addr: SocketAddr) -> io::Result<net::TcpListener> {\n\n init();\n\n new_socket(addr, SOCK_STREAM).and_then(|socket| {\n\n let (raw_addr, raw_addr_length) = socket_addr(&addr);\n\n syscall!(\n\n win_bind(socket, raw_addr, raw_addr_length,),\n\n PartialEq::eq,\n\n SOCKET_ERROR\n\n )\n\n .and_then(|_| syscall!(listen(socket, 1024), PartialEq::eq, SOCKET_ERROR))\n\n .map_err(|err| {\n\n // Close the socket if we hit an error, ignoring the error\n\n // from closing since we can't pass back two errors.\n\n let _ = unsafe { closesocket(socket) };\n\n err\n\n })\n\n .map(|_| unsafe { net::TcpListener::from_raw_socket(socket as StdSocket) })\n\n })\n\n}\n\n\n", "file_path": "src/sys/windows/tcp.rs", "rank": 25, "score": 180066.91724714916 }, { "content": "pub fn is_lio(_: &Event) -> bool {\n\n // Not supported.\n\n false\n\n}\n\n\n", "file_path": "src/sys/windows/event.rs", "rank": 26, "score": 177166.0770289926 }, { "content": "pub fn is_aio(_: &Event) -> bool {\n\n // Not supported.\n\n false\n\n}\n\n\n", "file_path": "src/sys/windows/event.rs", "rank": 27, "score": 177166.0770289926 }, { "content": "/// Convert a raw overlapped pointer into a reference to `SockState`.\n\n/// Reverts `into_overlapped`.\n\nfn from_overlapped(ptr: *mut OVERLAPPED) -> Pin<Arc<Mutex<SockState>>> {\n\n let sock_ptr: *const Mutex<SockState> = ptr as *const _;\n\n unsafe { Pin::new_unchecked(Arc::from_raw(sock_ptr)) }\n\n}\n\n\n\n/// Each Selector has a globally unique(ish) ID associated with it. This ID\n\n/// gets tracked by `TcpStream`, `TcpListener`, etc... when they are first\n\n/// registered with the `Selector`. If a type that is previously associated with\n\n/// a `Selector` attempts to register itself with a different `Selector`, the\n\n/// operation will return with an error. This matches windows behavior.\n\n#[cfg(debug_assertions)]\n\nstatic NEXT_ID: AtomicUsize = AtomicUsize::new(0);\n\n\n\n/// Windows implementaion of `sys::Selector`\n\n///\n\n/// Edge-triggered event notification is simulated by resetting internal event flag of each socket state `SockState`\n\n/// and setting all events back by intercepting all requests that could cause `io::ErrorKind::WouldBlock` happening.\n\n///\n\n/// This selector is currently only support socket due to `Afd` driver is winsock2 specific.\n\n#[derive(Debug)]\n", "file_path": "src/sys/windows/selector.rs", "rank": 28, "score": 174241.43161259446 }, { "content": "#[test]\n\npub fn close_on_drop() {\n\n init();\n\n debug!(\"Starting TEST_CLOSE_ON_DROP\");\n\n let mut poll = Poll::new().unwrap();\n\n\n\n // == Create & setup server socket\n\n let mut srv = TcpListener::bind(any_local_address()).unwrap();\n\n let addr = srv.local_addr().unwrap();\n\n\n\n poll.registry()\n\n .register(&mut srv, SERVER, Interest::READABLE)\n\n .unwrap();\n\n\n\n // == Create & setup client socket\n\n let mut sock = TcpStream::connect(addr).unwrap();\n\n\n\n poll.registry()\n\n .register(&mut sock, CLIENT, Interest::WRITABLE)\n\n .unwrap();\n\n\n", "file_path": "tests/close_on_drop.rs", "rank": 29, "score": 170680.1726015042 }, { "content": "#[test]\n\npub fn udp_socket() {\n\n init();\n\n\n\n let tx = UdpSocket::bind(any_local_address()).unwrap();\n\n let rx = UdpSocket::bind(any_local_address()).unwrap();\n\n\n\n send_recv_udp(tx, rx, false);\n\n}\n\n\n", "file_path": "tests/udp_socket.rs", "rank": 30, "score": 169695.15168040074 }, { "content": "#[test]\n\npub fn udp_socket_discard() {\n\n init();\n\n\n\n let mut tx = UdpSocket::bind(any_local_address()).unwrap();\n\n let mut rx = UdpSocket::bind(any_local_address()).unwrap();\n\n let udp_outside = UdpSocket::bind(any_local_address()).unwrap();\n\n\n\n let tx_addr = tx.local_addr().unwrap();\n\n let rx_addr = rx.local_addr().unwrap();\n\n\n\n assert!(tx.connect(rx_addr).is_ok());\n\n assert!(udp_outside.connect(rx_addr).is_ok());\n\n assert!(rx.connect(tx_addr).is_ok());\n\n\n\n let mut poll = Poll::new().unwrap();\n\n\n\n checked_write!(udp_outside.send(b\"hello world\"));\n\n\n\n poll.registry()\n\n .register(&mut rx, LISTENER, Interest::READABLE)\n", "file_path": "tests/udp_socket.rs", "rank": 31, "score": 166507.19937667562 }, { "content": "/// Returns a path to a temporary file using `name` as filename.\n\npub fn temp_file(name: &'static str) -> PathBuf {\n\n let mut path = temp_dir();\n\n path.push(name);\n\n path\n\n}\n\n\n", "file_path": "tests/util/mod.rs", "rank": 32, "score": 164961.06328549457 }, { "content": "pub fn init() {\n\n static INIT: Once = Once::new();\n\n\n\n INIT.call_once(|| {\n\n env_logger::try_init().expect(\"unable to initialise logger\");\n\n\n\n // Remove all temporary files from previous test runs.\n\n let dir = temp_dir();\n\n let _ = fs::remove_dir_all(&dir);\n\n fs::create_dir_all(&dir).expect(\"unable to create temporary directory\");\n\n })\n\n}\n\n\n", "file_path": "tests/util/mod.rs", "rank": 33, "score": 163616.38261351286 }, { "content": "#[test]\n\npub fn udp_socket_send_recv() {\n\n init();\n\n\n\n let (tx, rx) = connected_sockets();\n\n\n\n send_recv_udp(tx, rx, true);\n\n}\n\n\n", "file_path": "tests/udp_socket.rs", "rank": 34, "score": 163488.11508767252 }, { "content": "#[test]\n\npub fn multicast() {\n\n init();\n\n\n\n debug!(\"Starting TEST_UDP_CONNECTIONLESS\");\n\n let mut poll = Poll::new().unwrap();\n\n\n\n let mut tx = UdpSocket::bind(any_local_address()).unwrap();\n\n let mut rx = UdpSocket::bind(any_local_address()).unwrap();\n\n\n\n info!(\"Joining group 227.1.1.100\");\n\n let any = &\"0.0.0.0\".parse().unwrap();\n\n rx.join_multicast_v4(&\"227.1.1.100\".parse().unwrap(), any)\n\n .unwrap();\n\n\n\n info!(\"Joining group 227.1.1.101\");\n\n rx.join_multicast_v4(&\"227.1.1.101\".parse().unwrap(), any)\n\n .unwrap();\n\n\n\n info!(\"Registering SENDER\");\n\n poll.registry()\n", "file_path": "tests/udp_socket.rs", "rank": 35, "score": 163244.12332969555 }, { "content": "/// Call all registration operations, ending with `source` being registered with `token` and `final_interests`.\n\npub fn registry_ops_flow(\n\n registry: &Registry,\n\n source: &mut dyn Source,\n\n token: Token,\n\n init_interests: Interest,\n\n final_interests: Interest,\n\n) -> io::Result<()> {\n\n registry.register(source, token, init_interests).unwrap();\n\n registry.deregister(source).unwrap();\n\n\n\n registry.register(source, token, init_interests).unwrap();\n\n registry.reregister(source, token, final_interests)\n\n}\n\n\n", "file_path": "tests/poll.rs", "rank": 36, "score": 160462.75719296568 }, { "content": "fn smoke_test_connected_udp_socket(mut socket1: UdpSocket, mut socket2: UdpSocket) {\n\n let (mut poll, mut events) = init_with_poll();\n\n\n\n assert_socket_non_blocking(&socket1);\n\n assert_socket_close_on_exec(&socket1);\n\n assert_socket_non_blocking(&socket2);\n\n assert_socket_close_on_exec(&socket2);\n\n\n\n poll.registry()\n\n .register(\n\n &mut socket1,\n\n ID1,\n\n Interest::READABLE.add(Interest::WRITABLE),\n\n )\n\n .expect(\"unable to register UDP socket\");\n\n poll.registry()\n\n .register(\n\n &mut socket2,\n\n ID2,\n\n Interest::READABLE.add(Interest::WRITABLE),\n", "file_path": "tests/udp_socket.rs", "rank": 37, "score": 160016.13489470532 }, { "content": "fn smoke_test_unconnected_udp_socket(mut socket1: UdpSocket, mut socket2: UdpSocket) {\n\n let (mut poll, mut events) = init_with_poll();\n\n\n\n assert_socket_non_blocking(&socket1);\n\n assert_socket_close_on_exec(&socket1);\n\n assert_socket_non_blocking(&socket2);\n\n assert_socket_close_on_exec(&socket2);\n\n\n\n let address1 = socket1.local_addr().unwrap();\n\n let address2 = socket2.local_addr().unwrap();\n\n\n\n poll.registry()\n\n .register(\n\n &mut socket1,\n\n ID1,\n\n Interest::READABLE.add(Interest::WRITABLE),\n\n )\n\n .expect(\"unable to register UDP socket\");\n\n poll.registry()\n\n .register(\n", "file_path": "tests/udp_socket.rs", "rank": 38, "score": 160016.13489470532 }, { "content": "fn interests_to_epoll(interests: Interest) -> u32 {\n\n let mut kind = EPOLLET;\n\n\n\n if interests.is_readable() {\n\n kind = kind | EPOLLIN | EPOLLRDHUP;\n\n }\n\n\n\n if interests.is_writable() {\n\n kind |= EPOLLOUT;\n\n }\n\n\n\n kind as u32\n\n}\n\n\n\npub type Event = libc::epoll_event;\n\npub type Events = Vec<Event>;\n\n\n\npub mod event {\n\n use std::fmt;\n\n\n", "file_path": "src/sys/unix/selector/epoll.rs", "rank": 39, "score": 157270.25926046443 }, { "content": "#[test]\n\n#[cfg(any(target_os = \"linux\", target_os = \"windows\"))]\n\npub fn double_register_different_token() {\n\n init();\n\n let poll = Poll::new().unwrap();\n\n\n\n let mut l = TcpListener::bind(\"127.0.0.1:0\".parse().unwrap()).unwrap();\n\n\n\n poll.registry()\n\n .register(&mut l, Token(0), Interest::READABLE)\n\n .unwrap();\n\n\n\n assert!(poll\n\n .registry()\n\n .register(&mut l, Token(1), Interest::READABLE)\n\n .is_err());\n\n}\n\n\n", "file_path": "tests/poll.rs", "rank": 40, "score": 156962.3820084997 }, { "content": "fn send_recv_udp(mut tx: UdpSocket, mut rx: UdpSocket, connected: bool) {\n\n init();\n\n\n\n debug!(\"Starting TEST_UDP_SOCKETS\");\n\n let mut poll = Poll::new().unwrap();\n\n\n\n // ensure that the sockets are non-blocking\n\n let mut buf = [0; 128];\n\n assert_would_block(rx.recv_from(&mut buf));\n\n\n\n info!(\"Registering SENDER\");\n\n poll.registry()\n\n .register(&mut tx, SENDER, Interest::WRITABLE)\n\n .unwrap();\n\n\n\n info!(\"Registering LISTENER\");\n\n poll.registry()\n\n .register(&mut rx, LISTENER, Interest::READABLE)\n\n .unwrap();\n\n\n", "file_path": "tests/udp_socket.rs", "rank": 41, "score": 156072.9987517338 }, { "content": "#[test]\n\npub fn reregister_different_interest_without_poll() {\n\n init();\n\n\n\n let mut events = Events::with_capacity(1024);\n\n let mut poll = Poll::new().unwrap();\n\n\n\n // Create the listener\n\n let mut l = TcpListener::bind(\"127.0.0.1:0\".parse().unwrap()).unwrap();\n\n\n\n // Register the listener with `Poll`\n\n poll.registry()\n\n .register(&mut l, Token(0), Interest::READABLE)\n\n .unwrap();\n\n\n\n let mut s1 = TcpStream::connect(l.local_addr().unwrap()).unwrap();\n\n poll.registry()\n\n .register(&mut s1, Token(2), Interest::READABLE)\n\n .unwrap();\n\n\n\n const TIMEOUT: Duration = Duration::from_millis(200);\n\n sleep(TIMEOUT);\n\n\n\n poll.registry()\n\n .reregister(&mut l, Token(0), Interest::WRITABLE)\n\n .unwrap();\n\n\n\n poll.poll(&mut events, Some(TIMEOUT)).unwrap();\n\n assert!(events.iter().next().is_none());\n\n}\n\n\n", "file_path": "tests/registering.rs", "rank": 42, "score": 153698.30812140414 }, { "content": "/// Bind to any port on localhost.\n\npub fn any_local_address() -> SocketAddr {\n\n \"127.0.0.1:0\".parse().unwrap()\n\n}\n\n\n", "file_path": "tests/util/mod.rs", "rank": 43, "score": 149304.41788754307 }, { "content": "#[derive(Debug)]\n\nstruct AfdGroup {\n\n cp: Arc<CompletionPort>,\n\n afd_group: Mutex<Vec<Arc<Afd>>>,\n\n}\n\n\n\nimpl AfdGroup {\n\n pub fn new(cp: Arc<CompletionPort>) -> AfdGroup {\n\n AfdGroup {\n\n afd_group: Mutex::new(Vec::new()),\n\n cp,\n\n }\n\n }\n\n\n\n pub fn release_unused_afd(&self) {\n\n let mut afd_group = self.afd_group.lock().unwrap();\n\n afd_group.retain(|g| Arc::strong_count(&g) > 1);\n\n }\n\n}\n\n\n\ncfg_net! {\n", "file_path": "src/sys/windows/selector.rs", "rank": 44, "score": 147339.69204001903 }, { "content": "/// Bind to any port on localhost, using a IPv6 address.\n\npub fn any_local_ipv6_address() -> SocketAddr {\n\n \"[::1]:0\".parse().unwrap()\n\n}\n\n\n", "file_path": "tests/util/mod.rs", "rank": 45, "score": 146283.34630935587 }, { "content": "pub fn assert_sync<T: Sync>() {}\n", "file_path": "tests/util/mod.rs", "rank": 46, "score": 145880.28957784886 }, { "content": "pub fn assert_send<T: Send>() {}\n\n\n\n/// An event that is expected to show up when `Poll` is polled, see\n\n/// `expect_events`.\n\n#[derive(Debug)]\n\npub struct ExpectEvent {\n\n token: Token,\n\n readiness: Readiness,\n\n}\n\n\n\nimpl ExpectEvent {\n\n pub fn new<R>(token: Token, readiness: R) -> ExpectEvent\n\n where\n\n R: Into<Readiness>,\n\n {\n\n ExpectEvent {\n\n token,\n\n readiness: readiness.into(),\n\n }\n\n }\n", "file_path": "tests/util/mod.rs", "rank": 47, "score": 145880.28957784886 }, { "content": "#[test]\n\nfn waker() {\n\n init();\n\n\n\n let mut poll = Poll::new().expect(\"unable to create new Poll instance\");\n\n let mut events = Events::with_capacity(10);\n\n\n\n let token = Token(10);\n\n let waker = Waker::new(poll.registry(), token).expect(\"unable to create waker\");\n\n\n\n waker.wake().expect(\"unable to wake\");\n\n expect_waker_event(&mut poll, &mut events, token);\n\n}\n\n\n", "file_path": "tests/waker.rs", "rank": 48, "score": 145662.7376506278 }, { "content": "#[test]\n\nfn tcp_no_events_after_deregister() {\n\n let (mut poll, mut events) = init_with_poll();\n\n\n\n let mut listener = TcpListener::bind(any_local_address()).unwrap();\n\n let addr = listener.local_addr().unwrap();\n\n let mut stream = TcpStream::connect(addr).unwrap();\n\n\n\n poll.registry()\n\n .register(&mut listener, ID1, Interest::READABLE)\n\n .unwrap();\n\n poll.registry()\n\n .register(&mut stream, ID3, Interest::READABLE)\n\n .unwrap();\n\n\n\n expect_events(\n\n &mut poll,\n\n &mut events,\n\n vec![ExpectEvent::new(ID1, Interest::READABLE)],\n\n );\n\n\n", "file_path": "tests/tcp.rs", "rank": 49, "score": 144937.18888828982 }, { "content": "#[test]\n\nfn write_then_drop() {\n\n init();\n\n\n\n let mut a = TcpListener::bind(\"127.0.0.1:0\".parse().unwrap()).unwrap();\n\n let addr = a.local_addr().unwrap();\n\n let mut s = TcpStream::connect(addr).unwrap();\n\n\n\n let mut poll = Poll::new().unwrap();\n\n\n\n poll.registry()\n\n .register(&mut a, Token(1), Interest::READABLE)\n\n .unwrap();\n\n\n\n poll.registry()\n\n .register(&mut s, Token(3), Interest::READABLE)\n\n .unwrap();\n\n\n\n let mut events = Events::with_capacity(1024);\n\n while events.is_empty() {\n\n poll.poll(&mut events, None).unwrap();\n", "file_path": "tests/tcp.rs", "rank": 50, "score": 139543.32558056846 }, { "content": "#[test]\n\nfn udp_socket_no_events_after_deregister() {\n\n let (mut poll, mut events) = init_with_poll();\n\n\n\n let mut socket = UdpSocket::bind(any_local_address()).unwrap();\n\n let address = socket.local_addr().unwrap();\n\n\n\n let barrier = Arc::new(Barrier::new(2));\n\n let thread_handle = send_packets(address, 1, barrier.clone());\n\n\n\n poll.registry()\n\n .register(&mut socket, ID1, Interest::READABLE)\n\n .unwrap();\n\n\n\n // Let the packet be send.\n\n barrier.wait();\n\n\n\n poll.registry().deregister(&mut socket).unwrap();\n\n\n\n expect_no_events(&mut poll, &mut events);\n\n\n\n // But we do expect a packet to be send.\n\n let mut buf = [0; 20];\n\n expect_read!(socket.recv_from(&mut buf), DATA1, __anywhere);\n\n\n\n thread_handle.join().expect(\"unable to join thread\");\n\n}\n\n\n", "file_path": "tests/udp_socket.rs", "rank": 51, "score": 138968.65675065742 }, { "content": "#[cfg(windows)]\n\npub fn assert_socket_close_on_exec<S>(_: &S) {\n\n // Windows doesn't have this concept.\n\n}\n\n\n", "file_path": "tests/util/mod.rs", "rank": 52, "score": 137070.91283216968 }, { "content": "#[cfg(windows)]\n\npub fn assert_socket_non_blocking<S>(_: &S) {\n\n // No way to get this information...\n\n}\n\n\n\n/// Assert that `CLOEXEC` is set on `socket`.\n", "file_path": "tests/util/mod.rs", "rank": 53, "score": 137070.91283216968 }, { "content": "fn tcp_shutdown_client_both_close_event() {\n\n let (mut poll, mut events) = init_with_poll();\n\n let barrier = Arc::new(Barrier::new(2));\n\n\n\n let (handle, sockaddr) = start_listener(1, Some(barrier.clone()), false);\n\n let mut stream = TcpStream::connect(sockaddr).unwrap();\n\n\n\n poll.registry()\n\n .register(&mut stream, ID1, Interest::READABLE.add(Interest::WRITABLE))\n\n .unwrap();\n\n\n\n expect_events(\n\n &mut poll,\n\n &mut events,\n\n vec![ExpectEvent::new(ID1, Interest::WRITABLE)],\n\n );\n\n\n\n stream.shutdown(Shutdown::Both).unwrap();\n\n expect_events(\n\n &mut poll,\n\n &mut events,\n\n vec![ExpectEvent::new(ID1, Readiness::WRITE_CLOSED)],\n\n );\n\n\n\n barrier.wait();\n\n handle.join().expect(\"failed to join thread\");\n\n}\n\n\n", "file_path": "tests/tcp_stream.rs", "rank": 54, "score": 135404.89094767752 }, { "content": "#[test]\n\nfn no_events_after_deregister() {\n\n let (mut poll, mut events) = init_with_poll();\n\n\n\n let mut listener = TcpListener::bind(any_local_address()).unwrap();\n\n let address = listener.local_addr().unwrap();\n\n\n\n poll.registry()\n\n .register(&mut listener, ID1, Interest::READABLE)\n\n .unwrap();\n\n\n\n let barrier = Arc::new(Barrier::new(2));\n\n let thread_handle = start_connections(address, 1, barrier.clone());\n\n\n\n poll.registry().deregister(&mut listener).unwrap();\n\n\n\n expect_no_events(&mut poll, &mut events);\n\n\n\n // Should still be able to accept the connection.\n\n let (stream, peer_address) = listener.accept().expect(\"unable to accept connection\");\n\n assert!(peer_address.ip().is_loopback());\n", "file_path": "tests/tcp_listener.rs", "rank": 55, "score": 135232.14717464894 }, { "content": "#[test]\n\nfn no_events_after_deregister() {\n\n let (mut poll, mut events) = init_with_poll();\n\n\n\n let (thread_handle, address) = echo_listener(any_local_address(), 1);\n\n\n\n let mut stream = TcpStream::connect(address).unwrap();\n\n\n\n poll.registry()\n\n .register(&mut stream, ID1, Interest::WRITABLE.add(Interest::READABLE))\n\n .expect(\"unable to register TCP stream\");\n\n\n\n poll.registry()\n\n .deregister(&mut stream)\n\n .expect(\"unable to deregister TCP stream\");\n\n\n\n expect_no_events(&mut poll, &mut events);\n\n\n\n // We do expect to be connected.\n\n assert_eq!(stream.peer_addr().unwrap(), address);\n\n\n", "file_path": "tests/tcp_stream.rs", "rank": 56, "score": 135232.14717464894 }, { "content": "#[cfg(unix)]\n\npub fn assert_socket_non_blocking<S>(socket: &S)\n\nwhere\n\n S: AsRawFd,\n\n{\n\n let flags = unsafe { libc::fcntl(socket.as_raw_fd(), libc::F_GETFL) };\n\n assert!(flags & libc::O_NONBLOCK != 0, \"socket not non-blocking\");\n\n}\n\n\n", "file_path": "tests/util/mod.rs", "rank": 57, "score": 134368.76228279123 }, { "content": "#[cfg(unix)]\n\npub fn assert_socket_close_on_exec<S>(socket: &S)\n\nwhere\n\n S: AsRawFd,\n\n{\n\n let flags = unsafe { libc::fcntl(socket.as_raw_fd(), libc::F_GETFD) };\n\n assert!(flags & libc::FD_CLOEXEC != 0, \"socket flag CLOEXEC not set\");\n\n}\n\n\n", "file_path": "tests/util/mod.rs", "rank": 58, "score": 134368.76228279123 }, { "content": "#[test]\n\nfn drop_cancels_interest_and_shuts_down() {\n\n init();\n\n\n\n use mio::net::TcpStream;\n\n use std::io;\n\n use std::io::Read;\n\n use std::net::TcpListener;\n\n use std::thread;\n\n\n\n let l = TcpListener::bind(\"127.0.0.1:0\").unwrap();\n\n let addr = l.local_addr().unwrap();\n\n\n\n let t = thread::spawn(move || {\n\n let mut s = l.incoming().next().unwrap().unwrap();\n\n s.set_read_timeout(Some(Duration::from_secs(5)))\n\n .expect(\"set_read_timeout\");\n\n let r = s.read(&mut [0; 16]);\n\n match r {\n\n Ok(_) => (),\n\n Err(e) => {\n", "file_path": "tests/poll.rs", "rank": 59, "score": 133236.8160848102 }, { "content": "#[test]\n\nfn reregister_interest_token_usage() {\n\n let (mut poll, mut events) = init_with_poll();\n\n\n\n let mut udp_socket = UdpSocket::bind(any_local_address()).unwrap();\n\n\n\n poll.registry()\n\n .register(&mut udp_socket, ID1, Interest::READABLE)\n\n .expect(\"unable to register listener\");\n\n\n\n poll.registry()\n\n .reregister(&mut udp_socket, ID1, Interest::READABLE)\n\n .expect(\"unable to register listener\");\n\n\n\n poll.registry()\n\n .reregister(&mut udp_socket, ID2, Interest::WRITABLE)\n\n .expect(\"unable to register listener\");\n\n\n\n expect_events(\n\n &mut poll,\n\n &mut events,\n", "file_path": "tests/poll.rs", "rank": 60, "score": 133221.61348136497 }, { "content": "#[test]\n\nfn tcp_shutdown_server_write_close_event() {\n\n let (mut poll, mut events) = init_with_poll();\n\n let barrier = Arc::new(Barrier::new(2));\n\n\n\n let (handle, sockaddr) = start_listener(1, Some(barrier.clone()), true);\n\n let mut stream = TcpStream::connect(sockaddr).unwrap();\n\n\n\n poll.registry()\n\n .register(&mut stream, ID1, Interest::READABLE.add(Interest::WRITABLE))\n\n .unwrap();\n\n\n\n expect_events(\n\n &mut poll,\n\n &mut events,\n\n vec![ExpectEvent::new(ID1, Interest::WRITABLE)],\n\n );\n\n\n\n barrier.wait();\n\n\n\n expect_events(\n", "file_path": "tests/tcp_stream.rs", "rank": 61, "score": 132555.42348780282 }, { "content": "fn tcp_shutdown_client_write_close_event() {\n\n let (mut poll, mut events) = init_with_poll();\n\n let barrier = Arc::new(Barrier::new(2));\n\n\n\n let (handle, sockaddr) = start_listener(1, Some(barrier.clone()), false);\n\n let mut stream = TcpStream::connect(sockaddr).unwrap();\n\n\n\n let interests = Interest::READABLE | Interest::WRITABLE;\n\n\n\n poll.registry()\n\n .register(&mut stream, ID1, interests)\n\n .unwrap();\n\n\n\n expect_events(\n\n &mut poll,\n\n &mut events,\n\n vec![ExpectEvent::new(ID1, Interest::WRITABLE)],\n\n );\n\n\n\n stream.shutdown(Shutdown::Write).unwrap();\n\n expect_events(\n\n &mut poll,\n\n &mut events,\n\n vec![ExpectEvent::new(ID1, Readiness::WRITE_CLOSED)],\n\n );\n\n\n\n barrier.wait();\n\n handle.join().expect(\"failed to join thread\");\n\n}\n\n\n", "file_path": "tests/tcp_stream.rs", "rank": 62, "score": 132555.42348780282 }, { "content": "fn tcp_shutdown_client_read_close_event() {\n\n let (mut poll, mut events) = init_with_poll();\n\n let barrier = Arc::new(Barrier::new(2));\n\n\n\n let (handle, sockaddr) = start_listener(1, Some(barrier.clone()), false);\n\n let mut stream = TcpStream::connect(sockaddr).unwrap();\n\n\n\n let interests = Interest::READABLE | Interest::WRITABLE;\n\n\n\n poll.registry()\n\n .register(&mut stream, ID1, interests)\n\n .unwrap();\n\n\n\n expect_events(\n\n &mut poll,\n\n &mut events,\n\n vec![ExpectEvent::new(ID1, Interest::WRITABLE)],\n\n );\n\n\n\n stream.shutdown(Shutdown::Read).unwrap();\n", "file_path": "tests/tcp_stream.rs", "rank": 63, "score": 132555.42348780282 }, { "content": "/// Returns `true` if the connection is done.\n\nfn handle_connection_event(\n\n registry: &Registry,\n\n connection: &mut TcpStream,\n\n event: &Event,\n\n) -> io::Result<bool> {\n\n if event.is_writable() {\n\n // We can (maybe) write to the connection.\n\n match connection.write(DATA) {\n\n // We want to write the entire `DATA` buffer in a single go. If we\n\n // write less we'll return a short write error (same as\n\n // `io::Write::write_all` does).\n\n Ok(n) if n < DATA.len() => return Err(io::ErrorKind::WriteZero.into()),\n\n Ok(_) => {\n\n // After we've written something we'll reregister the connection\n\n // to only respond to readable events.\n\n registry.reregister(connection, event.token(), Interest::READABLE)?\n\n }\n\n // Would block \"errors\" are the OS's way of saying that the\n\n // connection is not actually ready to perform this I/O operation.\n\n Err(ref err) if would_block(err) => {}\n", "file_path": "examples/tcp_server.rs", "rank": 64, "score": 131631.48873580244 }, { "content": "/// Assert that the provided result is an `io::Error` with kind `WouldBlock`.\n\npub fn assert_would_block<T>(result: io::Result<T>) {\n\n match result {\n\n Ok(_) => panic!(\"unexpected OK result, expected a `WouldBlock` error\"),\n\n Err(ref err) if err.kind() == io::ErrorKind::WouldBlock => {}\n\n Err(err) => panic!(\"unexpected error result: {}\", err),\n\n }\n\n}\n\n\n\n/// Assert that `NONBLOCK` is set on `socket`.\n", "file_path": "tests/util/mod.rs", "rank": 65, "score": 129628.84253829705 }, { "content": "#[test]\n\n#[cfg(debug_assertions)] // Check is only present when debug assertions are enabled.\n\nfn udp_register_multiple_event_loops() {\n\n init();\n\n\n\n let mut socket = UdpSocket::bind(any_local_address()).unwrap();\n\n\n\n let poll1 = Poll::new().unwrap();\n\n poll1\n\n .registry()\n\n .register(\n\n &mut socket,\n\n Token(0),\n\n Interest::READABLE | Interest::WRITABLE,\n\n )\n\n .unwrap();\n\n\n\n let poll2 = Poll::new().unwrap();\n\n\n\n // Try registering the same socket with the initial one\n\n let res = poll2.registry().register(\n\n &mut socket,\n\n Token(0),\n\n Interest::READABLE | Interest::WRITABLE,\n\n );\n\n assert_error(res, \"I/O source already registered with a `Registry`\");\n\n}\n\n\n", "file_path": "tests/registering.rs", "rank": 66, "score": 128685.83711612038 }, { "content": "#[test]\n\n#[cfg(debug_assertions)] // Check is only present when debug assertions are enabled.\n\nfn tcp_register_multiple_event_loops() {\n\n init();\n\n\n\n let mut listener = TcpListener::bind(any_local_address()).unwrap();\n\n let addr = listener.local_addr().unwrap();\n\n\n\n let poll1 = Poll::new().unwrap();\n\n poll1\n\n .registry()\n\n .register(\n\n &mut listener,\n\n Token(0),\n\n Interest::READABLE | Interest::WRITABLE,\n\n )\n\n .unwrap();\n\n\n\n let poll2 = Poll::new().unwrap();\n\n\n\n // Try registering the same socket with the initial one\n\n let res = poll2.registry().register(\n", "file_path": "tests/registering.rs", "rank": 67, "score": 128249.74129471435 }, { "content": "/// Returns the temporary directory for Mio test files.\n\nfn temp_dir() -> PathBuf {\n\n let mut path = env::temp_dir();\n\n path.push(\"mio_tests\");\n\n path\n\n}\n\n\n\n/// A checked {write, send, send_to} macro that ensures the entire buffer is\n\n/// written.\n\n///\n\n/// Usage: `checked_write!(stream.write(&DATA));`\n\n/// Also works for send(_to): `checked_write!(socket.send_to(DATA, address))`.\n\nmacro_rules! checked_write {\n\n ($socket: ident . $method: ident ( $data: expr $(, $arg: expr)* ) ) => {{\n\n let data = $data;\n\n let n = $socket.$method($data $(, $arg)*)\n\n .expect(\"unable to write to socket\");\n\n assert_eq!(n, data.len(), \"short write\");\n\n }};\n\n}\n\n\n", "file_path": "tests/util/mod.rs", "rank": 68, "score": 127628.80008886152 }, { "content": "/// Converts the pointer to a `SockState` into a raw pointer.\n\n/// To revert see `from_overlapped`.\n\nfn into_overlapped(sock_state: Pin<Arc<Mutex<SockState>>>) -> PVOID {\n\n let overlapped_ptr: *const Mutex<SockState> =\n\n unsafe { Arc::into_raw(Pin::into_inner_unchecked(sock_state)) };\n\n overlapped_ptr as *mut _\n\n}\n\n\n", "file_path": "src/sys/windows/selector.rs", "rank": 69, "score": 125475.20357421652 }, { "content": "#[test]\n\npub fn register_deregister() {\n\n init();\n\n\n\n debug!(\"Starting TEST_REGISTER_DEREGISTER\");\n\n let mut poll = Poll::new().unwrap();\n\n let mut events = Events::with_capacity(1024);\n\n\n\n let mut server = TcpListener::bind(any_local_address()).unwrap();\n\n let addr = server.local_addr().unwrap();\n\n\n\n info!(\"register server socket\");\n\n poll.registry()\n\n .register(&mut server, SERVER, Interest::READABLE)\n\n .unwrap();\n\n\n\n let mut client = TcpStream::connect(addr).unwrap();\n\n\n\n // Register client socket only as writable\n\n poll.registry()\n\n .register(&mut client, CLIENT, Interest::READABLE)\n", "file_path": "tests/registering.rs", "rank": 70, "score": 124564.70113195907 }, { "content": "mod listener;\n\npub use self::listener::TcpListener;\n\n\n\nmod stream;\n\npub use self::stream::TcpStream;\n", "file_path": "src/net/tcp/mod.rs", "rank": 71, "score": 119890.93601096045 }, { "content": "struct MyHandler {\n\n listener: TcpListener,\n\n connected: TcpStream,\n\n accepted: Option<TcpStream>,\n\n shutdown: bool,\n\n}\n\n\n", "file_path": "tests/tcp.rs", "rank": 72, "score": 116447.67335707704 }, { "content": "fn smoke_test_unconnected(mut datagram1: UnixDatagram, mut datagram2: UnixDatagram) {\n\n let (mut poll, mut events) = init_with_poll();\n\n\n\n assert_socket_non_blocking(&datagram1);\n\n assert_socket_close_on_exec(&datagram1);\n\n assert_socket_non_blocking(&datagram2);\n\n assert_socket_close_on_exec(&datagram2);\n\n\n\n let addr1 = datagram1.local_addr().unwrap();\n\n let addr2 = datagram2.local_addr().unwrap();\n\n let path1 = addr1.as_pathname().expect(\"failed to get pathname\");\n\n let path2 = addr2.as_pathname().expect(\"failed to get pathname\");\n\n\n\n poll.registry()\n\n .register(\n\n &mut datagram1,\n\n TOKEN_1,\n\n Interest::READABLE.add(Interest::WRITABLE),\n\n )\n\n .unwrap();\n", "file_path": "tests/unix_datagram.rs", "rank": 73, "score": 113535.41326822853 }, { "content": "fn smoke_test_connected(mut datagram1: UnixDatagram, mut datagram2: UnixDatagram) {\n\n let (mut poll, mut events) = init_with_poll();\n\n\n\n assert_socket_non_blocking(&datagram1);\n\n assert_socket_close_on_exec(&datagram1);\n\n assert_socket_non_blocking(&datagram2);\n\n assert_socket_close_on_exec(&datagram2);\n\n\n\n let local_addr1 = datagram1.local_addr().unwrap();\n\n let peer_addr1 = datagram1.peer_addr().unwrap();\n\n let local_addr2 = datagram2.local_addr().unwrap();\n\n let peer_addr2 = datagram2.peer_addr().unwrap();\n\n assert_eq!(\n\n local_addr1.as_pathname().expect(\"failed to get pathname\"),\n\n peer_addr2.as_pathname().expect(\"failed to get pathname\")\n\n );\n\n assert_eq!(\n\n local_addr2.as_pathname().expect(\"failed to get pathname\"),\n\n peer_addr1.as_pathname().expect(\"failed to get pathname\")\n\n );\n", "file_path": "tests/unix_datagram.rs", "rank": 74, "score": 113535.41326822853 }, { "content": "struct TestHandler {\n\n srv: TcpListener,\n\n cli: TcpStream,\n\n state: TestState,\n\n shutdown: bool,\n\n}\n\n\n\nimpl TestHandler {\n\n fn new(srv: TcpListener, cli: TcpStream) -> TestHandler {\n\n TestHandler {\n\n srv,\n\n cli,\n\n state: Initial,\n\n shutdown: false,\n\n }\n\n }\n\n\n\n fn handle_read(&mut self, poll: &mut Poll, tok: Token) {\n\n debug!(\"readable; tok={:?}\", tok);\n\n\n", "file_path": "tests/close_on_drop.rs", "rank": 75, "score": 112335.85735827382 }, { "content": "struct TestEventSource {\n\n registrations: Vec<(Token, Interest)>,\n\n reregistrations: Vec<(Token, Interest)>,\n\n deregister_count: usize,\n\n}\n\n\n\nimpl TestEventSource {\n\n fn new() -> TestEventSource {\n\n TestEventSource {\n\n registrations: Vec::new(),\n\n reregistrations: Vec::new(),\n\n deregister_count: 0,\n\n }\n\n }\n\n}\n\n\n\nimpl event::Source for TestEventSource {\n\n fn register(\n\n &mut self,\n\n _registry: &Registry,\n", "file_path": "tests/poll.rs", "rank": 76, "score": 111881.41625692935 }, { "content": "#[test]\n\nfn events_all() {\n\n let (mut poll, mut events) = init_with_poll();\n\n assert_eq!(events.capacity(), 16);\n\n assert!(events.is_empty());\n\n\n\n let waker = Waker::new(poll.registry(), WAKE_TOKEN).unwrap();\n\n\n\n waker.wake().expect(\"unable to wake\");\n\n poll.poll(&mut events, Some(Duration::from_millis(100)))\n\n .unwrap();\n\n\n\n assert!(!events.is_empty());\n\n\n\n for event in events.iter() {\n\n assert_eq!(event.token(), WAKE_TOKEN);\n\n assert!(event.is_readable());\n\n }\n\n\n\n events.clear();\n\n assert!(events.is_empty());\n\n}\n", "file_path": "tests/events.rs", "rank": 77, "score": 111601.16027184139 }, { "content": "#[cfg(debug_assertions)]\n\n#[derive(Debug)]\n\nstruct SelectorId {\n\n id: AtomicUsize,\n\n}\n\n\n\n#[cfg(debug_assertions)]\n\nimpl SelectorId {\n\n /// Value of `id` if `SelectorId` is not associated with any\n\n /// `sys::Selector`. Valid selector ids start at 1.\n\n const UNASSOCIATED: usize = 0;\n\n\n\n /// Create a new `SelectorId`.\n\n const fn new() -> SelectorId {\n\n SelectorId {\n\n id: AtomicUsize::new(Self::UNASSOCIATED),\n\n }\n\n }\n\n\n\n /// Associate an I/O source with `registry`, returning an error if its\n\n /// already registered.\n\n fn associate(&self, registry: &Registry) -> io::Result<()> {\n", "file_path": "src/io_source.rs", "rank": 78, "score": 111578.8880321434 }, { "content": "struct ErroneousTestEventSource;\n\n\n\nimpl event::Source for ErroneousTestEventSource {\n\n fn register(\n\n &mut self,\n\n _registry: &Registry,\n\n _token: Token,\n\n _interests: Interest,\n\n ) -> io::Result<()> {\n\n Err(io::Error::new(io::ErrorKind::Other, \"register\"))\n\n }\n\n\n\n fn reregister(\n\n &mut self,\n\n _registry: &Registry,\n\n _token: Token,\n\n _interests: Interest,\n\n ) -> io::Result<()> {\n\n Err(io::Error::new(io::ErrorKind::Other, \"reregister\"))\n\n }\n\n\n\n fn deregister(&mut self, _registry: &Registry) -> io::Result<()> {\n\n Err(io::Error::new(io::ErrorKind::Other, \"deregister\"))\n\n }\n\n}\n\n\n", "file_path": "tests/poll.rs", "rank": 79, "score": 109420.06966787661 }, { "content": "/// Assert that `result` is an error and the formatted error (via\n\n/// `fmt::Display`) equals `expected_msg`.\n\npub fn assert_error<T, E: fmt::Display>(result: Result<T, E>, expected_msg: &str) {\n\n match result {\n\n Ok(_) => panic!(\"unexpected OK result\"),\n\n Err(err) => assert!(\n\n err.to_string().contains(expected_msg),\n\n \"wanted: {}, got: {}\",\n\n expected_msg,\n\n err,\n\n ),\n\n }\n\n}\n\n\n", "file_path": "tests/util/mod.rs", "rank": 80, "score": 108205.6911330586 }, { "content": "/// An event source that may be registered with [`Registry`].\n\n///\n\n/// Types that implement `event::Source` can be registered with\n\n/// `Registry`. Users of Mio **should not** use the `event::Source` trait\n\n/// functions directly. Instead, the equivalent functions on `Registry` should\n\n/// be used.\n\n///\n\n/// See [`Registry`] for more details.\n\n///\n\n/// [`Registry`]: ../struct.Registry.html\n\n///\n\n/// # Implementing `event::Source`\n\n///\n\n/// Event sources are always backed by system handles, such as sockets or other\n\n/// system handles. These `event::Source`s will be monitored by the system\n\n/// selector. An implementation of `Source` will almost always delegates to a\n\n/// lower level handle. Examples of this are [`TcpStream`]s, or the *unix only*\n\n/// [`SourceFd`].\n\n///\n\n/// [`TcpStream`]: ../net/struct.TcpStream.html\n\n/// [`SourceFd`]: ../unix/struct.SourceFd.html\n\n///\n\n/// # Dropping `event::Source`s\n\n///\n\n/// All `event::Source`s, unless otherwise specified, need to be [deregistered]\n\n/// before being dropped for them to not leak resources. This goes against the\n\n/// normal drop behaviour of types in Rust which cleanup after themselves, e.g.\n\n/// a `File` will close itself. However since deregistering needs access to\n\n/// [`Registry`] this cannot be done while being dropped.\n\n///\n\n/// [deregistered]: ../struct.Registry.html#method.deregister\n\n///\n\n/// # Examples\n\n///\n\n/// Implementing `Source` on a struct containing a socket:\n\n///\n\n/// ```\n\n/// use mio::{Interest, Registry, Token};\n\n/// use mio::event::Source;\n\n/// use mio::net::TcpStream;\n\n///\n\n/// use std::io;\n\n///\n\n/// # #[allow(dead_code)]\n\n/// pub struct MySource {\n\n/// socket: TcpStream,\n\n/// }\n\n///\n\n/// impl Source for MySource {\n\n/// fn register(&mut self, registry: &Registry, token: Token, interests: Interest)\n\n/// -> io::Result<()>\n\n/// {\n\n/// // Delegate the `register` call to `socket`\n\n/// self.socket.register(registry, token, interests)\n\n/// }\n\n///\n\n/// fn reregister(&mut self, registry: &Registry, token: Token, interests: Interest)\n\n/// -> io::Result<()>\n\n/// {\n\n/// // Delegate the `reregister` call to `socket`\n\n/// self.socket.reregister(registry, token, interests)\n\n/// }\n\n///\n\n/// fn deregister(&mut self, registry: &Registry) -> io::Result<()> {\n\n/// // Delegate the `deregister` call to `socket`\n\n/// self.socket.deregister(registry)\n\n/// }\n\n/// }\n\n/// ```\n\npub trait Source {\n\n /// Register `self` with the given `Registry` instance.\n\n ///\n\n /// This function should not be called directly. Use [`Registry::register`]\n\n /// instead. Implementors should handle registration by delegating the call\n\n /// to another `Source` type.\n\n ///\n\n /// [`Registry::register`]: ../struct.Registry.html#method.register\n\n fn register(\n\n &mut self,\n\n registry: &Registry,\n\n token: Token,\n\n interests: Interest,\n\n ) -> io::Result<()>;\n\n\n\n /// Re-register `self` with the given `Registry` instance.\n\n ///\n\n /// This function should not be called directly. Use\n\n /// [`Registry::reregister`] instead. Implementors should handle\n\n /// re-registration by either delegating the call to another `Source` type.\n", "file_path": "src/event/source.rs", "rank": 81, "score": 107545.8142265242 }, { "content": "#[test]\n\nfn tcp_listener() {\n\n smoke_test_tcp_listener(any_local_address(), TcpListener::bind);\n\n}\n\n\n", "file_path": "tests/tcp_listener.rs", "rank": 82, "score": 105910.73769121163 }, { "content": "#[test]\n\nfn waker_wakeup_different_thread() {\n\n init();\n\n\n\n let mut poll = Poll::new().expect(\"unable to create new Poll instance\");\n\n let mut events = Events::with_capacity(10);\n\n\n\n let token = Token(10);\n\n let waker = Waker::new(poll.registry(), token).expect(\"unable to create waker\");\n\n\n\n let waker = Arc::new(waker);\n\n let waker1 = Arc::clone(&waker);\n\n let handle = thread::spawn(move || {\n\n waker1.wake().expect(\"unable to wake\");\n\n });\n\n\n\n expect_waker_event(&mut poll, &mut events, token);\n\n\n\n expect_no_events(&mut poll, &mut events);\n\n\n\n handle.join().unwrap();\n\n}\n\n\n", "file_path": "tests/waker.rs", "rank": 83, "score": 105198.46845778864 }, { "content": "#[test]\n\nfn waker_multiple_wakeups_same_thread() {\n\n init();\n\n\n\n let mut poll = Poll::new().expect(\"unable to create new Poll instance\");\n\n let mut events = Events::with_capacity(10);\n\n\n\n let token = Token(10);\n\n let waker = Waker::new(poll.registry(), token).expect(\"unable to create waker\");\n\n\n\n for _ in 0..3 {\n\n waker.wake().expect(\"unable to wake\");\n\n }\n\n expect_waker_event(&mut poll, &mut events, token);\n\n}\n\n\n", "file_path": "tests/waker.rs", "rank": 84, "score": 105198.46845778864 }, { "content": "#[test]\n\nfn assert_event_source_implemented_for() {\n\n fn assert_event_source<E: event::Source>() {}\n\n\n\n assert_event_source::<Box<dyn event::Source>>();\n\n assert_event_source::<Box<TcpStream>>();\n\n}\n\n\n", "file_path": "tests/events.rs", "rank": 85, "score": 104775.8009905938 }, { "content": "#[test]\n\nfn udp_socket_reregister() {\n\n let (mut poll, mut events) = init_with_poll();\n\n\n\n let mut socket = UdpSocket::bind(any_local_address()).unwrap();\n\n let address = socket.local_addr().unwrap();\n\n\n\n let barrier = Arc::new(Barrier::new(2));\n\n let thread_handle = send_packets(address, 1, barrier.clone());\n\n\n\n poll.registry()\n\n .register(&mut socket, ID1, Interest::WRITABLE)\n\n .unwrap();\n\n // Let the first packet be send.\n\n barrier.wait();\n\n expect_events(\n\n &mut poll,\n\n &mut events,\n\n vec![ExpectEvent::new(ID1, Interest::WRITABLE)], // Not readable!\n\n );\n\n\n", "file_path": "tests/udp_socket.rs", "rank": 86, "score": 104349.29621606942 }, { "content": "#[test]\n\nfn udp_socket_register() {\n\n let (mut poll, mut events) = init_with_poll();\n\n\n\n let mut socket = UdpSocket::bind(any_local_address()).unwrap();\n\n poll.registry()\n\n .register(&mut socket, ID1, Interest::READABLE)\n\n .expect(\"unable to register UDP socket\");\n\n\n\n expect_no_events(&mut poll, &mut events);\n\n\n\n // NOTE: more tests are done in the smoke tests above.\n\n}\n\n\n", "file_path": "tests/udp_socket.rs", "rank": 87, "score": 104349.29621606942 }, { "content": "#[test]\n\nfn tcp_listener_std() {\n\n smoke_test_tcp_listener(any_local_address(), |addr| {\n\n let listener = net::TcpListener::bind(addr).unwrap();\n\n // `std::net::TcpListener`s are blocking by default, so make sure it is in\n\n // non-blocking mode before wrapping in a Mio equivalent.\n\n listener.set_nonblocking(true).unwrap();\n\n Ok(TcpListener::from_std(listener))\n\n });\n\n}\n\n\n", "file_path": "tests/tcp_listener.rs", "rank": 88, "score": 103778.63177691707 }, { "content": "#[test]\n\nfn tcp_listener_ipv6() {\n\n smoke_test_tcp_listener(any_local_ipv6_address(), TcpListener::bind);\n\n}\n\n\n", "file_path": "tests/tcp_listener.rs", "rank": 89, "score": 103778.63177691707 }, { "content": "#[test]\n\nfn tcp_stream_std() {\n\n smoke_test_tcp_stream(any_local_address(), |addr| {\n\n let stream = net::TcpStream::connect(addr).unwrap();\n\n // `std::net::TcpStream`s are blocking by default, so make sure it is\n\n // in non-blocking mode before wrapping in a Mio equivalent.\n\n stream.set_nonblocking(true).unwrap();\n\n Ok(TcpStream::from_std(stream))\n\n });\n\n}\n\n\n", "file_path": "tests/tcp_stream.rs", "rank": 90, "score": 103778.63177691707 }, { "content": "#[test]\n\nfn tcp_stream_ipv6() {\n\n smoke_test_tcp_stream(any_local_ipv6_address(), TcpStream::connect);\n\n}\n\n\n", "file_path": "tests/tcp_stream.rs", "rank": 91, "score": 103778.63177691707 }, { "content": "#[test]\n\nfn tcp_stream_ipv4() {\n\n smoke_test_tcp_stream(any_local_address(), TcpStream::connect);\n\n}\n\n\n", "file_path": "tests/tcp_stream.rs", "rank": 92, "score": 103778.63177691707 }, { "content": "#[test]\n\nfn waker_multiple_wakeups_different_thread() {\n\n init();\n\n\n\n let mut poll = Poll::new().expect(\"unable to create new Poll instance\");\n\n let mut events = Events::with_capacity(10);\n\n\n\n let token = Token(10);\n\n let waker = Waker::new(poll.registry(), token).expect(\"unable to create waker\");\n\n let waker = Arc::new(waker);\n\n let waker1 = Arc::clone(&waker);\n\n let waker2 = Arc::clone(&waker1);\n\n\n\n let handle1 = thread::spawn(move || {\n\n waker1.wake().expect(\"unable to wake\");\n\n });\n\n\n\n let barrier = Arc::new(Barrier::new(2));\n\n let barrier2 = barrier.clone();\n\n let handle2 = thread::spawn(move || {\n\n barrier2.wait();\n", "file_path": "tests/waker.rs", "rank": 93, "score": 103147.5278349294 }, { "content": "#[test]\n\nfn add() {\n\n let interest: Interest = Interest::READABLE.add(Interest::WRITABLE);\n\n\n\n assert!(interest.is_readable());\n\n assert!(interest.is_writable());\n\n}\n", "file_path": "tests/interest.rs", "rank": 94, "score": 102739.59622301935 }, { "content": "#[test]\n\nfn is_tests() {\n\n assert!(Interest::READABLE.is_readable());\n\n assert!(!Interest::READABLE.is_writable());\n\n assert!(!Interest::WRITABLE.is_readable());\n\n assert!(Interest::WRITABLE.is_writable());\n\n assert!(!Interest::WRITABLE.is_aio());\n\n assert!(!Interest::WRITABLE.is_lio());\n\n}\n\n\n", "file_path": "tests/interest.rs", "rank": 95, "score": 102739.59622301935 }, { "content": "#[test]\n\nfn bit_or() {\n\n let interests = Interest::READABLE | Interest::WRITABLE;\n\n assert!(interests.is_readable());\n\n assert!(interests.is_writable());\n\n}\n\n\n", "file_path": "tests/interest.rs", "rank": 96, "score": 102739.59622301935 }, { "content": "#[test]\n\nfn unconnected_udp_socket_std() {\n\n let socket1 = net::UdpSocket::bind(any_local_address()).unwrap();\n\n let socket2 = net::UdpSocket::bind(any_local_address()).unwrap();\n\n\n\n // `std::net::UdpSocket`s are blocking by default, so make sure they are\n\n // in non-blocking mode before wrapping in a Mio equivalent.\n\n socket1.set_nonblocking(true).unwrap();\n\n socket2.set_nonblocking(true).unwrap();\n\n\n\n let socket1 = UdpSocket::from_std(socket1);\n\n let socket2 = UdpSocket::from_std(socket2);\n\n smoke_test_unconnected_udp_socket(socket1, socket2);\n\n}\n\n\n", "file_path": "tests/udp_socket.rs", "rank": 97, "score": 102314.91585521438 }, { "content": "#[test]\n\nfn unconnected_udp_socket_ipv4() {\n\n let socket1 = UdpSocket::bind(any_local_address()).unwrap();\n\n let socket2 = UdpSocket::bind(any_local_address()).unwrap();\n\n smoke_test_unconnected_udp_socket(socket1, socket2);\n\n}\n\n\n", "file_path": "tests/udp_socket.rs", "rank": 98, "score": 102314.91585521438 }, { "content": "#[test]\n\nfn unconnected_udp_socket_ipv6() {\n\n let socket1 = UdpSocket::bind(any_local_ipv6_address()).unwrap();\n\n let socket2 = UdpSocket::bind(any_local_ipv6_address()).unwrap();\n\n smoke_test_unconnected_udp_socket(socket1, socket2);\n\n}\n\n\n", "file_path": "tests/udp_socket.rs", "rank": 99, "score": 102314.91585521438 } ]
Rust
sway-core/src/parse_tree/declaration/reassignment.rs
mitchmindtree/sway
38479274e0c50518e20c919682b8173ae4a555d3
use crate::build_config::BuildConfig; use crate::error::{err, ok, CompileError, CompileResult}; use crate::parse_tree::Expression; use crate::parser::Rule; use crate::span::Span; use crate::{parse_array_index, Ident}; use pest::iterators::Pair; #[derive(Debug, Clone)] pub struct Reassignment { pub lhs: Box<Expression>, pub rhs: Expression, pub(crate) span: Span, } impl Reassignment { pub(crate) fn parse_from_pair( pair: Pair<Rule>, config: Option<&BuildConfig>, ) -> CompileResult<Reassignment> { let path = config.map(|c| c.path()); let span = Span { span: pair.as_span(), path: path.clone(), }; let mut warnings = vec![]; let mut errors = vec![]; let mut iter = pair.into_inner(); let variable_or_struct_reassignment = iter.next().expect("guaranteed by grammar"); match variable_or_struct_reassignment.as_rule() { Rule::variable_reassignment => { let mut iter = variable_or_struct_reassignment.into_inner(); let name = check!( Expression::parse_from_pair_inner(iter.next().unwrap(), config), return err(warnings, errors), warnings, errors ); let body = iter.next().unwrap(); let body = check!( Expression::parse_from_pair(body.clone(), config), Expression::Tuple { fields: vec![], span: Span { span: body.as_span(), path } }, warnings, errors ); ok( Reassignment { lhs: Box::new(name), rhs: body, span, }, warnings, errors, ) } Rule::struct_field_reassignment => { let mut iter = variable_or_struct_reassignment.into_inner(); let lhs = iter.next().expect("guaranteed by grammar"); let rhs = iter.next().expect("guaranteed by grammar"); let rhs_span = Span { span: rhs.as_span(), path: path.clone(), }; let body = check!( Expression::parse_from_pair(rhs, config), Expression::Tuple { fields: vec![], span: rhs_span }, warnings, errors ); let inner = lhs.into_inner().next().expect("guaranteed by grammar"); assert_eq!(inner.as_rule(), Rule::subfield_path); let mut name_parts = inner.into_inner(); let mut expr = check!( parse_subfield_path_ensure_only_var( name_parts.next().expect("guaranteed by grammar"), config ), return err(warnings, errors), warnings, errors ); for name_part in name_parts { expr = Expression::SubfieldExpression { prefix: Box::new(expr.clone()), span: Span { span: name_part.as_span(), path: path.clone(), }, field_to_access: check!( Ident::parse_from_pair(name_part, config), continue, warnings, errors ), } } ok( Reassignment { lhs: Box::new(expr), rhs: body, span, }, warnings, errors, ) } _ => unreachable!("guaranteed by grammar"), } } } fn parse_subfield_path_ensure_only_var( item: Pair<Rule>, config: Option<&BuildConfig>, ) -> CompileResult<Expression> { let warnings = vec![]; let mut errors = vec![]; let path = config.map(|c| c.path()); let item = item.into_inner().next().expect("guarenteed by grammar"); match item.as_rule() { Rule::call_item => parse_call_item_ensure_only_var(item, config), Rule::array_index => parse_array_index(item, config), a => { eprintln!( "Unimplemented subfield path: {:?} ({:?}) ({:?})", a, item.as_str(), item.as_rule() ); errors.push(CompileError::UnimplementedRule( a, Span { span: item.as_span(), path: path.clone(), }, )); let exp = Expression::Tuple { fields: vec![], span: Span { span: item.as_span(), path, }, }; ok(exp, warnings, errors) } } } fn parse_call_item_ensure_only_var( item: Pair<Rule>, config: Option<&BuildConfig>, ) -> CompileResult<Expression> { let path = config.map(|c| c.path()); let mut warnings = vec![]; let mut errors = vec![]; assert_eq!(item.as_rule(), Rule::call_item); let item = item.into_inner().next().expect("guaranteed by grammar"); let exp = match item.as_rule() { Rule::ident => Expression::VariableExpression { name: check!( Ident::parse_from_pair(item.clone(), config), return err(warnings, errors), warnings, errors ), span: Span { span: item.as_span(), path, }, }, Rule::expr => { errors.push(CompileError::InvalidExpressionOnLhs { span: Span { span: item.as_span(), path, }, }); return err(warnings, errors); } a => unreachable!("{:?}", a), }; ok(exp, warnings, errors) }
use crate::build_config::BuildConfig; use crate::error::{err, ok, CompileError, CompileResult}; use crate::parse_tree::Expression; use crate::parser::Rule; use crate::span::Span; use crate::{parse_array_index, Ident}; use pest::iterators::Pair; #[derive(Debug, Clone)] pub struct Reassignment { pub lhs: Box<Expression>, pub rhs: Expression, pub(crate) span: Span, } impl Reassignment { pub(crate) fn parse_from_pair( pair: Pair<Rule>, config: Option<&BuildConfig>, ) -> CompileResult<Reassignment> { let path = config.map(|c| c.path()); let span = Span { span: pair.as_span(), path: path.clone(), }; let mut warnings = vec![]; let mut errors = vec![]; let mut iter = pair.into_inner(); let variable_or_struct_reassignment = iter.next().expect("guaranteed by grammar"); match variable_or_struct_reassignment.as_rule() { Rule::variable_reassignment => { let mut iter = variable_or_struct_reassignment.into_inner();
let body = iter.next().unwrap(); let body = check!( Expression::parse_from_pair(body.clone(), config), Expression::Tuple { fields: vec![], span: Span { span: body.as_span(), path } }, warnings, errors ); ok( Reassignment { lhs: Box::new(name), rhs: body, span, }, warnings, errors, ) } Rule::struct_field_reassignment => { let mut iter = variable_or_struct_reassignment.into_inner(); let lhs = iter.next().expect("guaranteed by grammar"); let rhs = iter.next().expect("guaranteed by grammar"); let rhs_span = Span { span: rhs.as_span(), path: path.clone(), }; let body = check!( Expression::parse_from_pair(rhs, config), Expression::Tuple { fields: vec![], span: rhs_span }, warnings, errors ); let inner = lhs.into_inner().next().expect("guaranteed by grammar"); assert_eq!(inner.as_rule(), Rule::subfield_path); let mut name_parts = inner.into_inner(); let mut expr = check!( parse_subfield_path_ensure_only_var( name_parts.next().expect("guaranteed by grammar"), config ), return err(warnings, errors), warnings, errors ); for name_part in name_parts { expr = Expression::SubfieldExpression { prefix: Box::new(expr.clone()), span: Span { span: name_part.as_span(), path: path.clone(), }, field_to_access: check!( Ident::parse_from_pair(name_part, config), continue, warnings, errors ), } } ok( Reassignment { lhs: Box::new(expr), rhs: body, span, }, warnings, errors, ) } _ => unreachable!("guaranteed by grammar"), } } } fn parse_subfield_path_ensure_only_var( item: Pair<Rule>, config: Option<&BuildConfig>, ) -> CompileResult<Expression> { let warnings = vec![]; let mut errors = vec![]; let path = config.map(|c| c.path()); let item = item.into_inner().next().expect("guarenteed by grammar"); match item.as_rule() { Rule::call_item => parse_call_item_ensure_only_var(item, config), Rule::array_index => parse_array_index(item, config), a => { eprintln!( "Unimplemented subfield path: {:?} ({:?}) ({:?})", a, item.as_str(), item.as_rule() ); errors.push(CompileError::UnimplementedRule( a, Span { span: item.as_span(), path: path.clone(), }, )); let exp = Expression::Tuple { fields: vec![], span: Span { span: item.as_span(), path, }, }; ok(exp, warnings, errors) } } } fn parse_call_item_ensure_only_var( item: Pair<Rule>, config: Option<&BuildConfig>, ) -> CompileResult<Expression> { let path = config.map(|c| c.path()); let mut warnings = vec![]; let mut errors = vec![]; assert_eq!(item.as_rule(), Rule::call_item); let item = item.into_inner().next().expect("guaranteed by grammar"); let exp = match item.as_rule() { Rule::ident => Expression::VariableExpression { name: check!( Ident::parse_from_pair(item.clone(), config), return err(warnings, errors), warnings, errors ), span: Span { span: item.as_span(), path, }, }, Rule::expr => { errors.push(CompileError::InvalidExpressionOnLhs { span: Span { span: item.as_span(), path, }, }); return err(warnings, errors); } a => unreachable!("{:?}", a), }; ok(exp, warnings, errors) }
let name = check!( Expression::parse_from_pair_inner(iter.next().unwrap(), config), return err(warnings, errors), warnings, errors );
assignment_statement
[ { "content": "fn disallow_opcode(op: &Ident) -> Vec<CompileError> {\n\n let mut errors = vec![];\n\n\n\n match op.as_str().to_lowercase().as_str() {\n\n \"jnei\" => {\n\n errors.push(CompileError::DisallowedJnei {\n\n span: op.span().clone(),\n\n });\n\n }\n\n \"ji\" => {\n\n errors.push(CompileError::DisallowedJi {\n\n span: op.span().clone(),\n\n });\n\n }\n\n _ => (),\n\n };\n\n errors\n\n}\n", "file_path": "sway-core/src/parse_tree/expression/asm.rs", "rank": 0, "score": 332492.59722810367 }, { "content": "fn build_recursion_error(fn_sym: Ident, span: Span, chain: &[Ident]) -> CompileError {\n\n match chain.len() {\n\n // An empty chain indicates immediate recursion.\n\n 0 => CompileError::RecursiveCall {\n\n fn_name: fn_sym,\n\n span,\n\n },\n\n // Chain entries indicate mutual recursion.\n\n 1 => CompileError::RecursiveCallChain {\n\n fn_name: fn_sym,\n\n call_chain: chain[0].as_str().to_string(),\n\n span,\n\n },\n\n n => {\n\n let mut msg = chain[0].as_str().to_string();\n\n for ident in &chain[1..(n - 1)] {\n\n msg.push_str(\", \");\n\n msg.push_str(ident.as_str());\n\n }\n\n msg.push_str(\" and \");\n", "file_path": "sway-core/src/semantic_analysis/node_dependencies.rs", "rank": 1, "score": 327101.99477061036 }, { "content": "pub fn get_sway_files(path: PathBuf) -> Vec<PathBuf> {\n\n let mut files = vec![];\n\n let mut dir_entries = vec![path];\n\n\n\n while let Some(next_dir) = dir_entries.pop() {\n\n if let Ok(read_dir) = fs::read_dir(next_dir) {\n\n for entry in read_dir.filter_map(|res| res.ok()) {\n\n let path = entry.path();\n\n\n\n if path.is_dir() {\n\n dir_entries.push(path);\n\n } else if is_sway_file(&path) {\n\n files.push(path)\n\n }\n\n }\n\n }\n\n }\n\n\n\n files\n\n}\n", "file_path": "sway-utils/src/helpers.rs", "rank": 2, "score": 296480.96143866214 }, { "content": "// A call item is parsed as either an `ident` or a parenthesized `expr`. This method's job is to\n\n// figure out which variant of `call_item` this is and turn it into either a variable expression\n\n// or parse it as an expression otherwise.\n\nfn parse_call_item(item: Pair<Rule>, config: Option<&BuildConfig>) -> CompileResult<Expression> {\n\n let mut warnings = vec![];\n\n let mut errors = vec![];\n\n assert_eq!(item.as_rule(), Rule::call_item);\n\n let item = item.into_inner().next().expect(\"guaranteed by grammar\");\n\n let exp = match item.as_rule() {\n\n Rule::ident => Expression::VariableExpression {\n\n name: check!(\n\n Ident::parse_from_pair(item.clone(), config),\n\n return err(warnings, errors),\n\n warnings,\n\n errors\n\n ),\n\n span: Span {\n\n span: item.as_span(),\n\n path: config.map(|c| c.path()),\n\n },\n\n },\n\n Rule::expr => check!(\n\n Expression::parse_from_pair(item, config),\n\n return err(warnings, errors),\n\n warnings,\n\n errors\n\n ),\n\n a => unreachable!(\"{:?}\", a),\n\n };\n\n ok(exp, warnings, errors)\n\n}\n\n\n", "file_path": "sway-core/src/parse_tree/expression/mod.rs", "rank": 3, "score": 276128.98137743276 }, { "content": "fn parse_array_elems(elems: Pair<Rule>, config: Option<&BuildConfig>) -> CompileResult<Expression> {\n\n let mut warnings = Vec::new();\n\n let mut errors = Vec::new();\n\n\n\n let path = config.map(|cfg| cfg.path());\n\n let span = Span {\n\n span: elems.as_span(),\n\n path: path.clone(),\n\n };\n\n\n\n let mut elem_iter = elems.into_inner();\n\n let first_elem = elem_iter.next().unwrap();\n\n let contents = match first_elem.as_rule() {\n\n Rule::literal_value => {\n\n // The form [initialiser; count].\n\n let span = first_elem.as_span();\n\n let init = Literal::parse_from_pair(first_elem, config)\n\n .map(|(value, span)| Expression::Literal { value, span })\n\n .unwrap_or_else(&mut warnings, &mut errors, || Expression::Tuple {\n\n fields: vec![],\n", "file_path": "sway-core/src/parse_tree/expression/mod.rs", "rank": 4, "score": 276118.2502047675 }, { "content": "/// cleans whitespace, including newlines\n\npub fn clean_all_whitespace(iter: &mut Peekable<Enumerate<Chars>>) {\n\n while let Some((_, next_char)) = iter.peek() {\n\n if next_char.is_whitespace() {\n\n iter.next();\n\n } else {\n\n break;\n\n }\n\n }\n\n}\n\n\n", "file_path": "sway-fmt/src/code_builder_helpers.rs", "rank": 5, "score": 275014.8499960861 }, { "content": "pub fn handle_pipe_case(code_line: &mut CodeLine, iter: &mut Peekable<Enumerate<Chars>>) {\n\n if let Some((_, next_char)) = iter.peek() {\n\n if *next_char == '|' {\n\n // it's OR operator\n\n code_line.append_with_whitespace(\"|| \");\n\n iter.next();\n\n } else {\n\n // it's just a single '|'\n\n code_line.append_with_whitespace(\"| \");\n\n }\n\n } else {\n\n code_line.append_with_whitespace(\"| \");\n\n }\n\n}\n\n\n", "file_path": "sway-fmt/src/code_builder_helpers.rs", "rank": 6, "score": 270146.6959500748 }, { "content": "pub fn handle_dash_case(code_line: &mut CodeLine, iter: &mut Peekable<Enumerate<Chars>>) {\n\n if let Some((_, next_char)) = iter.peek() {\n\n if *next_char == '>' {\n\n // it's a return arrow\n\n code_line.append_with_whitespace(\"-> \");\n\n iter.next();\n\n } else {\n\n // it's just a single '-'\n\n code_line.append_with_whitespace(\"- \");\n\n }\n\n } else {\n\n code_line.append_with_whitespace(\"- \");\n\n }\n\n}\n\n\n", "file_path": "sway-fmt/src/code_builder_helpers.rs", "rank": 7, "score": 270146.6959500748 }, { "content": "pub fn handle_colon_case(code_line: &mut CodeLine, iter: &mut Peekable<Enumerate<Chars>>) {\n\n if let Some((_, next_char)) = iter.peek() {\n\n let next_char = *next_char;\n\n if next_char == ':' {\n\n // it's :: operator\n\n code_line.push_str(\"::\");\n\n iter.next();\n\n } else {\n\n code_line.push_str(\": \");\n\n }\n\n } else {\n\n code_line.push_str(\": \");\n\n }\n\n}\n\n\n", "file_path": "sway-fmt/src/code_builder_helpers.rs", "rank": 8, "score": 270146.6959500748 }, { "content": "pub fn handle_whitespace_case(code_line: &mut CodeLine, iter: &mut Peekable<Enumerate<Chars>>) {\n\n clean_all_whitespace(iter);\n\n\n\n if let Some((_, next_char)) = iter.peek() {\n\n let next_char = *next_char;\n\n\n\n match next_char {\n\n '(' | ';' | ':' | ')' | ',' | '}' => {} // do nothing, handle it in next turn\n\n _ => {\n\n // add whitespace if it is not already there\n\n code_line.append_whitespace();\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "sway-fmt/src/code_builder_helpers.rs", "rank": 9, "score": 270146.6959500748 }, { "content": "pub fn handle_assignment_case(code_line: &mut CodeLine, iter: &mut Peekable<Enumerate<Chars>>) {\n\n if let Some((_, next_char)) = iter.peek() {\n\n let next_char = *next_char;\n\n if next_char == '=' {\n\n // it's equality operator\n\n code_line.append_with_whitespace(\"== \");\n\n iter.next();\n\n } else if next_char == '>' {\n\n // it's fat arrow\n\n code_line.append_with_whitespace(\"=> \");\n\n iter.next();\n\n } else {\n\n code_line.append_equal_sign();\n\n }\n\n } else {\n\n code_line.append_with_whitespace(\"= \");\n\n }\n\n}\n\n\n", "file_path": "sway-fmt/src/code_builder_helpers.rs", "rank": 10, "score": 270146.6959500748 }, { "content": "pub fn handle_ampersand_case(code_line: &mut CodeLine, iter: &mut Peekable<Enumerate<Chars>>) {\n\n if let Some((_, next_char)) = iter.peek() {\n\n if *next_char == '&' {\n\n // it's AND operator\n\n code_line.append_with_whitespace(\"&& \");\n\n iter.next();\n\n } else {\n\n // it's just a single '&'\n\n code_line.append_with_whitespace(\"& \");\n\n }\n\n } else {\n\n code_line.append_with_whitespace(\"& \");\n\n }\n\n}\n\n\n", "file_path": "sway-fmt/src/code_builder_helpers.rs", "rank": 11, "score": 270146.6959500748 }, { "content": "pub fn traverse_node(node: AstNode, tokens: &mut Vec<Token>) {\n\n match node.content {\n\n AstNodeContent::Declaration(dec) => handle_declaration(dec, tokens),\n\n AstNodeContent::Expression(exp) => handle_expression(exp, tokens),\n\n // TODO\n\n // handle other content types\n\n _ => {}\n\n };\n\n}\n\n\n", "file_path": "sway-server/src/core/token.rs", "rank": 12, "score": 267494.8650756114 }, { "content": "fn parse_op(op: Pair<Rule>, config: Option<&BuildConfig>) -> CompileResult<Op> {\n\n let path = config.map(|c| c.path());\n\n use OpVariant::*;\n\n let mut errors = Vec::new();\n\n let op_variant = match op.as_str() {\n\n \"+\" => Add,\n\n \"-\" => Subtract,\n\n \"/\" => Divide,\n\n \"*\" => Multiply,\n\n \"%\" => Modulo,\n\n \"||\" => Or,\n\n \"&&\" => And,\n\n \"==\" => Equals,\n\n \"!=\" => NotEquals,\n\n \"^\" => Xor,\n\n \"|\" => BinaryOr,\n\n \"&\" => BinaryAnd,\n\n \">\" => GreaterThan,\n\n \"<\" => LessThan,\n\n \">=\" => GreaterThanOrEqualTo,\n", "file_path": "sway-core/src/parse_tree/expression/mod.rs", "rank": 13, "score": 266598.9220895488 }, { "content": "fn match_tuple(exp: &Expression, elems: &[Scrutinee], span: &Span) -> CompileResult<MatcherResult> {\n\n let mut warnings = vec![];\n\n let mut errors = vec![];\n\n let mut match_req_map = vec![];\n\n let mut match_impl_map = vec![];\n\n for (pos, elem) in elems.iter().enumerate() {\n\n let delayed_resolution_exp = Expression::DelayedMatchTypeResolution {\n\n variant: DelayedResolutionVariant::TupleVariant(DelayedTupleVariantResolution {\n\n exp: Box::new(exp.clone()),\n\n elem_num: pos,\n\n }),\n\n span: span.clone(),\n\n };\n\n let new_matches = check!(\n\n matcher(&delayed_resolution_exp, elem),\n\n return err(warnings, errors),\n\n warnings,\n\n errors\n\n );\n\n match new_matches {\n", "file_path": "sway-core/src/parse_tree/expression/matcher.rs", "rank": 14, "score": 265340.5164873103 }, { "content": "fn handle_expression(expr: &Expression, changes: &mut Vec<Change>) {\n\n match &expr {\n\n Expression::StructExpression {\n\n struct_name: _,\n\n fields: _,\n\n span,\n\n } => changes.push(Change::new(span, ChangeType::Struct)),\n\n Expression::IfExp {\n\n condition: _,\n\n then,\n\n r#else,\n\n span: _,\n\n } => {\n\n handle_expression(then, changes);\n\n\n\n if let Some(else_expr) = r#else {\n\n handle_expression(else_expr, changes);\n\n }\n\n }\n\n Expression::CodeBlock { contents, span: _ } => {\n", "file_path": "sway-fmt/src/traversal.rs", "rank": 15, "score": 265293.0695309638 }, { "content": "fn handle_expression(exp: Expression, tokens: &mut Vec<Token>) {\n\n match exp {\n\n Expression::CodeBlock { span: _, contents } => {\n\n let nodes = contents.contents;\n\n\n\n for node in nodes {\n\n traverse_node(node, tokens);\n\n }\n\n }\n\n Expression::FunctionApplication { name, .. } => {\n\n let ident = name.suffix;\n\n let token = Token::from_ident(&ident, TokenType::FunctionApplication);\n\n tokens.push(token);\n\n\n\n // TODO\n\n // perform a for/in on arguments ?\n\n }\n\n // TODO\n\n // handle other expressions\n\n _ => {}\n\n }\n\n}\n\n\n", "file_path": "sway-server/src/core/token.rs", "rank": 16, "score": 262974.7449594537 }, { "content": "fn handle_implicit_return_expression(expr: &Expression, changes: &mut Vec<Change>) {\n\n handle_expression(expr, changes)\n\n}\n", "file_path": "sway-fmt/src/traversal.rs", "rank": 17, "score": 260712.5822609018 }, { "content": "/// This algorithm desugars match expressions to if statements.\n\n///\n\n/// Given the following example:\n\n///\n\n/// ```ignore\n\n/// struct Point {\n\n/// x: u64,\n\n/// y: u64\n\n/// }\n\n///\n\n/// let p = Point {\n\n/// x: 42,\n\n/// y: 24\n\n/// };\n\n///\n\n/// match p {\n\n/// Point { x, y: 5 } => { x },\n\n/// Point { x, y: 24 } => { x },\n\n/// _ => 0\n\n/// }\n\n/// ```\n\n///\n\n/// The resulting if statement would look roughly like this:\n\n///\n\n/// ```ignore\n\n/// if y==5 {\n\n/// let x = 42;\n\n/// x\n\n/// } else if y==42 {\n\n/// let x = 42;\n\n/// x\n\n/// } else {\n\n/// 0\n\n/// }\n\n/// ```\n\n///\n\n/// The steps of the algorithm can roughly be broken down into:\n\n///\n\n/// 1. Assemble the \"matched branches.\"\n\n/// 2. Assemble the possibly nested giant if statement using the matched branches.\n\n/// 2a. Assemble the conditional that goes in the if primary expression.\n\n/// 2b. Assemble the statements that go inside of the body of the if expression\n\n/// 2c. Assemble the giant if statement.\n\n/// 3. Return!\n\npub fn desugar_match_expression(\n\n primary_expression: Expression,\n\n branches: Vec<MatchBranch>,\n\n _span: Span,\n\n) -> CompileResult<Expression> {\n\n let mut errors = vec![];\n\n let mut warnings = vec![];\n\n\n\n // 1. Assemble the \"matched branches.\"\n\n let mut matched_branches = vec![];\n\n for MatchBranch {\n\n condition,\n\n result,\n\n span: branch_span,\n\n } in branches.iter()\n\n {\n\n let matches = match condition {\n\n MatchCondition::CatchAll(_) => Some((vec![], vec![])),\n\n MatchCondition::Scrutinee(scrutinee) => check!(\n\n matcher(&primary_expression, scrutinee),\n", "file_path": "sway-core/src/parse_tree/expression/mod.rs", "rank": 18, "score": 258320.9038783437 }, { "content": "/// Split an identifier of unknown style into words.\n\nfn split_words(ident: &str) -> impl Iterator<Item = &str> {\n\n ident.split('_').map(camel_case_split_words).flatten()\n\n}\n\n\n", "file_path": "sway-core/src/style.rs", "rank": 19, "score": 256515.9046086829 }, { "content": "pub fn resolve_type(id: TypeId, error_span: &Span) -> Result<TypeInfo, TypeError> {\n\n TYPE_ENGINE.resolve_type(id, error_span)\n\n}\n\n\n", "file_path": "sway-core/src/type_engine/engine.rs", "rank": 20, "score": 253469.6197874416 }, { "content": "/// Split a CamelCase style identifier into words.\n\nfn camel_case_split_words(ident: &str) -> impl Iterator<Item = &str> {\n\n let mut ident = ident;\n\n std::iter::from_fn(move || {\n\n if ident.is_empty() {\n\n return None;\n\n }\n\n let index = find_camel_case_word_boundary(ident).unwrap_or_else(|| ident.len());\n\n let word = &ident[..index];\n\n ident = &ident[index..];\n\n Some(word)\n\n })\n\n}\n\n\n", "file_path": "sway-core/src/style.rs", "rank": 21, "score": 251619.32752408396 }, { "content": "pub fn print_on_success_library(silent_mode: bool, proj_name: &str, warnings: Vec<CompileWarning>) {\n\n if !silent_mode {\n\n warnings.iter().for_each(format_warning);\n\n }\n\n\n\n if warnings.is_empty() {\n\n let _ = println_green_err(&format!(\" Compiled library {:?}.\", proj_name));\n\n } else {\n\n let _ = println_yellow_err(&format!(\n\n \" Compiled library {:?} with {} {}.\",\n\n proj_name,\n\n warnings.len(),\n\n if warnings.len() > 1 {\n\n \"warnings\"\n\n } else {\n\n \"warning\"\n\n }\n\n ));\n\n }\n\n}\n\n\n", "file_path": "forc/src/utils/helpers.rs", "rank": 22, "score": 245691.66512342845 }, { "content": "/// This panics if the spans are not from the same file. This should\n\n/// only be used on spans that are actually next to each other.\n\npub fn join_spans(s1: Span, s2: Span) -> Span {\n\n assert!(\n\n s1.input() == s2.input() && s1.path == s2.path,\n\n \"Spans from different files cannot be joined.\",\n\n );\n\n\n\n let s1_positions = s1.split();\n\n let s2_positions = s2.split();\n\n if s1_positions.0 < s2_positions.1 {\n\n Span {\n\n span: s1_positions.0.span(&s2_positions.1),\n\n path: s1.path,\n\n }\n\n } else {\n\n Span {\n\n span: s2_positions.0.span(&s1_positions.1),\n\n path: s1.path,\n\n }\n\n }\n\n}\n", "file_path": "sway-core/src/utils.rs", "rank": 23, "score": 242010.7289361902 }, { "content": "fn get_struct_field_type(line: &str, iter: &mut Peekable<Enumerate<Chars>>) -> String {\n\n let mut result = String::default();\n\n\n\n loop {\n\n match iter.peek() {\n\n Some((next_index, c)) => {\n\n let next_char = *c;\n\n let next_index = *next_index;\n\n\n\n match next_char {\n\n ',' => {\n\n iter.next();\n\n result.push(',');\n\n break;\n\n }\n\n '{' => {\n\n iter.next();\n\n result.push('{');\n\n return result;\n\n }\n", "file_path": "sway-fmt/src/traversal_helper.rs", "rank": 24, "score": 237494.75953071777 }, { "content": "/// Continually go up in the file tree until a manifest (Forc.toml) is found.\n\npub fn find_manifest_dir(starter_path: &Path) -> Option<PathBuf> {\n\n let mut path = std::fs::canonicalize(starter_path).ok()?;\n\n let empty_path = PathBuf::from(\"/\");\n\n while path != empty_path {\n\n path.push(crate::constants::MANIFEST_FILE_NAME);\n\n if path.exists() {\n\n path.pop();\n\n return Some(path);\n\n } else {\n\n path.pop();\n\n path.pop();\n\n }\n\n }\n\n None\n\n}\n", "file_path": "sway-utils/src/helpers.rs", "rank": 25, "score": 224493.34921156825 }, { "content": "fn match_struct(\n\n exp: &Expression,\n\n struct_name: &Ident,\n\n fields: &[StructScrutineeField],\n\n span: &Span,\n\n) -> CompileResult<MatcherResult> {\n\n let mut warnings = vec![];\n\n let mut errors = vec![];\n\n let mut match_req_map = vec![];\n\n let mut match_impl_map = vec![];\n\n for field in fields.iter() {\n\n let field_name = field.field.clone();\n\n let scrutinee = field.scrutinee.clone();\n\n let delayed_resolution_exp = Expression::DelayedMatchTypeResolution {\n\n variant: DelayedResolutionVariant::StructField(DelayedStructFieldResolution {\n\n exp: Box::new(exp.clone()),\n\n struct_name: struct_name.to_owned(),\n\n field: field_name.clone(),\n\n }),\n\n span: span.clone(),\n", "file_path": "sway-core/src/parse_tree/expression/matcher.rs", "rank": 26, "score": 222571.79202166788 }, { "content": "/// Given an input `Arc<str>` and an optional [BuildConfig], parse the input into a [HllParseTree].\n\n///\n\n/// # Example\n\n/// ```\n\n/// # use sway_core::parse;\n\n/// # fn main() {\n\n/// let input = \"script; fn main() -> bool { true }\";\n\n/// let result = parse(input.into(), Default::default());\n\n/// # }\n\n/// ```\n\n///\n\n/// # Panics\n\n/// Panics if the generated parser from Pest panics.\n\npub fn parse(input: Arc<str>, config: Option<&BuildConfig>) -> CompileResult<HllParseTree> {\n\n let mut warnings: Vec<CompileWarning> = Vec::new();\n\n let mut errors: Vec<CompileError> = Vec::new();\n\n let mut parsed = match HllParser::parse(Rule::program, input.clone()) {\n\n Ok(o) => o,\n\n Err(e) => {\n\n return err(\n\n Vec::new(),\n\n vec![CompileError::ParseFailure {\n\n span: span::Span {\n\n span: pest::Span::new(input, get_start(&e), get_end(&e)).unwrap(),\n\n path: config.map(|config| config.path()),\n\n },\n\n err: e,\n\n }],\n\n )\n\n }\n\n };\n\n let parsed_root = check!(\n\n parse_root_from_pairs(parsed.next().unwrap().into_inner(), config),\n", "file_path": "sway-core/src/lib.rs", "rank": 27, "score": 220981.24975278976 }, { "content": "/// Convert an identifier into snake_case. This is a best-guess at what the identifier would look\n\n/// like if it were expressed in the correct style.\n\npub fn to_snake_case(ident: &str) -> String {\n\n let mut ret = String::with_capacity(ident.len());\n\n\n\n let (leading_underscores, trimmed) =\n\n ident.split_at(ident.find(|c| c != '_').unwrap_or_else(|| ident.len()));\n\n ret.push_str(leading_underscores);\n\n let mut words = split_words(trimmed);\n\n if let Some(word) = words.next() {\n\n ret.extend(word.chars().map(char::to_lowercase).flatten());\n\n for word in words {\n\n ret.push('_');\n\n ret.extend(word.chars().map(char::to_lowercase).flatten());\n\n }\n\n }\n\n ret\n\n}\n\n\n", "file_path": "sway-core/src/style.rs", "rank": 28, "score": 220472.84999228202 }, { "content": "/// Detect whether an identifier is written in snake_case.\n\npub fn is_snake_case(ident: &str) -> bool {\n\n let trimmed = ident.trim_start_matches('_');\n\n if trimmed.contains(\"__\") {\n\n return false;\n\n }\n\n if trimmed.contains(char::is_uppercase) {\n\n return false;\n\n }\n\n true\n\n}\n\n\n", "file_path": "sway-core/src/style.rs", "rank": 29, "score": 220467.74696112337 }, { "content": "pub fn is_sway_file(file: &Path) -> bool {\n\n let res = file.extension();\n\n Some(OsStr::new(constants::SWAY_EXTENSION)) == res\n\n}\n", "file_path": "sway-utils/src/helpers.rs", "rank": 30, "score": 220397.9446650392 }, { "content": "pub fn is_sway_file(file: &Path) -> bool {\n\n let res = file.extension();\n\n Some(OsStr::new(constants::SWAY_EXTENSION)) == res\n\n}\n\n\n", "file_path": "forc/src/utils/helpers.rs", "rank": 31, "score": 220397.94466503922 }, { "content": "pub fn find_main_path(manifest_dir: &Path, manifest: &Manifest) -> PathBuf {\n\n let mut code_dir = manifest_dir.to_path_buf();\n\n code_dir.push(constants::SRC_DIR);\n\n code_dir.push(&manifest.project.entry);\n\n code_dir\n\n}\n\n\n", "file_path": "forc/src/utils/helpers.rs", "rank": 32, "score": 218925.8600926359 }, { "content": "/// Convert an identifier into UpperCamelCase. This is a best-guess at what the identifier would\n\n/// look like if it were expressed in the correct style.\n\npub fn to_upper_camel_case(ident: &str) -> String {\n\n let mut ret = String::with_capacity(ident.len());\n\n\n\n let (leading_underscores, trimmed) =\n\n ident.split_at(ident.find(|c| c != '_').unwrap_or_else(|| ident.len()));\n\n ret.push_str(leading_underscores);\n\n for word in split_words(trimmed) {\n\n let mut chars = word.chars();\n\n if let Some(c) = chars.next() {\n\n ret.extend(c.to_uppercase());\n\n ret.extend(chars.map(char::to_lowercase).flatten());\n\n }\n\n }\n\n ret\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n", "file_path": "sway-core/src/style.rs", "rank": 33, "score": 218515.27599440102 }, { "content": "/// Convert an identifier into SCREAMING_SNAKE_CASE. This is a best-guess at what the identifier\n\n/// would look like if it were expressed in the correct style.\n\npub fn to_screaming_snake_case(ident: &str) -> String {\n\n let mut ret = String::with_capacity(ident.len());\n\n\n\n let (leading_underscores, trimmed) =\n\n ident.split_at(ident.find(|c| c != '_').unwrap_or_else(|| ident.len()));\n\n ret.push_str(leading_underscores);\n\n let mut words = split_words(trimmed);\n\n if let Some(word) = words.next() {\n\n ret.extend(word.chars().map(char::to_uppercase).flatten());\n\n for word in words {\n\n ret.push('_');\n\n ret.extend(word.chars().map(char::to_uppercase).flatten());\n\n }\n\n }\n\n ret\n\n}\n\n\n", "file_path": "sway-core/src/style.rs", "rank": 34, "score": 218515.27599440102 }, { "content": "/// Detect whether an identifier is written in UpperCamelCase.\n\npub fn is_upper_camel_case(ident: &str) -> bool {\n\n let trimmed = ident.trim_start_matches('_');\n\n if trimmed.contains('_') {\n\n return false;\n\n }\n\n if trimmed.starts_with(char::is_lowercase) {\n\n return false;\n\n }\n\n true\n\n}\n\n\n", "file_path": "sway-core/src/style.rs", "rank": 35, "score": 218510.20452489035 }, { "content": "/// Detect whether an identifier is written in SCREAMING_SNAKE_CASE.\n\npub fn is_screaming_snake_case(ident: &str) -> bool {\n\n let trimmed = ident.trim_start_matches('_');\n\n if trimmed.contains(\"__\") {\n\n return false;\n\n }\n\n if trimmed.contains(char::is_lowercase) {\n\n return false;\n\n }\n\n true\n\n}\n\n\n", "file_path": "sway-core/src/style.rs", "rank": 36, "score": 218510.20452489035 }, { "content": "pub fn find_file_name<'sc>(manifest_dir: &Path, main_path: &'sc Path) -> Result<&'sc Path, String> {\n\n let mut file_path = manifest_dir.to_path_buf();\n\n file_path.pop();\n\n let file_name = match main_path.strip_prefix(file_path.clone()) {\n\n Ok(o) => o,\n\n Err(err) => return Err(err.to_string()),\n\n };\n\n Ok(file_name)\n\n}\n\n\n", "file_path": "forc/src/utils/helpers.rs", "rank": 37, "score": 215728.50767952684 }, { "content": "pub fn to_completion_items(tokens: &[Token]) -> Vec<CompletionItem> {\n\n let mut completion_items = vec![];\n\n\n\n for token in tokens {\n\n if token.is_initial_declaration() {\n\n let item = CompletionItem {\n\n label: token.name.clone(),\n\n kind: get_kind(&token.token_type),\n\n ..Default::default()\n\n };\n\n completion_items.push(item);\n\n }\n\n }\n\n\n\n completion_items\n\n}\n\n\n", "file_path": "sway-server/src/capabilities/completion.rs", "rank": 38, "score": 209994.54006531002 }, { "content": "fn find_recursive_calls(decl_dependencies: &DependencyMap) -> Vec<CompileError> {\n\n decl_dependencies\n\n .iter()\n\n .filter_map(|(dep_sym, _)| find_recursive_call(decl_dependencies, dep_sym))\n\n .collect()\n\n}\n\n\n", "file_path": "sway-core/src/semantic_analysis/node_dependencies.rs", "rank": 39, "score": 209090.91035850285 }, { "content": "pub fn traverse_for_changes(parse_tree: &HllParseTree) -> Vec<Change> {\n\n let mut changes = vec![];\n\n\n\n for node in &parse_tree.tree.root_nodes {\n\n traverse_ast_node(node, &mut changes)\n\n }\n\n\n\n changes.sort_by(|a, b| a.start.cmp(&b.start));\n\n\n\n changes\n\n}\n\n\n", "file_path": "sway-fmt/src/traversal.rs", "rank": 40, "score": 208189.18003328278 }, { "content": "pub fn to_semantic_tokes(tokens: &[Token]) -> Vec<SemanticToken> {\n\n if tokens.is_empty() {\n\n return vec![];\n\n }\n\n\n\n let mut semantic_tokens: Vec<SemanticToken> = vec![create_semantic_token(&tokens[0], None)];\n\n\n\n for i in 1..tokens.len() {\n\n let semantic_token = create_semantic_token(&tokens[i], Some(&tokens[i - 1]));\n\n semantic_tokens.push(semantic_token);\n\n }\n\n\n\n semantic_tokens\n\n}\n\n\n", "file_path": "sway-server/src/capabilities/semantic_tokens.rs", "rank": 41, "score": 208189.18003328278 }, { "content": "fn handle_declaration(declaration: Declaration, tokens: &mut Vec<Token>) {\n\n match declaration {\n\n Declaration::VariableDeclaration(variable) => {\n\n tokens.push(Token::from_variable(&variable));\n\n handle_expression(variable.body, tokens);\n\n }\n\n Declaration::FunctionDeclaration(func_dec) => {\n\n let ident = &func_dec.name;\n\n let token = Token::from_ident(\n\n ident,\n\n TokenType::FunctionDeclaration(get_function_details(&func_dec)),\n\n );\n\n tokens.push(token);\n\n\n\n for node in func_dec.body.contents {\n\n traverse_node(node, tokens);\n\n }\n\n }\n\n Declaration::Reassignment(reassignment) => {\n\n let token_type = TokenType::Reassignment;\n", "file_path": "sway-server/src/core/token.rs", "rank": 42, "score": 207306.72198339767 }, { "content": "pub fn read_manifest(manifest_dir: &Path) -> Result<Manifest, String> {\n\n let manifest_path = {\n\n let mut man = PathBuf::from(manifest_dir);\n\n man.push(constants::MANIFEST_FILE_NAME);\n\n man\n\n };\n\n let manifest_path_str = format!(\"{:?}\", manifest_path);\n\n let manifest = match std::fs::read_to_string(manifest_path) {\n\n Ok(o) => o,\n\n Err(e) => {\n\n return Err(format!(\n\n \"failed to read manifest at {:?}: {}\",\n\n manifest_path_str, e\n\n ))\n\n }\n\n };\n\n match toml::from_str(&manifest) {\n\n Ok(o) => Ok(o),\n\n Err(e) => Err(format!(\"Error parsing manifest: {}.\", e)),\n\n }\n\n}\n\n\n", "file_path": "forc/src/utils/helpers.rs", "rank": 43, "score": 206103.92399977765 }, { "content": "/// This algorithm desugars pattern matching into a [MatcherResult], by creating two lists,\n\n/// the [MatchReqMap] which is a list of requirements that a desugared if expression\n\n/// must inlcude in the conditional, and the [MatchImplMap] which is a list of variable\n\n/// declarations that must be placed inside the body of the if expression.\n\n///\n\n/// Given the following example\n\n///\n\n/// ```ignore\n\n/// struct Point {\n\n/// x: u64,\n\n/// y: u64\n\n/// }\n\n///\n\n/// let p = Point {\n\n/// x: 42,\n\n/// y: 24\n\n/// };\n\n///\n\n/// match p {\n\n/// Point { x, y: 5 } => { x },\n\n/// Point { x, y: 24 } => { x },\n\n/// _ => 0\n\n/// }\n\n/// ```\n\n///\n\n/// The first match arm would create a [MatchReqMap] of roughly:\n\n///\n\n/// ```ignore\n\n/// [\n\n/// (y, 5) // y must equal 5 to trigger this case\n\n/// ]\n\n/// ```\n\n///\n\n/// The first match arm would create a [MatchImplMap] of roughly:\n\n///\n\n/// ```ignore\n\n/// [\n\n/// (x, 42) // add `let x = 42` in the body of the desugared if expression\n\n/// ]\n\n/// ```\n\npub fn matcher(exp: &Expression, scrutinee: &Scrutinee) -> CompileResult<MatcherResult> {\n\n let mut errors = vec![];\n\n let warnings = vec![];\n\n match scrutinee {\n\n Scrutinee::Literal { value, span } => match_literal(exp, value, span),\n\n Scrutinee::Variable { name, span } => match_variable(exp, name, span),\n\n Scrutinee::StructScrutinee {\n\n struct_name,\n\n fields,\n\n span,\n\n } => match_struct(exp, struct_name, fields, span),\n\n Scrutinee::EnumScrutinee {\n\n call_path,\n\n args,\n\n span,\n\n } => match_enum(exp, call_path, args, span),\n\n Scrutinee::Tuple { elems, span } => match_tuple(exp, elems, span),\n\n scrutinee => {\n\n eprintln!(\"Unimplemented scrutinee: {:?}\", scrutinee,);\n\n errors.push(CompileError::Unimplemented(\n\n \"this match expression scrutinee is not implemented\",\n\n scrutinee.span(),\n\n ));\n\n ok(Some((vec![], vec![])), warnings, errors)\n\n }\n\n }\n\n}\n\n\n", "file_path": "sway-core/src/parse_tree/expression/matcher.rs", "rank": 44, "score": 205544.68794453196 }, { "content": "fn handle_return_statement(ret: &ReturnStatement, changes: &mut Vec<Change>) {\n\n handle_expression(&ret.expr, changes)\n\n}\n\n\n", "file_path": "sway-fmt/src/traversal.rs", "rank": 45, "score": 205497.9668570325 }, { "content": "/// We want compile errors and warnings to retain their ordering, since typically\n\n/// they are grouped by relevance. However, we want to deduplicate them.\n\n/// Stdlib dedup in Rust assumes sorted data for efficiency, but we don't want that.\n\n/// A hash set would also mess up the order, so this is just a brute force way of doing it\n\n/// with a vector.\n\nfn dedup_unsorted<T: PartialEq + std::hash::Hash>(mut data: Vec<T>) -> Vec<T> {\n\n use smallvec::SmallVec;\n\n use std::collections::hash_map::{DefaultHasher, Entry};\n\n use std::hash::Hasher;\n\n\n\n let mut write_index = 0;\n\n let mut indexes: HashMap<u64, SmallVec<[usize; 1]>> = HashMap::with_capacity(data.len());\n\n for read_index in 0..data.len() {\n\n let hash = {\n\n let mut hasher = DefaultHasher::new();\n\n data[read_index].hash(&mut hasher);\n\n hasher.finish()\n\n };\n\n let index_vec = match indexes.entry(hash) {\n\n Entry::Occupied(oe) => {\n\n if oe\n\n .get()\n\n .iter()\n\n .any(|index| data[*index] == data[read_index])\n\n {\n", "file_path": "sway-core/src/lib.rs", "rank": 46, "score": 204956.60227198148 }, { "content": "pub fn get_current_dependency_version(dep_dir: &Path) -> Result<VersionedDependencyDirectory> {\n\n let mut entries =\n\n fs::read_dir(dep_dir).context(format!(\"couldn't read directory {}\", dep_dir.display()))?;\n\n let entry = match entries.next() {\n\n Some(entry) => entry,\n\n None => bail!(\"Dependency directory is empty. Run `forc build` to install dependencies.\"),\n\n };\n\n\n\n let path = entry?.path();\n\n if !path.is_dir() {\n\n bail!(\"{} isn't a directory.\", dep_dir.display())\n\n }\n\n\n\n let file_name = path.file_name().unwrap();\n\n // Dependencies directories are named as \"$repo_owner-$repo-$concatenated_hash\"\n\n let hash = file_name\n\n .to_str()\n\n .with_context(|| format!(\"Invalid utf8 in dependency name: {}\", path.display()))?\n\n .split('-')\n\n .last()\n", "file_path": "forc/src/utils/dependency.rs", "rank": 47, "score": 204863.98408315345 }, { "content": "fn traverse_ast_node(ast_node: &AstNode, changes: &mut Vec<Change>) {\n\n match &ast_node.content {\n\n AstNodeContent::Declaration(dec) => handle_declaration(dec, ast_node, changes),\n\n\n\n AstNodeContent::ReturnStatement(ret) => handle_return_statement(ret, changes),\n\n\n\n AstNodeContent::Expression(expr) => handle_expression(expr, changes),\n\n\n\n AstNodeContent::ImplicitReturnExpression(expr) => {\n\n handle_implicit_return_expression(expr, changes)\n\n }\n\n\n\n AstNodeContent::UseStatement(_) => {\n\n changes.push(Change::new(&ast_node.span, ChangeType::UseStatement));\n\n }\n\n\n\n AstNodeContent::IncludeStatement(_) => {\n\n changes.push(Change::new(&ast_node.span, ChangeType::IncludeStatement))\n\n }\n\n\n\n _ => {}\n\n }\n\n}\n\n\n", "file_path": "sway-fmt/src/traversal.rs", "rank": 48, "score": 203735.97845938057 }, { "content": "pub fn download_tarball(url: &str, out_dir: &Path) -> Result<String> {\n\n let mut data = Vec::new();\n\n let mut handle = Easy::new();\n\n\n\n // Download the tarball.\n\n handle.url(url).context(\"failed to configure tarball URL\")?;\n\n handle\n\n .follow_location(true)\n\n .context(\"failed to configure follow location\")?;\n\n\n\n handle\n\n .useragent(\"forc-builder\")\n\n .context(\"failed to configure User-Agent\")?;\n\n {\n\n let mut transfer = handle.transfer();\n\n transfer\n\n .write_function(|new_data| {\n\n data.extend_from_slice(new_data);\n\n Ok(new_data.len())\n\n })\n", "file_path": "forc/src/utils/dependency.rs", "rank": 49, "score": 202494.13417698885 }, { "content": "pub fn get_struct_details(struct_dec: &StructDeclaration) -> StructDetails {\n\n StructDetails {\n\n visibility: struct_dec.visibility,\n\n }\n\n}\n\n\n", "file_path": "sway-server/src/core/token_type.rs", "rank": 50, "score": 200755.47379773352 }, { "content": "pub fn build(command: BuildCommand) -> Result<Vec<u8>, String> {\n\n // find manifest directory, even if in subdirectory\n\n let this_dir = if let Some(ref path) = command.path {\n\n PathBuf::from(path)\n\n } else {\n\n std::env::current_dir().map_err(|e| format!(\"{:?}\", e))?\n\n };\n\n\n\n let BuildCommand {\n\n binary_outfile,\n\n print_finalized_asm,\n\n print_intermediate_asm,\n\n offline_mode,\n\n silent_mode,\n\n ..\n\n } = command;\n\n let manifest_dir = match find_manifest_dir(&this_dir) {\n\n Some(dir) => dir,\n\n None => {\n\n return Err(format!(\n", "file_path": "forc/src/ops/forc_build.rs", "rank": 51, "score": 200562.2376580679 }, { "content": "// if it's a string just keep pushing the characters\n\npub fn handle_string_case(code_line: &mut CodeLine, current_char: char) {\n\n code_line.push_char(current_char);\n\n if current_char == '\"' {\n\n let previous_char = code_line.text.chars().last();\n\n // end of the string\n\n if previous_char != Some('\\\\') {\n\n code_line.become_default();\n\n }\n\n }\n\n}\n\n\n", "file_path": "sway-fmt/src/code_builder_helpers.rs", "rank": 52, "score": 197465.78046627 }, { "content": "// Helper to get only detailed dependencies (`Dependency::Detailed`).\n\npub fn get_detailed_dependencies(manifest: &mut Manifest) -> HashMap<String, &DependencyDetails> {\n\n let mut dependencies: HashMap<String, &DependencyDetails> = HashMap::new();\n\n\n\n if let Some(ref mut deps) = manifest.dependencies {\n\n for (dep_name, dependency_details) in deps.iter_mut() {\n\n match dependency_details {\n\n Dependency::Simple(..) => continue,\n\n Dependency::Detailed(dep_details) => {\n\n dependencies.insert(dep_name.to_owned(), dep_details)\n\n }\n\n };\n\n }\n\n }\n\n\n\n dependencies\n\n}\n\n\n\npub async fn get_github_repo_releases(dependency_url: &str) -> Result<Vec<String>> {\n\n // Quick protection against `git` dependency URL ending with `/`.\n\n let dependency_url = dependency_url.trim_end_matches('/');\n", "file_path": "forc/src/utils/dependency.rs", "rank": 53, "score": 197161.74345194193 }, { "content": "pub fn to_symbol_information(tokens: &[Token], url: Url) -> Vec<SymbolInformation> {\n\n let mut symbols: Vec<SymbolInformation> = vec![];\n\n\n\n for token in tokens {\n\n let symbol = create_symbol_info(token, url.clone());\n\n symbols.push(symbol)\n\n }\n\n\n\n symbols\n\n}\n\n\n\n#[allow(warnings)]\n", "file_path": "sway-server/src/capabilities/document_symbol.rs", "rank": 54, "score": 197089.76715856648 }, { "content": "pub fn handle_watched_files(_session: Arc<Session>, events: Vec<FileEvent>) {\n\n for _event in events {\n\n // TODO FileChangeType::DELETED wants fully-qualified type, but that doesn't work\n\n // if let FileChangeType::DELETED {} = event.typ {\n\n // let _ = session.remove_document(&event.uri);\n\n // }\n\n }\n\n}\n", "file_path": "sway-server/src/capabilities/file_sync.rs", "rank": 55, "score": 195419.17223745328 }, { "content": "fn handle_declaration(dec: &Declaration, ast_node: &AstNode, changes: &mut Vec<Change>) {\n\n match &dec {\n\n Declaration::VariableDeclaration(var_dec) => handle_expression(&var_dec.body, changes),\n\n\n\n Declaration::StructDeclaration(_) => {\n\n changes.push(Change::new(&ast_node.span, ChangeType::Struct))\n\n }\n\n\n\n Declaration::EnumDeclaration(_) => {\n\n changes.push(Change::new(&ast_node.span, ChangeType::Enum))\n\n }\n\n\n\n Declaration::FunctionDeclaration(func) => {\n\n for content in &func.body.contents {\n\n traverse_ast_node(content, changes);\n\n }\n\n }\n\n\n\n Declaration::ImplSelf(impl_self) => {\n\n for func in &impl_self.functions {\n", "file_path": "sway-fmt/src/traversal.rs", "rank": 56, "score": 195070.78631681955 }, { "content": "pub fn format_use_statement(line: &str) -> String {\n\n let use_keyword = extract_keyword(line, Rule::use_keyword).unwrap();\n\n let (_, right) = line.split_once(&use_keyword).unwrap();\n\n let right: String = right.chars().filter(|c| !c.is_whitespace()).collect();\n\n format!(\n\n \"{}{} {}\",\n\n ALREADY_FORMATTED_LINE_PATTERN, use_keyword, right\n\n )\n\n}\n\n\n", "file_path": "sway-fmt/src/traversal_helper.rs", "rank": 57, "score": 188515.89803842577 }, { "content": "pub fn format_delineated_path(line: &str) -> String {\n\n // currently just clean up extra unwanted whitespace\n\n line.chars().filter(|c| !c.is_whitespace()).collect()\n\n}\n\n\n", "file_path": "sway-fmt/src/traversal_helper.rs", "rank": 58, "score": 188417.54963345657 }, { "content": "pub fn get_main_file(manifest_of_dep: &Manifest, manifest_dir: &Path) -> Result<Arc<str>, String> {\n\n let main_path = {\n\n let mut code_dir = PathBuf::from(manifest_dir);\n\n code_dir.push(constants::SRC_DIR);\n\n code_dir.push(&manifest_of_dep.project.entry);\n\n code_dir\n\n };\n\n\n\n // some hackery to get around lifetimes for now, until the AST returns a non-lifetime-bound AST\n\n let main_file = std::fs::read_to_string(&main_path).map_err(|e| e.to_string())?;\n\n let main_file = Arc::from(main_file);\n\n Ok(main_file)\n\n}\n\n\n", "file_path": "forc/src/utils/helpers.rs", "rank": 59, "score": 187734.72183873056 }, { "content": "pub fn get_format_text_edits(text: Arc<str>, options: FormattingOptions) -> Option<Vec<TextEdit>> {\n\n // we only format if code is correct\n\n match get_formatted_data(text.clone(), options.tab_size) {\n\n Ok((num_of_lines, formatted_text)) => {\n\n let text_lines_count = text.split('\\n').count();\n\n let line_end = std::cmp::max(num_of_lines, text_lines_count) as u32;\n\n\n\n let main_edit = TextEdit {\n\n range: Range::new(Position::new(0, 0), Position::new(line_end as u32, 0)),\n\n new_text: formatted_text,\n\n };\n\n\n\n Some(vec![main_edit])\n\n }\n\n _ => None,\n\n }\n\n}\n", "file_path": "sway-server/src/capabilities/formatting.rs", "rank": 60, "score": 184482.17547868256 }, { "content": "pub fn format(command: FormatCommand) -> Result<(), FormatError> {\n\n let build_command = BuildCommand {\n\n path: None,\n\n print_finalized_asm: false,\n\n print_intermediate_asm: false,\n\n binary_outfile: None,\n\n offline_mode: false,\n\n silent_mode: false,\n\n };\n\n\n\n match forc_build::build(build_command) {\n\n // build is successful, continue to formatting\n\n Ok(_) => format_after_build(command),\n\n\n\n // forc_build will print all the errors/warnings\n\n Err(err) => Err(err.into()),\n\n }\n\n}\n\n\n", "file_path": "forc/src/ops/forc_fmt.rs", "rank": 61, "score": 183556.6082353936 }, { "content": "fn get_range(warning_or_error: &WarningOrError<'_>) -> Range {\n\n let (start, end) = match warning_or_error {\n\n WarningOrError::Error(error) => error.line_col(),\n\n WarningOrError::Warning(warning) => warning.line_col(),\n\n };\n\n\n\n let start_line = start.line as u32 - 1;\n\n let start_character = start.col as u32;\n\n\n\n let end_line = end.line as u32 - 1;\n\n let end_character = end.col as u32;\n\n\n\n Range {\n\n start: Position::new(start_line, start_character),\n\n end: Position::new(end_line, end_character),\n\n }\n\n}\n\n\n", "file_path": "sway-server/src/capabilities/diagnostic.rs", "rank": 62, "score": 183512.33526568828 }, { "content": "struct MatchedBranch {\n\n result: Expression,\n\n match_req_map: Vec<(Expression, Expression)>,\n\n match_impl_map: Vec<(Ident, Expression)>,\n\n branch_span: Span,\n\n}\n\n\n", "file_path": "sway-core/src/parse_tree/expression/mod.rs", "rank": 63, "score": 182422.76200285798 }, { "content": "/// returns number of lines and formatted text\n\npub fn get_formatted_data(file: Arc<str>, tab_size: u32) -> Result<(usize, String), Vec<String>> {\n\n let parsed_res = sway_core::parse(file.clone(), None);\n\n match parsed_res.value {\n\n Some(parse_tree) => {\n\n let changes = traverse_for_changes(&parse_tree);\n\n let mut rope_file = Rope::from_str(&file);\n\n\n\n let mut offset: i32 = 0;\n\n for change in changes {\n\n let (new_offset, start, end) = calculate_offset(offset, &change);\n\n offset = new_offset;\n\n\n\n rope_file.remove(start..end);\n\n rope_file.insert(start, &change.text);\n\n }\n\n\n\n let mut code_builder = CodeBuilder::new(tab_size);\n\n\n\n let file = rope_file.to_string();\n\n let lines: Vec<&str> = file.split('\\n').collect();\n", "file_path": "sway-fmt/src/fmt.rs", "rank": 64, "score": 180940.4539927672 }, { "content": "#[test]\n\nfn test_struct_memory_layout() {\n\n use crate::span::Span;\n\n let first_field_name = Ident::new_with_override(\n\n \"foo\",\n\n Span {\n\n span: pest::Span::new(\" \".into(), 0, 0).unwrap(),\n\n path: None,\n\n },\n\n );\n\n let second_field_name = Ident::new_with_override(\n\n \"bar\",\n\n Span {\n\n span: pest::Span::new(\" \".into(), 0, 0).unwrap(),\n\n path: None,\n\n },\n\n );\n\n\n\n let numbers = ContiguousMemoryLayoutDescriptor {\n\n fields: vec![\n\n FieldMemoryLayoutDescriptor {\n", "file_path": "sway-core/src/asm_generation/expression/structs.rs", "rank": 65, "score": 180372.4329829318 }, { "content": "fn parse_str_type(raw: &str, span: Span) -> CompileResult<TypeInfo> {\n\n if raw.starts_with(\"str[\") {\n\n let mut rest = raw.split_at(\"str[\".len()).1.chars().collect::<Vec<_>>();\n\n if let Some(']') = rest.pop() {\n\n if let Ok(num) = String::from_iter(rest).parse() {\n\n return ok(TypeInfo::Str(num), vec![], vec![]);\n\n }\n\n }\n\n return err(\n\n vec![],\n\n vec![CompileError::InvalidStrType {\n\n raw: raw.to_string(),\n\n span,\n\n }],\n\n );\n\n }\n\n err(vec![], vec![CompileError::UnknownType { span }])\n\n}\n\n\n", "file_path": "sway-core/src/type_engine.rs", "rank": 66, "score": 179313.38936494477 }, { "content": "fn format_sway_file(file: &Path, formatted_content: &str) -> Result<(), FormatError> {\n\n fs::write(file, formatted_content)?;\n\n\n\n Ok(())\n\n}\n\n\n\npub struct FormatError {\n\n pub message: String,\n\n}\n\n\n\nimpl fmt::Display for FormatError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n write!(f, \"{}\", self)\n\n }\n\n}\n\n\n\nimpl From<&str> for FormatError {\n\n fn from(s: &str) -> Self {\n\n FormatError {\n\n message: s.to_string(),\n", "file_path": "forc/src/ops/forc_fmt.rs", "rank": 67, "score": 177115.30379301804 }, { "content": "fn match_literal(\n\n exp: &Expression,\n\n scrutinee: &Literal,\n\n scrutinee_span: &Span,\n\n) -> CompileResult<MatcherResult> {\n\n let match_req_map = vec![(\n\n exp.to_owned(),\n\n Expression::Literal {\n\n value: scrutinee.clone(),\n\n span: scrutinee_span.clone(),\n\n },\n\n )];\n\n let match_impl_map = vec![];\n\n ok(Some((match_req_map, match_impl_map)), vec![], vec![])\n\n}\n\n\n", "file_path": "sway-core/src/parse_tree/expression/matcher.rs", "rank": 68, "score": 173788.14138675435 }, { "content": "fn match_enum(\n\n exp: &Expression,\n\n call_path: &CallPath,\n\n args: &[Scrutinee],\n\n span: &Span,\n\n) -> CompileResult<MatcherResult> {\n\n let mut warnings = vec![];\n\n let mut errors = vec![];\n\n let mut match_req_map = vec![];\n\n let mut match_impl_map = vec![];\n\n for (pos, arg) in args.iter().enumerate() {\n\n let delayed_resolution_exp = Expression::DelayedMatchTypeResolution {\n\n variant: DelayedResolutionVariant::EnumVariant(DelayedEnumVariantResolution {\n\n exp: Box::new(exp.clone()),\n\n call_path: call_path.to_owned(),\n\n arg_num: pos,\n\n }),\n\n span: span.clone(),\n\n };\n\n let new_matches = check!(\n", "file_path": "sway-core/src/parse_tree/expression/matcher.rs", "rank": 69, "score": 173788.14138675435 }, { "content": "fn match_variable(\n\n exp: &Expression,\n\n scrutinee_name: &Ident,\n\n _span: &Span,\n\n) -> CompileResult<MatcherResult> {\n\n let match_req_map = vec![];\n\n let match_impl_map = vec![(scrutinee_name.to_owned(), exp.to_owned())];\n\n ok(Some((match_req_map, match_impl_map)), vec![], vec![])\n\n}\n\n\n", "file_path": "sway-core/src/parse_tree/expression/matcher.rs", "rank": 70, "score": 173788.14138675435 }, { "content": "fn get_range_from_span(span: &Span) -> Range {\n\n let start = span.start_pos().line_col();\n\n let end = span.end_pos().line_col();\n\n\n\n let start_line = start.0 as u32 - 1;\n\n let start_character = start.1 as u32 - 1;\n\n\n\n let end_line = end.0 as u32 - 1;\n\n let end_character = end.1 as u32 - 2;\n\n\n\n Range {\n\n start: Position::new(start_line, start_character),\n\n end: Position::new(end_line, end_character),\n\n }\n\n}\n", "file_path": "sway-server/src/core/token.rs", "rank": 71, "score": 172942.56358084703 }, { "content": "fn parse_subfield_path(\n\n item: Pair<Rule>,\n\n config: Option<&BuildConfig>,\n\n) -> CompileResult<Expression> {\n\n let warnings = vec![];\n\n let mut errors = vec![];\n\n let path = config.map(|c| c.path());\n\n let item = item.into_inner().next().expect(\"guarenteed by grammar\");\n\n match item.as_rule() {\n\n Rule::call_item => parse_call_item(item, config),\n\n Rule::array_index => parse_array_index(item, config),\n\n a => {\n\n eprintln!(\n\n \"Unimplemented subfield path: {:?} ({:?}) ({:?})\",\n\n a,\n\n item.as_str(),\n\n item.as_rule()\n\n );\n\n errors.push(CompileError::UnimplementedRule(\n\n a,\n", "file_path": "sway-core/src/parse_tree/expression/mod.rs", "rank": 72, "score": 171735.94984441897 }, { "content": "/// Implementations of traits are top-level things that are not conditional, so\n\n/// we insert an edge from the function's starting point to the declaration to show\n\n/// that the declaration was indeed at some point implemented.\n\n/// Additionally, we insert the trait's methods into the method namespace in order to\n\n/// track which exact methods are dead code.\n\nfn connect_impl_trait(\n\n trait_name: &CallPath,\n\n graph: &mut ControlFlowGraph,\n\n methods: &[TypedFunctionDeclaration],\n\n entry_node: NodeIndex,\n\n) {\n\n let mut methods_and_indexes = vec![];\n\n // insert method declarations into the graph\n\n for fn_decl in methods {\n\n let fn_decl_entry_node = graph.add_node(ControlFlowGraphNode::MethodDeclaration {\n\n span: fn_decl.span.clone(),\n\n method_name: fn_decl.name.clone(),\n\n });\n\n graph.add_edge(entry_node, fn_decl_entry_node, \"\".into());\n\n // connect the impl declaration node to the functions themselves, as all trait functions are\n\n // public if the trait is in scope\n\n connect_typed_fn_decl(fn_decl, graph, fn_decl_entry_node, fn_decl.span.clone());\n\n methods_and_indexes.push((fn_decl.name.clone(), fn_decl_entry_node));\n\n }\n\n // Now, insert the methods into the trait method namespace.\n", "file_path": "sway-core/src/control_flow_analysis/analyze_return_paths.rs", "rank": 74, "score": 169226.39087003696 }, { "content": "fn print_inner_types(name: String, inner_types: impl Iterator<Item = TypeId>) -> String {\n\n format!(\n\n \"{}<{}>\",\n\n name,\n\n inner_types\n\n .map(|x| x.friendly_type_str())\n\n .collect::<Vec<_>>()\n\n .join(\", \")\n\n )\n\n}\n", "file_path": "sway-core/src/type_engine/type_info.rs", "rank": 75, "score": 166487.51770437995 }, { "content": "/// Checks for disallowed opcodes in non-contract code.\n\n/// i.e., if this is a script or predicate, we can't use certain contract opcodes.\n\n/// See https://github.com/FuelLabs/sway/issues/350 for details.\n\npub fn check_invalid_opcodes(asm: &FinalizedAsm) -> CompileResult<()> {\n\n match asm {\n\n FinalizedAsm::ContractAbi { .. } | FinalizedAsm::Library => ok((), vec![], vec![]),\n\n FinalizedAsm::ScriptMain {\n\n program_section, ..\n\n } => check_for_contract_opcodes(&program_section.ops[..]),\n\n FinalizedAsm::PredicateMain {\n\n program_section, ..\n\n } => check_for_contract_opcodes(&program_section.ops[..]),\n\n }\n\n}\n\n\n", "file_path": "sway-core/src/asm_generation/checks.rs", "rank": 76, "score": 166067.34194068023 }, { "content": "pub fn main() {\n\n let k = Story::Pain;\n\n}\n\n\n", "file_path": "sway-fmt/src/fmt.rs", "rank": 77, "score": 160976.35459540668 }, { "content": "pub fn compile_to_ast(\n\n input: Arc<str>,\n\n initial_namespace: &Namespace,\n\n build_config: &BuildConfig,\n\n dependency_graph: &mut HashMap<String, HashSet<String>>,\n\n) -> CompileAstResult {\n\n let mut warnings = Vec::new();\n\n let mut errors = Vec::new();\n\n let parse_tree = check!(\n\n parse(input, Some(build_config)),\n\n return CompileAstResult::Failure { errors, warnings },\n\n warnings,\n\n errors\n\n );\n\n let mut dead_code_graph = ControlFlowGraph {\n\n graph: Graph::new(),\n\n entry_points: vec![],\n\n namespace: Default::default(),\n\n };\n\n\n", "file_path": "sway-core/src/lib.rs", "rank": 78, "score": 159521.50346858244 }, { "content": "pub fn print_on_success(\n\n silent_mode: bool,\n\n proj_name: &str,\n\n warnings: Vec<CompileWarning>,\n\n tree_type: TreeType,\n\n) {\n\n let type_str = match tree_type {\n\n TreeType::Script {} => \"script\",\n\n TreeType::Contract {} => \"contract\",\n\n TreeType::Predicate {} => \"predicate\",\n\n TreeType::Library { .. } => \"library\",\n\n };\n\n\n\n if !silent_mode {\n\n warnings.iter().for_each(format_warning);\n\n }\n\n\n\n if warnings.is_empty() {\n\n let _ = println_green_err(&format!(\" Compiled {} {:?}.\", type_str, proj_name));\n\n } else {\n", "file_path": "forc/src/utils/helpers.rs", "rank": 79, "score": 159521.50346858244 }, { "content": "/// Given input Sway source code, compile to a [BytecodeCompilationResult] which contains the asm in\n\n/// bytecode form.\n\npub fn compile_to_bytecode(\n\n input: Arc<str>,\n\n initial_namespace: &Namespace,\n\n build_config: BuildConfig,\n\n dependency_graph: &mut HashMap<String, HashSet<String>>,\n\n) -> BytecodeCompilationResult {\n\n match compile_to_asm(input, initial_namespace, build_config, dependency_graph) {\n\n CompilationResult::Success {\n\n mut asm,\n\n mut warnings,\n\n } => {\n\n let mut asm_res = asm.to_bytecode_mut();\n\n warnings.append(&mut asm_res.warnings);\n\n if asm_res.value.is_none() || !asm_res.errors.is_empty() {\n\n BytecodeCompilationResult::Failure {\n\n warnings,\n\n errors: asm_res.errors,\n\n }\n\n } else {\n\n // asm_res is confirmed to be Some(bytes).\n", "file_path": "sway-core/src/lib.rs", "rank": 80, "score": 159521.50346858244 }, { "content": "/// Given input Sway source code, compile to a [CompilationResult] which contains the asm in opcode\n\n/// form (not raw bytes/bytecode).\n\npub fn compile_to_asm(\n\n input: Arc<str>,\n\n initial_namespace: &Namespace,\n\n build_config: BuildConfig,\n\n dependency_graph: &mut HashMap<String, HashSet<String>>,\n\n) -> CompilationResult {\n\n match compile_to_ast(input, initial_namespace, &build_config, dependency_graph) {\n\n CompileAstResult::Failure { warnings, errors } => {\n\n CompilationResult::Failure { warnings, errors }\n\n }\n\n CompileAstResult::Success {\n\n parse_tree,\n\n tree_type,\n\n mut warnings,\n\n } => {\n\n let mut errors = vec![];\n\n match tree_type {\n\n TreeType::Contract | TreeType::Script | TreeType::Predicate => {\n\n let asm = check!(\n\n compile_ast_to_asm(*parse_tree, &build_config),\n", "file_path": "sway-core/src/lib.rs", "rank": 81, "score": 159521.50346858244 }, { "content": "pub fn print_on_failure(\n\n silent_mode: bool,\n\n warnings: Vec<CompileWarning>,\n\n errors: Vec<CompileError>,\n\n) {\n\n let e_len = errors.len();\n\n\n\n if !silent_mode {\n\n warnings.iter().for_each(format_warning);\n\n errors.into_iter().for_each(|error| format_err(&error));\n\n }\n\n\n\n println_red_err(&format!(\n\n \" Aborting due to {} {}.\",\n\n e_len,\n\n if e_len > 1 { \"errors\" } else { \"error\" }\n\n ))\n\n .unwrap();\n\n}\n\n\n", "file_path": "forc/src/utils/helpers.rs", "rank": 82, "score": 159521.50346858244 }, { "content": "/// Takes a parse failure as input and returns either the index of the positional pest parse error, or the start position of the span of text that the error occurs.\n\nfn get_start(err: &pest::error::Error<Rule>) -> usize {\n\n match err.location {\n\n pest::error::InputLocation::Pos(num) => num,\n\n pest::error::InputLocation::Span((start, _)) => start,\n\n }\n\n}\n\n\n", "file_path": "sway-core/src/lib.rs", "rank": 83, "score": 158384.24954116548 }, { "content": "/// Takes a parse failure as input and returns either the index of the positional pest parse error, or the end position of the span of text that the error occurs.\n\nfn get_end(err: &pest::error::Error<Rule>) -> usize {\n\n match err.location {\n\n pest::error::InputLocation::Pos(num) => num,\n\n pest::error::InputLocation::Span((_, end)) => end,\n\n }\n\n}\n\n\n\n/// This struct represents the compilation of an internal dependency\n\n/// defined through an include statement (the `dep` keyword).\n\npub(crate) struct InnerDependencyCompileResult {\n\n name: Ident,\n\n namespace: Namespace,\n\n}\n\n/// For internal compiler use.\n\n/// Compiles an included file and returns its control flow and dead code graphs.\n\n/// These graphs are merged into the parent program's graphs for accurate analysis.\n\n///\n\n/// TODO -- there is _so_ much duplicated code and messiness in this file around the\n\n/// different types of compilation and stuff. After we get to a good state with the MVP,\n\n/// clean up the types here with the power of hindsight\n", "file_path": "sway-core/src/lib.rs", "rank": 84, "score": 158384.24954116548 }, { "content": "pub fn format_document(\n\n session: Arc<Session>,\n\n params: DocumentFormattingParams,\n\n) -> Option<Vec<TextEdit>> {\n\n let options: FormattingOptions = params.options;\n\n let text_document: TextDocumentIdentifier = params.text_document;\n\n let url = text_document.uri;\n\n\n\n session.format_text(&url, options)\n\n}\n\n\n", "file_path": "sway-server/src/capabilities/formatting.rs", "rank": 85, "score": 158108.01141024163 }, { "content": "pub fn get_highlights(\n\n session: Arc<Session>,\n\n params: DocumentHighlightParams,\n\n) -> Option<Vec<DocumentHighlight>> {\n\n let url = params.text_document_position_params.text_document.uri;\n\n let position = params.text_document_position_params.position;\n\n\n\n session.get_token_ranges(&url, position).map(|ranges| {\n\n ranges\n\n .into_iter()\n\n .map(|range| DocumentHighlight { range, kind: None })\n\n .collect()\n\n })\n\n}\n", "file_path": "sway-server/src/capabilities/highlight.rs", "rank": 86, "score": 158108.01141024163 }, { "content": "/// Downloads a non-local dependency that's hosted on GitHub.\n\n/// By default, it stores the dependency in `~/.forc/`.\n\n/// A given dependency `dep` is stored under `~/.forc/dep/default/$owner-$repo-$hash`.\n\n/// If no hash (nor any other type of reference) is provided, Forc\n\n/// will download the default branch at the latest commit.\n\n/// If a branch is specified, it will go in `~/.forc/dep/$branch/$owner-$repo-$hash.\n\n/// If a version is specified, it will go in `~/.forc/dep/$version/$owner-$repo-$hash.\n\n/// Version takes precedence over branch reference.\n\npub fn download_github_dep(\n\n dep_name: &str,\n\n repo_base_url: &str,\n\n branch: &Option<String>,\n\n version: &Option<String>,\n\n offline_mode: OfflineMode,\n\n) -> Result<String> {\n\n let home_dir = match home_dir() {\n\n None => return Err(anyhow!(\"Couldn't find home directory (`~/`)\")),\n\n Some(p) => p.to_str().unwrap().to_owned(),\n\n };\n\n\n\n // Version tag takes precedence over branch reference.\n\n let out_dir = match &version {\n\n Some(v) => PathBuf::from(format!(\n\n \"{}/{}/{}/{}\",\n\n home_dir,\n\n constants::FORC_DEPENDENCIES_DIRECTORY,\n\n dep_name,\n\n v\n", "file_path": "forc/src/utils/dependency.rs", "rank": 87, "score": 158108.01141024163 }, { "content": "pub fn get_completion(\n\n session: Arc<Session>,\n\n params: CompletionParams,\n\n) -> Option<CompletionResponse> {\n\n let url = params.text_document_position.text_document.uri;\n\n\n\n session\n\n .get_completion_items(&url)\n\n .map(CompletionResponse::Array)\n\n}\n\n\n", "file_path": "sway-server/src/capabilities/completion.rs", "rank": 88, "score": 158108.01141024163 }, { "content": "pub fn replace_dep_version(\n\n target_directory: &Path,\n\n git: &str,\n\n dep: &DependencyDetails,\n\n) -> Result<()> {\n\n let current = get_current_dependency_version(target_directory)?;\n\n\n\n let api_url = build_github_repo_api_url(git, &dep.branch, &dep.version);\n\n download_tarball(&api_url, target_directory)?;\n\n\n\n // Delete old one\n\n match fs::remove_dir_all(current.path) {\n\n Ok(_) => Ok(()),\n\n Err(e) => {\n\n return Err(anyhow!(\n\n \"failed to remove old version of the dependency ({}): {}\",\n\n git,\n\n e\n\n ))\n\n }\n\n }\n\n}\n\n\n", "file_path": "forc/src/utils/dependency.rs", "rank": 89, "score": 158108.01141024163 }, { "content": "pub fn prepare_rename(\n\n session: Arc<Session>,\n\n params: lsp::TextDocumentPositionParams,\n\n) -> Option<lsp::PrepareRenameResponse> {\n\n let url = params.text_document.uri;\n\n\n\n match session.documents.get(url.path()) {\n\n Some(ref document) => {\n\n if let Some(token) = document.get_token_at_position(params.position) {\n\n match token.token_type {\n\n TokenType::Library | TokenType::Reassignment => None,\n\n _ => Some(lsp::PrepareRenameResponse::RangeWithPlaceholder {\n\n range: token.range,\n\n placeholder: token.name.clone(),\n\n }),\n\n }\n\n } else {\n\n None\n\n }\n\n }\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "sway-server/src/capabilities/rename.rs", "rank": 90, "score": 158108.01141024163 }, { "content": "pub fn get_diagnostics(\n\n warnings: Vec<CompileWarning>,\n\n errors: Vec<CompileError>,\n\n) -> Vec<Diagnostic> {\n\n let errors: Vec<Diagnostic> = errors\n\n .iter()\n\n .map(|error| {\n\n let range = get_range(&WarningOrError::Error(error));\n\n Diagnostic {\n\n range,\n\n severity: Some(DiagnosticSeverity::ERROR),\n\n message: error.to_friendly_error_string(),\n\n ..Default::default()\n\n }\n\n })\n\n .collect();\n\n\n\n let warnings: Vec<Diagnostic> = warnings\n\n .iter()\n\n .map(|warning| {\n", "file_path": "sway-server/src/capabilities/diagnostic.rs", "rank": 91, "score": 158108.01141024163 }, { "content": "pub fn go_to_definition(\n\n session: Arc<Session>,\n\n params: GotoDefinitionParams,\n\n) -> Option<GotoDefinitionResponse> {\n\n let url = params.text_document_position_params.text_document.uri;\n\n let position = params.text_document_position_params.position;\n\n\n\n session.get_token_definition_response(url, position)\n\n}\n\n\n", "file_path": "sway-server/src/capabilities/go_to.rs", "rank": 92, "score": 158108.01141024163 }, { "content": "pub fn unify_with_self(\n\n a: TypeId,\n\n b: TypeId,\n\n self_type: TypeId,\n\n span: &Span,\n\n) -> Result<Vec<CompileWarning>, TypeError> {\n\n TYPE_ENGINE.unify_with_self(a, b, self_type, span)\n\n}\n\n\n", "file_path": "sway-core/src/type_engine/engine.rs", "rank": 93, "score": 156734.13948070214 }, { "content": "/// Builds a proper URL that's used to call GitHub's API.\n\n/// The dependency is specified as `https://github.com/:owner/:project`\n\n/// And the API URL must be like `https://api.github.com/repos/:owner/:project/tarball`\n\n/// Adding a `:ref` at the end makes it download a branch/tag based repo.\n\n/// Omitting it makes it download the default branch at latest commit.\n\npub fn build_github_repo_api_url(\n\n dependency_url: &str,\n\n branch: &Option<String>,\n\n version: &Option<String>,\n\n) -> String {\n\n let dependency_url = dependency_url.trim_end_matches('/');\n\n let mut pieces = dependency_url.rsplit('/');\n\n\n\n let project_name: &str = match pieces.next() {\n\n Some(p) => p,\n\n None => dependency_url,\n\n };\n\n\n\n let owner_name: &str = match pieces.next() {\n\n Some(p) => p,\n\n None => dependency_url,\n\n };\n\n\n\n // Version tag takes precedence over branch reference.\n\n match version {\n", "file_path": "forc/src/utils/dependency.rs", "rank": 94, "score": 155402.51569604428 }, { "content": "pub fn handle_change_file(\n\n session: Arc<Session>,\n\n params: DidChangeTextDocumentParams,\n\n) -> Result<(), DocumentError> {\n\n session.update_text_document(&params.text_document.uri, params.content_changes)\n\n}\n\n\n", "file_path": "sway-server/src/capabilities/text_sync.rs", "rank": 95, "score": 155398.2448777438 }, { "content": "pub fn handle_open_file(\n\n session: Arc<Session>,\n\n params: &DidOpenTextDocumentParams,\n\n) -> Vec<Diagnostic> {\n\n let path = params.text_document.uri.path();\n\n\n\n if !session.contains_sway_file(&params.text_document.uri) {\n\n if let Ok(text_document) = TextDocument::build_from_path(path) {\n\n let _ = session.store_document(text_document);\n\n }\n\n }\n\n\n\n match session.parse_document(path) {\n\n Ok(diagnostics) => diagnostics,\n\n Err(DocumentError::FailedToParse(diagnostics)) => diagnostics,\n\n _ => vec![],\n\n }\n\n}\n\n\n", "file_path": "sway-server/src/capabilities/text_sync.rs", "rank": 96, "score": 155398.2448777438 }, { "content": "pub fn handle_save_file(\n\n session: Arc<Session>,\n\n params: &DidSaveTextDocumentParams,\n\n) -> Option<Vec<Diagnostic>> {\n\n let path = params.text_document.uri.path();\n\n\n\n match session.parse_document(path) {\n\n Ok(diagnostics) => {\n\n if diagnostics.is_empty() {\n\n None\n\n } else {\n\n Some(diagnostics)\n\n }\n\n }\n\n Err(DocumentError::FailedToParse(diagnostics)) => Some(diagnostics),\n\n _ => None,\n\n }\n\n}\n", "file_path": "sway-server/src/capabilities/text_sync.rs", "rank": 97, "score": 155398.2448777438 }, { "content": "// https://github.com/microsoft/vscode-extension-samples/blob/5ae1f7787122812dcc84e37427ca90af5ee09f14/semantic-tokens-sample/vscode.proposed.d.ts#L71\n\npub fn get_semantic_tokens_full(\n\n session: Arc<Session>,\n\n params: SemanticTokensParams,\n\n) -> Option<SemanticTokensResult> {\n\n let url = params.text_document.uri;\n\n\n\n match session.get_semantic_tokens(&url) {\n\n Some(semantic_tokens) => {\n\n if semantic_tokens.is_empty() {\n\n return None;\n\n }\n\n\n\n Some(SemanticTokensResult::Tokens(SemanticTokens {\n\n result_id: None,\n\n data: semantic_tokens,\n\n }))\n\n }\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "sway-server/src/capabilities/semantic_tokens.rs", "rank": 98, "score": 154098.77438346925 }, { "content": "pub fn handle_multiline_comment_case(\n\n code_line: &mut CodeLine,\n\n current_char: char,\n\n iter: &mut Peekable<Enumerate<Chars>>,\n\n) {\n\n code_line.push_char(current_char);\n\n\n\n if current_char == '*' {\n\n // end multiline comment and reset to default type\n\n if let Some((_, '/')) = iter.peek() {\n\n code_line.push_char('/');\n\n iter.next();\n\n code_line.become_default();\n\n }\n\n }\n\n}\n\n\n", "file_path": "sway-fmt/src/code_builder_helpers.rs", "rank": 99, "score": 154098.77438346925 } ]
Rust
src/interchange/src/avro/encode.rs
jiangyuzhao/materialize
2f8bf7a64197cff27e43132f251d5908f2f9e520
use std::fmt; use byteorder::{NetworkEndian, WriteBytesExt}; use chrono::Timelike; use itertools::Itertools; use lazy_static::lazy_static; use mz_avro::types::AvroMap; use repr::adt::jsonb::JsonbRef; use repr::adt::numeric::{self, NUMERIC_AGG_MAX_PRECISION, NUMERIC_DATUM_MAX_PRECISION}; use repr::{ColumnName, ColumnType, Datum, RelationDesc, Row, ScalarType}; use serde_json::json; use crate::encode::{column_names_and_types, Encode, TypedDatum}; use crate::json::build_row_schema_json; use mz_avro::types::{DecimalValue, Value}; use mz_avro::Schema; lazy_static! { static ref DEBEZIUM_TRANSACTION_SCHEMA: Schema = Schema::parse(&json!({ "type": "record", "name": "envelope", "fields": [ { "name": "id", "type": "string" }, { "name": "status", "type": "string" }, { "name": "event_count", "type": [ "null", "long" ] }, { "name": "data_collections", "type": [ "null", { "type": "array", "items": { "type": "record", "name": "data_collection", "fields": [ { "name": "data_collection", "type": "string" }, { "name": "event_count", "type": "long" }, ] } } ], "default": null, }, ] })).expect("valid schema constructed"); } fn build_schema(columns: &[(ColumnName, ColumnType)]) -> Schema { let row_schema = build_row_schema_json(&columns, "envelope"); Schema::parse(&row_schema).expect("valid schema constructed") } fn encode_avro_header(buf: &mut Vec<u8>, schema_id: i32) { buf.write_u8(0).expect("writing to vec cannot fail"); buf.write_i32::<NetworkEndian>(schema_id) .expect("writing to vec cannot fail"); } struct KeyInfo { columns: Vec<(ColumnName, ColumnType)>, schema: Schema, } fn encode_message_unchecked( schema_id: i32, row: Row, schema: &Schema, columns: &[(ColumnName, ColumnType)], ) -> Vec<u8> { let mut buf = vec![]; encode_avro_header(&mut buf, schema_id); let value = encode_datums_as_avro(row.iter(), columns); mz_avro::encode_unchecked(&value, schema, &mut buf); buf } pub struct AvroSchemaGenerator { value_columns: Vec<(ColumnName, ColumnType)>, key_info: Option<KeyInfo>, writer_schema: Schema, } impl AvroSchemaGenerator { pub fn new( key_desc: Option<RelationDesc>, value_desc: RelationDesc, include_transaction: bool, ) -> Self { let mut value_columns = column_names_and_types(value_desc); if include_transaction { value_columns.push(( "transaction".into(), ColumnType { nullable: false, scalar_type: ScalarType::Record { fields: vec![( "id".into(), ColumnType { scalar_type: ScalarType::String, nullable: false, }, )], custom_oid: None, custom_name: Some("transaction".to_string()), }, }, )); } let writer_schema = build_schema(&value_columns); let key_info = key_desc.map(|key_desc| { let columns = column_names_and_types(key_desc); let row_schema = build_row_schema_json(&columns, "row"); KeyInfo { schema: Schema::parse(&row_schema).expect("valid schema constructed"), columns, } }); AvroSchemaGenerator { value_columns, key_info, writer_schema, } } pub fn value_writer_schema(&self) -> &Schema { &self.writer_schema } pub fn value_columns(&self) -> &[(ColumnName, ColumnType)] { &self.value_columns } pub fn key_writer_schema(&self) -> Option<&Schema> { self.key_info.as_ref().map(|KeyInfo { schema, .. }| schema) } pub fn key_columns(&self) -> Option<&[(ColumnName, ColumnType)]> { self.key_info .as_ref() .map(|KeyInfo { columns, .. }| columns.as_slice()) } } impl fmt::Debug for AvroSchemaGenerator { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("SchemaGenerator") .field("writer_schema", &self.writer_schema) .finish() } } pub struct AvroEncoder { schema_generator: AvroSchemaGenerator, key_schema_id: Option<i32>, value_schema_id: i32, } impl fmt::Debug for AvroEncoder { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("AvroEncoder") .field("writer_schema", &self.schema_generator.writer_schema) .finish() } } impl AvroEncoder { pub fn new( schema_generator: AvroSchemaGenerator, key_schema_id: Option<i32>, value_schema_id: i32, ) -> Self { AvroEncoder { schema_generator, key_schema_id, value_schema_id, } } pub fn encode_key_unchecked(&self, schema_id: i32, row: Row) -> Vec<u8> { let schema = self.schema_generator.key_writer_schema().unwrap(); let columns = self.schema_generator.key_columns().unwrap(); encode_message_unchecked(schema_id, row, schema, columns) } pub fn encode_value_unchecked(&self, schema_id: i32, row: Row) -> Vec<u8> { let schema = self.schema_generator.value_writer_schema(); let columns = self.schema_generator.value_columns(); encode_message_unchecked(schema_id, row, schema, columns) } } impl Encode for AvroEncoder { fn get_format_name(&self) -> &str { "avro" } fn encode_key_unchecked(&self, row: Row) -> Vec<u8> { self.encode_key_unchecked(self.key_schema_id.unwrap(), row) } fn encode_value_unchecked(&self, row: Row) -> Vec<u8> { self.encode_value_unchecked(self.value_schema_id, row) } } pub fn encode_datums_as_avro<'a, I>(datums: I, names_types: &[(ColumnName, ColumnType)]) -> Value where I: IntoIterator<Item = Datum<'a>>, { let value_fields: Vec<(String, Value)> = names_types .iter() .zip_eq(datums) .map(|((name, typ), datum)| { let name = name.as_str().to_owned(); use mz_avro::types::ToAvro; (name, TypedDatum::new(datum, typ.clone()).avro()) }) .collect(); let v = Value::Record(value_fields); v } impl<'a> mz_avro::types::ToAvro for TypedDatum<'a> { fn avro(self) -> Value { let TypedDatum { datum, typ } = self; if typ.nullable && datum.is_null() { Value::Union { index: 0, inner: Box::new(Value::Null), n_variants: 2, null_variant: Some(0), } } else { let mut val = match &typ.scalar_type { ScalarType::Bool => Value::Boolean(datum.unwrap_bool()), ScalarType::Int16 => Value::Int(i32::from(datum.unwrap_int16())), ScalarType::Int32 | ScalarType::Oid => Value::Int(datum.unwrap_int32()), ScalarType::Int64 => Value::Long(datum.unwrap_int64()), ScalarType::Float32 => Value::Float(datum.unwrap_float32()), ScalarType::Float64 => Value::Double(datum.unwrap_float64()), ScalarType::Numeric { scale } => { let mut d = datum.unwrap_numeric().0; let (unscaled, precision, scale) = match scale { Some(scale) => { numeric::rescale(&mut d, *scale).unwrap(); ( numeric::numeric_to_twos_complement_be(d).to_vec(), NUMERIC_DATUM_MAX_PRECISION, usize::from(*scale), ) } None => ( numeric::numeric_to_twos_complement_wide(d).to_vec(), NUMERIC_AGG_MAX_PRECISION, NUMERIC_DATUM_MAX_PRECISION, ), }; Value::Decimal(DecimalValue { unscaled, precision, scale, }) } ScalarType::Date => Value::Date(datum.unwrap_date()), ScalarType::Time => Value::Long({ let time = datum.unwrap_time(); (time.num_seconds_from_midnight() * 1_000_000) as i64 + (time.nanosecond() as i64) / 1_000 }), ScalarType::Timestamp => Value::Timestamp(datum.unwrap_timestamp()), ScalarType::TimestampTz => Value::Timestamp(datum.unwrap_timestamptz().naive_utc()), ScalarType::Interval => Value::Fixed(20, { let iv = datum.unwrap_interval(); let mut buf = Vec::with_capacity(24); buf.extend(&iv.months.to_le_bytes()); buf.extend(&iv.duration.to_le_bytes()); debug_assert_eq!(buf.len(), 20); buf }), ScalarType::Bytes => Value::Bytes(Vec::from(datum.unwrap_bytes())), ScalarType::String => Value::String(datum.unwrap_str().to_owned()), ScalarType::Jsonb => Value::Json(JsonbRef::from_datum(datum).to_serde_json()), ScalarType::Uuid => Value::Uuid(datum.unwrap_uuid()), ScalarType::Array(element_type) | ScalarType::List { element_type, .. } => { let list = match typ.scalar_type { ScalarType::Array(_) => datum.unwrap_array().elements(), ScalarType::List { .. } => datum.unwrap_list(), _ => unreachable!(), }; let values = list .into_iter() .map(|datum| { let datum = TypedDatum::new( datum, ColumnType { nullable: true, scalar_type: (**element_type).clone(), }, ); datum.avro() }) .collect(); Value::Array(values) } ScalarType::Map { value_type, .. } => { let map = datum.unwrap_map(); let elements = map .into_iter() .map(|(key, datum)| { let datum = TypedDatum::new( datum, ColumnType { nullable: true, scalar_type: (**value_type).clone(), }, ); let value = datum.avro(); (key.to_string(), value) }) .collect(); Value::Map(AvroMap(elements)) } ScalarType::Record { fields, .. } => { let list = datum.unwrap_list(); let fields = fields .iter() .zip(list.into_iter()) .map(|((name, typ), datum)| { let name = name.to_string(); let datum = TypedDatum::new(datum, typ.clone()); let value = datum.avro(); (name, value) }) .collect(); Value::Record(fields) } }; if typ.nullable { val = Value::Union { index: 1, inner: Box::new(val), n_variants: 2, null_variant: Some(0), }; } val } } } pub fn get_debezium_transaction_schema() -> &'static Schema { &DEBEZIUM_TRANSACTION_SCHEMA } pub fn encode_debezium_transaction_unchecked( schema_id: i32, collection: &str, id: &str, status: &str, message_count: Option<i64>, ) -> Vec<u8> { let mut buf = Vec::new(); encode_avro_header(&mut buf, schema_id); let transaction_id = Value::String(id.to_owned()); let status = Value::String(status.to_owned()); let event_count = match message_count { None => Value::Union { index: 0, inner: Box::new(Value::Null), n_variants: 2, null_variant: Some(0), }, Some(count) => Value::Union { index: 1, inner: Box::new(Value::Long(count)), n_variants: 2, null_variant: Some(0), }, }; let data_collections = if let Some(message_count) = message_count { let collection = Value::Record(vec![ ("data_collection".into(), Value::String(collection.into())), ("event_count".into(), Value::Long(message_count)), ]); Value::Union { index: 1, inner: Box::new(Value::Array(vec![collection])), n_variants: 2, null_variant: Some(0), } } else { Value::Union { index: 0, inner: Box::new(Value::Null), n_variants: 2, null_variant: Some(0), } }; let record_contents = vec![ ("id".into(), transaction_id), ("status".into(), status), ("event_count".into(), event_count), ("data_collections".into(), data_collections), ]; let avro = Value::Record(record_contents); debug_assert!(avro.validate(DEBEZIUM_TRANSACTION_SCHEMA.top_node())); mz_avro::encode_unchecked(&avro, &DEBEZIUM_TRANSACTION_SCHEMA, &mut buf); buf }
use std::fmt; use byteorder::{NetworkEndian, WriteBytesExt}; use chrono::Timelike; use itertools::Itertools; use lazy_static::lazy_static; use mz_avro::types::AvroMap; use repr::adt::jsonb::JsonbRef; use repr::adt::numeric::{self, NUMERIC_AGG_MAX_PRECISION, NUMERIC_DATUM_MAX_PRECISION}; use repr::{ColumnName, ColumnType, Datum, RelationDesc, Row, ScalarType}; use serde_json::json; use crate::encode::{column_names_and_types, Encode, TypedDatum}; use crate::json::build_row_schema_json; use mz_avro::types::{DecimalValue, Value}; use mz_avro::Schema; lazy_static! { static ref DEBEZIUM_TRANSACTION_SCHEMA: Schema = Schema::parse(&json!({ "type": "record", "name": "envelope", "fields": [ { "name": "id", "type": "string" }, { "name": "status", "type": "string" }, { "name": "event_count", "type": [ "null", "long" ] }, { "name": "data_collections", "type": [ "null", { "type": "array", "items": { "type": "record", "name": "data_collection", "fields": [ { "name": "data_collection", "type": "string" }, { "name": "event_count", "type": "long" }, ] } } ], "default": null, }, ] })).expect("valid schema constructed"); } fn build_schema(columns: &[(ColumnName, ColumnType)]) -> Schema { let row_schema = build_row_schema_json(&columns, "envelope"); Schema::parse(&row_schema).expect("valid schema constructed") } fn encode_avro_header(buf: &mut Vec<u8>, schema_id: i32) { buf.write_u8(0).expect("writing to vec cannot fail"); buf.write_i32::<NetworkEndian>(schema_id) .expect("writing to vec cannot fail"); } struct KeyInfo { columns: Vec<(ColumnName, ColumnType)>, schema: Schema, } fn encode_message_unchecked( schema_id: i32, row: Row, schema: &Schema, columns: &[(ColumnName, ColumnType)], ) -> Vec<u8> { let mut buf = vec![]; encode_avro_header(&mut buf, schema_id); let value = encode_datums_as_avro(row.iter(), columns); mz_avro::encode_unchecked(&value, schema, &mut buf); buf } pub struct AvroSchemaGenerator { value_columns: Vec<(ColumnName, ColumnType)>, key_info: Option<KeyInfo>, writer_schema: Schema, } impl AvroSchemaGenerator { pub fn new( key_desc: Option<RelationDesc>, value_desc: RelationDesc, include_transaction: bool, ) -> Self { let mut value_columns = column_names_and_types(value_desc); if include_transaction { value_columns.push(( "transaction".into(), ColumnType { nullable: false, scalar_type: ScalarType::Record { fields: vec![( "id".into(), ColumnType { scalar_type: ScalarType::String, nullable: false, }, )], custom_oid: None, custom_name: Some("transaction".to_string()), }, }, )); } let writer_schema = build_schema(&value_columns); let key_info = key_desc.map(|key_desc| { let columns = column_names_and_types(key_desc); let row_schema = build_row_schema_json(&columns, "row"); KeyInfo { schema: Schema::parse(&row_schema).expect("valid schema constructed"), columns, } }); AvroSchemaGenerator { value_columns, key_info, writer_schema, } } pub fn value_writer_schema(&self) -> &Schema { &self.writer_schema } pub fn value_columns(&self) -> &[(ColumnName, ColumnType)] { &self.value_columns } pub fn key_writer_schema(&self) -> Option<&Schema> { self.key_info.as_ref().map(|KeyInfo { schema, .. }| schema) } pub fn key_columns(&self) -> Option<&[(ColumnName, ColumnType)]> { self.key_info .as_ref() .map(|KeyInfo { columns, .. }| columns.as_slice()) } } impl fmt::Debug for AvroSchemaGenerator { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("SchemaGenerator") .field("writer_schema", &self.writer_schema) .finish() } } pub struct AvroEncoder { schema_generator: AvroSchemaGenerator, key_schema_id: Option<i32>, value_schema_id: i32, } impl fmt::Debug for AvroEncoder { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("AvroEncoder") .field("writer_schema", &self.schema_generator.writer_schema) .finish() } } impl AvroEncoder { pub fn new( schema_generator: AvroSchemaGenerator, key_schema_id: Option<i32>, value_schema_id: i32, ) -> Self { AvroEncoder { schema_generator, key_schema_id, value_schema_id, } } pub fn encode_key_unchecked(&self, schema_id: i32, row: Row) -> Vec<u8> { let schema = self.schema_generator.key_writer_schema().unwrap(); let columns = self.schema_generator.key_columns().unwrap(); encode_message_unchecked(schema_id, row, schema, columns) } pub fn encode_value_unchecked(&self, schema_id: i32, row: Row) -> Vec<u8> { let schema = self.schema_generator.value_writer_schema(); let columns = self.schema_generator.value_columns(); encode_message_unchecked(schema_id, row, schema, columns) } } impl Encode for AvroEncoder { fn get_format_name(&self) -> &str { "avro" } fn encode_key_unchecked(&self, row: Row) -> Vec<u8> { self.encode_key_unchecked(self.key_schema_id.unwrap(), row) } fn encode_value_unchecked(&self, row: Row) -> Vec<u8> { self.encode_value_unchecked(self.value_schema_id, row) } } pub fn encode_datums_as_avro<'a, I>(datums: I, names_types: &[(ColumnName, ColumnType)]) -> Value where
impl<'a> mz_avro::types::ToAvro for TypedDatum<'a> { fn avro(self) -> Value { let TypedDatum { datum, typ } = self; if typ.nullable && datum.is_null() { Value::Union { index: 0, inner: Box::new(Value::Null), n_variants: 2, null_variant: Some(0), } } else { let mut val = match &typ.scalar_type { ScalarType::Bool => Value::Boolean(datum.unwrap_bool()), ScalarType::Int16 => Value::Int(i32::from(datum.unwrap_int16())), ScalarType::Int32 | ScalarType::Oid => Value::Int(datum.unwrap_int32()), ScalarType::Int64 => Value::Long(datum.unwrap_int64()), ScalarType::Float32 => Value::Float(datum.unwrap_float32()), ScalarType::Float64 => Value::Double(datum.unwrap_float64()), ScalarType::Numeric { scale } => { let mut d = datum.unwrap_numeric().0; let (unscaled, precision, scale) = match scale { Some(scale) => { numeric::rescale(&mut d, *scale).unwrap(); ( numeric::numeric_to_twos_complement_be(d).to_vec(), NUMERIC_DATUM_MAX_PRECISION, usize::from(*scale), ) } None => ( numeric::numeric_to_twos_complement_wide(d).to_vec(), NUMERIC_AGG_MAX_PRECISION, NUMERIC_DATUM_MAX_PRECISION, ), }; Value::Decimal(DecimalValue { unscaled, precision, scale, }) } ScalarType::Date => Value::Date(datum.unwrap_date()), ScalarType::Time => Value::Long({ let time = datum.unwrap_time(); (time.num_seconds_from_midnight() * 1_000_000) as i64 + (time.nanosecond() as i64) / 1_000 }), ScalarType::Timestamp => Value::Timestamp(datum.unwrap_timestamp()), ScalarType::TimestampTz => Value::Timestamp(datum.unwrap_timestamptz().naive_utc()), ScalarType::Interval => Value::Fixed(20, { let iv = datum.unwrap_interval(); let mut buf = Vec::with_capacity(24); buf.extend(&iv.months.to_le_bytes()); buf.extend(&iv.duration.to_le_bytes()); debug_assert_eq!(buf.len(), 20); buf }), ScalarType::Bytes => Value::Bytes(Vec::from(datum.unwrap_bytes())), ScalarType::String => Value::String(datum.unwrap_str().to_owned()), ScalarType::Jsonb => Value::Json(JsonbRef::from_datum(datum).to_serde_json()), ScalarType::Uuid => Value::Uuid(datum.unwrap_uuid()), ScalarType::Array(element_type) | ScalarType::List { element_type, .. } => { let list = match typ.scalar_type { ScalarType::Array(_) => datum.unwrap_array().elements(), ScalarType::List { .. } => datum.unwrap_list(), _ => unreachable!(), }; let values = list .into_iter() .map(|datum| { let datum = TypedDatum::new( datum, ColumnType { nullable: true, scalar_type: (**element_type).clone(), }, ); datum.avro() }) .collect(); Value::Array(values) } ScalarType::Map { value_type, .. } => { let map = datum.unwrap_map(); let elements = map .into_iter() .map(|(key, datum)| { let datum = TypedDatum::new( datum, ColumnType { nullable: true, scalar_type: (**value_type).clone(), }, ); let value = datum.avro(); (key.to_string(), value) }) .collect(); Value::Map(AvroMap(elements)) } ScalarType::Record { fields, .. } => { let list = datum.unwrap_list(); let fields = fields .iter() .zip(list.into_iter()) .map(|((name, typ), datum)| { let name = name.to_string(); let datum = TypedDatum::new(datum, typ.clone()); let value = datum.avro(); (name, value) }) .collect(); Value::Record(fields) } }; if typ.nullable { val = Value::Union { index: 1, inner: Box::new(val), n_variants: 2, null_variant: Some(0), }; } val } } } pub fn get_debezium_transaction_schema() -> &'static Schema { &DEBEZIUM_TRANSACTION_SCHEMA } pub fn encode_debezium_transaction_unchecked( schema_id: i32, collection: &str, id: &str, status: &str, message_count: Option<i64>, ) -> Vec<u8> { let mut buf = Vec::new(); encode_avro_header(&mut buf, schema_id); let transaction_id = Value::String(id.to_owned()); let status = Value::String(status.to_owned()); let event_count = match message_count { None => Value::Union { index: 0, inner: Box::new(Value::Null), n_variants: 2, null_variant: Some(0), }, Some(count) => Value::Union { index: 1, inner: Box::new(Value::Long(count)), n_variants: 2, null_variant: Some(0), }, }; let data_collections = if let Some(message_count) = message_count { let collection = Value::Record(vec![ ("data_collection".into(), Value::String(collection.into())), ("event_count".into(), Value::Long(message_count)), ]); Value::Union { index: 1, inner: Box::new(Value::Array(vec![collection])), n_variants: 2, null_variant: Some(0), } } else { Value::Union { index: 0, inner: Box::new(Value::Null), n_variants: 2, null_variant: Some(0), } }; let record_contents = vec![ ("id".into(), transaction_id), ("status".into(), status), ("event_count".into(), event_count), ("data_collections".into(), data_collections), ]; let avro = Value::Record(record_contents); debug_assert!(avro.validate(DEBEZIUM_TRANSACTION_SCHEMA.top_node())); mz_avro::encode_unchecked(&avro, &DEBEZIUM_TRANSACTION_SCHEMA, &mut buf); buf }
I: IntoIterator<Item = Datum<'a>>, { let value_fields: Vec<(String, Value)> = names_types .iter() .zip_eq(datums) .map(|((name, typ), datum)| { let name = name.as_str().to_owned(); use mz_avro::types::ToAvro; (name, TypedDatum::new(datum, typ.clone()).avro()) }) .collect(); let v = Value::Record(value_fields); v }
function_block-function_prefix_line
[ { "content": "/// Encode a `Value` into avro format.\n\n///\n\n/// **NOTE** This will not perform schema validation. The value is assumed to\n\n/// be valid with regards to the schema. Schema are needed only to guide the\n\n/// encoding for complex type values.\n\npub fn encode_ref(value: &Value, schema: SchemaNode, buffer: &mut Vec<u8>) {\n\n match value {\n\n Value::Null => (),\n\n Value::Boolean(b) => buffer.push(if *b { 1u8 } else { 0u8 }),\n\n Value::Int(i) => encode_int(*i, buffer),\n\n Value::Long(i) => encode_long(*i, buffer),\n\n Value::Float(x) => buffer.extend_from_slice(&unsafe { transmute::<f32, [u8; 4]>(*x) }),\n\n Value::Date(d) => {\n\n let span = (*d) - chrono::NaiveDate::from_ymd(1970, 1, 1);\n\n encode_int(\n\n span.num_days()\n\n .try_into()\n\n .expect(\"Num days is too large to encode as i32\"),\n\n buffer,\n\n )\n\n }\n\n Value::Timestamp(d) => {\n\n let mult = match schema.inner {\n\n SchemaPiece::TimestampMilli => 1_000,\n\n SchemaPiece::TimestampMicro => 1_000_000,\n", "file_path": "src/avro/src/encode.rs", "rank": 1, "score": 578765.2427224701 }, { "content": "/// Encode a `Value` into avro format.\n\n///\n\n/// **NOTE** This will not perform schema validation. The value is assumed to\n\n/// be valid with regards to the schema. Schema are needed only to guide the\n\n/// encoding for complex type values.\n\npub fn encode(value: &Value, schema: &Schema, buffer: &mut Vec<u8>) {\n\n encode_ref(&value, schema.top_node(), buffer)\n\n}\n\n\n", "file_path": "src/avro/src/encode.rs", "rank": 2, "score": 565444.8370667836 }, { "content": "pub fn format_string<F>(buf: &mut F, s: &str) -> Nestable\n\nwhere\n\n F: FormatBuffer,\n\n{\n\n buf.write_str(s);\n\n Nestable::MayNeedEscaping\n\n}\n\n\n", "file_path": "src/repr/src/strconv.rs", "rank": 3, "score": 538109.4133585393 }, { "content": "pub fn encode_to_vec(value: &Value, schema: &Schema) -> Vec<u8> {\n\n let mut buffer = Vec::new();\n\n encode(&value, schema, &mut buffer);\n\n buffer\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::collections::HashMap;\n\n\n\n #[test]\n\n fn test_encode_empty_array() {\n\n let mut buf = Vec::new();\n\n let empty: Vec<Value> = Vec::new();\n\n encode(\n\n &Value::Array(empty),\n\n &r#\"{\"type\": \"array\", \"items\": \"int\"}\"#.parse().unwrap(),\n\n &mut buf,\n\n );\n", "file_path": "src/avro/src/encode.rs", "rank": 4, "score": 524023.948810263 }, { "content": "/// Constructs a null datum of the specified type.\n\npub fn null_datum(ty: &Type) -> (Datum<'static>, ScalarType) {\n\n let ty = match ty {\n\n Type::Array(t) => {\n\n let (_, elem_type) = null_datum(t);\n\n ScalarType::Array(Box::new(elem_type))\n\n }\n\n Type::Bool => ScalarType::Bool,\n\n Type::Bytea => ScalarType::Bytes,\n\n Type::Date => ScalarType::Date,\n\n Type::Float4 => ScalarType::Float32,\n\n Type::Float8 => ScalarType::Float64,\n\n Type::Int2 => ScalarType::Int16,\n\n Type::Int4 => ScalarType::Int32,\n\n Type::Int8 => ScalarType::Int64,\n\n Type::Interval => ScalarType::Interval,\n\n Type::Jsonb => ScalarType::Jsonb,\n\n Type::List(t) => {\n\n let (_, elem_type) = null_datum(t);\n\n ScalarType::List {\n\n element_type: Box::new(elem_type),\n", "file_path": "src/pgrepr/src/value.rs", "rank": 5, "score": 522768.93059652194 }, { "content": "/// Writes a boolean value into `buf`.\n\n///\n\n/// `true` is encoded as the char `'t'` and `false` is encoded as the char\n\n/// `'f'`.\n\npub fn format_bool<F>(buf: &mut F, b: bool) -> Nestable\n\nwhere\n\n F: FormatBuffer,\n\n{\n\n buf.write_str(format_bool_static(b));\n\n Nestable::Yes\n\n}\n\n\n", "file_path": "src/repr/src/strconv.rs", "rank": 7, "score": 508712.2682055901 }, { "content": "/// Writes an [`i32`] to `buf`.\n\npub fn format_int32<F>(buf: &mut F, i: i32) -> Nestable\n\nwhere\n\n F: FormatBuffer,\n\n{\n\n write!(buf, \"{}\", i);\n\n Nestable::Yes\n\n}\n\n\n", "file_path": "src/repr/src/strconv.rs", "rank": 8, "score": 493294.46142471477 }, { "content": "/// Extracts deduplicated column names and types from a relation description.\n\npub fn column_names_and_types(desc: RelationDesc) -> Vec<(ColumnName, ColumnType)> {\n\n // Invent names for columns that don't have a name.\n\n let mut columns: Vec<_> = desc\n\n .into_iter()\n\n .enumerate()\n\n .map(|(i, (name, ty))| match name {\n\n None => (ColumnName::from(format!(\"column{}\", i + 1)), ty),\n\n Some(name) => (name, ty),\n\n })\n\n .collect();\n\n\n\n // Deduplicate names.\n\n let mut seen = HashSet::new();\n\n for (name, _ty) in &mut columns {\n\n let stem_len = name.as_str().len();\n\n let mut i = 1;\n\n while seen.contains(name) {\n\n name.as_mut_str().truncate(stem_len);\n\n if name.as_str().ends_with(|c: char| c.is_ascii_digit()) {\n\n name.as_mut_str().push('_');\n\n }\n\n name.as_mut_str().push_str(&i.to_string());\n\n i += 1;\n\n }\n\n seen.insert(name);\n\n }\n\n columns\n\n}\n", "file_path": "src/interchange/src/encode.rs", "rank": 9, "score": 490011.96065804746 }, { "content": "/// Gets the names and the types of the fields of an enum variant or struct.\n\nfn get_field_names_types(f: &syn::Fields) -> (Vec<String>, Vec<String>) {\n\n match f {\n\n Fields::Named(named_fields) => {\n\n let (names, types): (Vec<_>, Vec<_>) = named_fields\n\n .named\n\n .iter()\n\n .map(|n| {\n\n (\n\n n.ident.as_ref().unwrap().to_string(),\n\n get_type_as_string(&n.ty),\n\n )\n\n })\n\n .unzip();\n\n (names, types)\n\n }\n\n Fields::Unnamed(unnamed_fields) => {\n\n let types = unnamed_fields\n\n .unnamed\n\n .iter()\n\n .map(|u| get_type_as_string(&u.ty))\n\n .collect::<Vec<_>>();\n\n (Vec::new(), types)\n\n }\n\n Fields::Unit => (Vec::new(), Vec::new()),\n\n }\n\n}\n\n\n", "file_path": "src/lowertest-derive/src/lib.rs", "rank": 10, "score": 466163.2074030257 }, { "content": "/// Construct the schema for the CDC V2 protocol.\n\npub fn build_schema(row_schema: serde_json::Value) -> Schema {\n\n let updates_schema = json!({\n\n \"type\": \"array\",\n\n \"items\": {\n\n \"name\" : \"update\",\n\n \"type\" : \"record\",\n\n \"fields\" : [\n\n {\n\n \"name\": \"data\",\n\n \"type\": row_schema,\n\n },\n\n {\n\n \"name\" : \"time\",\n\n \"type\" : \"long\",\n\n },\n\n {\n\n \"name\" : \"diff\",\n\n \"type\" : \"long\",\n\n },\n\n ],\n", "file_path": "src/interchange/src/avro/envelope_cdc_v2.rs", "rank": 11, "score": 465570.1694339299 }, { "content": "fn build_row_schema_field<F: FnMut() -> String>(\n\n namer: &mut F,\n\n names_seen: &mut HashSet<String>,\n\n typ: &ColumnType,\n\n) -> serde_json::value::Value {\n\n let mut field_type = match &typ.scalar_type {\n\n ScalarType::Bool => json!(\"boolean\"),\n\n ScalarType::Int16 | ScalarType::Int32 | ScalarType::Oid => json!(\"int\"),\n\n ScalarType::Int64 => json!(\"long\"),\n\n ScalarType::Float32 => json!(\"float\"),\n\n ScalarType::Float64 => json!(\"double\"),\n\n ScalarType::Date => json!({\n\n \"type\": \"int\",\n\n \"logicalType\": \"date\",\n\n }),\n\n ScalarType::Time => json!({\n\n \"type\": \"long\",\n\n \"logicalType\": \"time-micros\",\n\n }),\n\n ScalarType::Timestamp | ScalarType::TimestampTz => json!({\n", "file_path": "src/interchange/src/json.rs", "rank": 12, "score": 461677.4036701776 }, { "content": "pub fn zig_i32(n: i32, buffer: &mut Vec<u8>) {\n\n zig_i64(n as i64, buffer)\n\n}\n\n\n", "file_path": "src/avro/src/util.rs", "rank": 13, "score": 460441.1255341398 }, { "content": "/// Converts serialized JSON to the syntax that [to_json] handles.\n\n///\n\n/// `json` is assumed to have been produced by serializing an object of type\n\n/// `type_name`.\n\n/// `ctx` is responsible for converting serialized JSON to any syntax\n\n/// extensions or overrides.\n\npub fn from_json<C>(json: &Value, type_name: &str, rti: &ReflectedTypeInfo, ctx: &mut C) -> String\n\nwhere\n\n C: TestDeserializeContext,\n\n{\n\n let type_name = normalize_type_name(type_name);\n\n if let Some(result) = ctx.reverse_syntax_override(json, &type_name, rti) {\n\n return result;\n\n }\n\n if let Some((names, types)) = rti.struct_dict.get(&type_name[..]) {\n\n format!(\"({})\", from_json_fields(json, names, types, rti, ctx))\n\n } else if let Some(enum_dict) = rti.enum_dict.get(&type_name[..]) {\n\n match json {\n\n // A unit enum in JSON is `\"variant\"`. In the spec it is `variant`.\n\n Value::String(s) => unquote(s),\n\n // An enum with fields is `{\"variant\": <fields>}` in JSON. In the\n\n // spec it is `(variant field1 .. fieldn).\n\n Value::Object(map) => {\n\n // Each enum instance only belongs to one variant.\n\n assert_eq!(\n\n map.len(),\n", "file_path": "src/lowertest/src/lib.rs", "rank": 14, "score": 460277.56192300085 }, { "content": "fn write_value_ref(schema: &Schema, value: &Value, buffer: &mut Vec<u8>) -> Result<(), Error> {\n\n if !value.validate(schema.top_node()) {\n\n return Err(ValidationError::new(\"value does not match schema\").into());\n\n }\n\n encode_ref(value, schema.top_node(), buffer);\n\n Ok(())\n\n}\n\n\n", "file_path": "src/avro/src/writer.rs", "rank": 15, "score": 458417.38598385267 }, { "content": "fn bench_indexed_snapshots<U, L, F>(c: &mut Criterion, name: &str, mut new_fn: F)\n\nwhere\n\n U: Buffer,\n\n L: Blob,\n\n F: FnMut(usize) -> Result<Indexed<String, String, U, L>, Error>,\n\n{\n\n let data_len = 100_000;\n\n let data: Vec<_> = (0..data_len)\n\n .map(|i| ((format!(\"key{}\", i), format!(\"val{}\", i)), i as u64, 1))\n\n .collect();\n\n\n\n let mut i = new_fn(1).expect(\"creating index cannot fail\");\n\n let id = i.register(\"0\").expect(\"registration succeeds\");\n\n\n\n // Write the data out to the index's buffer.\n\n i.write_sync(vec![(id, data)])\n\n .expect(\"writing to index cannot fail\");\n\n c.bench_function(&format!(\"{}_buffer_snapshot\", name), |b| {\n\n bench_snapshot(&i, id, data_len, b)\n\n });\n", "file_path": "src/persist/benches/snapshot.rs", "rank": 16, "score": 457398.0254104234 }, { "content": "pub fn format_jsonb_pretty<F>(buf: &mut F, jsonb: JsonbRef)\n\nwhere\n\n F: FormatBuffer,\n\n{\n\n write!(buf, \"{:#}\", jsonb)\n\n}\n\n\n", "file_path": "src/repr/src/strconv.rs", "rank": 18, "score": 455301.1913797263 }, { "content": "/// Like `format_bool`, but returns a string with a static lifetime.\n\n///\n\n/// This function should be preferred to `format_bool` when applicable, as it\n\n/// avoids an allocation.\n\npub fn format_bool_static(b: bool) -> &'static str {\n\n match b {\n\n true => \"t\",\n\n false => \"f\",\n\n }\n\n}\n\n\n", "file_path": "src/repr/src/strconv.rs", "rank": 19, "score": 453305.3275628304 }, { "content": "/// Encode a compatible value (implementing the `ToAvro` trait) into Avro format, also\n\n/// performing schema validation.\n\n///\n\n/// **NOTE** This function has a quite small niche of usage and does NOT generate headers and sync\n\n/// markers; use [`Writer`](struct.Writer.html) to be fully Avro-compatible if you don't know what\n\n/// you are doing, instead.\n\npub fn to_avro_datum<T: ToAvro>(schema: &Schema, value: T) -> Result<Vec<u8>, Error> {\n\n let mut buffer = Vec::new();\n\n write_avro_datum(schema, value, &mut buffer)?;\n\n Ok(buffer)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::io::Cursor;\n\n use std::str::FromStr;\n\n\n\n use serde::{Deserialize, Serialize};\n\n\n\n use super::*;\n\n use crate::types::Record;\n\n use crate::util::zig_i64;\n\n use crate::Reader;\n\n\n\n static SCHEMA: &str = r#\"\n\n {\n", "file_path": "src/avro/src/writer.rs", "rank": 20, "score": 452824.95816977764 }, { "content": "pub fn format_jsonb<F>(buf: &mut F, jsonb: JsonbRef) -> Nestable\n\nwhere\n\n F: FormatBuffer,\n\n{\n\n write!(buf, \"{}\", jsonb);\n\n Nestable::MayNeedEscaping\n\n}\n\n\n", "file_path": "src/repr/src/strconv.rs", "rank": 21, "score": 447806.70888271776 }, { "content": "/// Merges streams together, yielding items as they become available.\n\n///\n\n/// Like [`stream::select_all()`], except that ready items from earlier streams\n\n/// are preferred to later streams. For example, all ready items from the first\n\n/// stream in `streams` will be yielded before moving on to the second stream in\n\n/// `streams. This can cause starvation, so use with care.\n\npub fn select_all_biased<S>(mut streams: Vec<S>) -> impl Stream<Item = S::Item>\n\nwhere\n\n S: Stream + Unpin,\n\n{\n\n stream::poll_fn(move |cx| {\n\n let mut i = 0;\n\n while i < streams.len() {\n\n match streams[i].poll_next_unpin(cx) {\n\n Poll::Ready(Some(v)) => return Poll::Ready(Some(v)),\n\n Poll::Ready(None) => {\n\n streams.remove(i);\n\n }\n\n Poll::Pending => i += 1,\n\n }\n\n }\n\n if streams.is_empty() {\n\n Poll::Ready(None)\n\n } else {\n\n Poll::Pending\n\n }\n", "file_path": "src/ore/src/future.rs", "rank": 22, "score": 446717.5633917679 }, { "content": "/// Converts a Materialize row into a vector of PostgreSQL values.\n\n///\n\n/// Calling this function is equivalent to mapping [`Value::from_datum`] over\n\n/// every datum in `row`.\n\npub fn values_from_row(row: Row, typ: &RelationType) -> Vec<Option<Value>> {\n\n row.iter()\n\n .zip(typ.column_types.iter())\n\n .map(|(col, typ)| Value::from_datum(col, &typ.scalar_type))\n\n .collect()\n\n}\n", "file_path": "src/pgrepr/src/value.rs", "rank": 23, "score": 446522.202198956 }, { "content": "/// Decode a `Value` encoded in Avro format given its `Schema` and anything implementing `io::Read`\n\n/// to read from.\n\n///\n\n/// In case a reader `Schema` is provided, schema resolution will also be performed.\n\n///\n\n/// **NOTE** This function has a quite small niche of usage and does NOT take care of reading the\n\n/// header and consecutive data blocks; use [`Reader`](struct.Reader.html) if you don't know what\n\n/// you are doing, instead.\n\npub fn from_avro_datum<R: AvroRead>(schema: &Schema, reader: &mut R) -> Result<Value, AvroError> {\n\n let value = decode(schema.top_node(), reader)?;\n\n Ok(value)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::types::{Record, ToAvro};\n\n use crate::Reader;\n\n\n\n use std::io::Cursor;\n\n\n\n static SCHEMA: &str = r#\"\n\n {\n\n \"type\": \"record\",\n\n \"name\": \"test\",\n\n \"fields\": [\n\n {\"name\": \"a\", \"type\": \"long\", \"default\": 42},\n\n {\"name\": \"b\", \"type\": \"string\"}\n", "file_path": "src/avro/src/reader.rs", "rank": 24, "score": 446025.5656683709 }, { "content": "fn encode_int(i: i32, buffer: &mut Vec<u8>) {\n\n zig_i32(i, buffer)\n\n}\n\n\n", "file_path": "src/avro/src/encode.rs", "rank": 25, "score": 440261.86870143475 }, { "content": "fn validate_schema_1(schema: SchemaNode) -> anyhow::Result<Vec<(ColumnName, ColumnType)>> {\n\n match schema.inner {\n\n SchemaPiece::Record { fields, .. } => {\n\n let mut columns = vec![];\n\n let mut seen_avro_nodes = Default::default();\n\n for f in fields {\n\n columns.extend(get_named_columns(\n\n &mut seen_avro_nodes,\n\n schema.step(&f.schema),\n\n &f.name,\n\n )?);\n\n }\n\n Ok(columns)\n\n }\n\n _ => bail!(\"row schemas must be records, got: {:?}\", schema.inner),\n\n }\n\n}\n\n\n", "file_path": "src/interchange/src/avro/schema.rs", "rank": 26, "score": 432452.57090964715 }, { "content": "fn is_null(schema: &SchemaPieceOrNamed) -> bool {\n\n matches!(schema, SchemaPieceOrNamed::Piece(SchemaPiece::Null))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use anyhow::Context;\n\n use chrono::{DateTime, NaiveDate, NaiveDateTime, NaiveTime, Utc};\n\n use ordered_float::OrderedFloat;\n\n use serde::Deserialize;\n\n use std::fs::File;\n\n\n\n use mz_avro::types::{DecimalValue, Value};\n\n use repr::adt::numeric;\n\n use repr::{ColumnName, ColumnType, Datum, RelationDesc, ScalarType};\n\n\n\n use super::*;\n\n\n\n #[derive(Deserialize)]\n\n struct TestCase {\n", "file_path": "src/interchange/src/avro.rs", "rank": 28, "score": 429761.66651671194 }, { "content": "/// Writes an `f64` to `buf`.\n\npub fn format_float64<F>(buf: &mut F, f: f64) -> Nestable\n\nwhere\n\n F: FormatBuffer,\n\n{\n\n format_float(buf, f)\n\n}\n\n\n", "file_path": "src/repr/src/strconv.rs", "rank": 29, "score": 422155.37952546764 }, { "content": "/// Writes an `f32` to `buf`.\n\npub fn format_float32<F>(buf: &mut F, f: f32) -> Nestable\n\nwhere\n\n F: FormatBuffer,\n\n{\n\n format_float(buf, f)\n\n}\n\n\n", "file_path": "src/repr/src/strconv.rs", "rank": 30, "score": 422155.3795254676 }, { "content": "fn unnest_array<'a>(a: Datum<'a>) -> impl Iterator<Item = (Row, Diff)> + 'a {\n\n a.unwrap_array()\n\n .elements()\n\n .iter()\n\n .map(move |e| (Row::pack_slice(&[e]), 1))\n\n}\n\n\n", "file_path": "src/expr/src/relation/func.rs", "rank": 31, "score": 418858.9756795197 }, { "content": "fn bench_sort_datums(rows: Vec<Vec<Datum>>, b: &mut Bencher) {\n\n b.iter_with_setup(|| rows.clone(), |mut rows| rows.sort())\n\n}\n\n\n", "file_path": "src/repr/benches/row.rs", "rank": 32, "score": 418792.22035028477 }, { "content": "fn write(schema: &Schema, records: &[Value]) -> Vec<u8> {\n\n let mut writer = Writer::new(&schema, Vec::new());\n\n writer.extend_from_slice(records).unwrap();\n\n writer.into_inner()\n\n}\n\n\n", "file_path": "src/avro/benches/serde.rs", "rank": 33, "score": 417321.2494820145 }, { "content": "/// Writes an `i64` to `buf`.\n\npub fn format_int64<F>(buf: &mut F, i: i64) -> Nestable\n\nwhere\n\n F: FormatBuffer,\n\n{\n\n write!(buf, \"{}\", i);\n\n Nestable::Yes\n\n}\n\n\n", "file_path": "src/repr/src/strconv.rs", "rank": 34, "score": 414952.7466441118 }, { "content": "/// Writes an [`i16`] to `buf`.\n\npub fn format_int16<F>(buf: &mut F, i: i16) -> Nestable\n\nwhere\n\n F: FormatBuffer,\n\n{\n\n write!(buf, \"{}\", i);\n\n Nestable::Yes\n\n}\n\n\n", "file_path": "src/repr/src/strconv.rs", "rank": 35, "score": 414952.7466441117 }, { "content": "/// Constructs a `Datum` from `litval` and `littyp`.\n\n///\n\n/// See [get_scalar_type_or_default] for creating a `ScalarType`.\n\n///\n\n/// Because Datums do not own their strings, if `littyp` is\n\n/// `ScalarType::String`, make sure that `litval` has already\n\n/// been unquoted by [unquote_string].\n\n///\n\n/// Generally, `litval` can be parsed into a Datum in the manner you would\n\n/// imagine. Exceptions:\n\n/// * A Timestamp should be in the format `\"\\\"%Y-%m-%d %H:%M:%S%.f\\\"\"` or\n\n/// `\"\\\"%Y-%m-%d %H:%M:%S\\\"\"`\n\npub fn get_datum_from_str<'a>(litval: &'a str, littyp: &ScalarType) -> Result<Datum<'a>, String> {\n\n if litval == \"null\" {\n\n return Ok(Datum::Null);\n\n }\n\n match littyp {\n\n ScalarType::Bool => Ok(Datum::from(parse_litval::<bool>(litval, \"bool\")?)),\n\n ScalarType::Numeric { .. } => Ok(Datum::from(parse_litval::<Numeric>(litval, \"Numeric\")?)),\n\n ScalarType::Int16 => Ok(Datum::from(parse_litval::<i16>(litval, \"i16\")?)),\n\n ScalarType::Int32 => Ok(Datum::from(parse_litval::<i32>(litval, \"i32\")?)),\n\n ScalarType::Int64 => Ok(Datum::from(parse_litval::<i64>(litval, \"i64\")?)),\n\n ScalarType::Float32 => Ok(Datum::from(parse_litval::<f32>(litval, \"f32\")?)),\n\n ScalarType::Float64 => Ok(Datum::from(parse_litval::<f64>(litval, \"f64\")?)),\n\n ScalarType::String => Ok(Datum::from(litval)),\n\n ScalarType::Timestamp => {\n\n let datetime = if litval.contains('.') {\n\n NaiveDateTime::parse_from_str(litval, \"\\\"%Y-%m-%d %H:%M:%S%.f\\\"\")\n\n } else {\n\n NaiveDateTime::parse_from_str(litval, \"\\\"%Y-%m-%d %H:%M:%S\\\"\")\n\n };\n\n Ok(Datum::from(datetime.map_err(|e| {\n\n format!(\"Error while parsing NaiveDateTime: {}\", e)\n\n })?))\n\n }\n\n _ => Err(format!(\"Unsupported literal type {:?}\", littyp)),\n\n }\n\n}\n\n\n", "file_path": "src/repr-test-util/src/lib.rs", "rank": 36, "score": 413992.32473907596 }, { "content": "#[derive(Debug, PartialEq, Clone)]\n\nstruct PropertizedDict(Row, Vec<(String, PropertizedDatum)>);\n\n\n", "file_path": "src/repr/tests/rows.rs", "rank": 37, "score": 412819.2731326405 }, { "content": "fn format_row(row: &Row, types: &[Type], mode: Mode, sort: &Sort) -> Vec<String> {\n\n let mut formatted: Vec<String> = vec![];\n\n for i in 0..row.len() {\n\n let t: Option<Slt> = row.get::<usize, Option<Slt>>(i);\n\n let t: Option<String> = t.map(|d| format_datum(d, &types[i], mode, i));\n\n formatted.push(match t {\n\n Some(t) => t,\n\n None => \"NULL\".into(),\n\n });\n\n }\n\n if mode == Mode::Cockroach && sort.yes() {\n\n formatted\n\n .iter()\n\n .flat_map(|s| {\n\n crate::parser::split_cols(&s, types.len())\n\n .into_iter()\n\n .map(ToString::to_string)\n\n .collect::<Vec<_>>()\n\n })\n\n .collect()\n", "file_path": "src/sqllogictest/src/runner.rs", "rank": 38, "score": 411501.6194776986 }, { "content": "fn bench_sort_row(rows: Vec<Vec<Datum>>, b: &mut Bencher) {\n\n let rows = rows.into_iter().map(Row::pack).collect::<Vec<_>>();\n\n b.iter_with_setup(|| rows.clone(), |mut rows| rows.sort())\n\n}\n\n\n", "file_path": "src/repr/benches/row.rs", "rank": 39, "score": 411182.44184246316 }, { "content": "// This function is derived from code in the avro_rs project. Update the license\n\n// header on this file accordingly if you move it to a new home.\n\npub fn from_json(json: &JsonValue, schema: SchemaNode) -> Result<Value, String> {\n\n match (json, schema.inner) {\n\n (JsonValue::Null, SchemaPiece::Null) => Ok(Value::Null),\n\n (JsonValue::Bool(b), SchemaPiece::Boolean) => Ok(Value::Boolean(*b)),\n\n (JsonValue::Number(ref n), SchemaPiece::Int) => Ok(Value::Int(\n\n n.as_i64().unwrap().try_into().map_err_to_string()?,\n\n )),\n\n (JsonValue::Number(ref n), SchemaPiece::Long) => Ok(Value::Long(n.as_i64().unwrap())),\n\n (JsonValue::Number(ref n), SchemaPiece::Float) => {\n\n Ok(Value::Float(n.as_f64().unwrap() as f32))\n\n }\n\n (JsonValue::Number(ref n), SchemaPiece::Double) => Ok(Value::Double(n.as_f64().unwrap())),\n\n (JsonValue::Number(ref n), SchemaPiece::Date) => Ok(Value::Date(\n\n chrono::NaiveDate::from_ymd(1970, 1, 1) + chrono::Duration::days(n.as_i64().unwrap()),\n\n )),\n\n (JsonValue::Number(ref n), SchemaPiece::TimestampMilli) => {\n\n let ts = n.as_i64().unwrap();\n\n Ok(Value::Timestamp(chrono::NaiveDateTime::from_timestamp(\n\n ts / 1_000,\n\n ((ts % 1_000) * 1_000_000) as u32,\n", "file_path": "src/testdrive/src/format/avro.rs", "rank": 40, "score": 410940.7172841738 }, { "content": "fn benchmark(schema: &Schema, record: &Value, s: &str, count: usize, runs: usize) {\n\n let mut records = Vec::new();\n\n for __ in 0..count {\n\n records.push(record.clone());\n\n }\n\n\n\n let mut durations = Vec::with_capacity(runs);\n\n\n\n let mut bytes = None;\n\n for _ in 0..runs {\n\n let records = records.clone();\n\n\n\n let start = Instant::now();\n\n let mut writer = Writer::new(schema.clone(), Vec::new());\n\n writer.extend(records.into_iter()).unwrap();\n\n\n\n let duration = Instant::now().duration_since(start);\n\n durations.push(duration);\n\n\n\n bytes = Some(writer.into_inner());\n", "file_path": "src/avro/examples/benchmark.rs", "rank": 41, "score": 410811.61670353694 }, { "content": "fn bench_filter_packed(filter: Datum, rows: Vec<Vec<Datum>>, b: &mut Bencher) {\n\n let filter = Row::pack_slice(&[filter]);\n\n let rows = rows.into_iter().map(Row::pack).collect::<Vec<_>>();\n\n b.iter_with_setup(\n\n || rows.clone(),\n\n |mut rows| rows.retain(|row| row.unpack()[0] == filter.unpack_first()),\n\n )\n\n}\n\n\n", "file_path": "src/repr/benches/row.rs", "rank": 42, "score": 410646.26568044815 }, { "content": "fn bench_filter_unpacked(filter: Datum, rows: Vec<Vec<Datum>>, b: &mut Bencher) {\n\n let rows = rows.into_iter().map(Row::pack).collect::<Vec<_>>();\n\n b.iter_with_setup(\n\n || rows.clone(),\n\n |mut rows| rows.retain(|row| row.unpack()[0] == filter),\n\n )\n\n}\n\n\n", "file_path": "src/repr/benches/row.rs", "rank": 43, "score": 410646.26568044815 }, { "content": "/// Writes a [`NaiveDate`] to `buf`.\n\npub fn format_date<F>(buf: &mut F, d: NaiveDate) -> Nestable\n\nwhere\n\n F: FormatBuffer,\n\n{\n\n write!(buf, \"{}\", d);\n\n Nestable::Yes\n\n}\n\n\n", "file_path": "src/repr/src/strconv.rs", "rank": 44, "score": 409741.94249304815 }, { "content": "/// Writes a [`NaiveDateTime`] timestamp to `buf`.\n\npub fn format_time<F>(buf: &mut F, t: NaiveTime) -> Nestable\n\nwhere\n\n F: FormatBuffer,\n\n{\n\n write!(buf, \"{}\", t.format(\"%H:%M:%S\"));\n\n format_nanos_to_micros(buf, t.nanosecond());\n\n Nestable::Yes\n\n}\n\n\n", "file_path": "src/repr/src/strconv.rs", "rank": 45, "score": 409741.839311398 }, { "content": "pub fn format_uuid<F>(buf: &mut F, uuid: Uuid) -> Nestable\n\nwhere\n\n F: FormatBuffer,\n\n{\n\n write!(buf, \"{}\", uuid);\n\n Nestable::Yes\n\n}\n\n\n", "file_path": "src/repr/src/strconv.rs", "rank": 46, "score": 409735.21383931907 }, { "content": "pub fn format_bytes<F>(buf: &mut F, bytes: &[u8]) -> Nestable\n\nwhere\n\n F: FormatBuffer,\n\n{\n\n write!(buf, \"\\\\x{}\", hex::encode(bytes));\n\n Nestable::Yes\n\n}\n\n\n", "file_path": "src/repr/src/strconv.rs", "rank": 47, "score": 409735.21383931907 }, { "content": "pub fn format_interval<F>(buf: &mut F, iv: Interval) -> Nestable\n\nwhere\n\n F: FormatBuffer,\n\n{\n\n write!(buf, \"{}\", iv);\n\n Nestable::MayNeedEscaping\n\n}\n\n\n", "file_path": "src/repr/src/strconv.rs", "rank": 48, "score": 409735.21383931907 }, { "content": "/// Flatten the provided Value using keys prefixed with the given String. New column names and\n\n/// their Postgres types will be appended to the provided vector.\n\nfn flatten_single_value(flattened: &mut Vec<(String, PostgresType)>, key: String, value: Value) {\n\n match value {\n\n Value::Array(v) => {\n\n v.into_iter().enumerate().for_each(|(i, v)| {\n\n let new_key = format!(\"{}__{}\", key, i);\n\n flatten_single_value(flattened, new_key, v);\n\n });\n\n }\n\n Value::Bool(_) => flattened.push((key, PostgresType::Other)),\n\n Value::Number(_) => flattened.push((key, PostgresType::Other)),\n\n Value::String(_) => flattened.push((key, PostgresType::Text)),\n\n Value::Object(v) => {\n\n v.into_iter().for_each(|(k, v)| {\n\n let new_key = format!(\"{}__{}\", key, k);\n\n flatten_single_value(flattened, new_key, v);\n\n });\n\n }\n\n Value::Null => {}\n\n }\n\n}\n\n\n\n/// Generates SQL commands to create views on json sources, flattened for easier\n\n/// access. String fields will be converted to text to avoid extra quotes.\n", "file_path": "play/json-flattened-view-gen/src/main.rs", "rank": 49, "score": 407669.7087953032 }, { "content": "pub fn parse_schema(schema: &str) -> anyhow::Result<Schema> {\n\n let schema = serde_json::from_str(schema)?;\n\n Ok(Schema::parse(&schema)?)\n\n}\n\n\n", "file_path": "src/interchange/src/avro/schema.rs", "rank": 50, "score": 406464.0414049594 }, { "content": "fn write_fn_name(out: &mut String, s: &str) {\n\n // Simplify associated type names so that e.g. `T::FooBar` becomes\n\n // `visit_foo_bar`.\n\n let s = s.splitn(2, \"::\").last().unwrap();\n\n for c in s.chars() {\n\n if c.is_ascii_uppercase() {\n\n out.push('_');\n\n out.push(c.to_ascii_lowercase());\n\n } else {\n\n out.push(c);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/walkabout/src/gen.rs", "rank": 51, "score": 406032.5628229175 }, { "content": "fn encode_long(i: i64, buffer: &mut Vec<u8>) {\n\n zig_i64(i, buffer)\n\n}\n\n\n", "file_path": "src/avro/src/encode.rs", "rank": 52, "score": 403816.75684823876 }, { "content": "fn bench_pack_pack(rows: Vec<Vec<Datum>>, b: &mut Bencher) {\n\n b.iter(|| rows.iter().map(Row::pack).collect::<Vec<_>>())\n\n}\n\n\n", "file_path": "src/repr/benches/row.rs", "rank": 53, "score": 403395.0992333249 }, { "content": "fn bench_sort_unpack(rows: Vec<Vec<Datum>>, b: &mut Bencher) {\n\n let rows = rows.into_iter().map(Row::pack).collect::<Vec<_>>();\n\n b.iter_with_setup(\n\n || rows.clone(),\n\n |mut rows| {\n\n #[allow(clippy::unnecessary_sort_by)]\n\n rows.sort_by(move |a, b| a.unpack().cmp(&b.unpack()));\n\n },\n\n )\n\n}\n\n\n", "file_path": "src/repr/benches/row.rs", "rank": 54, "score": 403395.0992333249 }, { "content": "fn bench_sort_iter(rows: Vec<Vec<Datum>>, b: &mut Bencher) {\n\n let rows = rows.into_iter().map(Row::pack).collect::<Vec<_>>();\n\n b.iter_with_setup(\n\n || rows.clone(),\n\n |mut rows| {\n\n rows.sort_by(move |a, b| {\n\n for (a, b) in a.iter().zip(b.iter()) {\n\n match a.cmp(&b) {\n\n Ordering::Equal => (),\n\n non_equal => return non_equal,\n\n }\n\n }\n\n Ordering::Equal\n\n });\n\n },\n\n )\n\n}\n\n\n", "file_path": "src/repr/benches/row.rs", "rank": 55, "score": 403395.0992333249 }, { "content": "fn bench_sort_unpacked(rows: Vec<Vec<Datum>>, b: &mut Bencher) {\n\n let arity = rows[0].len();\n\n let rows = rows.into_iter().map(Row::pack).collect::<Vec<_>>();\n\n b.iter_with_setup(\n\n || rows.clone(),\n\n |rows| {\n\n let mut unpacked = vec![];\n\n for row in &rows {\n\n unpacked.extend(row);\n\n }\n\n let mut slices = unpacked.chunks(arity).collect::<Vec<_>>();\n\n slices.sort();\n\n slices.into_iter().map(Row::pack).collect::<Vec<_>>()\n\n },\n\n )\n\n}\n\n\n", "file_path": "src/repr/benches/row.rs", "rank": 56, "score": 403395.0992333249 }, { "content": "/// Writes a [`NaiveDateTime`] timestamp to `buf`.\n\npub fn format_timestamp<F>(buf: &mut F, ts: NaiveDateTime) -> Nestable\n\nwhere\n\n F: FormatBuffer,\n\n{\n\n write!(buf, \"{}\", ts.format(\"%Y-%m-%d %H:%M:%S\"));\n\n format_nanos_to_micros(buf, ts.timestamp_subsec_nanos());\n\n // This always needs escaping because of the whitespace\n\n Nestable::MayNeedEscaping\n\n}\n\n\n", "file_path": "src/repr/src/strconv.rs", "rank": 57, "score": 399984.656200396 }, { "content": "fn gen_fold_element(buf: &mut CodegenBuf, binding: &str, ty: &Type) {\n\n match ty {\n\n Type::Primitive => buf.write(binding),\n\n Type::Abstract(ty) => {\n\n let fn_name = fold_fn_name(ty);\n\n buf.write(f!(\"folder.{fn_name}({binding})\"));\n\n }\n\n Type::Option(ty) => {\n\n buf.write(f!(\"{binding}.map(|v| \"));\n\n gen_fold_element(buf, \"v\", ty);\n\n buf.write(\")\")\n\n }\n\n Type::Vec(ty) => {\n\n buf.write(f!(\"{binding}.into_iter().map(|v| \"));\n\n gen_fold_element(buf, \"v\", ty);\n\n buf.write(\").collect()\");\n\n }\n\n Type::Box(ty) => {\n\n buf.write(\"Box::new(\");\n\n gen_fold_element(buf, &f!(\"*{binding}\"), ty);\n\n buf.write(\")\");\n\n }\n\n Type::Local(s) => {\n\n let fn_name = fold_fn_name(s);\n\n buf.write(f!(\"folder.{fn_name}({binding})\"));\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/walkabout/src/gen.rs", "rank": 58, "score": 399775.1884431113 }, { "content": "pub fn format_numeric<F>(buf: &mut F, n: &OrderedDecimal<Numeric>) -> Nestable\n\nwhere\n\n F: FormatBuffer,\n\n{\n\n write!(buf, \"{}\", n.0.to_standard_notation_string());\n\n Nestable::Yes\n\n}\n\n\n", "file_path": "src/repr/src/strconv.rs", "rank": 59, "score": 397859.33297809283 }, { "content": "fn lex_to_adjacent_string(buf: &mut LexBuf) -> bool {\n\n // Adjacent string literals that are separated by whitespace are\n\n // concatenated if and only if that whitespace contains at least one newline\n\n // character. This bizzare rule matches PostgreSQL and the SQL standard.\n\n let whitespace = buf.take_while(|ch| ch.is_ascii_whitespace());\n\n whitespace.contains(&['\\n', '\\r'][..]) && buf.consume('\\'')\n\n}\n\n\n", "file_path": "src/sql-parser/src/lexer.rs", "rank": 60, "score": 397528.67239665834 }, { "content": "fn pcf_string(s: &str) -> String {\n\n format!(\"\\\"{}\\\"\", s)\n\n}\n\n\n", "file_path": "src/avro/src/schema.rs", "rank": 61, "score": 393507.6328111688 }, { "content": "/// Writes a [`DateTime<Utc>`] timestamp to `buf`.\n\npub fn format_timestamptz<F>(buf: &mut F, ts: DateTime<Utc>) -> Nestable\n\nwhere\n\n F: FormatBuffer,\n\n{\n\n write!(buf, \"{}\", ts.format(\"%Y-%m-%d %H:%M:%S\"));\n\n format_nanos_to_micros(buf, ts.timestamp_subsec_nanos());\n\n write!(buf, \"+00\");\n\n // This always needs escaping because of the whitespace\n\n Nestable::MayNeedEscaping\n\n}\n\n\n", "file_path": "src/repr/src/strconv.rs", "rank": 62, "score": 393093.195653611 }, { "content": "fn jsonb_type(d: Datum<'_>) -> &'static str {\n\n match d {\n\n Datum::JsonNull => \"null\",\n\n Datum::False | Datum::True => \"boolean\",\n\n Datum::String(_) => \"string\",\n\n Datum::Int64(_) | Datum::Float64(_) => \"numeric\",\n\n Datum::List(_) => \"array\",\n\n Datum::Map(_) => \"object\",\n\n _ => unreachable!(\"jsonb_type called on invalid datum {:?}\", d),\n\n }\n\n}\n\n\n", "file_path": "src/expr/src/scalar/func.rs", "rank": 63, "score": 391107.87140064675 }, { "content": "fn normalize_type_name(type_name: &str) -> String {\n\n // Normalize the type name by stripping whitespace.\n\n let type_name = type_name.replace(\" \", \"\");\n\n // Eliminate outer `Option<>` and `Box<>` from type names because they are\n\n // inconsequential when it comes to creating a correctly deserializable JSON\n\n // string.\n\n let type_name = if type_name.starts_with(\"Option<\") && type_name.ends_with('>') {\n\n &type_name[7..(type_name.len() - 1)]\n\n } else if type_name.starts_with(\"Box<\") && type_name.ends_with('>') {\n\n &type_name[4..(type_name.len() - 1)]\n\n } else {\n\n &type_name\n\n };\n\n type_name.to_string()\n\n}\n\n\n", "file_path": "src/lowertest/src/lib.rs", "rank": 64, "score": 390208.62874085957 }, { "content": "fn generate_series_int64(start: Datum, stop: Datum) -> impl Iterator<Item = (Row, Diff)> {\n\n let start = start.unwrap_int64();\n\n let stop = stop.unwrap_int64();\n\n (start..=stop).map(move |i| (Row::pack_slice(&[Datum::Int64(i)]), 1))\n\n}\n\n\n", "file_path": "src/expr/src/relation/func.rs", "rank": 65, "score": 389081.7043157476 }, { "content": "fn generate_series_int32(start: Datum, stop: Datum) -> impl Iterator<Item = (Row, Diff)> {\n\n let start = start.unwrap_int32();\n\n let stop = stop.unwrap_int32();\n\n (start..=stop).map(move |i| (Row::pack_slice(&[Datum::Int32(i)]), 1))\n\n}\n\n\n", "file_path": "src/expr/src/relation/func.rs", "rank": 66, "score": 389081.7043157476 }, { "content": "pub fn dbz_format(rp: &mut Row, dp: DiffPair<Row>) -> Row {\n\n if let Some(before) = dp.before {\n\n rp.push_list_with(|rp| rp.extend_by_row(&before));\n\n } else {\n\n rp.push(Datum::Null);\n\n }\n\n if let Some(after) = dp.after {\n\n rp.push_list_with(|rp| rp.extend_by_row(&after));\n\n } else {\n\n rp.push(Datum::Null);\n\n }\n\n rp.finish_and_reuse()\n\n}\n\n\n", "file_path": "src/interchange/src/envelopes.rs", "rank": 67, "score": 386805.0176290097 }, { "content": "fn bench_write(b: &mut test::Bencher, make_record: &Fn() -> (Schema, Value)) {\n\n let (schema, record) = make_record();\n\n b.iter(|| to_avro_datum(&schema, record.clone()));\n\n}\n\n\n", "file_path": "src/avro/benches/single.rs", "rank": 68, "score": 386582.5555646837 }, { "content": "fn encode_element(buf: &mut BytesMut, elem: Option<&Value>, ty: &Type) -> Result<(), io::Error> {\n\n match elem {\n\n None => buf.put_i32(-1),\n\n Some(elem) => {\n\n let base = buf.len();\n\n buf.put_i32(0);\n\n elem.encode_binary(ty, buf)?;\n\n let len = pg_len(\"encoded element\", buf.len() - base - 4)?;\n\n buf[base..base + 4].copy_from_slice(&len.to_be_bytes());\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/pgrepr/src/value.rs", "rank": 69, "score": 386123.6568969224 }, { "content": "#[derive(Debug, PartialEq, Clone)]\n\nstruct PropertizedArray(Row, Vec<PropertizedDatum>);\n\n\n", "file_path": "src/repr/tests/rows.rs", "rank": 70, "score": 385647.39897707454 }, { "content": "/// Normalizes an identifier that represents a column name.\n\npub fn column_name(id: Ident) -> ColumnName {\n\n ColumnName::from(ident(id))\n\n}\n\n\n", "file_path": "src/sql/src/normalize.rs", "rank": 71, "score": 382923.96585193573 }, { "content": "pub fn upsert_format(dps: Vec<DiffPair<Row>>) -> Option<Row> {\n\n let dp = dps.expect_element(\n\n \"primary key error: expected at most one update \\\n\n per key and timestamp. This can happen when the configured sink key is \\\n\n not a primary key of the sinked relation.\",\n\n );\n\n dp.after\n\n}\n", "file_path": "src/interchange/src/envelopes.rs", "rank": 72, "score": 382779.6973934618 }, { "content": "fn bench_read(b: &mut test::Bencher, make_record: &Fn() -> (Schema, Value), n_records: usize) {\n\n let (schema, record) = make_record();\n\n let records = make_records(&record, n_records);\n\n let bytes = write(&schema, &records);\n\n println!(\"bytes.len() = {}\", bytes.len());\n\n println!(\"records.len() = {}\", records.len());\n\n b.iter(|| read(&schema, &bytes));\n\n}\n\n\n", "file_path": "src/avro/benches/serde.rs", "rank": 73, "score": 382644.4874454774 }, { "content": "fn bench_write(b: &mut test::Bencher, make_record: &Fn() -> (Schema, Value), n_records: usize) {\n\n let (schema, record) = make_record();\n\n let records = make_records(&record, n_records);\n\n b.iter(|| write(&schema, &records));\n\n}\n\n\n", "file_path": "src/avro/benches/serde.rs", "rank": 74, "score": 382644.4874454774 }, { "content": "fn is_reserved_name(name: &str) -> bool {\n\n name.starts_with(\"mz_\") || name.starts_with(\"pg_\")\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub enum Op {\n\n CreateDatabase {\n\n name: String,\n\n oid: u32,\n\n },\n\n CreateSchema {\n\n database_name: DatabaseSpecifier,\n\n schema_name: String,\n\n oid: u32,\n\n },\n\n CreateRole {\n\n name: String,\n\n oid: u32,\n\n },\n\n CreateItem {\n", "file_path": "src/coord/src/catalog.rs", "rank": 75, "score": 380507.4590706517 }, { "content": "// TODO: Convert to an `impl Iterator` return value.\n\npub fn csv_extract(a: Datum, n_cols: usize) -> Vec<(Row, Diff)> {\n\n let bytes = a.unwrap_str().as_bytes();\n\n let mut row = Row::default();\n\n let mut csv_reader = csv::ReaderBuilder::new()\n\n .has_headers(false)\n\n .from_reader(bytes);\n\n csv_reader\n\n .records()\n\n .filter_map(|res| match res {\n\n Ok(sr) if sr.len() == n_cols => {\n\n row.extend(sr.iter().map(|s| Datum::String(s)));\n\n Some((row.finish_and_reuse(), 1))\n\n }\n\n _ => None,\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "src/expr/src/relation/func.rs", "rank": 76, "score": 380493.87642691453 }, { "content": "/// Changes the `name` used in an item's `CREATE` statement. To complete a\n\n/// rename operation, you must also call `create_stmt_rename_refs` on all dependent\n\n/// items.\n\npub fn create_stmt_rename(create_stmt: &mut Statement<Raw>, to_item_name: String) {\n\n // TODO(sploiselle): Support renaming schemas and databases.\n\n match create_stmt {\n\n Statement::CreateIndex(CreateIndexStatement { name, .. }) => {\n\n *name = Some(Ident::new(to_item_name));\n\n }\n\n Statement::CreateSink(CreateSinkStatement { name, .. })\n\n | Statement::CreateSource(CreateSourceStatement { name, .. })\n\n | Statement::CreateView(CreateViewStatement {\n\n definition: ViewDefinition { name, .. },\n\n ..\n\n })\n\n | Statement::CreateTable(CreateTableStatement { name, .. }) => {\n\n // The last name in an ObjectName is the item name. The item name\n\n // does not have a fixed index.\n\n // TODO: https://github.com/MaterializeInc/materialize/issues/5591\n\n let object_name_len = name.0.len() - 1;\n\n name.0[object_name_len] = Ident::new(to_item_name);\n\n }\n\n _ => unreachable!(\"Internal error: only catalog items can be renamed\"),\n\n }\n\n}\n\n\n", "file_path": "src/sql/src/ast/transform.rs", "rank": 77, "score": 380179.5436959614 }, { "content": "fn lex_embedded_element<'a>(buf: &mut LexBuf<'a>) -> Result<Cow<'a, str>, String> {\n\n let pos = buf.pos();\n\n assert!(matches!(buf.next(), Some('{')));\n\n let mut depth = 1;\n\n let mut in_escape = false;\n\n while depth > 0 {\n\n match buf.next() {\n\n Some('\\\\') => {\n\n buf.next(); // Next character is escaped, so ignore it\n\n }\n\n Some('\"') => in_escape = !in_escape, // Begin or end escape\n\n Some('{') if !in_escape => depth += 1,\n\n Some('}') if !in_escape => depth -= 1,\n\n Some(_) => (),\n\n None => bail!(\"unterminated embedded element\"),\n\n }\n\n }\n\n let s = &buf.inner()[pos..buf.pos()];\n\n Ok(Cow::Borrowed(s))\n\n}\n\n\n", "file_path": "src/repr/src/strconv.rs", "rank": 78, "score": 378801.7656220215 }, { "content": "fn lex_quoted_element<'a>(buf: &mut LexBuf<'a>) -> Result<Cow<'a, str>, String> {\n\n assert!(buf.consume('\"'));\n\n let s = buf.take_while(|ch| !matches!(ch, '\"' | '\\\\'));\n\n\n\n // `Cow::Borrowed` optimization for quoted strings without escapes\n\n if let Some('\"') = buf.peek() {\n\n buf.next();\n\n return Ok(s.into());\n\n }\n\n\n\n let mut s = s.to_string();\n\n loop {\n\n match buf.next() {\n\n Some('\\\\') => match buf.next() {\n\n Some(c) => s.push(c),\n\n None => bail!(\"unterminated quoted string\"),\n\n },\n\n Some('\"') => break,\n\n Some(c) => s.push(c),\n\n None => bail!(\"unterminated quoted string\"),\n\n }\n\n }\n\n Ok(s.into())\n\n}\n\n\n", "file_path": "src/repr/src/strconv.rs", "rank": 79, "score": 378801.7656220215 }, { "content": "fn gen_visit_element(c: &VisitConfig, buf: &mut CodegenBuf, binding: &str, ty: &Type) {\n\n match ty {\n\n Type::Primitive => (),\n\n Type::Abstract(ty) => {\n\n let fn_name = visit_fn_name(c, ty);\n\n buf.writeln(f!(\"visitor.{fn_name}({binding});\"));\n\n }\n\n Type::Option(ty) => {\n\n buf.start_block(f!(\"if let Some(v) = {binding}\"));\n\n gen_visit_element(c, buf, \"v\", ty);\n\n buf.end_block();\n\n }\n\n Type::Vec(ty) => {\n\n buf.start_block(f!(\"for v in {binding}\"));\n\n gen_visit_element(c, buf, \"v\", ty);\n\n buf.end_block();\n\n }\n\n Type::Box(ty) => {\n\n let binding = match c.mutable {\n\n true => format!(\"&mut *{}\", binding),\n", "file_path": "src/walkabout/src/gen.rs", "rank": 80, "score": 375921.9604893639 }, { "content": "pub fn extract_data_columns<'a>(schema: &'a Schema) -> anyhow::Result<SchemaNode<'a>> {\n\n let data_name = FullName::from_parts(\"data\", Some(\"com.materialize.cdc\"), \"\");\n\n let data_schema = &schema\n\n .try_lookup_name(&data_name)\n\n .ok_or_else(|| anyhow!(\"record not found: {}\", data_name))?\n\n .piece;\n\n Ok(SchemaNode {\n\n root: &schema,\n\n inner: data_schema,\n\n name: None,\n\n })\n\n}\n\n\n\n/// Collected state to encode update batches and progress statements.\n\n#[derive(Debug)]\n\npub struct Encoder {\n\n columns: Vec<(ColumnName, ColumnType)>,\n\n schema: Schema,\n\n}\n\n\n", "file_path": "src/interchange/src/avro/envelope_cdc_v2.rs", "rank": 81, "score": 374382.8573909754 }, { "content": "fn pcf_map(schema: &Map<String, serde_json::Value>) -> String {\n\n // Look for the namespace variant up front.\n\n let ns = schema.get(\"namespace\").and_then(|v| v.as_str());\n\n let mut fields = Vec::new();\n\n for (k, v) in schema {\n\n // Reduce primitive types to their simple form. ([PRIMITIVE] rule)\n\n if schema.len() == 1 && k == \"type\" {\n\n // Invariant: function is only callable from a valid schema, so this is acceptable.\n\n if let serde_json::Value::String(s) = v {\n\n return pcf_string(s);\n\n }\n\n }\n\n\n\n // Strip out unused fields ([STRIP] rule)\n\n if field_ordering_position(k).is_none() {\n\n continue;\n\n }\n\n\n\n // Fully qualify the name, if it isn't already ([FULLNAMES] rule).\n\n if k == \"name\" {\n", "file_path": "src/avro/src/schema.rs", "rank": 82, "score": 374011.2856853364 }, { "content": "fn item_generics(item: &Item, suffix: &str) -> String {\n\n if item.generics().is_empty() {\n\n \"\".into()\n\n } else {\n\n let generics = item\n\n .generics()\n\n .iter()\n\n .map(|g| f!(\"{g.name}{suffix}\"))\n\n .join(\", \");\n\n format!(\"<{}>\", generics)\n\n }\n\n}\n", "file_path": "src/walkabout/src/gen.rs", "rank": 83, "score": 373899.165783843 }, { "content": "fn unnest_list<'a>(a: Datum<'a>) -> impl Iterator<Item = (Row, Diff)> + 'a {\n\n a.unwrap_list()\n\n .iter()\n\n .map(move |e| (Row::pack_slice(&[e]), 1))\n\n}\n\n\n\nimpl fmt::Display for AggregateFunc {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n AggregateFunc::MaxNumeric => f.write_str(\"max\"),\n\n AggregateFunc::MaxInt16 => f.write_str(\"max\"),\n\n AggregateFunc::MaxInt32 => f.write_str(\"max\"),\n\n AggregateFunc::MaxInt64 => f.write_str(\"max\"),\n\n AggregateFunc::MaxFloat32 => f.write_str(\"max\"),\n\n AggregateFunc::MaxFloat64 => f.write_str(\"max\"),\n\n AggregateFunc::MaxBool => f.write_str(\"max\"),\n\n AggregateFunc::MaxString => f.write_str(\"max\"),\n\n AggregateFunc::MaxDate => f.write_str(\"max\"),\n\n AggregateFunc::MaxTimestamp => f.write_str(\"max\"),\n\n AggregateFunc::MaxTimestampTz => f.write_str(\"max\"),\n", "file_path": "src/expr/src/relation/func.rs", "rank": 84, "score": 373733.28756567644 }, { "content": "fn tokenize_timezone(value: &str) -> Result<Vec<TimeStrToken>, String> {\n\n let mut toks: Vec<TimeStrToken> = vec![];\n\n let mut num_buf = String::with_capacity(4);\n\n // If the timezone string has a colon, we need to parse all numbers naively.\n\n // Otherwise we need to parse long sequences of digits as [..hhhhmm]\n\n let split_nums: bool = !value.contains(':');\n\n\n\n // Takes a string and tries to parse it as a number token and insert it into\n\n // the token list\n\n fn parse_num(\n\n toks: &mut Vec<TimeStrToken>,\n\n n: &str,\n\n split_nums: bool,\n\n idx: usize,\n\n ) -> Result<(), String> {\n\n if n.is_empty() {\n\n return Ok(());\n\n }\n\n\n\n let (first, second) = if n.len() > 2 && split_nums {\n", "file_path": "src/repr/src/adt/datetime.rs", "rank": 85, "score": 371869.24887307326 }, { "content": "/// Convert a Datum to a String such that [get_datum_from_str] can convert the\n\n/// String back into the same Datum.\n\n///\n\n/// Currently supports only Datums supported by [get_datum_from_str].\n\npub fn datum_to_test_spec(datum: Datum) -> String {\n\n let result = format!(\"{}\", datum);\n\n match datum {\n\n Datum::Timestamp(_) => result.quoted().to_string(),\n\n _ => result,\n\n }\n\n}\n\n\n", "file_path": "src/repr-test-util/src/lib.rs", "rank": 86, "score": 371078.64936066006 }, { "content": "/// Changes `\"\\\"foo\\\"\"` to `\"foo\"` if scalar type is String\n\npub fn unquote_string(litval: &str, littyp: &ScalarType) -> String {\n\n if littyp == &ScalarType::String {\n\n lowertest::unquote(litval)\n\n } else {\n\n litval.to_string()\n\n }\n\n}\n\n\n", "file_path": "src/repr-test-util/src/lib.rs", "rank": 87, "score": 370805.85568272707 }, { "content": "fn jsonb_object_keys<'a>(a: Datum<'a>) -> impl Iterator<Item = (Row, Diff)> + 'a {\n\n let map = match a {\n\n Datum::Map(dict) => dict,\n\n _ => repr::DatumMap::empty(),\n\n };\n\n\n\n map.iter()\n\n .map(move |(k, _)| (Row::pack_slice(&[Datum::String(k)]), 1))\n\n}\n\n\n", "file_path": "src/expr/src/relation/func.rs", "rank": 88, "score": 368422.0655292766 }, { "content": "// Used to define the ordering and inclusion of fields.\n\nfn field_ordering_position(field: &str) -> Option<usize> {\n\n let v = match field {\n\n \"name\" => 1,\n\n \"type\" => 2,\n\n \"fields\" => 3,\n\n \"symbols\" => 4,\n\n \"items\" => 5,\n\n \"values\" => 6,\n\n \"size\" => 7,\n\n _ => return None,\n\n };\n\n\n\n Some(v)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use types::Record;\n\n use types::ToAvro;\n\n\n", "file_path": "src/avro/src/schema.rs", "rank": 89, "score": 368261.4341716933 }, { "content": "fn decode_row(row: Row, context: Context) -> Result<Vec<String>, String> {\n\n enum ArrayElement<T> {\n\n Null,\n\n NonNull(T),\n\n }\n\n\n\n impl<T> fmt::Display for ArrayElement<T>\n\n where\n\n T: fmt::Display,\n\n {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n ArrayElement::Null => f.write_str(\"NULL\"),\n\n ArrayElement::NonNull(t) => t.fmt(f),\n\n }\n\n }\n\n }\n\n\n\n impl<'a, T> FromSql<'a> for ArrayElement<T>\n\n where\n", "file_path": "src/testdrive/src/action/sql.rs", "rank": 90, "score": 366608.8842659856 }, { "content": "fn make_records(record: &Value, count: usize) -> Vec<Value> {\n\n let mut records = Vec::new();\n\n for _ in 0..count {\n\n records.push(record.clone());\n\n }\n\n records\n\n}\n\n\n", "file_path": "src/avro/benches/serde.rs", "rank": 91, "score": 366493.74193360034 }, { "content": "pub fn zig_i64(n: i64, buffer: &mut Vec<u8>) {\n\n encode_variable(((n << 1) ^ (n >> 63)) as u64, buffer)\n\n}\n\n\n", "file_path": "src/avro/src/util.rs", "rank": 92, "score": 364104.6219524365 }, { "content": "pub fn zag_i32<R: Read>(reader: &mut R) -> Result<i32, AvroError> {\n\n let i = zag_i64(reader)?;\n\n if i < i64::from(i32::min_value()) || i > i64::from(i32::max_value()) {\n\n Err(AvroError::Decode(DecodeError::I32OutOfRange(i)))\n\n } else {\n\n Ok(i as i32)\n\n }\n\n}\n\n\n", "file_path": "src/avro/src/util.rs", "rank": 93, "score": 363280.0415995479 }, { "content": "/// Parses a **valid** avro schema into the Parsing Canonical Form.\n\n/// <https://avro.apache.org/docs/1.8.2/spec.html#Parsing+Canonical+Form+for+Schemas>\n\nfn parsing_canonical_form(schema: &serde_json::Value) -> String {\n\n match schema {\n\n serde_json::Value::Object(map) => pcf_map(map),\n\n serde_json::Value::String(s) => pcf_string(s),\n\n serde_json::Value::Array(v) => pcf_array(v),\n\n serde_json::Value::Number(n) => n.to_string(),\n\n _ => unreachable!(\"{:?} cannot yet be printed in canonical form\", schema),\n\n }\n\n}\n\n\n", "file_path": "src/avro/src/schema.rs", "rank": 94, "score": 362381.1635597861 }, { "content": "/// Construct a Batch that depends on `state`\n\n///\n\n/// In particular this will have somewhat sensible values for all fields, and\n\n/// will be the next time slice after `state.last_time`, incrementing `last_time` to now\n\npub fn random_batch(rng: &mut impl Rng, state: &mut RecordState) -> Batch {\n\n let id = Uuid::new_v4();\n\n\n\n let dur_val = rng.gen_range(15..1_000);\n\n let dur = chrono::Duration::seconds(dur_val);\n\n let interval_start_time = state.last_time.clone();\n\n let interval_start = protobuf_timestamp(state.last_time);\n\n state.last_time = state.last_time.checked_add_signed(dur).unwrap();\n\n let interval_end = protobuf_timestamp(state.last_time);\n\n\n\n let mut records = RepeatedField::<Record>::new();\n\n\n\n for _ in 0..rng.gen_range(1..50) {\n\n records.push(random_record(rng, interval_start_time, dur_val));\n\n }\n\n\n\n let mut batch = Batch::new();\n\n batch.set_id(id.to_string());\n\n batch.set_interval_start(interval_start);\n\n batch.set_interval_end(interval_end);\n\n batch.set_records(records);\n\n\n\n batch\n\n}\n\n\n", "file_path": "demo/billing/src/randomizer.rs", "rank": 95, "score": 361294.75284688344 }, { "content": "fn encode_bytes<B: AsRef<[u8]> + ?Sized>(s: &B, buffer: &mut Vec<u8>) {\n\n let bytes = s.as_ref();\n\n encode(\n\n &Value::Long(bytes.len() as i64),\n\n &Schema {\n\n named: vec![],\n\n indices: Default::default(),\n\n top: SchemaPiece::Long.into(),\n\n },\n\n buffer,\n\n );\n\n buffer.extend_from_slice(bytes);\n\n}\n\n\n", "file_path": "src/avro/src/encode.rs", "rank": 96, "score": 360832.23009756557 }, { "content": "/// Changes `\"\\\"foo\\\"\"` to `\"foo\"`\n\npub fn unquote(s: &str) -> String {\n\n if s.starts_with('\"') && s.ends_with('\"') {\n\n s[1..(s.len() - 1)].replace(\"\\\\\\\"\", \"\\\"\")\n\n } else {\n\n s.to_string()\n\n }\n\n}\n\n\n\n/* #endregion */\n\n\n", "file_path": "src/lowertest/src/lib.rs", "rank": 97, "score": 360567.9090768887 }, { "content": "fn pcf_array(arr: &[serde_json::Value]) -> String {\n\n let inter = arr\n\n .iter()\n\n .map(parsing_canonical_form)\n\n .collect::<Vec<String>>()\n\n .join(\",\");\n\n format!(\"[{}]\", inter)\n\n}\n\n\n", "file_path": "src/avro/src/schema.rs", "rank": 98, "score": 359978.4431022927 }, { "content": "fn encode_variable(mut z: u64, buffer: &mut Vec<u8>) {\n\n loop {\n\n if z <= 0x7F {\n\n buffer.push((z & 0x7F) as u8);\n\n break;\n\n } else {\n\n buffer.push((0x80 | (z & 0x7F)) as u8);\n\n z >>= 7;\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/avro/src/util.rs", "rank": 99, "score": 358909.14916485816 } ]
Rust
src/comglue/glue.rs
estokes/netidx-excel
959d7b8f02188970a26b8130a4072767b22124bb
use crate::{ comglue::{ dispatch::IRTDUpdateEventWrap, interface::{IDispatch, IRTDServer, IRTDUpdateEvent}, variant::{string_from_wstr, SafeArray, Variant}, }, server::{Server, TopicId}, }; use anyhow::{bail, Result}; use com::sys::{HRESULT, IID, NOERROR}; use log::{debug, error}; use netidx::{ path::Path, subscriber::{Event, Value}, }; use windows::Win32::System::Com::{ ITypeInfo, DISPPARAMS, EXCEPINFO, SAFEARRAY, SAFEARRAYBOUND, VARIANT, }; struct Params(*mut DISPPARAMS); impl Drop for Params { fn drop(&mut self) { unsafe { for i in 0..self.len() { if let Ok(v) = self.get_mut(i) { *v = Variant::new(); } } } } } impl Params { fn new(ptr: *mut DISPPARAMS) -> Result<Self> { if ptr.is_null() { bail!("invalid params") } Ok(Params(ptr)) } unsafe fn len(&self) -> usize { (*self.0).cArgs as usize } unsafe fn get(&self, i: usize) -> Result<&Variant> { if i < self.len() { Ok(Variant::ref_from_raw((*self.0).rgvarg.offset(i as isize))) } else { bail!("no param at index: {}", i) } } unsafe fn get_mut(&self, i: usize) -> Result<&mut Variant> { if i < self.len() { Ok(Variant::ref_from_raw_mut((*self.0).rgvarg.offset(i as isize))) } else { bail!("no param at index: {}", i) } } } unsafe fn dispatch_server_start(server: &Server, params: Params) -> Result<()> { server.server_start(IRTDUpdateEventWrap::new(params.get(0)?.try_into()?)?); Ok(()) } unsafe fn dispatch_connect_data(server: &Server, params: Params) -> Result<()> { let topic_id = TopicId(params.get(2)?.try_into()?); let topics: &SafeArray = params.get(1)?.try_into()?; let topics = topics.read()?; let path = match topics.iter()?.next() { None => bail!("not enough topics"), Some(v) => { let path: String = v.try_into()?; Path::from(path) } }; Ok(server.connect_data(topic_id, path)?) } fn variant_of_value(v: &Value) -> Variant { match v { Value::I32(v) | Value::Z32(v) => Variant::from(*v), Value::U32(v) | Value::V32(v) => Variant::from(*v), Value::I64(v) | Value::Z64(v) => Variant::from(*v), Value::U64(v) | Value::V64(v) => Variant::from(*v), Value::F32(v) => Variant::from(*v), Value::F64(v) => Variant::from(*v), Value::True => Variant::from(true), Value::False => Variant::from(false), Value::String(s) => Variant::from(&**s), Value::Bytes(_) => Variant::from("#BIN"), Value::Null => Variant::null(), Value::Ok => Variant::from("OK"), Value::Error(e) => Variant::from(&format!("#ERR {}", &**e)), Value::DateTime(d) => Variant::from(&d.to_string()), Value::Duration(d) => Variant::from(&format!("{}s", d.as_secs_f64())), Value::Array(_) => Variant::from(&format!("{}", v)), /* sadly this doesn't work { let mut res = SafeArray::new(&[ SAFEARRAYBOUND { lLbound: 0, cElements: a.len() as u32 }, ]); { let mut res = res.write().unwrap(); for (i, v) in a.iter().enumerate() { *res.get_mut(&[i as i32]).unwrap() = variant_of_value(v); } } Variant::from(res) }*/ } } fn variant_of_event(e: &Event) -> Variant { match e { Event::Unsubscribed => Variant::from("#SUB"), Event::Update(v) => variant_of_value(v), } } unsafe fn dispatch_refresh_data( server: &Server, params: Params, result: &mut Variant, ) -> Result<()> { let ntopics = params.get_mut(0)?; let ntopics: &mut i32 = ntopics.try_into()?; let mut updates = server.refresh_data(); let len = updates.len(); *ntopics = len as i32; let mut array = SafeArray::new(&[ SAFEARRAYBOUND { lLbound: 0, cElements: 2 }, SAFEARRAYBOUND { lLbound: 0, cElements: len as u32 }, ]); { let mut wh = array.write()?; for (i, (TopicId(tid), e)) in updates.drain().enumerate() { *wh.get_mut(&[0, i as i32])? = Variant::from(tid); *wh.get_mut(&[1, i as i32])? = variant_of_event(&e); } } *result = Variant::from(array); Ok(()) } unsafe fn dispatch_disconnect_data(server: &Server, params: Params) -> Result<()> { let topic_id = TopicId(params.get(0)?.try_into()?); Ok(server.disconnect_data(topic_id)) } com::class! { #[derive(Debug)] pub class NetidxRTD: IRTDServer(IDispatch) { server: Server, } impl IDispatch for NetidxRTD { fn get_type_info_count(&self, info: *mut u32) -> HRESULT { debug!("get_type_info_count(info: {})", unsafe { *info }); if !info.is_null() { unsafe { *info = 0; } } NOERROR } fn get_type_info(&self, _lcid: u32, _type_info: *mut *mut ITypeInfo) -> HRESULT { NOERROR } pub fn get_ids_of_names( &self, riid: *const IID, names: *const *mut u16, names_len: u32, lcid: u32, ids: *mut i32 ) -> HRESULT { debug!("get_ids_of_names(riid: {:?}, names: {:?}, names_len: {}, lcid: {}, ids: {:?})", riid, names, names_len, lcid, ids); if !ids.is_null() && !names.is_null() { for i in 0..names_len { let name = unsafe { string_from_wstr(*names.offset(i as isize)) }; let name = name.to_string_lossy(); debug!("name: {}", name); match &*name { "ServerStart" => unsafe { *ids.offset(i as isize) = 0; }, "ServerTerminate" => unsafe { *ids.offset(i as isize) = 1; } "ConnectData" => unsafe { *ids.offset(i as isize) = 2; } "RefreshData" => unsafe { *ids.offset(i as isize) = 3; } "DisconnectData" => unsafe { *ids.offset(i as isize) = 4; } "Heartbeat" => unsafe { *ids.offset(i as isize) = 5; } _ => debug!("unknown method: {}", name) } } } NOERROR } unsafe fn invoke( &self, id: i32, iid: *const IID, lcid: u32, flags: u16, params: *mut DISPPARAMS, result: *mut VARIANT, exception: *mut EXCEPINFO, arg_error: *mut u32 ) -> HRESULT { debug!( "invoke(id: {}, iid: {:?}, lcid: {}, flags: {}, params: {:?}, result: {:?}, exception: {:?}, arg_error: {:?})", id, iid, lcid, flags, params, result, exception, arg_error ); assert!(!params.is_null()); let result = Variant::ref_from_raw_mut(result); let params = match Params::new(params) { Ok(p) => p, Err(e) => { error!("failed to wrap params {}", e); *result = Variant::error(); return NOERROR; } }; match id { 0 => { debug!("ServerStart"); match dispatch_server_start(&self.server, params) { Ok(()) => { *result = Variant::from(1); }, Err(e) => { error!("server_start invalid arg {}", e); *result = Variant::error(); } } }, 1 => { debug!("ServerTerminate"); self.server.server_terminate(); *result = Variant::from(1); }, 2 => { debug!("ConnectData"); match dispatch_connect_data(&self.server, params) { Ok(()) => { *result = Variant::from(1); }, Err(e) => { error!("connect_data invalid arg {}", e); *result = Variant::error(); } } }, 3 => { debug!("RefreshData"); match dispatch_refresh_data(&self.server, params, result) { Ok(()) => (), Err(e) => { error!("refresh_data failed {}", e); *result = Variant::error(); } } }, 4 => { debug!("DisconnectData"); match dispatch_disconnect_data(&self.server, params) { Ok(()) => { *result = Variant::from(1); } Err(e) => { error!("disconnect_data invalid arg {}", e); *result = Variant::error() } } }, 5 => { debug!("Heartbeat"); *result = Variant::from(1); }, _ => { debug!("unknown method {} called", id) }, } NOERROR } } impl IRTDServer for NetidxRTD { fn server_start(&self, _cb: *const IRTDUpdateEvent, _res: *mut i32) -> HRESULT { debug!("ServerStart called directly"); NOERROR } fn connect_data(&self, _topic_id: i32, _topic: *const SAFEARRAY, _get_new_values: *mut VARIANT, _res: *mut VARIANT) -> HRESULT { debug!("ConnectData called directly"); NOERROR } fn refresh_data(&self, _topic_count: *mut i32, _data: *mut SAFEARRAY) -> HRESULT { debug!("RefreshData called directly"); NOERROR } fn disconnect_data(&self, _topic_id: i32) -> HRESULT { debug!("DisconnectData called directly"); NOERROR } fn heartbeat(&self, _res: *mut i32) -> HRESULT { debug!("Heartbeat called directly"); NOERROR } fn server_terminate(&self) -> HRESULT { debug!("ServerTerminate called directly"); NOERROR } } }
use crate::{ comglue::{ dispatch::IRTDUpdateEventWrap, interface::{IDispatch, IRTDServer, IRTDUpdateEvent}, variant::{string_from_wstr, SafeArray, Variant}, }, server::{Server, TopicId}, }; use anyhow::{bail, Result}; use com::sys::{HRESULT, IID, NOERROR}; use log::{debug, error}; use netidx::{ path::Path, subscriber::{Event, Value}, }; use windows::Win32::System::Com::{ ITypeInfo, DISPPARAMS, EXCEPINFO, SAFEARRAY, SAFEARRAYBOUND, VARIANT, }; struct Params(*mut DISPPARAMS); impl Drop for Params { fn drop(&mut self) { unsafe { for i in 0..self.len() { if let Ok(v) = self.get_mut(i) { *v = Variant::new(); } } } } } impl Params { fn new(ptr: *mut DISPPARAMS) -> Result<Self> { if ptr.is_null() { bail!("invalid params") } Ok(Params(ptr)) } unsafe fn len(&self) -> usize { (*self.0).cArgs as usize } unsafe fn get(&self, i: usize) -> Result<&Variant> { if i < self.len() { Ok(Variant::ref_from_raw((*self.0).rgvarg.offset(i as isize))) } else { bail!("no param at index: {}", i) } } unsafe fn get_mut(&self, i: usize) -> Result<&mut Variant> { if i < self.len() { Ok(Variant::ref_from_raw_mut((*self.0).rgvarg.offset(i as isize))) } else { bail!("no param at index: {}", i) } } } unsafe fn dispatch_server_start(server: &Server, params: Params) -> Result<()> { server.server_start(IRTDUpdateEventWrap::new(params.get(0)?.try_into()?)?); Ok(()) } unsafe fn dispatch_connect_data(server: &Server, params: Params) -> Result<()> { let topic_id = TopicId(params.get(2)?.try_into()?); let topics: &SafeArray = params.get(1)?.try_into()?; let topics = topics.read()?; let path = match topics.iter()?.next() { None => bail!("not enough topics"), Some(v) => { let path: String = v.try_into()?; Path::from(path) } }; Ok(server.connect_data(topic_id, path)?) } fn variant_of_value(v: &Value) -> Variant { match v { Value::I32(v) | Value::Z32(v) => Variant::from(*v), Value::U32(v) | Value::V32(v) => Variant::from(*v), Value::I64(v) | Value::Z64(v) => Variant::from(*v), Value::U64(v) | Value::V64(v) => Variant::from(*v), Value::F32(v) => Variant::from(*v), Value::F64(v) => Variant::from(*v), Value::True => Variant::from(true), Value::False => Variant::from(false), Value::String(s) => Variant::from(&**s), Value::Bytes(_) => Variant::from("#BIN"), Value::Null => Variant::null(), Value::Ok => Variant::from("OK"), Value::Error(e) => Variant::from(&format!("#ERR {}", &**e)), Value::DateTime(d) => Variant::from(&d.to_string()), Value::Duration(d) => Variant::from(&format!("{}s", d.as_secs_f64())), Value::Array(_) => Variant::from(&format!("{}", v)), /* sadly this doesn't work { let mut res = SafeArray::new(&[ SAFEARRAYBOUND { lLbound: 0, cElements: a.len() as u32 }, ]); { let mut res = res.write().unwrap(); for (i, v) in a.iter().enumerate() { *res.get_mut(&[i as i32]).unwrap() = variant_of_value(v); } } Variant::from(res) }*/ } } fn variant_of_event(e: &Event) -> Variant { match e { Event::Unsubscribed => Variant::from("#SUB"), Event::Update(v) => variant_of_value(v), } } unsafe fn dispatch_refresh_data( server: &Server, params: Params, result: &mut Variant, ) -> Result<()> { let ntopics = params.get_mut(0)?; let ntopics: &mut i32 = ntopics.try_into()?; let mut updates = server.refresh_data(); let len = updates.len(); *ntopics = len as i32; let mut array = SafeArray::new(&[ SAFEARRAYBOUND { lLbound: 0, cElements: 2 }, SAFEARRAYBOUND { lLbound: 0, cElements: len as u32 }, ]); { let mut wh = array.write()?; for (i, (TopicId(tid), e)) in updates.drain().enumerate()
}, } NOERROR } } impl IRTDServer for NetidxRTD { fn server_start(&self, _cb: *const IRTDUpdateEvent, _res: *mut i32) -> HRESULT { debug!("ServerStart called directly"); NOERROR } fn connect_data(&self, _topic_id: i32, _topic: *const SAFEARRAY, _get_new_values: *mut VARIANT, _res: *mut VARIANT) -> HRESULT { debug!("ConnectData called directly"); NOERROR } fn refresh_data(&self, _topic_count: *mut i32, _data: *mut SAFEARRAY) -> HRESULT { debug!("RefreshData called directly"); NOERROR } fn disconnect_data(&self, _topic_id: i32) -> HRESULT { debug!("DisconnectData called directly"); NOERROR } fn heartbeat(&self, _res: *mut i32) -> HRESULT { debug!("Heartbeat called directly"); NOERROR } fn server_terminate(&self) -> HRESULT { debug!("ServerTerminate called directly"); NOERROR } } }
{ *wh.get_mut(&[0, i as i32])? = Variant::from(tid); *wh.get_mut(&[1, i as i32])? = variant_of_event(&e); } } *result = Variant::from(array); Ok(()) } unsafe fn dispatch_disconnect_data(server: &Server, params: Params) -> Result<()> { let topic_id = TopicId(params.get(0)?.try_into()?); Ok(server.disconnect_data(topic_id)) } com::class! { #[derive(Debug)] pub class NetidxRTD: IRTDServer(IDispatch) { server: Server, } impl IDispatch for NetidxRTD { fn get_type_info_count(&self, info: *mut u32) -> HRESULT { debug!("get_type_info_count(info: {})", unsafe { *info }); if !info.is_null() { unsafe { *info = 0; } } NOERROR } fn get_type_info(&self, _lcid: u32, _type_info: *mut *mut ITypeInfo) -> HRESULT { NOERROR } pub fn get_ids_of_names( &self, riid: *const IID, names: *const *mut u16, names_len: u32, lcid: u32, ids: *mut i32 ) -> HRESULT { debug!("get_ids_of_names(riid: {:?}, names: {:?}, names_len: {}, lcid: {}, ids: {:?})", riid, names, names_len, lcid, ids); if !ids.is_null() && !names.is_null() { for i in 0..names_len { let name = unsafe { string_from_wstr(*names.offset(i as isize)) }; let name = name.to_string_lossy(); debug!("name: {}", name); match &*name { "ServerStart" => unsafe { *ids.offset(i as isize) = 0; }, "ServerTerminate" => unsafe { *ids.offset(i as isize) = 1; } "ConnectData" => unsafe { *ids.offset(i as isize) = 2; } "RefreshData" => unsafe { *ids.offset(i as isize) = 3; } "DisconnectData" => unsafe { *ids.offset(i as isize) = 4; } "Heartbeat" => unsafe { *ids.offset(i as isize) = 5; } _ => debug!("unknown method: {}", name) } } } NOERROR } unsafe fn invoke( &self, id: i32, iid: *const IID, lcid: u32, flags: u16, params: *mut DISPPARAMS, result: *mut VARIANT, exception: *mut EXCEPINFO, arg_error: *mut u32 ) -> HRESULT { debug!( "invoke(id: {}, iid: {:?}, lcid: {}, flags: {}, params: {:?}, result: {:?}, exception: {:?}, arg_error: {:?})", id, iid, lcid, flags, params, result, exception, arg_error ); assert!(!params.is_null()); let result = Variant::ref_from_raw_mut(result); let params = match Params::new(params) { Ok(p) => p, Err(e) => { error!("failed to wrap params {}", e); *result = Variant::error(); return NOERROR; } }; match id { 0 => { debug!("ServerStart"); match dispatch_server_start(&self.server, params) { Ok(()) => { *result = Variant::from(1); }, Err(e) => { error!("server_start invalid arg {}", e); *result = Variant::error(); } } }, 1 => { debug!("ServerTerminate"); self.server.server_terminate(); *result = Variant::from(1); }, 2 => { debug!("ConnectData"); match dispatch_connect_data(&self.server, params) { Ok(()) => { *result = Variant::from(1); }, Err(e) => { error!("connect_data invalid arg {}", e); *result = Variant::error(); } } }, 3 => { debug!("RefreshData"); match dispatch_refresh_data(&self.server, params, result) { Ok(()) => (), Err(e) => { error!("refresh_data failed {}", e); *result = Variant::error(); } } }, 4 => { debug!("DisconnectData"); match dispatch_disconnect_data(&self.server, params) { Ok(()) => { *result = Variant::from(1); } Err(e) => { error!("disconnect_data invalid arg {}", e); *result = Variant::error() } } }, 5 => { debug!("Heartbeat"); *result = Variant::from(1); }, _ => { debug!("unknown method {} called", id)
random
[ { "content": "fn next_index(bounds: &[SAFEARRAYBOUND], idx: &mut [i32]) -> bool {\n\n let mut i = 0;\n\n while i < bounds.len() {\n\n if idx[i] < (bounds[i].lLbound + bounds[i].cElements as i32) {\n\n idx[i] += 1;\n\n for j in 0..i {\n\n idx[j] = bounds[j].lLbound;\n\n }\n\n break;\n\n }\n\n i += 1;\n\n }\n\n i < bounds.len()\n\n}\n\n\n\npub struct SafeArrayIterMut<'a> {\n\n array: &'a mut SafeArray,\n\n bounds: Vec<SAFEARRAYBOUND>,\n\n idx: Vec<i32>,\n\n end: bool,\n", "file_path": "src/comglue/variant.rs", "rank": 1, "score": 117221.24649027009 }, { "content": "fn dll_register_server() -> Result<()> {\n\n let hkcr = RegKey::predef(HKEY_CLASSES_ROOT);\n\n let (by_name, _) = hkcr.create_subkey(\"NetidxRTD\\\\CLSID\")?;\n\n let clsid = clsid(CLSID);\n\n by_name.set_value(\"\", &clsid)?;\n\n if mem::size_of::<usize>() == 8 {\n\n register_clsid(&hkcr, &clsid)?;\n\n } else if mem::size_of::<usize>() == 4 {\n\n let wow = hkcr.open_subkey(\"WOW6432Node\")?;\n\n register_clsid(&wow, &clsid)?;\n\n } else {\n\n bail!(\"can't figure out the word size\")\n\n }\n\n Ok(())\n\n}\n\n\n\n#[no_mangle]\n\nextern \"system\" fn DllRegisterServer() -> HRESULT {\n\n match dll_register_server() {\n\n Err(_) => SELFREG_E_CLASS,\n\n Ok(()) => NOERROR,\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 4, "score": 75999.50676625583 }, { "content": "fn dll_unregister_server() -> Result<()> {\n\n let hkcr = RegKey::predef(HKEY_CLASSES_ROOT);\n\n let clsid = clsid(CLSID);\n\n hkcr.delete_subkey_all(\"NetidxRTD\")?;\n\n assert!(clsid.len() > 0);\n\n hkcr.delete_subkey_all(&format!(\"CLSID\\\\{}\", clsid))?;\n\n hkcr.delete_subkey_all(&format!(\"WOW6432Node\\\\CLSID\\\\{}\", clsid))?;\n\n Ok(())\n\n}\n\n\n\n#[no_mangle]\n\nextern \"system\" fn DllUnregisterServer() -> HRESULT {\n\n match dll_unregister_server() {\n\n Err(_) => SELFREG_E_CLASS,\n\n Ok(()) => NOERROR,\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 5, "score": 75999.50676625583 }, { "content": "// IRTDUpdateEvent is single apartment threaded, and that means we need to ask COM\n\n// to make a proxy for us in order to run it in another thread. Since we MUST run in\n\n// another thread to be async, this is mandatory. We have to marshal the interface when\n\n// we receive it, and then unmarshal it in the update thread, which is then able to\n\n// call into it.\n\nstruct IRTDUpdateEventThreadArgs {\n\n stream: IStream,\n\n rx: mpsc::Receiver<()>,\n\n}\n\n\n\nstatic IDISPATCH_GUID: GUID = GUID {\n\n data1: IID_IDISPATCH.data1,\n\n data2: IID_IDISPATCH.data2,\n\n data3: IID_IDISPATCH.data3,\n\n data4: IID_IDISPATCH.data4,\n\n};\n\n\n\nunsafe fn irtd_update_event_loop(\n\n update_notify: i32,\n\n rx: mpsc::Receiver<()>,\n\n idp: Com::IDispatch,\n\n) {\n\n while let Ok(()) = rx.recv() {\n\n while let Ok(()) = rx.try_recv() {}\n\n loop {\n", "file_path": "src/comglue/dispatch.rs", "rank": 6, "score": 73911.22437685492 }, { "content": "fn register_clsid(root: &RegKey, clsid: &String) -> Result<()> {\n\n let (by_id, _) = root.create_subkey(&format!(\"CLSID\\\\{}\", &clsid))?;\n\n let (by_id_inproc, _) = by_id.create_subkey(\"InprocServer32\")?;\n\n by_id.set_value(&\"\", &\"NetidxRTD\")?;\n\n by_id_inproc.set_value(\"\", &unsafe { get_dll_file_path(_HMODULE) })?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 7, "score": 64302.34180686583 }, { "content": "struct ServerInner {\n\n runtime: Runtime,\n\n update: Option<IRTDUpdateEventWrap>,\n\n subscriber: Subscriber,\n\n updates: mpsc::Sender<Pooled<Vec<(SubId, Event)>>>,\n\n by_id: FxHashMap<SubId, FxHashSet<TopicId>>,\n\n by_topic: FxHashMap<TopicId, Dval>,\n\n pending: Pooled<FxHashMap<TopicId, Event>>,\n\n}\n\n\n\nimpl ServerInner {\n\n fn clear(&mut self) {\n\n self.update = None;\n\n self.by_id.clear();\n\n self.by_topic.clear();\n\n self.pending.clear();\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n", "file_path": "src/server.rs", "rank": 8, "score": 61839.83296185627 }, { "content": "fn load_config_and_init_log() -> Result<Config> {\n\n let path = match dirs::config_dir() {\n\n Some(d) => d,\n\n None => match dirs::home_dir() {\n\n Some(d) => d,\n\n None => PathBuf::from(\"\\\\\"),\n\n },\n\n };\n\n let base = path.join(\"netidx-excel\");\n\n fs::create_dir_all(base.clone())?;\n\n let config_file = base.join(\"config.json\");\n\n let log_file = base.join(\"log.txt\");\n\n if !config_file.exists() {\n\n fs::write(&*config_file, &serde_json::to_string_pretty(&Config::default())?)?;\n\n }\n\n let config: Config = serde_json::from_str(&fs::read_to_string(config_file.clone())?)?;\n\n let log = File::create(log_file)?;\n\n simplelog::WriteLogger::init(config.log_level, simplelog::Config::default(), log)?;\n\n Ok(config)\n\n}\n\n\n\npub static CONFIG: Lazy<Config> = Lazy::new(|| match load_config_and_init_log() {\n\n Ok(c) => c,\n\n Err(_) => Config::default(),\n\n});\n", "file_path": "src/comglue/mod.rs", "rank": 9, "score": 60428.391443581626 }, { "content": "fn clsid(id: CLSID) -> String {\n\n format!(\"{{{}}}\", id)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 10, "score": 52955.08071898249 }, { "content": "pub fn str_to_wstr(s: &str) -> Vec<u16> {\n\n let mut v = OsString::from(s).encode_wide().collect::<Vec<_>>();\n\n v.push(0);\n\n v\n\n}\n\n\n\n#[repr(transparent)]\n\npub struct Variant(VARIANT);\n\n\n\nimpl Default for Variant {\n\n fn default() -> Self {\n\n Variant(unsafe {\n\n let mut v = mem::zeroed();\n\n VariantInit(&mut v);\n\n v\n\n })\n\n }\n\n}\n\n\n\nimpl Drop for Variant {\n", "file_path": "src/comglue/variant.rs", "rank": 11, "score": 45576.9358633399 }, { "content": "\n\n fn try_into(self) -> Result<&'a SafeArray, Self::Error> {\n\n if self.typ() != VARENUM(VT_ARRAY.0 | VT_VARIANT.0) {\n\n bail!(\"not a variant safearray\")\n\n } else {\n\n Ok(unsafe {\n\n mem::transmute::<&*mut SAFEARRAY, &SafeArray>(&self.val().parray)\n\n })\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> TryInto<&'a mut SafeArray> for &'a mut Variant {\n\n type Error = Error;\n\n\n\n fn try_into(self) -> Result<&'a mut SafeArray, Self::Error> {\n\n if self.typ() != VARENUM(VT_ARRAY.0 | VT_VARIANT.0) {\n\n bail!(\"not a variant safearray\")\n\n } else {\n\n Ok(unsafe {\n", "file_path": "src/comglue/variant.rs", "rank": 12, "score": 34174.11959924664 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl<'a> TryInto<i32> for &'a Variant {\n\n type Error = Error;\n\n\n\n fn try_into(self) -> Result<i32, Self::Error> {\n\n if self.typ() != VT_I4 {\n\n bail!(\"not an i32\")\n\n } else {\n\n unsafe { Ok(self.val().lVal) }\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> TryInto<&'a mut i32> for &'a mut Variant {\n\n type Error = Error;\n\n\n\n fn try_into(self) -> Result<&'a mut i32, Self::Error> {\n", "file_path": "src/comglue/variant.rs", "rank": 13, "score": 34172.951001795605 }, { "content": " fn drop(&mut self) {\n\n unsafe {\n\n let _ = SafeArrayDestroy(self.0);\n\n }\n\n }\n\n}\n\n\n\nimpl SafeArray {\n\n pub fn new(bounds: &[SAFEARRAYBOUND]) -> SafeArray {\n\n let t = unsafe {\n\n SafeArrayCreate(VT_VARIANT.0 as u16, bounds.len() as u32, bounds.as_ptr())\n\n };\n\n SafeArray(t)\n\n }\n\n\n\n unsafe fn check_pointer(p: *const SAFEARRAY) -> Result<()> {\n\n let typ = SafeArrayGetVartype(p)\n\n .map_err(|e| anyhow!(\"couldn't get safearray type {}\", e.to_string()))?;\n\n if typ != VT_VARIANT.0 as u16 {\n\n bail!(\"not a variant array\")\n", "file_path": "src/comglue/variant.rs", "rank": 14, "score": 34172.38930074669 }, { "content": " })\n\n }\n\n }\n\n\n\n fn bounds(&self) -> Result<Vec<SAFEARRAYBOUND>> {\n\n let dims = self.dims();\n\n let mut res = Vec::with_capacity(dims as usize);\n\n for i in 1..=dims {\n\n let bound = self.bound(i)?;\n\n res.push(bound)\n\n }\n\n Ok(res)\n\n }\n\n\n\n fn get(&self, idx: &[i32]) -> Result<&Variant> {\n\n unsafe {\n\n let mut vp: *mut VARIANT = ptr::null_mut();\n\n match SafeArrayPtrOfIndex(\n\n self.0,\n\n idx.as_ptr(),\n", "file_path": "src/comglue/variant.rs", "rank": 15, "score": 34171.44404992187 }, { "content": " if self.typ() != VARENUM(VT_I4.0 | VT_BYREF.0) {\n\n bail!(\"not a byref i32\")\n\n } else {\n\n Ok(unsafe { &mut *self.val().plVal })\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> TryInto<String> for &'a Variant {\n\n type Error = Error;\n\n\n\n fn try_into(self) -> Result<String, Self::Error> {\n\n if self.typ() != VT_BSTR {\n\n bail!(\"not a string\")\n\n } else {\n\n unsafe {\n\n let s = &*self.val().bstrVal;\n\n Ok(string_from_wstr(s.0).to_string_lossy().to_string())\n\n }\n\n }\n", "file_path": "src/comglue/variant.rs", "rank": 16, "score": 34171.03097218327 }, { "content": " v\n\n }\n\n }\n\n}\n\n\n\nimpl From<&str> for Variant {\n\n fn from(s: &str) -> Self {\n\n let mut v = Self::new();\n\n unsafe {\n\n v.set_typ(VT_BSTR);\n\n let mut s = str_to_wstr(s);\n\n let bs = SysAllocStringLen(PWSTR(s.as_mut_ptr()), s.len() as u32);\n\n v.val_mut().bstrVal = mem::ManuallyDrop::new(bs);\n\n v\n\n }\n\n }\n\n}\n\n\n\nimpl From<&String> for Variant {\n\n fn from(s: &String) -> Self {\n", "file_path": "src/comglue/variant.rs", "rank": 17, "score": 34170.896059545674 }, { "content": " fn drop(&mut self) {\n\n let _ = unsafe { VariantClear(&mut self.0) };\n\n }\n\n}\n\n\n\nimpl<'a> TryInto<bool> for &'a Variant {\n\n type Error = Error;\n\n\n\n fn try_into(self) -> Result<bool, Self::Error> {\n\n if self.typ() != VT_BOOL {\n\n bail!(\"not a bool\")\n\n } else {\n\n unsafe {\n\n let v = self.val().boolVal;\n\n if v == -1 {\n\n Ok(true)\n\n } else {\n\n Ok(false)\n\n }\n\n }\n", "file_path": "src/comglue/variant.rs", "rank": 18, "score": 34170.87093311681 }, { "content": " pub fn iter_mut(&mut self) -> Result<SafeArrayIterMut> {\n\n let bounds = self.bounds()?;\n\n let idx =\n\n (0..bounds.len()).into_iter().map(|i| bounds[i].lLbound).collect::<Vec<_>>();\n\n Ok(SafeArrayIterMut { array: self.0, bounds, idx, end: false })\n\n }\n\n\n\n pub fn get(&self, idx: &[i32]) -> Result<&Variant> {\n\n self.0.get(idx)\n\n }\n\n\n\n pub fn get_mut(&mut self, idx: &[i32]) -> Result<&mut Variant> {\n\n self.0.get_mut(idx)\n\n }\n\n}\n\n\n\n#[repr(transparent)]\n\npub struct SafeArray(*mut SAFEARRAY);\n\n\n\nimpl Drop for SafeArray {\n", "file_path": "src/comglue/variant.rs", "rank": 19, "score": 34169.71075475871 }, { "content": " }\n\n}\n\n\n\nimpl<'a> TryInto<IDispatch> for &'a Variant {\n\n type Error = Error;\n\n\n\n fn try_into(self) -> Result<IDispatch, Self::Error> {\n\n if self.typ() != VT_DISPATCH {\n\n bail!(\"not an IDispatch interface\")\n\n } else {\n\n unsafe {\n\n Ok(IDispatch::from_abi(self.val().pdispVal)\n\n .map_err(|e| anyhow!(e.to_string()))?)\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> TryInto<&'a SafeArray> for &'a Variant {\n\n type Error = Error;\n", "file_path": "src/comglue/variant.rs", "rank": 20, "score": 34169.61508862046 }, { "content": " }\n\n Ok(())\n\n }\n\n\n\n pub unsafe fn from_raw<'a>(p: *mut SAFEARRAY) -> Result<Self> {\n\n Self::check_pointer(p)?;\n\n Ok(mem::transmute::<*mut SAFEARRAY, SafeArray>(p))\n\n }\n\n\n\n pub fn write<'a>(&'a mut self) -> Result<SafeArrayWriteGuard<'a>> {\n\n unsafe {\n\n SafeArrayLock(self.0)\n\n .map_err(|e| anyhow!(\"failed to lock safearray {}\", e.to_string()))?;\n\n Ok(SafeArrayWriteGuard(self))\n\n }\n\n }\n\n\n\n pub fn read<'a>(&'a self) -> Result<SafeArrayReadGuard<'a>> {\n\n unsafe {\n\n SafeArrayLock(self.0)\n", "file_path": "src/comglue/variant.rs", "rank": 21, "score": 34169.552591389445 }, { "content": " let idx =\n\n (0..bounds.len()).into_iter().map(|i| bounds[i].lLbound).collect::<Vec<_>>();\n\n Ok(SafeArrayIter { array: self.0, bounds, idx, end: false })\n\n }\n\n\n\n pub fn get(&self, idx: &[i32]) -> Result<&Variant> {\n\n self.0.get(idx)\n\n }\n\n}\n\n\n\npub struct SafeArrayWriteGuard<'a>(&'a mut SafeArray);\n\n\n\nimpl<'a> Drop for SafeArrayWriteGuard<'a> {\n\n fn drop(&mut self) {\n\n unsafe {\n\n let _ = SafeArrayUnlock(self.0 .0);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/comglue/variant.rs", "rank": 22, "score": 34169.19357761709 }, { "content": " &mut vp as *mut *mut VARIANT as *mut *mut c_void,\n\n ) {\n\n Ok(()) => Ok(Variant::ref_from_raw(vp)),\n\n Err(e) => bail!(\"could not access idx: {:?}, {}\", idx, e.to_string()),\n\n }\n\n }\n\n }\n\n\n\n fn get_mut(&mut self, idx: &[i32]) -> Result<&mut Variant> {\n\n unsafe {\n\n let mut vp: *mut VARIANT = ptr::null_mut();\n\n match SafeArrayPtrOfIndex(\n\n self.0,\n\n idx.as_ptr(),\n\n &mut vp as *mut *mut VARIANT as *mut *mut c_void,\n\n ) {\n\n Ok(()) => Ok(Variant::ref_from_raw_mut(vp)),\n\n Err(e) => bail!(\"could not access idx: {:?}, {}\", idx, e.to_string()),\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/comglue/variant.rs", "rank": 23, "score": 34169.1050010302 }, { "content": "}\n\n\n\nimpl<'a> Iterator for SafeArrayIterMut<'a> {\n\n type Item = &'a mut Variant;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n if self.end {\n\n None\n\n } else {\n\n let res = unsafe {\n\n let mut vp: *mut VARIANT = ptr::null_mut();\n\n SafeArrayPtrOfIndex(\n\n self.array.0,\n\n self.idx.as_ptr(),\n\n &mut vp as *mut *mut VARIANT as *mut *mut c_void,\n\n )\n\n .ok()?;\n\n Some(Variant::ref_from_raw_mut(vp))\n\n };\n\n self.end = next_index(&self.bounds, &mut self.idx);\n", "file_path": "src/comglue/variant.rs", "rank": 24, "score": 34168.806583360805 }, { "content": " res\n\n }\n\n }\n\n}\n\n\n\npub struct SafeArrayIter<'a> {\n\n array: &'a SafeArray,\n\n bounds: Vec<SAFEARRAYBOUND>,\n\n idx: Vec<i32>,\n\n end: bool,\n\n}\n\n\n\nimpl<'a> Iterator for SafeArrayIter<'a> {\n\n type Item = &'a Variant;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n if self.end {\n\n None\n\n } else {\n\n let res = unsafe {\n", "file_path": "src/comglue/variant.rs", "rank": 25, "score": 34168.77233919992 }, { "content": " mem::transmute::<&mut *mut SAFEARRAY, &mut SafeArray>(\n\n &mut self.val_mut().parray,\n\n )\n\n })\n\n }\n\n }\n\n}\n\n\n\nimpl From<bool> for Variant {\n\n fn from(b: bool) -> Self {\n\n let mut v = Self::new();\n\n unsafe {\n\n v.set_typ(VT_BOOL);\n\n v.val_mut().boolVal = if b { -1 } else { 0 };\n\n v\n\n }\n\n }\n\n}\n\n\n\nimpl From<i32> for Variant {\n", "file_path": "src/comglue/variant.rs", "rank": 26, "score": 34168.31362353389 }, { "content": " let mut vp: *mut VARIANT = ptr::null_mut();\n\n SafeArrayPtrOfIndex(\n\n self.array.0,\n\n self.idx.as_ptr(),\n\n &mut vp as *mut *mut VARIANT as *mut *mut c_void,\n\n )\n\n .ok()?;\n\n Some(Variant::ref_from_raw(vp))\n\n };\n\n self.end = next_index(&self.bounds, &mut self.idx);\n\n res\n\n }\n\n }\n\n}\n\n\n\npub struct SafeArrayReadGuard<'a>(&'a SafeArray);\n\n\n\nimpl<'a> Drop for SafeArrayReadGuard<'a> {\n\n fn drop(&mut self) {\n\n unsafe {\n", "file_path": "src/comglue/variant.rs", "rank": 27, "score": 34168.15108243636 }, { "content": "use anyhow::{anyhow, bail, Error, Result};\n\nuse std::{\n\n convert::{From, TryInto},\n\n default::Default,\n\n ffi::{c_void, OsString},\n\n iter::Iterator,\n\n mem,\n\n ops::Drop,\n\n os::windows::ffi::{OsStrExt, OsStringExt},\n\n ptr,\n\n};\n\nuse windows::{\n\n core::Abi,\n\n Win32::{\n\n Foundation::{SysAllocStringLen, PWSTR},\n\n Globalization::lstrlenW,\n\n System::{\n\n Com::{IDispatch, SAFEARRAY, SAFEARRAYBOUND, VARIANT, VARIANT_0_0_0},\n\n Ole::{\n\n SafeArrayCreate, SafeArrayDestroy, SafeArrayGetDim, SafeArrayGetLBound,\n", "file_path": "src/comglue/variant.rs", "rank": 28, "score": 34168.1308682528 }, { "content": " Variant::from(s.as_str())\n\n }\n\n}\n\n\n\nimpl From<String> for Variant {\n\n fn from(s: String) -> Self {\n\n Variant::from(s.as_str())\n\n }\n\n}\n\n\n\nimpl From<SafeArray> for Variant {\n\n fn from(a: SafeArray) -> Self {\n\n let mut v = Self::new();\n\n unsafe {\n\n v.set_typ(VARENUM(VT_ARRAY.0 | VT_VARIANT.0));\n\n v.val_mut().parray = a.0;\n\n // the variant is now responsible for deallocating the safe array\n\n mem::forget(a);\n\n v\n\n }\n", "file_path": "src/comglue/variant.rs", "rank": 29, "score": 34167.84927441966 }, { "content": " .map_err(|e| anyhow!(\"failed to lock safearray {}\", e.to_string()))?;\n\n Ok(SafeArrayReadGuard(self))\n\n }\n\n }\n\n\n\n fn dims(&self) -> u32 {\n\n unsafe { SafeArrayGetDim(self.0) }\n\n }\n\n\n\n fn bound(&self, dim: u32) -> Result<SAFEARRAYBOUND> {\n\n unsafe {\n\n let lbound = SafeArrayGetLBound(self.0, dim).map_err(|e| {\n\n anyhow!(\"couldn't get safe array lower bound {}\", e.to_string())\n\n })?;\n\n let ubound = SafeArrayGetUBound(self.0, dim).map_err(|e| {\n\n anyhow!(\"couldn't get safe array upper bound {}\", e.to_string())\n\n })?;\n\n Ok(SAFEARRAYBOUND {\n\n cElements: (1 + ubound - lbound) as u32,\n\n lLbound: lbound,\n", "file_path": "src/comglue/variant.rs", "rank": 30, "score": 34167.07219191263 }, { "content": " fn from(i: i32) -> Self {\n\n let mut v = Self::new();\n\n unsafe {\n\n v.set_typ(VT_I4);\n\n v.val_mut().lVal = i;\n\n v\n\n }\n\n }\n\n}\n\n\n\nimpl From<u32> for Variant {\n\n fn from(i: u32) -> Self {\n\n let mut v = Self::new();\n\n unsafe {\n\n v.set_typ(VT_UI4);\n\n v.val_mut().ulVal = i;\n\n v\n\n }\n\n }\n\n}\n", "file_path": "src/comglue/variant.rs", "rank": 31, "score": 34167.03147634799 }, { "content": " }\n\n}\n\n\n\nimpl Variant {\n\n pub fn new() -> Variant {\n\n Self::default()\n\n }\n\n\n\n pub fn null() -> Variant {\n\n let mut v = Self::default();\n\n unsafe { v.set_typ(VT_NULL) }\n\n v\n\n }\n\n\n\n pub fn error() -> Variant {\n\n let mut v = Self::default();\n\n unsafe { v.set_typ(VT_ERROR) }\n\n v\n\n }\n\n\n", "file_path": "src/comglue/variant.rs", "rank": 32, "score": 34165.6158659651 }, { "content": "impl<'a> SafeArrayWriteGuard<'a> {\n\n pub fn dims(&self) -> u32 {\n\n self.0.dims()\n\n }\n\n\n\n pub fn bound(&self, dim: u32) -> Result<SAFEARRAYBOUND> {\n\n self.0.bound(dim)\n\n }\n\n\n\n pub fn bounds(&self) -> Result<Vec<SAFEARRAYBOUND>> {\n\n self.0.bounds()\n\n }\n\n\n\n pub fn iter(&self) -> Result<SafeArrayIter> {\n\n let bounds = self.bounds()?;\n\n let idx =\n\n (0..bounds.len()).into_iter().map(|i| bounds[i].lLbound).collect::<Vec<_>>();\n\n Ok(SafeArrayIter { array: self.0, bounds, idx, end: false })\n\n }\n\n\n", "file_path": "src/comglue/variant.rs", "rank": 33, "score": 34165.4846366728 }, { "content": " SafeArrayGetUBound, SafeArrayGetVartype, SafeArrayLock,\n\n SafeArrayPtrOfIndex, SafeArrayUnlock, VariantClear, VariantInit, VARENUM,\n\n VT_ARRAY, VT_BOOL, VT_BSTR, VT_BYREF, VT_DISPATCH, VT_ERROR, VT_I4,\n\n VT_I8, VT_NULL, VT_R4, VT_R8, VT_UI4, VT_UI8, VT_VARIANT,\n\n },\n\n },\n\n },\n\n};\n\n\n\npub unsafe fn string_from_wstr<'a>(s: *mut u16) -> OsString {\n\n OsString::from_wide(std::slice::from_raw_parts(s, lstrlenW(PWSTR(s)) as usize))\n\n}\n\n\n", "file_path": "src/comglue/variant.rs", "rank": 34, "score": 34164.42421922383 }, { "content": " }\n\n}\n\n\n\nimpl From<f32> for Variant {\n\n fn from(i: f32) -> Self {\n\n let mut v = Self::new();\n\n unsafe {\n\n v.set_typ(VT_R4);\n\n v.val_mut().fltVal = i;\n\n v\n\n }\n\n }\n\n}\n\n\n\nimpl From<f64> for Variant {\n\n fn from(i: f64) -> Self {\n\n let mut v = Self::new();\n\n unsafe {\n\n v.set_typ(VT_R8);\n\n v.val_mut().dblVal = i;\n", "file_path": "src/comglue/variant.rs", "rank": 35, "score": 34163.36524871322 }, { "content": "\n\nimpl From<i64> for Variant {\n\n fn from(i: i64) -> Self {\n\n let mut v = Self::new();\n\n unsafe {\n\n v.set_typ(VT_I8);\n\n v.val_mut().llVal = i;\n\n v\n\n }\n\n }\n\n}\n\n\n\nimpl From<u64> for Variant {\n\n fn from(i: u64) -> Self {\n\n let mut v = Self::new();\n\n unsafe {\n\n v.set_typ(VT_UI8);\n\n v.val_mut().ullVal = i;\n\n v\n\n }\n", "file_path": "src/comglue/variant.rs", "rank": 36, "score": 34163.36524871322 }, { "content": " let _ = SafeArrayUnlock(self.0 .0);\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> SafeArrayReadGuard<'a> {\n\n pub fn dims(&self) -> u32 {\n\n self.0.dims()\n\n }\n\n\n\n pub fn bound(&self, dim: u32) -> Result<SAFEARRAYBOUND> {\n\n self.0.bound(dim)\n\n }\n\n\n\n pub fn bounds(&self) -> Result<Vec<SAFEARRAYBOUND>> {\n\n self.0.bounds()\n\n }\n\n\n\n pub fn iter(&self) -> Result<SafeArrayIter> {\n\n let bounds = self.bounds()?;\n", "file_path": "src/comglue/variant.rs", "rank": 37, "score": 34163.26568830036 }, { "content": " pub fn typ(&self) -> VARENUM {\n\n VARENUM(unsafe { self.0.Anonymous.Anonymous.vt as i32 })\n\n }\n\n\n\n unsafe fn set_typ(&mut self, typ: VARENUM) {\n\n (*self.0.Anonymous.Anonymous).vt = typ.0 as u16;\n\n }\n\n\n\n unsafe fn val(&self) -> &VARIANT_0_0_0 {\n\n &self.0.Anonymous.Anonymous.Anonymous\n\n }\n\n\n\n unsafe fn val_mut(&mut self) -> &mut VARIANT_0_0_0 {\n\n &mut (*self.0.Anonymous.Anonymous).Anonymous\n\n }\n\n}\n\n\n", "file_path": "src/comglue/variant.rs", "rank": 38, "score": 34160.15155548706 }, { "content": " pub fn as_ptr(&self) -> *const VARIANT {\n\n unsafe { mem::transmute::<&Variant, &VARIANT>(self) as *const VARIANT }\n\n }\n\n\n\n pub fn as_mut_ptr(&mut self) -> *mut VARIANT {\n\n unsafe { mem::transmute::<&mut Variant, &mut VARIANT>(self) as *mut VARIANT }\n\n }\n\n\n\n // turn a const pointer to a `VARIANT` into a reference to a `Variant`.\n\n // take care to assign a reasonable lifetime.\n\n pub unsafe fn ref_from_raw<'a>(p: *const VARIANT) -> &'a Variant {\n\n mem::transmute::<*const VARIANT, &'a Variant>(p)\n\n }\n\n\n\n // turn a mut pointer to a `VARIANT` into a mutable reference to a `Variant`.\n\n // take care to assign a reasonable lifetime.\n\n pub unsafe fn ref_from_raw_mut<'a>(p: *mut VARIANT) -> &'a mut Variant {\n\n mem::transmute::<*mut VARIANT, &'a mut Variant>(p)\n\n }\n\n\n", "file_path": "src/comglue/variant.rs", "rank": 39, "score": 34159.10684935961 }, { "content": "pub struct Server(Arc<Mutex<ServerInner>>);\n\n\n\nimpl Default for Server {\n\n fn default() -> Self {\n\n let cfg = *comglue::CONFIG;\n\n Self::new(cfg)\n\n }\n\n}\n\n\n\nimpl fmt::Debug for Server {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"Server\")\n\n }\n\n}\n\n\n\nimpl Server {\n\n async fn updates_loop(self, mut up: mpsc::Receiver<Pooled<Vec<(SubId, Event)>>>) {\n\n debug!(\"updates loop started\");\n\n while let Some(mut updates) = up.next().await {\n\n let mut inner = self.0.lock();\n", "file_path": "src/server.rs", "rank": 40, "score": 22737.814914540595 }, { "content": "use crate::comglue::{self, dispatch::IRTDUpdateEventWrap};\n\nuse anyhow::Result;\n\nuse futures::{channel::mpsc, prelude::*};\n\nuse fxhash::{FxBuildHasher, FxHashMap, FxHashSet};\n\nuse log::debug;\n\nuse netidx::{\n\n config::Config,\n\n path::Path,\n\n pool::{Pool, Pooled},\n\n resolver::Auth,\n\n subscriber::{Dval, Event, SubId, Subscriber, UpdatesFlags},\n\n};\n\nuse once_cell::sync::Lazy;\n\nuse parking_lot::Mutex;\n\nuse std::{\n\n collections::{HashMap, HashSet},\n\n default::Default,\n\n fmt, mem,\n\n sync::Arc,\n\n};\n\nuse tokio::runtime::Runtime;\n\n\n\n#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]\n\npub(crate) struct TopicId(pub i32);\n\n\n\nstatic PENDING: Lazy<Pool<FxHashMap<TopicId, Event>>> =\n\n Lazy::new(|| Pool::new(3, 1_000_000));\n\n\n", "file_path": "src/server.rs", "rank": 41, "score": 22736.791191202243 }, { "content": " debug!(\"server_terminate\");\n\n }\n\n\n\n pub(crate) fn connect_data(&self, tid: TopicId, path: Path) -> Result<()> {\n\n debug!(\"connect_data\");\n\n let mut inner = self.0.lock();\n\n let dv = inner.subscriber.durable_subscribe(path);\n\n inner.pending.insert(tid, dv.last());\n\n if let Some(update) = inner.update.as_ref() {\n\n update.update_notify()\n\n }\n\n dv.updates(UpdatesFlags::BEGIN_WITH_LAST, inner.updates.clone());\n\n inner\n\n .by_id\n\n .entry(dv.id())\n\n .or_insert_with(|| HashSet::with_hasher(FxBuildHasher::default()))\n\n .insert(tid);\n\n inner.by_topic.insert(tid, dv);\n\n Ok(())\n\n }\n", "file_path": "src/server.rs", "rank": 42, "score": 22734.5283745116 }, { "content": "\n\n pub(crate) fn new(cfg: comglue::Config) -> Server {\n\n debug!(\"init runtime\");\n\n let runtime = Runtime::new().expect(\"could not init async runtime\");\n\n debug!(\"init subscriber\");\n\n let subscriber = runtime\n\n .block_on(async {\n\n let auth = match cfg.auth_mechanism {\n\n comglue::Auth::Anonymous => Auth::Anonymous,\n\n comglue::Auth::Kerberos => Auth::Krb5 { spn: None, upn: None },\n\n };\n\n let config =\n\n Config::load_default().expect(\"could not load netidx config\");\n\n Subscriber::new(config, auth)\n\n })\n\n .expect(\"could not init netidx subscriber\");\n\n let (tx, rx) = runtime.block_on(async { mpsc::channel(3) });\n\n let t = Server(Arc::new(Mutex::new(ServerInner {\n\n runtime,\n\n update: None,\n", "file_path": "src/server.rs", "rank": 43, "score": 22733.570123268382 }, { "content": " subscriber,\n\n updates: tx,\n\n by_id: HashMap::with_hasher(FxBuildHasher::default()),\n\n by_topic: HashMap::with_hasher(FxBuildHasher::default()),\n\n pending: PENDING.take(),\n\n })));\n\n let t_ = t.clone();\n\n t.0.lock().runtime.spawn(t_.updates_loop(rx));\n\n t\n\n }\n\n\n\n pub(crate) fn server_start(&self, update: IRTDUpdateEventWrap) {\n\n let mut inner = self.0.lock();\n\n inner.clear();\n\n inner.update = Some(update);\n\n debug!(\"server_start\");\n\n }\n\n\n\n pub(crate) fn server_terminate(&self) {\n\n self.0.lock().clear();\n", "file_path": "src/server.rs", "rank": 44, "score": 22732.799784533312 }, { "content": "\n\n pub(crate) fn disconnect_data(&self, tid: TopicId) {\n\n debug!(\"disconnect_data\");\n\n let mut inner = self.0.lock();\n\n inner.pending.remove(&tid);\n\n if let Some(dv) = inner.by_topic.remove(&tid) {\n\n if let Some(tids) = inner.by_id.get_mut(&dv.id()) {\n\n tids.remove(&tid);\n\n if tids.is_empty() {\n\n inner.by_id.remove(&dv.id());\n\n }\n\n }\n\n }\n\n }\n\n\n\n pub(crate) fn refresh_data(&self) -> Pooled<FxHashMap<TopicId, Event>> {\n\n debug!(\"refresh_data\");\n\n let mut inner = self.0.lock();\n\n mem::replace(&mut inner.pending, PENDING.take())\n\n }\n\n}\n", "file_path": "src/server.rs", "rank": 45, "score": 22728.87772063926 }, { "content": " let inner = &mut *inner;\n\n if let Some(update) = &mut inner.update {\n\n let call_update = inner.pending.is_empty();\n\n for (id, ev) in updates.drain(..) {\n\n if let Some(tids) = inner.by_id.get(&id) {\n\n let mut iter = tids.iter();\n\n for _ in 0..tids.len() - 1 {\n\n inner.pending.insert(*iter.next().unwrap(), ev.clone());\n\n }\n\n inner.pending.insert(*iter.next().unwrap(), ev);\n\n }\n\n }\n\n if call_update {\n\n debug!(\"calling update_notify\");\n\n update.update_notify();\n\n }\n\n }\n\n }\n\n debug!(\"updates loop terminated\")\n\n }\n", "file_path": "src/server.rs", "rank": 46, "score": 22727.534761297153 }, { "content": " &self,\n\n riid: *const IID,\n\n names: *const *mut u16,\n\n names_len: u32,\n\n lcid: u32,\n\n ids: *mut i32\n\n ) -> HRESULT;\n\n pub fn invoke(\n\n &self,\n\n id: i32,\n\n iid: *const IID,\n\n lcid: u32,\n\n flags: u16,\n\n params: *mut DISPPARAMS,\n\n result: *mut VARIANT,\n\n exception: *mut EXCEPINFO,\n\n arg_error: *mut u32\n\n ) -> HRESULT;\n\n }\n\n\n", "file_path": "src/comglue/interface.rs", "rank": 53, "score": 14968.119283546795 }, { "content": " #[uuid(\"A43788C1-D91B-11D3-8F39-00C04F3651B8\")]\n\n pub unsafe interface IRTDUpdateEvent: IDispatch {\n\n pub fn update_notify(&self) -> HRESULT;\n\n pub fn heartbeat_interval(&self, hb: *mut i32) -> HRESULT;\n\n pub fn disconnect(&self) -> HRESULT;\n\n }\n\n\n\n #[uuid(\"EC0E6191-DB51-11D3-8F3E-00C04F3651B8\")]\n\n pub unsafe interface IRTDServer: IDispatch {\n\n pub fn server_start(&self, cb: *const IRTDUpdateEvent, res: *mut i32) -> HRESULT;\n\n pub fn connect_data(\n\n &self,\n\n topic_id: i32,\n\n topic: *const SAFEARRAY,\n\n get_new_values: *mut VARIANT,\n\n res: *mut VARIANT\n\n ) -> HRESULT;\n\n pub fn refresh_data(&self, topic_count: *mut i32, data: *mut SAFEARRAY) -> HRESULT;\n\n pub fn disconnect_data(&self, topic_id: i32) -> HRESULT;\n\n pub fn heartbeat(&self, res: *mut i32) -> HRESULT;\n\n pub fn server_terminate(&self) -> HRESULT;\n\n }\n\n}\n", "file_path": "src/comglue/interface.rs", "rank": 55, "score": 14964.818575359372 }, { "content": " match hr {\n\n Ok(()) => break,\n\n Err(e) => {\n\n error!(\"IRTDUpdateEvent: update_notify failed {}\", e);\n\n thread::sleep(Duration::from_millis(250))\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\nunsafe extern \"system\" fn irtd_update_event_thread(ptr: *mut c_void) -> u32 {\n\n let args = Box::from_raw(ptr.cast::<IRTDUpdateEventThreadArgs>());\n\n match CoInitialize(ptr::null_mut()) {\n\n Ok(()) => (),\n\n Err(e) => {\n\n error!(\"update_event_thread: failed to initialize COM {}\", e);\n\n return 0;\n\n }\n\n }\n", "file_path": "src/comglue/dispatch.rs", "rank": 58, "score": 14961.973815537349 }, { "content": " );\n\n debug!(\"update_event_thread: called GetIDsOfNames dispids: {:?}\", dispid);\n\n if let Err(e) = hr {\n\n error!(\"update_event_thread: could not get names {}\", e);\n\n }\n\n debug!(\"update_event_thread, init done, calling event loop\");\n\n irtd_update_event_loop(dispid, args.rx, idp);\n\n CoUninitialize();\n\n 0\n\n}\n\n\n\npub struct IRTDUpdateEventWrap(mpsc::Sender<()>);\n\n\n\nimpl IRTDUpdateEventWrap {\n\n pub unsafe fn new(disp: Com::IDispatch) -> Result<Self> {\n\n let (tx, rx) = mpsc::channel();\n\n let stream = CoMarshalInterThreadInterfaceInStream(&IDISPATCH_GUID, disp)\n\n .map_err(|e| anyhow!(e.to_string()))?;\n\n let args = Box::new(IRTDUpdateEventThreadArgs { stream, rx });\n\n let mut threadid = 0u32;\n", "file_path": "src/comglue/dispatch.rs", "rank": 62, "score": 14959.385133473948 }, { "content": "use com::{\n\n interfaces::IUnknown,\n\n sys::{HRESULT, IID},\n\n};\n\nuse windows::Win32::System::Com::{ITypeInfo, DISPPARAMS, EXCEPINFO, SAFEARRAY, VARIANT};\n\n\n\n// bde5f32a-14d9-414e-a0af-8390a1601944\n\npub const CLSID: IID = IID {\n\n data1: 0xbde5f32a,\n\n data2: 0x14d9,\n\n data3: 0x414e,\n\n data4: [0xa0, 0xaf, 0x83, 0x90, 0xa1, 0x60, 0x19, 0x44],\n\n};\n\n\n\ncom::interfaces! {\n\n #[uuid(\"00020400-0000-0000-C000-000000000046\")]\n\n pub unsafe interface IDispatch: IUnknown {\n\n pub fn get_type_info_count(&self, info: *mut u32) -> HRESULT;\n\n pub fn get_type_info(&self, lcid: u32, type_info: *mut *mut ITypeInfo) -> HRESULT;\n\n pub fn get_ids_of_names(\n", "file_path": "src/comglue/interface.rs", "rank": 63, "score": 14959.368862678832 }, { "content": "use crate::comglue::{\n\n interface::IID_IDISPATCH,\n\n variant::{str_to_wstr, Variant},\n\n};\n\nuse anyhow::{anyhow, Result};\n\nuse log::{debug, error};\n\nuse std::{boxed::Box, ffi::c_void, ptr, sync::mpsc, thread, time::Duration};\n\nuse windows::{\n\n core::GUID,\n\n Win32::{\n\n Foundation::PWSTR,\n\n System::{\n\n Com::{\n\n self, CoInitialize, CoUninitialize, IStream,\n\n Marshal::CoMarshalInterThreadInterfaceInStream,\n\n StructuredStorage::CoGetInterfaceAndReleaseStream, DISPPARAMS,\n\n },\n\n Ole,\n\n Threading::{CreateThread, THREAD_CREATION_FLAGS},\n\n },\n\n },\n\n};\n\n\n\n// IRTDUpdateEvent is single apartment threaded, and that means we need to ask COM\n\n// to make a proxy for us in order to run it in another thread. Since we MUST run in\n\n// another thread to be async, this is mandatory. We have to marshal the interface when\n\n// we receive it, and then unmarshal it in the update thread, which is then able to\n\n// call into it.\n", "file_path": "src/comglue/dispatch.rs", "rank": 64, "score": 14957.79491549562 }, { "content": " let mut args = [];\n\n let mut named_args = [];\n\n let mut params = DISPPARAMS {\n\n rgvarg: args.as_mut_ptr(),\n\n rgdispidNamedArgs: named_args.as_mut_ptr(),\n\n cArgs: 0,\n\n cNamedArgs: 0,\n\n };\n\n let mut result = Variant::null();\n\n let mut _arg_err = 0;\n\n let hr = idp.Invoke(\n\n update_notify,\n\n &GUID::zeroed(),\n\n 0,\n\n Ole::DISPATCH_METHOD as u16,\n\n &mut params,\n\n result.as_mut_ptr(),\n\n ptr::null_mut(),\n\n &mut _arg_err,\n\n );\n", "file_path": "src/comglue/dispatch.rs", "rank": 66, "score": 14956.3988920994 }, { "content": " let idp: Com::IDispatch = match CoGetInterfaceAndReleaseStream(&args.stream) {\n\n Ok(i) => i,\n\n Err(e) => {\n\n error!(\n\n \"update_event_thread: failed to unmarshal the IDispatch interface {}\",\n\n e\n\n );\n\n CoUninitialize();\n\n return 0;\n\n }\n\n };\n\n let mut update_notify = str_to_wstr(\"UpdateNotify\");\n\n let mut dispid = 0x0;\n\n debug!(\"get_dispids: calling GetIDsOfNames\");\n\n let hr = idp.GetIDsOfNames(\n\n &GUID::zeroed(),\n\n &PWSTR(update_notify.as_mut_ptr()),\n\n 1,\n\n 1000,\n\n &mut dispid,\n", "file_path": "src/comglue/dispatch.rs", "rank": 67, "score": 14955.918517811166 }, { "content": " CreateThread(\n\n ptr::null_mut(),\n\n 0,\n\n Some(irtd_update_event_thread),\n\n Box::into_raw(args).cast::<c_void>(),\n\n THREAD_CREATION_FLAGS::default(),\n\n &mut threadid,\n\n );\n\n Ok(IRTDUpdateEventWrap(tx))\n\n }\n\n\n\n pub fn update_notify(&self) {\n\n let _ = self.0.send(());\n\n }\n\n}\n", "file_path": "src/comglue/dispatch.rs", "rank": 68, "score": 14954.310393219037 }, { "content": "pub mod dispatch;\n\npub mod glue;\n\npub mod interface;\n\npub mod variant;\n\n\n\nuse anyhow::Result;\n\nuse dirs;\n\nuse log::LevelFilter;\n\nuse once_cell::sync::Lazy;\n\nuse simplelog;\n\nuse std::{\n\n default::Default,\n\n fs::{self, File},\n\n path::PathBuf,\n\n};\n\n\n\n#[derive(Debug, Clone, Copy, Serialize, Deserialize)]\n\npub enum Auth {\n\n Anonymous,\n\n Kerberos,\n", "file_path": "src/comglue/mod.rs", "rank": 69, "score": 14952.003328481167 }, { "content": "}\n\n\n\n#[derive(Debug, Clone, Copy, Serialize, Deserialize)]\n\npub struct Config {\n\n pub log_level: LevelFilter,\n\n pub auth_mechanism: Auth,\n\n}\n\n\n\nimpl Default for Config {\n\n fn default() -> Self {\n\n Config { log_level: LevelFilter::Off, auth_mechanism: Auth::Kerberos }\n\n }\n\n}\n\n\n", "file_path": "src/comglue/mod.rs", "rank": 70, "score": 14948.525339726883 }, { "content": "This is an Excel COM add-in that allows the `=RTD()` formula to pull in real time data from Netidx.\n\n\n\nSay you have published some data, maybe from the shell publisher, and you want to show it to your boss, but \"that commandline thing\" or \"that linux thing\" is not something your boss does.\n\n```bash\n\n> netidx publisher --bind 192.168.0.0/24 --spn publish/[email protected]\n\n/test/foo|array|[42, -42]\n\n/test/bar|i32|42\n\n/test/baz|i32|-42\n\n```\n\n\n\nIf you install the add-in on you bosses' machine, then he can pull that data into a cell in an Excel spreadsheet, and you can stop worrying about how to present results to pointy haired types and get your job done.\n\n\n\n![Example](example.PNG)\n\n\n\nThe key point is if a value in netidx updates, Excel will update almost immediatly, and your bosses pivot table will update too. csv exports are one step closer to dieing!\n\n# Syntax\n\n\n\n```\n\n=RTD(\"netidxrtd\",, PATH)\n\n```\n\n\n\n`PATH` can of course be a ref, or another formula, it's Excel, your boss knows Excel ... right?\n\n\n\n# Performance \n\n\n\nEven if you subscribe to a lot of data, or you subscribe to data that updates quickly, Excel should remain responsive because RTDs are throttled, and all the netidx processing is happening on a background thread pool. For example here Excel is maxing out my wifi network by subscribing to the stress publisher, however it remains completely responsive. It's actually pulling in 2 million updates per second, and that's limited by the network, not the cpu.\n\n\n\n![Performance](perf.PNG)\n\n\n\n# Building\n\n\n\nThere are pre built binaries [here](https://github.com/estokes/netidx-excel/releases/tag/0.1.0)\n\n\n\nBut you want to build it you need a windows machine (obviously) probably windows 10 or above, but 8 might work. You need to install rust, the easiest way is to use rustup. You need to install git. Once you have those two things installed,\n\n\n\n```bash\n\n> git clone https://github.com/estokes/netidx-excel\n\n> cd netidx-excel\n\n> cargo build --release\n\n```\n\n\n\nThe dll should be built in `target/release/netidx_excel.dll`\n\n\n", "file_path": "README.md", "rank": 71, "score": 12738.918257912137 }, { "content": "# Installing\n\n\n\nTo install you need to decide where you want the dll to live, it really doesn't matter, but I put it in `C:\\Program Files\\netidx-excel` on my machine. Then you need to run `regsvr32` on the dll as Administrator, that will set up the registry entries to register it as a proper COM server. So in an admin powershell,\n\n\n\n```powershell\n\n> mkdir 'C:\\Program Files\\netidx-excel'\n\n> cp target\\release\\netidx_excel.dll 'C:\\Program Files\\netidx-excel'\n\n> regsvr32 'C:\\Program Files\\netidx-excel\\netidx_excel.dll'\n\n```\n\n\n\nThe most common errors are `regsvr32` isn't in your path, and/or your shell is not running with admin rights.\n\n\n\nYou might also need to open the properties of the dll and \"unblock\" it (if you downloaded a binary instead of building it yourself).\n\n\n\n## 32 bit office on 64 bit windows\n\n\n\nIf you are running the 32 bit version of office, maybe because you have limited ram, then you will need to also install the netidx_excel32.dll, and you will need to run regsvr32 on that as well, just like the above. If you are building from source you will need to install the target `i686-pc-windows-msvc` and build the 32 bit dll with that target, e.g. `cargo build --target i686-pc-windows-msvc --release`, and then the dll will be in `target/i686-pc-windows-msvc/release` instead of `target/release`.\n\n\n\n# Limitations\n\n\n\n- No write support; there's no real reason other than time, it's perfectly possible\n\n- No publish support; again, no real reason, perfectly possible, but significantly more time than write\n\n- No resolver list support; once again, time, no real problems with this\n\n- I could remove the requirment for admin rights to install if people cared, but then you'd have to `regsvr32` it for every user on a machine\n\n\n\n# Other\n\n\n\nI programmed on windows for a WHOLE month so that you NEVER have too. Because trust me, you NEVER want to. But if you are curious about the dreams I had during that month, read this before bed [Inside COM+](https://www.thrysoee.dk/InsideCOM+/ch05c.htm). Really, don't do it. COM must have seemed like a good idea to someone at some point in history, right? Developers! Developers! Developers! ... Developers! I mean, GObject seems totally great now, really.\n", "file_path": "README.md", "rank": 72, "score": 12733.455108078464 }, { "content": "#[macro_use]\n\nextern crate serde_derive;\n\nmod comglue;\n\nmod server;\n\nuse anyhow::{bail, Result};\n\nuse com::{\n\n production::Class,\n\n sys::{CLASS_E_CLASSNOTAVAILABLE, CLSID, HRESULT, IID, NOERROR, SELFREG_E_CLASS},\n\n};\n\nuse comglue::glue::NetidxRTD;\n\nuse comglue::interface::CLSID;\n\nuse std::{ffi::c_void, mem, ptr};\n\n\n\n// sadly this doesn't register the class name, just the ID, so we must do all the\n\n// registration ourselves because excel requires the name to be mapped to the id\n\n//com::inproc_dll_module![(CLSID, NetidxRTD),];\n\n\n\nstatic mut _HMODULE: *mut c_void = ptr::null_mut();\n\n\n\n#[no_mangle]\n", "file_path": "src/lib.rs", "rank": 73, "score": 19.32429645556028 }, { "content": " \"class id passed to DllGetClassObject should never be null\"\n\n );\n\n\n\n let class_id = &*class_id;\n\n if class_id == &CLSID {\n\n let instance = <NetidxRTD as Class>::Factory::allocate();\n\n instance.QueryInterface(&*iid, result)\n\n } else {\n\n CLASS_E_CLASSNOTAVAILABLE\n\n }\n\n}\n\n\n\nuse winreg::{enums::*, RegKey};\n\n\n\nextern \"system\" {\n\n fn GetModuleFileNameA(hModule: *mut c_void, lpFilename: *mut i8, nSize: u32) -> u32;\n\n}\n\n\n\nunsafe fn get_dll_file_path(hmodule: *mut c_void) -> String {\n\n const MAX_FILE_PATH_LENGTH: usize = 260;\n", "file_path": "src/lib.rs", "rank": 74, "score": 17.73276804082419 }, { "content": "\n\n let mut path = [0u8; MAX_FILE_PATH_LENGTH];\n\n\n\n let len = GetModuleFileNameA(\n\n hmodule,\n\n path.as_mut_ptr() as *mut _,\n\n MAX_FILE_PATH_LENGTH as _,\n\n );\n\n\n\n String::from_utf8(path[..len as usize].to_vec()).unwrap()\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 75, "score": 12.66511033833714 }, { "content": "unsafe extern \"system\" fn DllMain(\n\n hinstance: *mut c_void,\n\n fdw_reason: u32,\n\n _reserved: *mut c_void,\n\n) -> i32 {\n\n const DLL_PROCESS_ATTACH: u32 = 1;\n\n if fdw_reason == DLL_PROCESS_ATTACH {\n\n _HMODULE = hinstance;\n\n }\n\n 1\n\n}\n\n\n\n#[no_mangle]\n\nunsafe extern \"system\" fn DllGetClassObject(\n\n class_id: *const CLSID,\n\n iid: *const IID,\n\n result: *mut *mut c_void,\n\n) -> HRESULT {\n\n assert!(\n\n !class_id.is_null(),\n", "file_path": "src/lib.rs", "rank": 76, "score": 12.225991515686454 } ]
Rust
app/gui/src/presenter.rs
hubertp/enso
d3846578cceb4844f1f94d4e7ca51762bba3fada
pub mod code; pub mod graph; pub mod project; pub mod searcher; pub use code::Code; pub use graph::Graph; pub use project::Project; pub use searcher::Searcher; use crate::prelude::*; use crate::controller::ide::StatusNotification; use crate::executor::global::spawn_stream_handler; use crate::presenter; use enso_frp as frp; use ide_view as view; use ide_view::graph_editor::SharedHashMap; #[derive(Debug)] struct Model { logger: Logger, current_project: RefCell<Option<Project>>, controller: controller::Ide, view: view::root::View, } impl Model { fn setup_and_display_new_project(self: Rc<Self>) { *self.current_project.borrow_mut() = None; if let Some(project_model) = self.controller.current_project() { self.view.switch_view_to_project(); let project_view = self.view.project(); let status_bar = self.view.status_bar().clone_ref(); let breadcrumbs = &project_view.graph().model.breadcrumbs; breadcrumbs.project_name(project_model.name().to_string()); let status_notifications = self.controller.status_notifications().clone_ref(); let ide_controller = self.controller.clone_ref(); let project_controller = controller::Project::new(project_model, status_notifications.clone_ref()); executor::global::spawn(async move { match presenter::Project::initialize( ide_controller, project_controller, project_view, status_bar, ) .await { Ok(project) => { *self.current_project.borrow_mut() = Some(project); } Err(err) => { let err_msg = format!("Failed to initialize project: {}", err); error!(self.logger, "{err_msg}"); status_notifications.publish_event(err_msg) } } }); } } pub fn open_project(&self, project_name: String) { let logger = self.logger.clone_ref(); let controller = self.controller.clone_ref(); crate::executor::global::spawn(async move { if let Ok(managing_api) = controller.manage_projects() { if let Err(err) = managing_api.open_project_by_name(project_name).await { error!(logger, "Cannot open project by name: {err}."); } } else { warning!(logger, "Project opening failed: no ProjectManagingAPI available."); } }); } fn create_project(&self, template: Option<&str>) { let logger = self.logger.clone_ref(); let controller = self.controller.clone_ref(); let template = template.map(ToOwned::to_owned); crate::executor::global::spawn(async move { if let Ok(managing_api) = controller.manage_projects() { if let Err(err) = managing_api.create_new_project(template.clone()).await { if let Some(template) = template { error!( logger, "Could not create new project from template {template}: {err}." ); } else { error!(logger, "Could not create new project: {err}."); } } } else { warning!(logger, "Project creation failed: no ProjectManagingAPI available."); } }); } } #[derive(Clone, CloneRef, Debug)] pub struct Presenter { network: frp::Network, model: Rc<Model>, } impl Presenter { pub fn new(controller: controller::Ide, view: ide_view::root::View) -> Self { let logger = Logger::new("Presenter"); let current_project = default(); let model = Rc::new(Model { logger, controller, view, current_project }); frp::new_network! { network let welcome_view_frp = &model.view.welcome_screen().frp; eval welcome_view_frp.open_project((name) model.open_project(name.to_owned())); eval welcome_view_frp.create_project((templ) model.create_project(templ.as_deref())); let root_frp = &model.view.frp; root_frp.switch_view_to_project <+ welcome_view_frp.create_project.constant(()); root_frp.switch_view_to_project <+ welcome_view_frp.open_project.constant(()); } Self { model, network }.init() } fn init(self) -> Self { self.setup_status_bar_notification_handler(); self.setup_controller_notification_handler(); self.set_projects_list_on_welcome_screen(); self.model.clone_ref().setup_and_display_new_project(); self } fn setup_status_bar_notification_handler(&self) { use controller::ide::BackgroundTaskHandle as ControllerHandle; use ide_view::status_bar::process::Id as ViewHandle; let logger = self.model.logger.clone_ref(); let process_map = SharedHashMap::<ControllerHandle, ViewHandle>::new(); let status_bar = self.model.view.status_bar().clone_ref(); let status_notifications = self.model.controller.status_notifications().subscribe(); let weak = Rc::downgrade(&self.model); spawn_stream_handler(weak, status_notifications, move |notification, _| { match notification { StatusNotification::Event { label } => { status_bar.add_event(ide_view::status_bar::event::Label::new(label)); } StatusNotification::BackgroundTaskStarted { label, handle } => { status_bar.add_process(ide_view::status_bar::process::Label::new(label)); let view_handle = status_bar.last_process.value(); process_map.insert(handle, view_handle); } StatusNotification::BackgroundTaskFinished { handle } => { if let Some(view_handle) = process_map.remove(&handle) { status_bar.finish_process(view_handle); } else { warning!(logger, "Controllers finished process not displayed in view"); } } } futures::future::ready(()) }); } fn setup_controller_notification_handler(&self) { let stream = self.model.controller.subscribe(); let weak = Rc::downgrade(&self.model); spawn_stream_handler(weak, stream, move |notification, model| { match notification { controller::ide::Notification::NewProjectCreated | controller::ide::Notification::ProjectOpened => model.setup_and_display_new_project(), } futures::future::ready(()) }); } fn set_projects_list_on_welcome_screen(&self) { let controller = self.model.controller.clone_ref(); let welcome_view_frp = self.model.view.welcome_screen().frp.clone_ref(); let logger = self.model.logger.clone_ref(); crate::executor::global::spawn(async move { if let Ok(project_manager) = controller.manage_projects() { match project_manager.list_projects().await { Ok(projects) => { let names = projects.into_iter().map(|p| p.name.into()).collect::<Vec<_>>(); welcome_view_frp.set_projects_list(names); } Err(err) => { error!(logger, "Unable to get list of projects: {err}."); } } } }); } } #[allow(missing_docs)] impl Presenter { pub fn view(&self) -> &view::root::View { &self.model.view } pub fn controller(&self) -> &controller::Ide { &self.model.controller } }
pub mod code; pub mod graph; pub mod project; pub mod searcher; pub use code::Code; pub use graph::Graph; pub use project::Project; pub use searcher::Searcher; use crate::prelude::*; use crate::controller::ide::StatusNotification; use crate::executor::global::spawn_stream_handler; use crate::presenter; use enso_frp as frp; use ide_view as view; use ide_view::graph_editor::SharedHashMap; #[derive(Debug)] struct Model { logger: Logger, current_project: RefCell<Option<Project>>, controller: controller::Ide, view: view::root::View, } impl Model { fn setup_and_display_new_project(self: Rc<Self>) { *self.current_project.borrow_mut() = None; if let Some(project_model) = self.controller.current_project() { self.view.switch_view_to_project(); let project_view = self.view.project(); let status_bar = self.view.status_bar().clone_ref(); let breadcrumbs = &project_view.graph().model.breadcrumbs; breadcrumbs.project_name(project_model.name().to_string()); let status_notifications = self.controller.status_notifications().clone_ref(); let ide_controller = self.controller.clone_ref(); let project_controller = controller::Project::new(project_model, status_notifications.clone_ref()); executor::global::spawn(async move { match presenter::Project::initialize( ide_controller, project_controller, project_view, status_bar, ) .await { Ok(project) => { *self.current_project.borrow_mut() = Some(project); } Err(err) => { let err_msg = format!("Failed to initialize project: {}", err); error!(self.logger, "{err_msg}"); status_notifications.publish_event(err_msg) } } }); } } pub fn open_project(&self, project_name: String) { let logger = self.logger.clone_ref(); let controller = self.controller.clone_ref(); crate::executor::global::spawn(async move { if let Ok(managing_api) = controller.manage_projects() { if let Err(err) = managing_api.open_project_by_name(project_name).await { error!(logger, "Cannot open project by name: {err}."); } } else { warning!(logger, "Project opening failed: no ProjectManagingAPI available."); } }); } fn create_project(&self, template: Option<&str>) { let logger = self.logger.clone_ref(); let controller = self.controller.clone_ref(); let template = template.map(ToOwned::to_owned); crate::executor::global::spawn(async move { if let Ok(managing_api) = controller.manage_projects() { if let Err(err) = managing_api.create_new_project(template.clone()).await { if let Some(template) = template { error!( logger, "Could not create new project from template {template}: {err}." ); } else { error!(logger, "Could not create new project: {err}."); } } } else { warning!(logger, "Project creation failed: no ProjectManagingAPI available."); } }); } } #[derive(Clone, CloneRef, Debug)] pub struct Presenter { network: frp::Network, model: Rc<Model>, } impl Presenter { pub fn new(controller: controller::Ide, view: ide_view::root::View) -> Self { let logger = Logger::new("Presenter"); let current_project = default(); let model = Rc::new(Model { logger, controller, view, current_project }); frp::new_network! { network let welcome_view_frp = &model.view.welcome_screen().frp; eval welcome_view_frp.open_project((name) model.open_project(name.to_owned())); eval welcome_view_frp.create_project((templ) model.create_project(templ.as_deref())); let root_frp = &model.view.frp; root_frp.switch_view_to_project <+ welcome_view_frp.create_project.constant(()); root_frp.switch_view_to_project <+ welcome_view_frp.open_project.constant(()); } Self { model, network }.init() } fn init(self) -> Self { self.setup_status_bar_notification_handler();
fn setup_status_bar_notification_handler(&self) { use controller::ide::BackgroundTaskHandle as ControllerHandle; use ide_view::status_bar::process::Id as ViewHandle; let logger = self.model.logger.clone_ref(); let process_map = SharedHashMap::<ControllerHandle, ViewHandle>::new(); let status_bar = self.model.view.status_bar().clone_ref(); let status_notifications = self.model.controller.status_notifications().subscribe(); let weak = Rc::downgrade(&self.model); spawn_stream_handler(weak, status_notifications, move |notification, _| { match notification { StatusNotification::Event { label } => { status_bar.add_event(ide_view::status_bar::event::Label::new(label)); } StatusNotification::BackgroundTaskStarted { label, handle } => { status_bar.add_process(ide_view::status_bar::process::Label::new(label)); let view_handle = status_bar.last_process.value(); process_map.insert(handle, view_handle); } StatusNotification::BackgroundTaskFinished { handle } => { if let Some(view_handle) = process_map.remove(&handle) { status_bar.finish_process(view_handle); } else { warning!(logger, "Controllers finished process not displayed in view"); } } } futures::future::ready(()) }); } fn setup_controller_notification_handler(&self) { let stream = self.model.controller.subscribe(); let weak = Rc::downgrade(&self.model); spawn_stream_handler(weak, stream, move |notification, model| { match notification { controller::ide::Notification::NewProjectCreated | controller::ide::Notification::ProjectOpened => model.setup_and_display_new_project(), } futures::future::ready(()) }); } fn set_projects_list_on_welcome_screen(&self) { let controller = self.model.controller.clone_ref(); let welcome_view_frp = self.model.view.welcome_screen().frp.clone_ref(); let logger = self.model.logger.clone_ref(); crate::executor::global::spawn(async move { if let Ok(project_manager) = controller.manage_projects() { match project_manager.list_projects().await { Ok(projects) => { let names = projects.into_iter().map(|p| p.name.into()).collect::<Vec<_>>(); welcome_view_frp.set_projects_list(names); } Err(err) => { error!(logger, "Unable to get list of projects: {err}."); } } } }); } } #[allow(missing_docs)] impl Presenter { pub fn view(&self) -> &view::root::View { &self.model.view } pub fn controller(&self) -> &controller::Ide { &self.model.controller } }
self.setup_controller_notification_handler(); self.set_projects_list_on_welcome_screen(); self.model.clone_ref().setup_and_display_new_project(); self }
function_block-function_prefix_line
[ { "content": "/// Create providers from the current controller's action list.\n\npub fn create_providers_from_controller(logger: &Logger, controller: &controller::Searcher) -> Any {\n\n use controller::searcher::Actions;\n\n match controller.actions() {\n\n Actions::Loading => as_any(Rc::new(list_view::entry::EmptyProvider)),\n\n Actions::Loaded { list } => {\n\n let user_action = controller.current_user_action();\n\n let intended_function = controller.intended_function_suggestion();\n\n let provider = Action { actions: list, user_action, intended_function };\n\n as_any(Rc::new(provider))\n\n }\n\n Actions::Error(err) => {\n\n error!(logger, \"Error while obtaining searcher action list: {err}\");\n\n as_any(Rc::new(list_view::entry::EmptyProvider))\n\n }\n\n }\n\n}\n\n\n", "file_path": "app/gui/src/presenter/searcher/provider.rs", "rank": 0, "score": 578069.9562998526 }, { "content": "/// The default content of the newly created initial main module file.\n\npub fn default_main_module_code() -> String {\n\n default_main_method_code()\n\n}\n\n\n", "file_path": "app/gui/src/controller/project.rs", "rank": 1, "score": 549197.733778896 }, { "content": "/// The code with definition of the default `main` method.\n\npub fn default_main_method_code() -> String {\n\n format!(r#\"{} = \"Hello, World!\"\"#, MAIN_DEFINITION_NAME)\n\n}\n\n\n", "file_path": "app/gui/src/controller/project.rs", "rank": 2, "score": 549191.3588501667 }, { "content": "#[derive(Debug, Clone, CloneRef)]\n\n#[allow(missing_docs)]\n\nstruct ProjectNameModel {\n\n app: Application,\n\n logger: Logger,\n\n display_object: display::object::Instance,\n\n view: background::View,\n\n style: StyleWatch,\n\n text_field: text::Area,\n\n project_name: Rc<RefCell<String>>,\n\n}\n\n\n\nimpl ProjectNameModel {\n\n /// Constructor.\n\n fn new(app: &Application) -> Self {\n\n let app = app.clone_ref();\n\n let scene = app.display.scene();\n\n let logger = Logger::new(\"ProjectName\");\n\n let display_object = display::object::Instance::new(&logger);\n\n // FIXME : StyleWatch is unsuitable here, as it was designed as an internal tool for shape\n\n // system (#795)\n\n let style = StyleWatch::new(&scene.style_sheet);\n", "file_path": "app/gui/view/graph-editor/src/component/breadcrumbs/project_name.rs", "rank": 3, "score": 493623.310785929 }, { "content": "/// Returns the path to package.yaml file for given project.\n\npub fn package_yaml_path(project_name: &str) -> String {\n\n match platform::current() {\n\n Some(Platform::Linux) | Some(Platform::MacOS) =>\n\n format!(\"~/enso/projects/{}/package.yaml\", project_name),\n\n Some(Platform::Windows) =>\n\n format!(\"%userprofile%\\\\enso\\\\projects\\\\{}\\\\package.yaml\", project_name),\n\n _ => format!(\"<path-to-enso-projects>/{}/package.yaml\", project_name),\n\n }\n\n}\n\n\n\n\n\n// ==============\n\n// === Handle ===\n\n// ==============\n\n\n\n// === SetupResult ===\n\n\n\n/// The result of initial project setup, containing handy controllers to be used in the initial\n\n/// view.\n\n#[derive(Clone, CloneRef, Debug)]\n", "file_path": "app/gui/src/controller/project.rs", "rank": 4, "score": 470428.7711115366 }, { "content": "/// Suggests a variable name for storing results of the given expression.\n\n///\n\n/// Name will try to express result of an infix operation (`sum` for `a+b`), kind of literal\n\n/// (`number` for `5`) and target function name for prefix chain.\n\n///\n\n/// The generated name is not unique and might collide with already present identifiers.\n\npub fn name_for_ast(ast: &Ast) -> String {\n\n use ast::*;\n\n match ast.shape() {\n\n Shape::Var(ident) => ident.name.clone(),\n\n Shape::Cons(ident) => ident.name.to_lowercase(),\n\n Shape::Number(_) => \"number\".into(),\n\n Shape::DanglingBase(_) => \"number\".into(),\n\n Shape::TextLineRaw(_) => \"text\".into(),\n\n Shape::TextLineFmt(_) => \"text\".into(),\n\n Shape::TextBlockRaw(_) => \"text\".into(),\n\n Shape::TextBlockFmt(_) => \"text\".into(),\n\n Shape::TextUnclosed(_) => \"text\".into(),\n\n Shape::Opr(opr) => match opr.name.as_ref() {\n\n \"+\" => \"sum\",\n\n \"*\" => \"product\",\n\n \"-\" => \"difference\",\n\n \"/\" => \"quotient\",\n\n _ => \"operator\",\n\n }\n\n .into(),\n", "file_path": "app/gui/src/controller/graph.rs", "rank": 5, "score": 466850.2442375739 }, { "content": "/// Return an FRP endpoint that indicates the current selection mode. This method sets up the logic\n\n/// for deriving the selection mode from the graph editor FRP.\n\npub fn get_mode(network: &frp::Network, editor: &crate::FrpEndpoints) -> frp::stream::Stream<Mode> {\n\n frp::extend! { network\n\n\n\n let multi_select_flag = crate::enable_disable_toggle\n\n ( network\n\n , &editor.enable_node_multi_select\n\n , &editor.disable_node_multi_select\n\n , &editor.toggle_node_multi_select\n\n );\n\n\n\n let merge_select_flag = crate::enable_disable_toggle\n\n ( network\n\n , &editor.enable_node_merge_select\n\n , &editor.disable_node_merge_select\n\n , &editor.toggle_node_merge_select\n\n );\n\n\n\n let subtract_select_flag = crate::enable_disable_toggle\n\n ( network\n\n , &editor.enable_node_subtract_select\n", "file_path": "app/gui/view/graph-editor/src/selection.rs", "rank": 6, "score": 465742.674645573 }, { "content": "/// The Path of the module initially opened after opening project in IDE.\n\npub fn initial_module_path(project: &model::Project) -> FallibleResult<model::module::Path> {\n\n model::module::Path::from_name_segments(project.project_content_root_id(), &[\n\n INITIAL_MODULE_NAME,\n\n ])\n\n}\n", "file_path": "app/gui/src/ide.rs", "rank": 7, "score": 444141.1873652619 }, { "content": "/// Convert the syntax tree into a string.\n\npub fn show_code(tokens: &impl ToTokens) -> String {\n\n repr(tokens)\n\n}\n", "file_path": "lib/rust/parser/flexer/src/generate.rs", "rank": 8, "score": 420805.1078255212 }, { "content": "/// The default metric, recommended by this library\n\npub fn default() -> impl Metric {\n\n SubsequentLettersBonus::default().sum(CaseMatchBonus::default())\n\n}\n\n\n\n\n\n\n\n// =======================\n\n// === Implementations ===\n\n// =======================\n\n\n\n// === Sum ===\n\n\n\n/// The structure representing the sum of two metrics\n\n#[derive(Copy, Clone, Debug, Default)]\n\npub struct Sum<Metrics1, Metrics2>(Metrics1, Metrics2);\n\n\n\nimpl<M1: Metric, M2: Metric> Metric for Sum<M1, M2> {\n\n fn measure_vertex(&self, vertex: subsequence_graph::Vertex, text: &str, pattern: &str) -> f32 {\n\n let Self(left, right) = self;\n\n let left = left.measure_vertex(vertex, text, pattern);\n", "file_path": "lib/rust/fuzzly/src/metric.rs", "rank": 9, "score": 403391.7307142762 }, { "content": "/// Select a new name for the project in a form of <suggested_name>_N, where N is a unique sequence\n\n/// number.\n\nfn choose_new_project_name(existing_names: &HashSet<String>, suggested_name: &str) -> ReferentName {\n\n let first_candidate = suggested_name.to_owned();\n\n let nth_project_name = |i| iformat!(\"{suggested_name}_{i}\");\n\n let candidates = (1..).map(nth_project_name);\n\n let mut candidates = std::iter::once(first_candidate).chain(candidates);\n\n // The iterator have no end, so we can safely unwrap.\n\n let name = candidates.find(|c| !existing_names.contains(c)).unwrap();\n\n ReferentName::from_identifier_text(name).expect(\"Empty project name provided\")\n\n}\n", "file_path": "app/gui/src/controller/ide/desktop.rs", "rank": 10, "score": 403169.47892849817 }, { "content": "/// The identifier of the project's main module.\n\npub fn main_module_id() -> model::module::Id {\n\n // We can just assume that `INITIAL_MODULE_NAME` is valid. This is verified by a test.\n\n model::module::Id::try_new([INITIAL_MODULE_NAME]).unwrap()\n\n}\n\n\n\n\n\n\n\n// =================\n\n// === Utilities ===\n\n// =================\n\n\n", "file_path": "app/gui/src/controller/project.rs", "rank": 11, "score": 401572.51690117526 }, { "content": "fn _fence<T, Out>(network: &frp::Network, trigger: T) -> (frp::Stream, frp::Stream<bool>)\n\nwhere\n\n T: frp::HasOutput<Output = Out>,\n\n T: Into<frp::Stream<Out>>,\n\n Out: frp::Data, {\n\n let trigger = trigger.into();\n\n frp::extend! { network\n", "file_path": "app/gui/view/debug_scene/interface/src/lib.rs", "rank": 12, "score": 401355.02681987855 }, { "content": "/// Default node position -- acts as a starting points for laying out nodes with no position defined\n\n/// in the metadata.\n\npub fn default_node_position() -> Vector2 {\n\n Vector2::new(DEFAULT_NODE_X_POSITION, DEFAULT_NODE_Y_POSITION)\n\n}\n\n\n\n\n\n\n\n// =============\n\n// === Model ===\n\n// =============\n\n\n", "file_path": "app/gui/src/presenter/graph.rs", "rank": 13, "score": 391685.4963317782 }, { "content": "/// Read the text from the clipboard. Please note that:\n\n/// - It uses the [Clipboard API](https://developer.mozilla.org/en-US/docs/Web/API/Clipboard_API)\n\n/// under the hood.\n\n/// - This is an asynchronous function. The callback with the text will be called when the text will\n\n/// be ready. The delay may be caused for example by waiting for permissions from the user.\n\n/// - This will probably display a permission prompt to the user for the first time it is used.\n\n/// - The website has to be served over HTTPS for this function to work correctly.\n\n/// - This function needs to be called from within user-initiated event callbacks, like mouse or key\n\n/// press. Otherwise it will not work.\n\n///\n\n/// Moreover, this function works in a very strange way in Firefox.\n\n/// [Firefox only supports reading the clipboard in browser extensions](https://developer.mozilla.org/en-US/docs/Web/API/Clipboard/readText).\n\n/// In such case this function fallbacks to the `paste` event. Whenever it is triggered, it\n\n/// remembers its value and passes it to the callback. This means, that in Firefox this function\n\n/// will work correctly only when called as a direct action to the `cmd + v` shortcut.\n\n///\n\n/// To learn more, see this [StackOverflow question](https://stackoverflow.com/questions/400212/how-do-i-copy-to-the-clipboard-in-javascript).\n\npub fn read_text(callback: impl Fn(String) + 'static) {\n\n let handler: Rc<RefCell<Option<ReadTextClosure>>> = default();\n\n let handler_clone = handler.clone_ref();\n\n let closure = Closure::wrap(Box::new(move |result| {\n\n *handler_clone.borrow_mut() = None;\n\n callback(result);\n\n }) as Box<dyn Fn(String)>);\n\n *handler.borrow_mut() = Some(closure);\n\n readText(handler.borrow().as_ref().unwrap());\n\n}\n", "file_path": "lib/rust/web/src/clipboard.rs", "rank": 14, "score": 383815.89508662117 }, { "content": "#[allow(non_snake_case)]\n\npub fn Error<S: Into<String>>(message: S) -> Error {\n\n let message = message.into();\n\n Error { message }\n\n}\n\n\n\npub type Result<T> = std::result::Result<T, Error>;\n\n\n\nimpl From<JsValue> for Error {\n\n fn from(t: JsValue) -> Self {\n\n let message = format!(\"{:?}\", t);\n\n Self { message }\n\n }\n\n}\n\n\n\n\n\n\n\n// ==============\n\n// === String ===\n\n// ==============\n\n\n\n#[wasm_bindgen]\n\nextern \"C\" {\n\n #[allow(unsafe_code)]\n\n #[wasm_bindgen(js_name = \"String\")]\n\n fn js_to_string_inner(s: &JsValue) -> String;\n\n}\n\n\n", "file_path": "lib/rust/web/src/lib.rs", "rank": 15, "score": 379189.1496880877 }, { "content": "/// Get metadata description for error visualization.\n\npub fn metadata() -> Metadata {\n\n let preprocessor = preprocessor();\n\n Metadata { preprocessor }\n\n}\n\n\n\n// =============\n\n// === Input ===\n\n// =============\n\n\n\n/// The input for Error Visualization.\n\n#[allow(missing_docs)]\n\n#[derive(Clone, Debug, Deserialize, Serialize)]\n\npub struct Input {\n\n pub kind: Option<Kind>,\n\n pub message: String,\n\n}\n\n\n\n\n\n\n\n// =============\n", "file_path": "app/gui/view/graph-editor/src/builtin/visualization/native/error.rs", "rank": 16, "score": 375895.42882391973 }, { "content": "/// Frp that can be used in a Component. The FRP requires an initializer that will be called during\n\n/// the construction of the component. `Default` + `CommandApi` are usually implemented when using\n\n/// the `ensogl_core::define_endpoints!` macro to create an FRP API.\n\npub trait Frp<Model>: Default + CommandApi {\n\n /// Frp initializer.\n\n fn init(&self, app: &Application, model: &Model, style: &StyleWatchFrp);\n\n\n\n /// Set of default shortcuts to be used in the `CommandApi`. See\n\n /// `lib/rust/ensogl/core/src/application/command.rs` for more details.\n\n fn default_shortcuts() -> Vec<shortcut::Shortcut> {\n\n default()\n\n }\n\n}\n\n\n\n\n\n\n\n// =================\n\n// === Component ===\n\n// =================\n\n\n\n/// Base struct for UI components in EnsoGL. Contains the Data/Shape model and the FPR exposing its\n\n/// behaviour.\n\n#[derive(CloneRef, Debug, Derivative)]\n", "file_path": "lib/rust/ensogl/component/gui/src/component.rs", "rank": 17, "score": 373937.791805415 }, { "content": "#[derive(Debug)]\n\nstruct Model {\n\n logger: Logger,\n\n controller: controller::Text,\n\n view: view::code_editor::View,\n\n}\n\n\n\nimpl Model {\n\n fn new(controller: controller::Text, view: view::code_editor::View) -> Self {\n\n let logger = Logger::new(\"presenter::code\");\n\n Self { logger, controller, view }\n\n }\n\n\n\n fn apply_change_from_view(&self, change: &enso_text::Change) {\n\n let converted = enso_text::Change { range: change.range, text: change.text.to_string() };\n\n if let Err(err) = self.controller.apply_text_change(converted) {\n\n error!(self.logger, \"Error while applying text change: {err}\");\n\n }\n\n }\n\n\n\n async fn emit_event_with_controller_code(&self, endpoint: &frp::Source<ImString>) {\n", "file_path": "app/gui/src/presenter/code.rs", "rank": 18, "score": 371783.0808116094 }, { "content": "#[derive(Clone, CloneRef, Debug)]\n\nstruct Model {\n\n logger: Logger,\n\n controller: controller::Searcher,\n\n view: view::project::View,\n\n input_view: ViewNodeId,\n\n}\n\n\n\nimpl Model {\n\n fn new(\n\n parent: impl AnyLogger,\n\n controller: controller::Searcher,\n\n view: view::project::View,\n\n input_view: ViewNodeId,\n\n ) -> Self {\n\n let logger = parent.sub(\"presenter::Searcher\");\n\n Self { logger, controller, view, input_view }\n\n }\n\n\n\n fn input_changed(&self, new_input: &str) {\n\n if let Err(err) = self.controller.set_input(new_input.to_owned()) {\n", "file_path": "app/gui/src/presenter/searcher.rs", "rank": 19, "score": 371672.22396744933 }, { "content": "#[allow(unused)]\n\n#[derive(Debug)]\n\nstruct Model {\n\n logger: Logger,\n\n controller: controller::Project,\n\n module_model: model::Module,\n\n graph_controller: controller::ExecutedGraph,\n\n ide_controller: controller::Ide,\n\n view: view::project::View,\n\n status_bar: view::status_bar::View,\n\n graph: presenter::Graph,\n\n code: presenter::Code,\n\n searcher: RefCell<Option<presenter::Searcher>>,\n\n}\n\n\n\nimpl Model {\n\n fn new(\n\n ide_controller: controller::Ide,\n\n controller: controller::Project,\n\n init_result: controller::project::InitializationResult,\n\n view: view::project::View,\n\n status_bar: view::status_bar::View,\n", "file_path": "app/gui/src/presenter/project.rs", "rank": 20, "score": 371413.4145812257 }, { "content": "#[derive(Debug)]\n\nstruct Model {\n\n logger: Logger,\n\n project: model::Project,\n\n controller: controller::ExecutedGraph,\n\n view: view::graph_editor::GraphEditor,\n\n state: Rc<State>,\n\n _visualization: Visualization,\n\n _execution_stack: CallStack,\n\n}\n\n\n\nimpl Model {\n\n pub fn new(\n\n project: model::Project,\n\n controller: controller::ExecutedGraph,\n\n view: view::graph_editor::GraphEditor,\n\n ) -> Self {\n\n let logger = Logger::new(\"presenter::Graph\");\n\n let state: Rc<State> = default();\n\n let visualization = Visualization::new(\n\n project.clone_ref(),\n", "file_path": "app/gui/src/presenter/graph.rs", "rank": 21, "score": 371378.833108309 }, { "content": "#[derive(Clone, CloneRef, Debug)]\n\nstruct Model {\n\n app: Application,\n\n logger: Logger,\n\n display_object: display::object::Instance,\n\n list: ListView<Entry>,\n\n documentation: documentation::View,\n\n doc_provider: Rc<CloneRefCell<AnyDocumentationProvider>>,\n\n}\n\n\n\nimpl Model {\n\n fn new(app: &Application) -> Self {\n\n let scene = app.display.scene();\n\n let app = app.clone_ref();\n\n let logger = Logger::new(\"SearcherView\");\n\n let display_object = display::object::Instance::new(&logger);\n\n let list = app.new_view::<ListView<Entry>>();\n\n let documentation = documentation::View::new(scene);\n\n let doc_provider = default();\n\n scene.layers.above_nodes.add_exclusive(&list);\n\n display_object.add_child(&documentation);\n", "file_path": "app/gui/view/src/searcher.rs", "rank": 22, "score": 371300.43048043095 }, { "content": "#[derive(Clone, CloneRef, Debug)]\n\nstruct Model {\n\n app: Application,\n\n logger: Logger,\n\n display_object: display::object::Instance,\n\n /// These buttons are present only in a cloud environment.\n\n window_control_buttons: Immutable<Option<crate::window_control_buttons::View>>,\n\n graph_editor: Rc<GraphEditor>,\n\n searcher: searcher::View,\n\n code_editor: code_editor::View,\n\n fullscreen_vis: Rc<RefCell<Option<visualization::fullscreen::Panel>>>,\n\n prompt_background: prompt_background::View,\n\n prompt: ensogl_text::Area,\n\n open_dialog: Rc<OpenDialog>,\n\n debug_mode_popup: debug_mode_popup::View,\n\n}\n\n\n\nimpl Model {\n\n fn new(app: &Application) -> Self {\n\n let logger = Logger::new(\"project::View\");\n\n let scene = app.display.scene();\n", "file_path": "app/gui/view/src/project.rs", "rank": 23, "score": 371041.54026374465 }, { "content": "/// Write the provided text to the clipboard. Please note that:\n\n/// - It uses the [Clipboard API](https://developer.mozilla.org/en-US/docs/Web/API/Clipboard_API)\n\n/// under the hood.\n\n/// - This is an asynchronous function. The results will not appear in the clipboard immediately.\n\n/// The delay may be caused for example by waiting for permission from the user.\n\n/// - This will probably display a permission prompt to the user for the first time it is used.\n\n/// - The website has to be served over HTTPS for this function to work correctly.\n\n/// - This function needs to be called from within user-initiated event callbacks, like mouse or key\n\n/// press. Otherwise it will not work.\n\n///\n\n/// Moreover, in case something fails, this function implements a fallback mechanism which tries\n\n/// to create a hidden text field, fill it with the text and use the obsolete\n\n/// [Document.execCommand](https://developer.mozilla.org/en-US/docs/Web/API/Document/execCommand)\n\n/// function.\n\n///\n\n/// To learn more, see this [StackOverflow question](https://stackoverflow.com/questions/400212/how-do-i-copy-to-the-clipboard-in-javascript).\n\npub fn write_text(text: impl Into<String>) {\n\n let text = text.into();\n\n writeText(text)\n\n}\n\n\n", "file_path": "lib/rust/web/src/clipboard.rs", "rank": 24, "score": 370265.2680073273 }, { "content": "/// Converts given `JsValue` into a `String`. Uses JS's `String` function,\n\n/// see: https://www.w3schools.com/jsref/jsref_string.asp\n\npub fn js_to_string(s: impl AsRef<JsValue>) -> String {\n\n js_to_string_inner(s.as_ref())\n\n}\n\n\n\n\n\n\n\n// =============\n\n// === Utils ===\n\n// =============\n\n\n\n/// Handle returned from `ignore_context_menu`. It unignores when the handle is dropped.\n\n#[derive(Debug)]\n\npub struct IgnoreContextMenuHandle {\n\n target: EventTarget,\n\n closure: Closure<dyn FnMut(MouseEvent)>,\n\n}\n\n\n\nimpl Drop for IgnoreContextMenuHandle {\n\n fn drop(&mut self) {\n\n let callback: &Function = self.closure.as_ref().unchecked_ref();\n\n self.target.remove_event_listener_with_callback(\"contextmenu\", callback).ok();\n\n }\n\n}\n\n\n", "file_path": "lib/rust/web/src/lib.rs", "rank": 25, "score": 368479.6600983607 }, { "content": "#[derive(Clone, CloneRef, Debug)]\n\nstruct Model {\n\n logger: Logger,\n\n controller: controller::Visualization,\n\n graph_view: view::graph_editor::GraphEditor,\n\n manager: Rc<Manager>,\n\n error_manager: Rc<Manager>,\n\n state: Rc<graph::state::State>,\n\n}\n\n\n\nimpl Model {\n\n /// Handle the showing visualization UI.\n\n fn visualization_shown(&self, node_id: ViewNodeId, metadata: visualization_view::Metadata) {\n\n self.update_visualization(node_id, &self.manager, Some(metadata));\n\n }\n\n\n\n /// Handle the hiding in UI.\n\n fn visualization_hidden(&self, node_id: view::graph_editor::NodeId) {\n\n self.update_visualization(node_id, &self.manager, None);\n\n }\n\n\n", "file_path": "app/gui/src/presenter/graph/visualization.rs", "rank": 26, "score": 366478.68372191873 }, { "content": "/// Checks if the given AST has Opr shape with the name matching given string.\n\npub fn is_opr_named(ast: &Ast, name: impl Str) -> bool {\n\n let opr_opt = known::Opr::try_from(ast).ok();\n\n opr_opt.contains_if(|opr| opr.name == name.as_ref())\n\n}\n\n\n", "file_path": "app/gui/language/ast/impl/src/opr.rs", "rank": 27, "score": 366201.51209444104 }, { "content": "/// Split qualified name into segments, like `\"Int.add\"` into `[\"Int\",\"add\"]`.\n\npub fn name_segments(name: &str) -> impl Iterator<Item = &str> {\n\n name.split(predefined::ACCESS)\n\n}\n\n\n\n\n\n\n\n// ===========================\n\n// === Chain-related types ===\n\n// ===========================\n\n\n\n/// A structure which keeps argument's AST with information about offset between it and an operator.\n\n/// We cannot use `Shifted` because `Shifted` assumes that offset is always before ast it contains,\n\n/// what is not a case here.\n\n#[allow(missing_docs)]\n\n#[derive(Clone, Debug)]\n\npub struct ArgWithOffset<T> {\n\n pub arg: T,\n\n pub offset: usize,\n\n}\n\n\n\n/// Infix operator operand. Optional, as we deal with Section* nodes as well.\n\npub type Operand = Option<ArgWithOffset<Ast>>;\n\n\n\n/// Infix operator standing between (optional) operands.\n\npub type Operator = known::Opr;\n\n\n", "file_path": "app/gui/language/ast/impl/src/opr.rs", "rank": 28, "score": 366188.13869738585 }, { "content": "/// Generate the `run` function for the specialized lexer.\n\n///\n\n/// This function is what the user of the lexer will call to begin execution.\n\npub fn run_function(output_type_name: impl Str) -> Result<ImplItem, GenError> {\n\n let output_type_name = str_to_path(output_type_name)?;\n\n let tree: ImplItem = parse_quote! {\n\n pub fn run<R:ReaderOps>(&mut self, mut reader:R) -> LexingResult<#output_type_name> {\n\n self.set_up();\n\n reader.advance_char(&mut self.bookmarks);\n\n while self.run_current_state(&mut reader) == StageStatus::ExitSuccess {}\n\n let result = match self.status {\n\n StageStatus::ExitFinished => LexingResult::success(\n\n mem::take(&mut self.output)\n\n ),\n\n StageStatus::ExitFail => LexingResult::failure(\n\n mem::take(&mut self.output)\n\n ),\n\n _ => LexingResult::partial(mem::take(&mut self.output))\n\n };\n\n self.tear_down();\n\n result\n\n }\n\n };\n\n Ok(tree)\n\n}\n\n\n", "file_path": "lib/rust/parser/flexer/src/generate.rs", "rank": 29, "score": 365639.84656108863 }, { "content": "#[rustfmt::skip]\n\npub fn is_timeout_error(error: &failure::Error) -> bool {\n\n use json_rpc::messages;\n\n use json_rpc::RpcError;\n\n use json_rpc::RpcError::*;\n\n const TIMEOUT: i64 = constants::ErrorCodes::Timeout as i64;\n\n matches!(error.downcast_ref::<RpcError>()\n\n , Some(TimeoutError{..})\n\n | Some(RemoteError(messages::Error{code:TIMEOUT,..})))\n\n}\n\n\n\n\n\n\n\n// =============\n\n// === Tests ===\n\n// =============\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n", "file_path": "app/gui/controller/engine-protocol/src/language_server.rs", "rank": 30, "score": 361861.7101278321 }, { "content": "#[derive(Debug, Clone, CloneRef)]\n\nstruct Model {\n\n display_object: display::object::Instance,\n\n label: PopupLabel,\n\n logger: Logger,\n\n}\n\n\n\nimpl Model {\n\n /// Constructor.\n\n pub fn new(app: &Application) -> Self {\n\n let logger = Logger::new(\"DebugModePopup\");\n\n let display_object = display::object::Instance::new(&logger);\n\n let label = PopupLabel::new(app);\n\n label.set_delay(LABEL_VISIBILITY_DELAY_MS);\n\n display_object.add_child(&label);\n\n\n\n Self { display_object, label, logger }\n\n }\n\n\n\n /// Show \"Debug Mode enabled\" label.\n\n pub fn show_enabled_label(&self) {\n", "file_path": "app/gui/view/src/debug_mode_popup.rs", "rank": 31, "score": 361762.8134158427 }, { "content": "#[derive(Debug)]\n\nstruct Model {\n\n logger: Logger,\n\n controller: controller::ExecutedGraph,\n\n view: view::graph_editor::GraphEditor,\n\n state: Rc<State>,\n\n}\n\n\n\nimpl Model {\n\n fn new(\n\n parent: impl AnyLogger,\n\n controller: controller::ExecutedGraph,\n\n view: view::graph_editor::GraphEditor,\n\n state: Rc<State>,\n\n ) -> Self {\n\n let logger = parent.sub(\"presenter::graph::CallStack\");\n\n Self { logger, controller, view, state }\n\n }\n\n\n\n fn expression_entered(&self, local_call: &view::graph_editor::LocalCall) {\n\n let local_call = LocalCall {\n", "file_path": "app/gui/src/presenter/graph/call_stack.rs", "rank": 32, "score": 361757.0372072323 }, { "content": "// TODO: convert camel-case names to nice names\n\nfn label(class: &JsValue) -> Result<String, Error> {\n\n try_str_field(class, field::LABEL).map(Ok).unwrap_or_else(|| {\n\n let class_name =\n\n try_str_field(class, \"name\").ok_or(Error::InvalidClass(InvalidClass::MissingName))?;\n\n Ok(class_name)\n\n })\n\n}\n\n\n\n\n\n\n\n// =============\n\n// === Error ===\n\n// =============\n\n\n\n/// Visualization definition or an error occurred during its construction.\n\npub type FallibleDefinition = Result<Definition, Error>;\n\n\n\n/// Error occurred during visualization definition.\n\n#[derive(Clone, Debug)]\n\n#[allow(missing_docs)]\n", "file_path": "app/gui/view/graph-editor/src/component/visualization/foreign/java_script/definition.rs", "rank": 33, "score": 361664.22467616503 }, { "content": "/// Get preprocessor configuration for error visualization.\n\npub fn preprocessor() -> instance::PreprocessorConfiguration {\n\n instance::PreprocessorConfiguration::new(PREPROCESSOR_CODE, PREPROCESSOR_MODULE)\n\n}\n\n\n", "file_path": "app/gui/view/graph-editor/src/builtin/visualization/native/error.rs", "rank": 34, "score": 358609.1726457873 }, { "content": "#[derive(Clone, Debug)]\n\nstruct Model {\n\n tooltip: Label,\n\n root: display::object::Instance,\n\n placement: Cell<Placement>,\n\n}\n\n\n\nimpl Model {\n\n fn new(app: &Application) -> Self {\n\n let logger = Logger::new(\"TooltipModel\");\n\n let tooltip = Label::new(app);\n\n let root = display::object::Instance::new(&logger);\n\n root.add_child(&tooltip);\n\n let placement = default();\n\n Self { tooltip, root, placement }\n\n }\n\n\n\n fn set_location(&self, position: Vector2, size: Vector2) {\n\n let layout_offset = match self.placement.get() {\n\n Placement::Top => Vector2::new(0.0, size.y * 0.5 + PLACEMENT_OFFSET),\n\n Placement::Bottom => Vector2::new(0.0, -size.y * 0.5 - PLACEMENT_OFFSET),\n", "file_path": "app/gui/view/graph-editor/src/component/tooltip.rs", "rank": 35, "score": 356851.64234645216 }, { "content": "pub fn expression_mock_string(label: &str) -> Expression {\n\n let pattern = Some(label.to_string());\n\n let code = format!(\"\\\"{}\\\"\", label);\n\n let parser = Parser::new_or_panic();\n\n let parameters = vec![];\n\n let ast = parser.parse_line_ast(&code).unwrap();\n\n let invocation_info = span_tree::generate::context::CalledMethodInfo { parameters };\n\n let ctx = span_tree::generate::MockContext::new_single(ast.id.unwrap(), invocation_info);\n\n let output_span_tree = span_tree::SpanTree::default();\n\n let input_span_tree = span_tree::SpanTree::new(&ast, &ctx).unwrap();\n\n let whole_expression_id = default();\n\n Expression { pattern, code, whole_expression_id, input_span_tree, output_span_tree }\n\n}\n\n\n", "file_path": "app/gui/view/debug_scene/interface/src/lib.rs", "rank": 36, "score": 352330.3616279897 }, { "content": "#[wasm_bindgen]\n\n#[allow(dead_code)]\n\npub fn entry_point_searcher_icons() {\n\n web::forward_panic_hook_to_console();\n\n web::set_stack_trace_limit();\n\n\n\n let logger = Logger::new(\"Icons example\");\n\n let app = Application::new(&web::get_html_element_by_id(\"root\").unwrap());\n\n ensogl_hardcoded_theme::builtin::dark::register(&app);\n\n ensogl_hardcoded_theme::builtin::light::register(&app);\n\n ensogl_hardcoded_theme::builtin::light::enable(&app);\n\n let world = app.display.clone();\n\n mem::forget(app);\n\n let scene = world.scene();\n\n mem::forget(Navigator::new(scene, &scene.camera()));\n\n\n\n\n\n // === Grid ===\n\n\n\n let grid_div = web::create_div();\n\n grid_div.set_style_or_panic(\"width\", \"1000px\");\n\n grid_div.set_style_or_panic(\"height\", \"16px\");\n", "file_path": "app/gui/view/src/searcher/icons.rs", "rank": 37, "score": 350677.06860217446 }, { "content": "#[allow(missing_docs)]\n\n#[derive(Clone, Debug, Fail)]\n\n#[fail(display = \"Project with name \\\"{}\\\" not found.\", 0)]\n\nstruct ProjectNotFound(String);\n\n\n\n\n\n// === Managing API ===\n\n\n", "file_path": "app/gui/src/controller/ide.rs", "rank": 38, "score": 348799.84824573377 }, { "content": "#[derive(Clone, CloneRef, Debug)]\n\nstruct Model {\n\n display_object: display::object::Instance,\n\n hover_area: hover_area::View,\n\n icons: Icons,\n\n size: Rc<Cell<Vector2>>,\n\n shapes: compound::events::MouseEvents,\n\n styles: StyleWatch,\n\n}\n\n\n\nimpl Model {\n\n fn new(logger: impl AnyLogger, app: &Application) -> Self {\n\n let scene = app.display.scene();\n\n let logger = Logger::new_sub(logger, \"ActionBar\");\n\n let display_object = display::object::Instance::new(&logger);\n\n let hover_area = hover_area::View::new(&logger);\n\n let icons = Icons::new(&logger);\n\n let shapes = compound::events::MouseEvents::default();\n\n let size = default();\n\n let styles = StyleWatch::new(&scene.style_sheet);\n\n\n", "file_path": "app/gui/view/graph-editor/src/component/node/action_bar.rs", "rank": 39, "score": 348227.38356801815 }, { "content": "pub fn create_element(name: &str) -> Element {\n\n try_create_element(name).unwrap()\n\n}\n\n\n", "file_path": "lib/rust/web/src/lib.rs", "rank": 40, "score": 346583.57058118033 }, { "content": "#[derive(Clone, CloneRef, Debug)]\n\nstruct Model {\n\n selection_menu: drop_down_menu::DropDownMenu,\n\n registry: visualization::Registry,\n\n}\n\n\n\nimpl Model {\n\n pub fn new(app: &Application, registry: visualization::Registry) -> Self {\n\n let selection_menu = drop_down_menu::DropDownMenu::new(app);\n\n app.display.scene().layers.below_main.add_exclusive(&selection_menu);\n\n Self { selection_menu, registry }\n\n }\n\n\n\n pub fn entries(&self, input_type: &Option<enso::Type>) -> Vec<visualization::Path> {\n\n let input_type_or_any = input_type.clone().unwrap_or_else(enso::Type::any);\n\n let definitions_iter = self.registry.valid_sources(&input_type_or_any).into_iter();\n\n definitions_iter.map(|d| d.signature.path).collect_vec()\n\n }\n\n}\n\n\n\nimpl display::Object for Model {\n", "file_path": "app/gui/view/graph-editor/src/component/visualization/container/visualization_chooser.rs", "rank": 41, "score": 344134.3955217682 }, { "content": "#[derive(Clone, CloneRef, Debug)]\n\nstruct Model {\n\n hover_area: hover_area::View,\n\n visualization_chooser: VisualizationChooser,\n\n background: background::View,\n\n display_object: display::object::Instance,\n\n size: Rc<Cell<Vector2>>,\n\n icons: Icons,\n\n shapes: compound::events::MouseEvents,\n\n}\n\n\n\nimpl Model {\n\n fn new(app: &Application, vis_registry: visualization::Registry) -> Self {\n\n let logger = Logger::new(\"ActionBarModel\");\n\n let background = background::View::new(&logger);\n\n let hover_area = hover_area::View::new(&logger);\n\n let visualization_chooser = VisualizationChooser::new(app, vis_registry);\n\n let display_object = display::object::Instance::new(&logger);\n\n let size = default();\n\n let icons = Icons::new(logger);\n\n let shapes = compound::events::MouseEvents::default();\n", "file_path": "app/gui/view/graph-editor/src/component/visualization/container/action_bar.rs", "rank": 42, "score": 344134.3955217682 }, { "content": "/// Method pointer that described the main method, i.e. the method that project view wants to open\n\n/// and which presence is currently required.\n\npub fn main_method_ptr(\n\n project_name: project::QualifiedName,\n\n module_path: &model::module::Path,\n\n) -> MethodPointer {\n\n module_path.method_pointer(project_name, MAIN_DEFINITION_NAME)\n\n}\n\n\n", "file_path": "app/gui/src/controller/project.rs", "rank": 43, "score": 343871.92833871953 }, { "content": "/// Check if the given macro match node is an import declaration.\n\npub fn is_match_import(ast: &known::Match) -> bool {\n\n let segment = &ast.segs.head;\n\n let keyword = crate::identifier::name(&segment.head);\n\n if keyword.contains_if(|str| *str == UNQUALIFIED_IMPORT_KEYWORD) {\n\n let second_segment = &ast.segs.tail.first();\n\n match second_segment {\n\n Some(seg) => {\n\n let keyword_2 = crate::identifier::name(&seg.head);\n\n if keyword_2.contains_if(|str| *str == QUALIFIED_IMPORT_KEYWORD) {\n\n return true;\n\n }\n\n }\n\n None => return false,\n\n }\n\n }\n\n keyword.contains_if(|str| *str == QUALIFIED_IMPORT_KEYWORD)\n\n}\n\n\n", "file_path": "app/gui/language/ast/impl/src/macros.rs", "rank": 44, "score": 342420.0251242083 }, { "content": "/// Generate an identifier name that is not present in the given sequence.\n\n///\n\n/// The name is generated by taking `base` string and appending subsequent integers.\n\npub fn generate_name(\n\n base: impl AsRef<str>,\n\n unavailable: impl IntoIterator<Item = NormalizedName>,\n\n) -> FallibleResult<Identifier> {\n\n let base = base.as_ref();\n\n let is_relevant = |name: &NormalizedName| name.starts_with(base);\n\n let unavailable = unavailable.into_iter().filter(is_relevant).collect::<HashSet<_>>();\n\n let name = (1..)\n\n .find_map(|i| {\n\n let candidate = NormalizedName::new(iformat!(\"{base}{i}\"));\n\n let available = !unavailable.contains(&candidate);\n\n available.as_some(candidate)\n\n })\n\n .unwrap(); // It never yields `None`, as we iterate infinite sequence until we find match.\n\n Identifier::from_text(name)\n\n}\n\n\n", "file_path": "app/gui/controller/double-representation/src/identifier.rs", "rank": 45, "score": 339729.83457897906 }, { "content": "#[automock]\n\npub trait API: Debug {\n\n /// Project's name\n\n // TODO [mwu] This should return Rc<ReferentName>.\n\n fn name(&self) -> ReferentName;\n\n\n\n /// Project's qualified name\n\n fn qualified_name(&self) -> QualifiedName;\n\n\n\n /// Get Language Server JSON-RPC Connection for this project.\n\n fn json_rpc(&self) -> Rc<language_server::Connection>;\n\n\n\n /// Get Language Server binary Connection for this project.\n\n fn binary_rpc(&self) -> Rc<binary::Connection>;\n\n\n\n /// Get the engine's version of the project.\n\n fn engine_version(&self) -> semver::Version;\n\n\n\n /// Get the instance of parser that is set up for this project.\n\n fn parser(&self) -> Parser;\n\n\n", "file_path": "app/gui/src/model/project.rs", "rank": 46, "score": 339326.3407644722 }, { "content": "/// Data that flows trough the FRP network.\n\npub trait Data = 'static + Clone + Debug + Default;\n\n\n\n\n\n// =================\n\n// === HasOutput ===\n\n// =================\n\n\n\n/// Implementors of this trait has to know their output type.\n", "file_path": "lib/rust/frp/src/node.rs", "rank": 47, "score": 338750.91825474193 }, { "content": "/// Fast-check if the pattern matches text.\n\n///\n\n/// This is faster way than calling `score_match(text,pattern,metric).is_some()`, therefore it's\n\n/// recommended to call this function before scoring when we are not sure if the pattern actually\n\n/// matches the text.\n\npub fn matches(text: impl Str, pattern: impl Str) -> bool {\n\n let mut pattern_chars = pattern.as_ref().chars();\n\n let mut next_pattern_char = pattern_chars.next();\n\n for text_char in text.as_ref().chars() {\n\n match next_pattern_char {\n\n Some(ch) if ch.eq_ignore_ascii_case(&text_char) =>\n\n next_pattern_char = pattern_chars.next(),\n\n Some(_) => {}\n\n None => {\n\n break;\n\n }\n\n }\n\n }\n\n next_pattern_char.is_none()\n\n}\n\n\n\n/// The result of `find_best_subsequence` function.\n\n#[derive(Clone, Debug, Default, PartialEq)]\n\npub struct Subsequence {\n\n /// The score of found subsequence.\n", "file_path": "lib/rust/fuzzly/src/score.rs", "rank": 48, "score": 338675.01370551425 }, { "content": "/// Creates a pretty documentation from hardcoded inner text.\n\npub fn documentation_html_from(inner: &str) -> String {\n\n return format!(\"<div class=\\\"doc\\\" style=\\\"font-size: 13px;\\\"><p>{}</p></div>\", inner);\n\n}\n\n\n\n// =========================\n\n// === Embedded Examples ===\n\n// =========================\n\n\n\nlazy_static! {\n\n /// The hard-coded examples to be used until the proper solution\n\n /// (described in https://github.com/enso-org/ide/issues/1011) will be implemented.\n\n //TODO[ao]: Remove once the issue will be implemented.\n\n pub static ref EXAMPLES:Vec<Example> = vec!\n\n [ Example\n\n { name : \"Parse JSON\".to_owned()\n\n , code : r#\"Json.parse '{\\\"a\\\":10, \\\"b\\\": 20}'\"#.to_owned()\n\n , imports : default()\n\n , documentation_html : documentation_html_from(\"An example showing how to parse string to Json structure.\")\n\n }\n\n , Example\n", "file_path": "app/gui/src/model/suggestion_database/example.rs", "rank": 49, "score": 338210.131563538 }, { "content": "/// Resolve the context module to a fully qualified name.\n\npub fn resolve_context_module(\n\n context_module: &ContextModule,\n\n main_module_name: impl FnOnce() -> model::module::QualifiedName,\n\n) -> FallibleResult<model::module::QualifiedName> {\n\n use visualization::instance::ContextModule::*;\n\n match context_module {\n\n ProjectMain => Ok(main_module_name()),\n\n Specific(module_name) => model::module::QualifiedName::from_text(module_name),\n\n }\n\n}\n\n\n\n\n\n\n\n// ==============\n\n// === Errors ===\n\n// ==============\n\n\n\n#[allow(missing_docs)]\n\n#[derive(Clone, Copy, Debug, Fail)]\n\n#[fail(display = \"No visualization information for expression {}.\", _0)]\n", "file_path": "app/gui/src/presenter/graph/visualization/manager.rs", "rank": 50, "score": 335473.9735327298 }, { "content": "/// Return the toggle status of the given enable/disable/toggle inputs as a stream of booleans.\n\npub fn enable_disable_toggle(\n\n network: &frp::Network,\n\n enable: &frp::Any,\n\n disable: &frp::Any,\n\n toggle: &frp::Any,\n\n) -> frp::Stream<bool> {\n\n // FIXME: the clone_refs bellow should not be needed.\n\n let enable = enable.clone_ref();\n\n let disable = disable.clone_ref();\n\n let toggle = toggle.clone_ref();\n\n frp::extend! { network\n\n out <- any(...);\n\n on_toggle <- toggle.map2(&out,|_,t| !t);\n\n on_enable <- enable.constant(true);\n\n on_disable <- disable.constant(false);\n\n out <+ on_toggle;\n\n out <+ on_enable;\n\n out <+ on_disable;\n\n }\n\n out.into()\n\n}\n\n\n", "file_path": "app/gui/view/graph-editor/src/lib.rs", "rank": 51, "score": 335127.3783163092 }, { "content": "/// Returns future which returns once the msdfgen library is initialized.\n\npub fn initialized() -> impl Future<Output = ()> {\n\n MsdfgenJsInitialized()\n\n}\n\n\n\n/// The future for running test after initialization\n", "file_path": "lib/rust/ensogl/component/text/msdf-sys/src/lib.rs", "rank": 52, "score": 334786.03103351855 }, { "content": "/// The Entry Model Provider.\n\n///\n\n/// This provider is used by searcher to print documentation of currently selected entry.\n\npub trait DocumentationProvider: Debug {\n\n /// Get documentation string to be displayed when no entry is selected.\n\n fn get(&self) -> Option<String> {\n\n None\n\n }\n\n\n\n /// Get documentation string for given entry, or `None` if entry or documentation does not\n\n /// exist.\n\n fn get_for_entry(&self, id: entry::Id) -> Option<String>;\n\n}\n\n\n\nimpl DocumentationProvider for entry::EmptyProvider {\n\n fn get_for_entry(&self, _: entry::Id) -> Option<String> {\n\n None\n\n }\n\n}\n\n\n\n\n\n// === AnyDocumentationProvider ===\n\n\n", "file_path": "app/gui/view/src/searcher.rs", "rank": 53, "score": 334750.4078323827 }, { "content": "/// Get the environment variable or panic if not available.\n\npub fn env_var_or_panic(var_name: &str) -> String {\n\n match std::env::var(var_name) {\n\n Ok(var) => var,\n\n Err(e) => panic!(\"Failed to read environment variable {}: {}.\", var_name, e),\n\n }\n\n}\n\n\n", "file_path": "lib/rust/build-utils/src/lib.rs", "rank": 54, "score": 334356.38326736004 }, { "content": "/// If this is the builtin macro for `->` (lambda expression), returns it as known `Match`.\n\npub fn as_lambda_match(ast: &Ast) -> Option<known::Match> {\n\n let macro_match = known::Match::try_from(ast).ok()?;\n\n let segment = &macro_match.segs.head;\n\n crate::opr::is_arrow_opr(&segment.head).then_some(macro_match)\n\n}\n\n\n", "file_path": "app/gui/language/ast/impl/src/macros.rs", "rank": 55, "score": 331828.60785926157 }, { "content": "#[wasm_bindgen]\n\n#[allow(dead_code)]\n\npub fn entry_point_interface() {\n\n web::forward_panic_hook_to_console();\n\n web::set_stack_trace_limit();\n\n run_once_initialized(|| {\n\n let app = Application::new(&web::get_html_element_by_id(\"root\").unwrap());\n\n init(&app);\n\n mem::forget(app);\n\n });\n\n}\n\n\n\n\n", "file_path": "app/gui/view/debug_scene/interface/src/lib.rs", "rank": 56, "score": 331384.5922203455 }, { "content": "#[wasm_bindgen]\n\n#[allow(dead_code, missing_docs)]\n\npub fn entry_point_visualization() {\n\n web::forward_panic_hook_to_console();\n\n web::set_stack_trace_limit();\n\n run_once_initialized(|| {\n\n let app = Application::new(&web::get_html_element_by_id(\"root\").unwrap());\n\n init(&app);\n\n std::mem::forget(app);\n\n });\n\n}\n\n\n", "file_path": "app/gui/view/debug_scene/visualization/src/lib.rs", "rank": 57, "score": 331384.4364364735 }, { "content": "/// Extend the list built by given [`ListBuilder`] with the categories and actions hardcoded\n\n/// in [`SUGGESTIONS`] constant.\n\npub fn add_hardcoded_entries_to_list(\n\n list: &mut ListBuilder,\n\n this_type: Option<&tp::QualifiedName>,\n\n return_types: Option<&HashSet<tp::QualifiedName>>,\n\n) {\n\n SUGGESTIONS.with(|hardcoded| {\n\n for hc_root_category in hardcoded {\n\n let icon = hc_root_category.icon.clone_ref();\n\n let mut root_cat = list.add_root_category(hc_root_category.name, icon);\n\n for hc_category in &hc_root_category.categories {\n\n let icon = hc_root_category.icon.clone_ref();\n\n let category = root_cat.add_category(hc_category.name, icon);\n\n category.extend(hc_category.suggestions.iter().cloned().filter_map(|suggestion| {\n\n let this_type_matches = if let Some(this_type) = this_type {\n\n suggestion.this_arg.contains(this_type)\n\n } else {\n\n true\n\n };\n\n let return_type_matches = if let Some(return_types) = return_types {\n\n suggestion\n", "file_path": "app/gui/src/controller/searcher/action/hardcoded.rs", "rank": 58, "score": 331373.713178391 }, { "content": "pub fn try_create_element(name: &str) -> Result<Element> {\n\n try_document()?\n\n .create_element(name)\n\n .map_err(|_| Error(format!(\"Cannot create element '{}'\", name)))\n\n}\n\n\n", "file_path": "lib/rust/web/src/lib.rs", "rank": 59, "score": 330265.2016943045 }, { "content": "/// Retrieves the identifier's name, if the Ast node is an identifier. Otherwise, returns None.\n\npub fn name(ast: &Ast) -> Option<&str> {\n\n match ast.shape() {\n\n Shape::Var(val) => Some(&val.name),\n\n Shape::Cons(val) => Some(&val.name),\n\n Shape::SectionSides(val) => name(&val.opr),\n\n Shape::Opr(val) => Some(&val.name),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "app/gui/language/ast/impl/src/identifier.rs", "rank": 60, "score": 330230.5549027609 }, { "content": "/// If the given AST node is an import declaration, returns it as a Match (which is the only shape\n\n/// capable of storing import declarations). Returns `None` otherwise.\n\npub fn ast_as_import_match(ast: &Ast) -> Option<known::Match> {\n\n let macro_match = known::Match::try_from(ast).ok()?;\n\n is_match_import(&macro_match).then_some(macro_match)\n\n}\n\n\n", "file_path": "app/gui/language/ast/impl/src/macros.rs", "rank": 61, "score": 328396.23711057496 }, { "content": "fn unwrap_error(opt_err: Option<String>) -> String {\n\n opt_err.unwrap_or_else(|| \"Unknown error.\".to_string())\n\n}\n\n\n\n\n\n\n\n// ======================\n\n// === Compile / Link ===\n\n// ======================\n\n\n", "file_path": "lib/rust/ensogl/core/src/system/gpu/shader.rs", "rank": 62, "score": 327892.4005776926 }, { "content": "/// Update IdMap to reflect the recent code change.\n\npub fn apply_code_change_to_id_map(\n\n id_map: &mut IdMap,\n\n change: &enso_text::text::Change<Bytes, String>,\n\n code: &str,\n\n) {\n\n // TODO [mwu]\n\n // The initial provisional algorithm received some changes to better behave in our typical\n\n // editor use-cases, i.e. to keep node ids when editing its expression. However, this came\n\n // at price of not properly keeping other sub-ids on parts of the node line.\n\n // In future, better and cleaner algorithm will need to be provided, likely with a different\n\n // API. Because of such expected rewrite and deeper restructuring, we don't really want to\n\n // spend much time on refactoring this function right now, even if it could be made nicer.\n\n\n\n let removed = &change.range.clone();\n\n let inserted = change.text.as_str();\n\n let new_code = change.applied(code).unwrap_or_else(|_| code.to_owned());\n\n let non_white = |c: char| !c.is_whitespace();\n\n let logger = enso_logger::DefaultWarningLogger::new(\"apply_code_change_to_id_map\");\n\n let vector = &mut id_map.vec;\n\n let inserted_size: Bytes = inserted.len().into();\n", "file_path": "app/gui/controller/double-representation/src/text.rs", "rank": 63, "score": 327519.4548395412 }, { "content": "/// With the list of occupied areas, return the first unoccupied point when going along the ray\n\n/// starting from `starting_point` and parallel to `direction` vector.\n\n///\n\n/// Returns [`None`] if the `direction` does not go clearly at any direction (both `direction.x` and\n\n/// `direction.y` are smaller than [`f32::EPSILON`]).\n\npub fn find_free_place(\n\n starting_point: Vector2,\n\n direction: Vector2,\n\n occupied: impl IntoIterator<Item = OccupiedArea>,\n\n) -> Option<Vector2> {\n\n let valid_dir = direction.x.abs() > f32::EPSILON || direction.y.abs() > f32::EPSILON;\n\n valid_dir.as_some_from(move || {\n\n let sorted_areas = occupied.into_iter().sorted_by_key(|area| {\n\n let x = area.x_bound_following_direction(-direction).unwrap_or(0.0);\n\n let y = area.y_bound_following_direction(-direction).unwrap_or(0.0);\n\n OrderedFloat(x * direction.x + y * direction.y)\n\n });\n\n let mut current_point = starting_point;\n\n for area in sorted_areas {\n\n if area.contains(current_point) {\n\n current_point = area.boundary_intersection(current_point, direction)\n\n }\n\n }\n\n current_point\n\n })\n", "file_path": "app/gui/view/graph-editor/src/free_place_finder.rs", "rank": 64, "score": 327090.37129189866 }, { "content": "fn init(app: &Application) {\n\n let _bg = app.display.scene().style_sheet.var(theme::application::background);\n\n\n\n let world = &app.display;\n\n let scene = world.scene();\n\n let camera = scene.camera();\n\n let navigator = Navigator::new(scene, &camera);\n\n\n\n app.views.register::<root::View>();\n\n app.views.register::<project::View>();\n\n app.views.register::<text::Area>();\n\n app.views.register::<GraphEditor>();\n\n let root_view = app.new_view::<root::View>();\n\n let project_view = root_view.project();\n\n let graph_editor = project_view.graph();\n\n let code_editor = project_view.code_editor();\n\n world.add_child(&root_view);\n\n\n\n code_editor.text_area().set_content(STUB_MODULE.to_owned());\n\n\n", "file_path": "app/gui/view/debug_scene/interface/src/lib.rs", "rank": 65, "score": 326209.3362080059 }, { "content": "fn init(app: &Application) {\n\n let world = &app.display;\n\n let scene = world.scene();\n\n let camera = scene.camera();\n\n let navigator = Navigator::new(scene, &camera);\n\n let registry = Registry::new();\n\n\n\n registry.add(constructor_graph());\n\n\n\n let vis_factories = registry.valid_sources(&\"[[Float,Float,Float]]\".into());\n\n let vis_class = vis_factories\n\n .iter()\n\n .find(|class| &*class.signature.name == \"Graph\")\n\n .expect(\"Couldn't find Graph class.\");\n\n let visualization = vis_class.new_instance(scene).expect(\"Couldn't create visualiser.\");\n\n visualization.activate.emit(());\n\n\n\n let network = enso_frp::Network::new(\"VisualizationExample\");\n\n enso_frp::extend! { network\n\n trace visualization.on_preprocessor_change;\n", "file_path": "app/gui/view/debug_scene/visualization/src/lib.rs", "rank": 66, "score": 326209.3362080059 }, { "content": "/// Try Interpreting the line as disabling comment. Return the text after `#`.\n\npub fn as_disable_comment(ast: &Ast) -> Option<String> {\n\n let r#match = crate::known::Match::try_from(ast).ok()?;\n\n let first_segment = &r#match.segs.head;\n\n if crate::identifier::name(&first_segment.head) == Some(DISABLING_COMMENT_INTRODUCER) {\n\n Some(first_segment.body.repr())\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "app/gui/language/ast/impl/src/macros.rs", "rank": 67, "score": 326132.0049504775 }, { "content": "pub fn display_graphviz(viz: Graphviz) {\n\n let code: String = viz.into();\n\n let url = percent_encoding::utf8_percent_encode(&code, percent_encoding::NON_ALPHANUMERIC);\n\n let url = format!(\"https://dreampuf.github.io/GraphvizOnline/#{}\", url);\n\n crate::web::window().open_with_url_and_target(&url, \"_blank\").unwrap();\n\n}\n\n\n\n\n\nimpl<T> GraphvizBuilder for T\n\nwhere\n\n T: ContentRef,\n\n Content<T>: GraphvizBuilder,\n\n{\n\n default fn graphviz_build(&self, builder: &mut Graphviz) {\n\n self.content().graphviz_build(builder)\n\n }\n\n}\n", "file_path": "lib/rust/frp/src/debug.rs", "rank": 68, "score": 322425.9442348865 }, { "content": "// TODO[ao] This expression mocks results in panic. If you want to use it, please fix it first.\n\npub fn expression_mock2() -> Expression {\n\n let pattern = Some(\"var1\".to_string());\n\n let pattern_cr = vec![Seq { right: false }, Or, Or, Build];\n\n let val = ast::crumbs::SegmentMatchCrumb::Body { val: pattern_cr };\n\n let parens_cr = ast::crumbs::MatchCrumb::Segs { val, index: 0 };\n\n let code = \"make_maps size (distribution normal)\".into();\n\n let output_span_tree = span_tree::SpanTree::default();\n\n let input_span_tree = span_tree::builder::TreeBuilder::new(36)\n\n .add_child(0, 14, span_tree::node::Kind::Chained, PrefixCrumb::Func)\n\n .add_child(0, 9, span_tree::node::Kind::Operation, PrefixCrumb::Func)\n\n .set_ast_id(Uuid::new_v4())\n\n .done()\n\n .add_empty_child(10, span_tree::node::InsertionPointType::BeforeTarget)\n\n .add_child(10, 4, span_tree::node::Kind::this().removable(), PrefixCrumb::Arg)\n\n .set_ast_id(Uuid::new_v4())\n\n .done()\n\n .add_empty_child(14, span_tree::node::InsertionPointType::Append)\n\n .set_ast_id(Uuid::new_v4())\n\n .done()\n\n .add_child(15, 21, span_tree::node::Kind::argument().removable(), PrefixCrumb::Arg)\n", "file_path": "app/gui/view/debug_scene/interface/src/lib.rs", "rank": 69, "score": 321687.7556917977 }, { "content": "pub fn expression_mock() -> Expression {\n\n let pattern = Some(\"var1\".to_string());\n\n let code = \"[1,2,3]\".to_string();\n\n let parser = Parser::new_or_panic();\n\n let this_param =\n\n span_tree::ArgumentInfo { name: Some(\"this\".to_owned()), tp: Some(\"Text\".to_owned()) };\n\n let parameters = vec![this_param];\n\n let ast = parser.parse_line_ast(&code).unwrap();\n\n let invocation_info = span_tree::generate::context::CalledMethodInfo { parameters };\n\n let ctx = span_tree::generate::MockContext::new_single(ast.id.unwrap(), invocation_info);\n\n let output_span_tree = span_tree::SpanTree::default();\n\n let input_span_tree = span_tree::SpanTree::new(&ast, &ctx).unwrap();\n\n let whole_expression_id = default();\n\n Expression { pattern, code, whole_expression_id, input_span_tree, output_span_tree }\n\n}\n\n\n", "file_path": "app/gui/view/debug_scene/interface/src/lib.rs", "rank": 70, "score": 321681.26397233957 }, { "content": "pub fn expression_mock3() -> Expression {\n\n let pattern = Some(\"Vector x y z\".to_string());\n\n // let code = \"image.blur ((foo bar) baz)\".to_string();\n\n let code = \"Vector x y z\".to_string();\n\n let parser = Parser::new_or_panic();\n\n let this_param =\n\n span_tree::ArgumentInfo { name: Some(\"this\".to_owned()), tp: Some(\"Image\".to_owned()) };\n\n let param0 = span_tree::ArgumentInfo {\n\n name: Some(\"radius\".to_owned()),\n\n tp: Some(\"Number\".to_owned()),\n\n };\n\n let param1 =\n\n span_tree::ArgumentInfo { name: Some(\"name\".to_owned()), tp: Some(\"Text\".to_owned()) };\n\n let param2 = span_tree::ArgumentInfo {\n\n name: Some(\"area\".to_owned()),\n\n tp: Some(\"Vector Int\".to_owned()),\n\n };\n\n let param3 = span_tree::ArgumentInfo {\n\n name: Some(\"matrix\".to_owned()),\n\n tp: Some(\"Vector String\".to_owned()),\n", "file_path": "app/gui/view/debug_scene/interface/src/lib.rs", "rank": 71, "score": 321681.26397233957 }, { "content": "/// Gets the string with conents of given environment variable.\n\n/// If the variable wasn't set, returns a default value from a second argument.\n\npub fn env_var_or(varname: &str, default_value: &str) -> String {\n\n std::env::var(varname).unwrap_or_else(|_| default_value.into())\n\n}\n\n\n", "file_path": "lib/rust/prelude/src/env.rs", "rank": 72, "score": 319163.8960807567 }, { "content": "/// Describes variable usage within a given Ast-like crumbable entity.\n\npub fn analyze_crumbable(crumbable: &impl Crumbable) -> IdentifierUsage {\n\n let mut analyzer = AliasAnalyzer::default();\n\n analyzer.process_subtrees(crumbable);\n\n analyzer.root_scope.symbols\n\n}\n\n\n\n\n\n\n\n// =============\n\n// === Tests ===\n\n// =============\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::test_utils::*;\n\n use super::*;\n\n\n\n wasm_bindgen_test_configure!(run_in_browser);\n\n\n\n /// Checks if actual observed sequence of located identifiers matches the expected one.\n", "file_path": "app/gui/controller/double-representation/src/alias_analysis.rs", "rank": 73, "score": 318953.22625803074 }, { "content": "/// Alias for `Default::default()`.\n\npub fn default<T: Default>() -> T {\n\n Default::default()\n\n}\n", "file_path": "lib/rust/prelude/src/std_reexports.rs", "rank": 74, "score": 317383.0573829245 }, { "content": "/// Is the given token an identifier matching to a given string?\n\npub fn matching_ident(token: &TokenTree, name: &str) -> bool {\n\n match token {\n\n TokenTree::Ident(ident) => *ident == name,\n\n _ => false,\n\n }\n\n}\n\n\n\n\n\n\n\n// ============\n\n// === Repr ===\n\n// ============\n\n\n", "file_path": "lib/rust/macro-utils/src/lib.rs", "rank": 75, "score": 315710.08379516454 }, { "content": "/// Produce a JSON-formatted event log from the internal event logs.\n\n///\n\n/// Consumes all events that have happened up to this point; except in testing, this should only be\n\n/// done once.\n\npub fn take_log() -> String {\n\n let events = EVENTS.take_all();\n\n let metadatas = METADATA_LOGS.clone_all();\n\n let metadata_names: Vec<_> = metadatas.iter().map(|metadata| metadata.name()).collect();\n\n let mut metadata_entries: Vec<_> =\n\n metadatas.into_iter().map(|metadata| metadata.take_all()).collect();\n\n let events: Vec<_> = events\n\n .into_iter()\n\n .map(|event| {\n\n event.map_metadata(|external| {\n\n let id = external.type_id as usize;\n\n let name = metadata_names[id];\n\n let data = metadata_entries[id].next().unwrap();\n\n let data = serde_json::value::to_raw_value(&data).unwrap();\n\n Variant { name, t: data }\n\n })\n\n })\n\n .collect();\n\n serde_json::to_string(&events).unwrap()\n\n}\n\n\n\n\n\n// === Variant ===\n\n\n", "file_path": "lib/rust/profiler/src/internal.rs", "rank": 76, "score": 313786.3108616984 }, { "content": "/// Get the code color for the provided type or default code color in case the type is None.\n\npub fn compute_for_code(tp: Option<&Type>, styles: &StyleWatch) -> color::Lcha {\n\n let opt_color = tp.as_ref().map(|tp| compute(tp, styles));\n\n opt_color.unwrap_or_else(|| styles.get_color(theme::graph_editor::node::text).into())\n\n}\n\n\n", "file_path": "app/gui/view/graph-editor/src/component/type_coloring.rs", "rank": 77, "score": 313359.19199722464 }, { "content": "#[derive(Clone, Debug, Default)]\n\nstruct ViewUpdate {\n\n state: Rc<State>,\n\n nodes: Vec<controller::graph::Node>,\n\n trees: HashMap<AstNodeId, controller::graph::NodeTrees>,\n\n connections: HashSet<AstConnection>,\n\n}\n\n\n\nimpl ViewUpdate {\n\n /// Create ViewUpdate information from Graph Presenter's model.\n\n fn new(model: &Model) -> FallibleResult<Self> {\n\n let state = model.state.clone_ref();\n\n let nodes = model.controller.graph().nodes()?;\n\n let connections_and_trees = model.controller.connections()?;\n\n let connections = connections_and_trees.connections.into_iter().collect();\n\n let trees = connections_and_trees.trees;\n\n Ok(Self { state, nodes, trees, connections })\n\n }\n\n\n\n /// Remove nodes from the state and return node views to be removed.\n\n fn remove_nodes(&self) -> Vec<ViewNodeId> {\n", "file_path": "app/gui/src/presenter/graph.rs", "rank": 78, "score": 312904.8303167901 }, { "content": "/// Wraps an arbitrary `std::error::Error` as an `InteropError.`\n\npub fn interop_error<T>(error: T) -> Error\n\nwhere T: Fail {\n\n Error::InteropError(Box::new(error))\n\n}\n\n\n\n\n\n\n\n// =============\n\n// === Tests ===\n\n// =============\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n\n\n\n\n #[derive(Clone, Debug, Default, Deserialize, Serialize)]\n\n struct Metadata {\n\n foo: usize,\n", "file_path": "app/gui/language/parser/src/api.rs", "rank": 79, "score": 310425.49377484474 }, { "content": "fn to_json_single_line(val: &impl Serialize) -> std::result::Result<String, serde_json::Error> {\n\n let json = serde_json::to_string(val)?;\n\n let line = json.chars().filter(|c| *c != '\\n' && *c != '\\r').collect();\n\n Ok(line)\n\n}\n\n\n\nimpl<M: Metadata> ParsedSourceFile<M> {\n\n /// Serialize to the SourceFile structure,\n\n pub fn serialize(&self) -> std::result::Result<SourceFile, serde_json::Error> {\n\n let code = self.ast.repr();\n\n let before_tag = \"\\n\".repeat(NEWLINES_BEFORE_TAG);\n\n let before_idmap = \"\\n\";\n\n let json_id_map = JsonIdMap::from_id_map(&self.ast.id_map(), &code);\n\n let id_map = to_json_single_line(&json_id_map)?;\n\n let before_metadata = \"\\n\";\n\n let metadata = to_json_single_line(&self.metadata)?;\n\n\n\n let id_map_start = code.len() + before_tag.len() + METADATA_TAG.len() + before_idmap.len();\n\n let id_map_start_bytes = Bytes::from(id_map_start);\n\n let metadata_start = id_map_start + id_map.len() + before_metadata.len();\n", "file_path": "app/gui/language/parser/src/api.rs", "rank": 80, "score": 310316.0633130426 }, { "content": "/// The Model Provider for ListView's entries of type `E`.\n\n///\n\n/// The [`crate::ListView`] component does not display all entries at once, instead it lazily ask\n\n/// for models of entries when they're about to be displayed. So setting the select content is\n\n/// essentially providing an implementor of this trait.\n\npub trait ModelProvider<E>: Debug {\n\n /// Number of all entries.\n\n fn entry_count(&self) -> usize;\n\n\n\n /// Get the model of entry with given id. The implementors should return `None` only when\n\n /// requested id greater or equal to entries count.\n\n fn get(&self, id: Id) -> Option<E::Model>\n\n where E: Entry;\n\n}\n\n\n\n\n\n// === AnyModelProvider ===\n\n\n\n/// A wrapper for shared instance of some Provider of models for `E` entries.\n\n#[derive(Debug, Shrinkwrap)]\n\npub struct AnyModelProvider<E>(Rc<dyn ModelProvider<E>>);\n\n\n\nimpl<E> Clone for AnyModelProvider<E> {\n\n fn clone(&self) -> Self {\n\n Self(self.0.clone())\n", "file_path": "lib/rust/ensogl/component/list-view/src/entry.rs", "rank": 81, "score": 310181.2250167172 }, { "content": "/// Creates a new where clause from provided sequence of where predicates.\n\npub fn new_where_clause(predicates: impl IntoIterator<Item = WherePredicate>) -> WhereClause {\n\n let predicates = syn::punctuated::Punctuated::from_iter(predicates);\n\n WhereClause { where_token: Default::default(), predicates }\n\n}\n\n\n\n\n\n\n\n// =============\n\n// === Tests ===\n\n// =============\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use proc_macro2::TokenStream;\n\n\n\n fn parse<T: syn::parse::Parse>(code: &str) -> T {\n\n syn::parse_str(code).unwrap()\n\n }\n\n\n", "file_path": "lib/rust/macro-utils/src/lib.rs", "rank": 82, "score": 309683.25591211306 }, { "content": " def evalModule(code: String, moduleName: String): Module = {\n\n val source = Source\n\n .newBuilder(LanguageInfo.ID, code, moduleName)\n\n .build()\n\n new Module(context.eval(source))\n\n }\n\n\n\n /** Evaluates provided code file as a new module.\n\n *\n\n * @param codeFile the code to evaluate.\n\n * @return the module representing evaluated code.\n\n */\n", "file_path": "engine/polyglot-api/src/main/scala/org/enso/polyglot/PolyglotContext.scala", "rank": 83, "score": 309349.0787764881 }, { "content": "pub fn mk_out_name<S: Str>(s: S) -> String {\n\n format!(\"output_{}\", s.as_ref())\n\n}\n", "file_path": "lib/rust/ensogl/core/src/display/symbol/gpu/shader/builder.rs", "rank": 85, "score": 308299.734211879 }, { "content": "/// Return FRP endpoints for the parameters that define a shadow.\n\npub fn frp_from_style(style: &StyleWatchFrp, path: impl Into<style::Path>) -> ParametersFrp {\n\n let path: style::Path = path.into();\n\n ParametersFrp {\n\n base_color: style.get_color(&path),\n\n fading: style.get_color(&path.sub(\"fading\")),\n\n size: style.get_number(&path.sub(\"size\")),\n\n spread: style.get_number(&path.sub(\"spread\")),\n\n exponent: style.get_number(&path.sub(\"exponent\")),\n\n offset_x: style.get_number(&path.sub(\"offset_x\")),\n\n offset_y: style.get_number(&path.sub(\"offset_y\")),\n\n }\n\n}\n", "file_path": "lib/rust/ensogl/component/shadow/src/lib.rs", "rank": 86, "score": 307648.10418409563 }, { "content": "pub struct ProjectName {\n\n model: Rc<ProjectNameModel>,\n\n pub frp: Frp,\n\n}\n\n\n\nimpl ProjectName {\n\n /// Constructor.\n\n fn new(app: &Application) -> Self {\n\n let frp = Frp::new();\n\n let model = Rc::new(ProjectNameModel::new(app));\n\n let network = &frp.network;\n\n let scene = app.display.scene();\n\n let text = &model.text_field.frp;\n\n // FIXME : StyleWatch is unsuitable here, as it was designed as an internal tool for shape\n\n // system (#795)\n\n let styles = StyleWatch::new(&scene.style_sheet);\n\n let hover_color = styles.get_color(theme::graph_editor::breadcrumbs::hover);\n\n let deselected_color = styles.get_color(theme::graph_editor::breadcrumbs::deselected::left);\n\n let selected_color = styles.get_color(theme::graph_editor::breadcrumbs::selected);\n\n let animations = Animations::new(network);\n", "file_path": "app/gui/view/graph-editor/src/component/breadcrumbs/project_name.rs", "rank": 87, "score": 307336.89848130953 }, { "content": "pub struct Animations {\n\n color: DEPRECATED_Animation<color::Rgba>,\n\n position: DEPRECATED_Animation<Vector3<f32>>,\n\n}\n\n\n\nimpl Animations {\n\n /// Constructor.\n\n pub fn new(network: &frp::Network) -> Self {\n\n let color = DEPRECATED_Animation::new(network);\n\n let position = DEPRECATED_Animation::new(network);\n\n Self { color, position }\n\n }\n\n}\n\n\n\n\n\n\n\n// ========================\n\n// === ProjectNameModel ===\n\n// ========================\n\n\n\n#[derive(Debug, Clone, CloneRef)]\n\n#[allow(missing_docs)]\n", "file_path": "app/gui/view/graph-editor/src/component/breadcrumbs/project_name.rs", "rank": 88, "score": 307327.59341565793 }, { "content": " fn display_object(&self) -> &display::object::Instance {\n\n &self.model.display_object\n\n }\n\n}\n\n\n\nimpl Deref for ProjectName {\n\n type Target = Frp;\n\n fn deref(&self) -> &Self::Target {\n\n &self.frp\n\n }\n\n}\n\n\n\nimpl application::command::FrpNetworkProvider for ProjectName {\n\n fn network(&self) -> &frp::Network {\n\n &self.frp.network\n\n }\n\n}\n\n\n\nimpl View for ProjectName {\n\n fn label() -> &'static str {\n", "file_path": "app/gui/view/graph-editor/src/component/breadcrumbs/project_name.rs", "rank": 89, "score": 307325.0699488103 }, { "content": " let base_color = style.get_color(theme::graph_editor::breadcrumbs::transparent);\n\n let text_size: TextSize = TEXT_SIZE.into();\n\n let text_field = app.new_view::<text::Area>();\n\n text_field.set_default_color.emit(base_color);\n\n text_field.set_default_text_size(text_size);\n\n text_field.single_line(true);\n\n\n\n text_field.remove_from_scene_layer(&scene.layers.main);\n\n text_field.add_to_scene_layer(&scene.layers.panel_text);\n\n text_field.hover();\n\n\n\n let view_logger = Logger::new_sub(&logger, \"view_logger\");\n\n let view = background::View::new(&view_logger);\n\n\n\n scene.layers.panel.add_exclusive(&view);\n\n\n\n let project_name = default();\n\n Self { app, logger, display_object, view, style, text_field, project_name }.init()\n\n }\n\n\n", "file_path": "app/gui/view/graph-editor/src/component/breadcrumbs/project_name.rs", "rank": 90, "score": 307320.8497325494 }, { "content": "use ensogl_component::text::style::Size as TextSize;\n\nuse ensogl_hardcoded_theme as theme;\n\nuse logger::DefaultWarningLogger as Logger;\n\n\n\n\n\n\n\n// =================\n\n// === Constants ===\n\n// =================\n\n\n\n// This is a default value for the project name when it is created. The project name should\n\n// always be initialized externally for the current project. If this value is visible in the UI,\n\n// it was not set to the correct project name due to some bug.\n\nconst UNINITIALIZED_PROJECT_NAME: &str = \"Project Name Uninitialized\";\n\n/// Default line height for project names.\n\npub const LINE_HEIGHT: f32 = TEXT_SIZE * 1.5;\n\n\n\n\n\n\n\n// ==================\n", "file_path": "app/gui/view/graph-editor/src/component/breadcrumbs/project_name.rs", "rank": 91, "score": 307319.93260942324 }, { "content": " debug!(self.logger, \"Committing name: '{name}'.\");\n\n *self.project_name.borrow_mut() = name;\n\n }\n\n}\n\n\n\nimpl display::Object for ProjectNameModel {\n\n fn display_object(&self) -> &display::object::Instance {\n\n &self.display_object\n\n }\n\n}\n\n\n\n\n\n\n\n// ===================\n\n// === ProjectName ===\n\n// ===================\n\n\n\n/// The view used for displaying and renaming it.\n\n#[derive(Debug, Clone, CloneRef)]\n\n#[allow(missing_docs)]\n", "file_path": "app/gui/view/graph-editor/src/component/breadcrumbs/project_name.rs", "rank": 92, "score": 307319.5744656232 }, { "content": " frp.output.source.pointer_style <+ mouse_over_while_editing.map(|_|\n\n cursor::Style::new_text_cursor()\n\n );\n\n no_mouse_or_edit <- on_mouse_over_and_editable.gate_not(&on_mouse_over_and_editable);\n\n frp.output.source.pointer_style <+ no_mouse_or_edit.map(|_|\n\n cursor::Style::default()\n\n );\n\n frp.output.source.pointer_style <+ frp.input.start_editing.gate(&frp.output.is_hovered).map(|_|\n\n cursor::Style::new_text_cursor()\n\n );\n\n }\n\n\n\n frp.deselect();\n\n frp.input.set_name.emit(UNINITIALIZED_PROJECT_NAME.to_string());\n\n\n\n Self { model, frp }\n\n }\n\n}\n\n\n\nimpl display::Object for ProjectName {\n", "file_path": "app/gui/view/graph-editor/src/component/breadcrumbs/project_name.rs", "rank": 93, "score": 307313.4260100439 }, { "content": "// === Background ===\n\n// ==================\n\n\n\n/// A transparent \"background\" of project name, set for capturing mouse events.\n\npub mod background {\n\n use super::*;\n\n\n\n ensogl::define_shape_system! {\n\n () {\n\n let bg_color = color::Rgba::new(0.0,0.0,0.0,0.000_001);\n\n Plane().fill(bg_color).into()\n\n }\n\n }\n\n}\n\n\n\n\n\n\n\n// ===========\n\n// === FRP ===\n\n// ===========\n", "file_path": "app/gui/view/graph-editor/src/component/breadcrumbs/project_name.rs", "rank": 94, "score": 307309.7815943227 }, { "content": "\n\n fn init(self) -> Self {\n\n self.add_child(&self.text_field);\n\n self.add_child(&self.view);\n\n self.update_text_field_content(self.project_name.borrow().as_str());\n\n self\n\n }\n\n\n\n /// Revert the text field content to the last committed project name.\n\n fn reset_name(&self) {\n\n debug!(self.logger, \"Resetting project name.\");\n\n self.update_text_field_content(self.project_name.borrow().as_str());\n\n }\n\n\n\n /// Update the visible content of the text field.\n\n fn update_text_field_content(&self, content: &str) {\n\n self.text_field.set_content(content);\n\n self.update_alignment(content);\n\n }\n\n\n", "file_path": "app/gui/view/graph-editor/src/component/breadcrumbs/project_name.rs", "rank": 95, "score": 307307.92327969155 }, { "content": " \"ProjectName\"\n\n }\n\n fn new(app: &Application) -> Self {\n\n ProjectName::new(app)\n\n }\n\n fn app(&self) -> &Application {\n\n &self.model.app\n\n }\n\n\n\n fn default_shortcuts() -> Vec<shortcut::Shortcut> {\n\n use shortcut::ActionType::*;\n\n (&[\n\n (Press, \"\", \"enter\", \"commit\"),\n\n (Release, \"\", \"escape\", \"cancel_editing\"),\n\n (DoublePress, \"is_hovered\", \"left-mouse-button\", \"start_editing\"),\n\n ])\n\n .iter()\n\n .map(|(a, b, c, d)| Self::self_shortcut_when(*a, *c, *d, *b))\n\n .collect()\n\n }\n\n}\n", "file_path": "app/gui/view/graph-editor/src/component/breadcrumbs/project_name.rs", "rank": 96, "score": 307306.65977741964 }, { "content": " fn set_color(&self, value: color::Rgba) {\n\n self.text_field.set_default_color(value);\n\n self.text_field.set_color_all(value);\n\n }\n\n\n\n fn set_position(&self, value: Vector3<f32>) {\n\n self.text_field.set_position(value);\n\n }\n\n\n\n /// Change the text field content and commit the given name.\n\n fn rename(&self, name: impl Str) {\n\n let name = name.into();\n\n debug!(self.logger, \"Renaming: '{name}'.\");\n\n self.update_text_field_content(&name);\n\n self.commit(name);\n\n }\n\n\n\n /// Confirm the given name as the current project name.\n\n fn commit<T: Into<String>>(&self, name: T) {\n\n let name = name.into();\n", "file_path": "app/gui/view/graph-editor/src/component/breadcrumbs/project_name.rs", "rank": 97, "score": 307305.0318539856 }, { "content": "//! This module provides a view for project's name which can be used to edit it.\n\n\n\nuse crate::prelude::*;\n\n\n\nuse crate::component::breadcrumbs::breadcrumb;\n\nuse crate::component::breadcrumbs::GLYPH_WIDTH;\n\nuse crate::component::breadcrumbs::TEXT_SIZE;\n\nuse crate::component::breadcrumbs::VERTICAL_MARGIN;\n\n\n\nuse enso_frp as frp;\n\nuse ensogl::application;\n\nuse ensogl::application::shortcut;\n\nuse ensogl::application::Application;\n\nuse ensogl::data::color;\n\nuse ensogl::display;\n\nuse ensogl::display::object::ObjectOps;\n\nuse ensogl::display::shape::*;\n\nuse ensogl::gui::cursor;\n\nuse ensogl::DEPRECATED_Animation;\n\nuse ensogl_component::text;\n", "file_path": "app/gui/view/graph-editor/src/component/breadcrumbs/project_name.rs", "rank": 98, "score": 307304.5974913598 }, { "content": " edit_click <- model.view.events.mouse_down.gate(&frp.ide_text_edit_mode);\n\n start_editing <- any(edit_click,frp.input.start_editing);\n\n eval_ start_editing ({\n\n text.set_focus(true);\n\n text.set_cursor_at_mouse_position()\n\n });\n\n frp.source.edit_mode <+ start_editing.to_true();\n\n\n\n\n\n // === Text Area ===\n\n\n\n text_content <- text.content.map(|txt| txt.to_string());\n\n eval text_content((content) model.update_alignment(content));\n\n text_width <- text_content.map(f!((content) model.width(content)));\n\n frp.source.width <+ text_width;\n\n\n\n\n\n // === Input Commands ===\n\n\n\n eval_ frp.input.cancel_editing (model.reset_name());\n", "file_path": "app/gui/view/graph-editor/src/component/breadcrumbs/project_name.rs", "rank": 99, "score": 307303.0895160488 } ]
Rust
examples/log-sensors.rs
quietlychris/f3
7ccc6f26fe16a4c053d09bcc61434c422f24d82f
#![deny(warnings)] #![no_main] #![no_std] extern crate panic_semihosting; use core::ptr; use aligned::Aligned; use byteorder::{ByteOrder, LE}; use cortex_m::{asm, itm}; use cortex_m_rt::entry; use f3::{ hal::{i2c::I2c, prelude::*, spi::Spi, stm32f30x, timer::Timer}, l3gd20::{self, Odr}, lsm303dlhc::{AccelOdr, MagOdr}, L3gd20, Lsm303dlhc, }; use nb::block; const FREQUENCY: u32 = 220; const NSAMPLES: u32 = 32 * FREQUENCY; #[entry] fn main() -> ! { let mut cp = cortex_m::Peripherals::take().unwrap(); let dp = stm32f30x::Peripherals::take().unwrap(); let mut flash = dp.FLASH.constrain(); let mut rcc = dp.RCC.constrain(); let clocks = rcc .cfgr .sysclk(64.mhz()) .pclk1(32.mhz()) .freeze(&mut flash.acr); unsafe { cp.DCB.demcr.modify(|r| r | (1 << 24)); let swo_freq = 2_000_000; cp.TPIU.acpr.write((clocks.sysclk().0 / swo_freq) - 1); cp.TPIU.sppr.write(2); cp.TPIU.ffcr.modify(|r| r & !(1 << 1)); const DBGMCU_CR: *mut u32 = 0xe0042004 as *mut u32; let r = ptr::read_volatile(DBGMCU_CR); ptr::write_volatile(DBGMCU_CR, r | (1 << 5)); cp.ITM.lar.write(0xC5ACCE55); cp.ITM.tcr.write( (0b000001 << 16) | (1 << 3) | (1 << 0), ); cp.ITM.ter[0].write(1); } let mut gpioa = dp.GPIOA.split(&mut rcc.ahb); let mut gpiob = dp.GPIOB.split(&mut rcc.ahb); let mut gpioe = dp.GPIOE.split(&mut rcc.ahb); let scl = gpiob.pb6.into_af4(&mut gpiob.moder, &mut gpiob.afrl); let sda = gpiob.pb7.into_af4(&mut gpiob.moder, &mut gpiob.afrl); let i2c = I2c::i2c1(dp.I2C1, (scl, sda), 400.khz(), clocks, &mut rcc.apb1); let mut lsm303dlhc = Lsm303dlhc::new(i2c).unwrap(); lsm303dlhc.accel_odr(AccelOdr::Hz400).unwrap(); lsm303dlhc.mag_odr(MagOdr::Hz220).unwrap(); let mut nss = gpioe .pe3 .into_push_pull_output(&mut gpioe.moder, &mut gpioe.otyper); nss.set_high(); let sck = gpioa.pa5.into_af5(&mut gpioa.moder, &mut gpioa.afrl); let miso = gpioa.pa6.into_af5(&mut gpioa.moder, &mut gpioa.afrl); let mosi = gpioa.pa7.into_af5(&mut gpioa.moder, &mut gpioa.afrl); let spi = Spi::spi1( dp.SPI1, (sck, miso, mosi), l3gd20::MODE, 1.mhz(), clocks, &mut rcc.apb2, ); let mut l3gd20 = L3gd20::new(spi, nss).unwrap(); l3gd20.set_odr(Odr::Hz380).unwrap(); let mut timer = Timer::tim2(dp.TIM2, FREQUENCY.hz(), clocks, &mut rcc.apb1); itm::write_all(&mut cp.ITM.stim[0], &[0]); let mut tx_buf: Aligned<u32, [u8; 20]> = Aligned([0; 20]); for _ in 0..NSAMPLES { block!(timer.wait()).unwrap(); let m = lsm303dlhc.mag().unwrap(); let ar = l3gd20.gyro().unwrap(); let g = lsm303dlhc.accel().unwrap(); let mut buf = [0; 18]; let mut start = 0; LE::write_i16(&mut buf[start..start + 2], m.x); start += 2; LE::write_i16(&mut buf[start..start + 2], m.y); start += 2; LE::write_i16(&mut buf[start..start + 2], m.z); start += 2; LE::write_i16(&mut buf[start..start + 2], ar.x); start += 2; LE::write_i16(&mut buf[start..start + 2], ar.y); start += 2; LE::write_i16(&mut buf[start..start + 2], ar.z); start += 2; LE::write_i16(&mut buf[start..start + 2], g.x); start += 2; LE::write_i16(&mut buf[start..start + 2], g.y); start += 2; LE::write_i16(&mut buf[start..start + 2], g.z); cobs::encode(&buf, &mut tx_buf); itm::write_aligned(&mut cp.ITM.stim[0], &tx_buf); } asm::bkpt(); loop {} }
#![deny(warnings)] #![no_main] #![no_std] extern crate panic_semihosting; use core::ptr; use aligned::Aligned; use byteorder::{ByteOrder, LE}; use cortex_m::{asm, itm}; use cortex_m_rt::entry; use f3::{ hal::{i2c::I2c, prelude::*, spi::Spi, stm32f30x, timer::Timer}, l3gd20::{self, Odr}, lsm303dlhc::{AccelOdr, MagOdr}, L3gd20, Lsm303dlhc, }; use nb::block; const FREQUENCY: u32 = 220; const NSAMPLES: u32 = 32 * FREQUENCY; #[entry] fn main() -> ! { let mut cp = cortex_m::Peripherals::take().unwrap(); let dp = stm32f30x::Peripherals::take().unwrap(); let mut flash = dp.FLASH.constrain(); let mut rcc = dp.RCC.constrain(); let clocks = rcc .cfgr .sysclk(64.mhz()) .pclk1(32.mhz()) .freeze(&mut flash.acr); unsafe { cp.DCB.demcr.modify(|r| r | (1 << 24)); let swo_freq = 2_000_000; cp.TPIU.acpr.write((clocks.sysclk().0 / swo_freq) - 1); cp.TPIU.sppr.write(2); cp.TPIU.ffcr.modify(|r| r & !(1 << 1)); const DBGMCU_CR: *mut u32 = 0xe0042004 as *mut u32; let r = ptr::read_volatile(DBGMCU_CR); ptr::write_volatile(DBGMCU_CR, r | (1 << 5)); cp.ITM.lar.write(0xC5ACCE55); cp.ITM.tcr.write( (0b000001 << 16) | (1 << 3) | (1 << 0), ); cp.ITM.ter[0].write(1); } let mut gpioa = dp.GPIOA.split(&mut rcc.ahb); let mut gpiob = dp.GPIOB.split(&mut rcc.ahb); let mut gpioe = dp.GPIOE.split(&mut rcc.ahb); let scl = gpiob.pb6.into_af4(&mut gpiob.moder, &mut gpiob.afrl); let sda = gpiob.pb7.into_af4(&mut gpiob.moder, &mut gpiob.afrl); let i2c = I2c::i2c1(dp.I2C1, (scl, sda), 400.khz(), clocks, &mut rcc.apb1); let mut lsm303dlhc = Lsm303dlhc::new(i2c).unwrap(); lsm303dlhc.accel_odr(AccelOdr::Hz400).unwrap(); lsm303dlhc.mag_odr(MagOdr::Hz220).unwrap(); let mut nss = gpioe .pe3 .into_push_pull_output(&mut gpioe.moder, &mut gpioe.otyper); nss.set_high(); let sck = gpioa.pa5.into_af5(&mut gpioa.moder, &mut gpioa.afrl); let miso = gpioa.pa6.into_af5(&mut gpioa.moder, &mut gpioa.afrl); let mosi = gpioa.pa7.into_af5(&mut gpioa.moder, &mut gpioa.afrl); let spi = Spi::spi1( dp.SPI1, (sck, miso, mosi), l3gd20::MODE, 1.mhz(), clocks, &mut rcc.apb2, ); let mut l3gd20 = L3gd20::new(spi, nss).unwrap(); l3gd20.set_odr(Odr::Hz380).unwrap(); let mut timer = Timer::tim2(dp.TIM2, FREQUENCY.hz(), clocks, &mut rcc.apb1); itm::write_all(&mut cp.ITM.stim[0], &[0]); let mut tx_buf: Aligned<u32, [u8; 20]> = Aligned([0; 20]); for _ in 0..NSAMPLES { block!(
LE::write_i16(&mut buf[start..start + 2], ar.x); start += 2; LE::write_i16(&mut buf[start..start + 2], ar.y); start += 2; LE::write_i16(&mut buf[start..start + 2], ar.z); start += 2; LE::write_i16(&mut buf[start..start + 2], g.x); start += 2; LE::write_i16(&mut buf[start..start + 2], g.y); start += 2; LE::write_i16(&mut buf[start..start + 2], g.z); cobs::encode(&buf, &mut tx_buf); itm::write_aligned(&mut cp.ITM.stim[0], &tx_buf); } asm::bkpt(); loop {} }
timer.wait()).unwrap(); let m = lsm303dlhc.mag().unwrap(); let ar = l3gd20.gyro().unwrap(); let g = lsm303dlhc.accel().unwrap(); let mut buf = [0; 18]; let mut start = 0; LE::write_i16(&mut buf[start..start + 2], m.x); start += 2; LE::write_i16(&mut buf[start..start + 2], m.y); start += 2; LE::write_i16(&mut buf[start..start + 2], m.z); start += 2;
random
[ { "content": "#[entry]\n\nfn main() -> ! {\n\n let p = cortex_m::Peripherals::take().unwrap();\n\n let mut itm = p.ITM;\n\n\n\n iprintln!(&mut itm.stim[0], \"Hello, world!\");\n\n\n\n asm::bkpt();\n\n\n\n loop {}\n\n}\n", "file_path": "examples/itm.rs", "rank": 0, "score": 89875.80130304726 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let p = stm32f30x::Peripherals::take().unwrap();\n\n\n\n let mut flash = p.FLASH.constrain();\n\n let mut rcc = p.RCC.constrain();\n\n\n\n // TRY the other clock configuration\n\n let clocks = rcc.cfgr.freeze(&mut flash.acr);\n\n // let clocks = rcc.cfgr.sysclk(64.mhz()).pclk1(32.mhz()).freeze(&mut flash.acr);\n\n\n\n let mut gpioa = p.GPIOA.split(&mut rcc.ahb);\n\n let mut gpioe = p.GPIOE.split(&mut rcc.ahb);\n\n\n\n let mut nss = gpioe\n\n .pe3\n\n .into_push_pull_output(&mut gpioe.moder, &mut gpioe.otyper);\n\n nss.set_high();\n\n\n\n // The `L3gd20` abstraction exposed by the `f3` crate requires a specific pin configuration to\n\n // be used and won't accept any configuration other than the one used here. Trying to use a\n", "file_path": "examples/l3gd20.rs", "rank": 1, "score": 89772.46695283944 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let p = stm32f30x::Peripherals::take().unwrap();\n\n\n\n let mut flash = p.FLASH.constrain();\n\n let mut rcc = p.RCC.constrain();\n\n\n\n // TRY the other clock configuration\n\n let clocks = rcc.cfgr.freeze(&mut flash.acr);\n\n // let clocks = rcc.cfgr.sysclk(64.mhz()).pclk1(32.mhz()).freeze(&mut flash.acr);\n\n\n\n // The `Lsm303dlhc` abstraction exposed by the `f3` crate requires a specific pin configuration\n\n // to be used and won't accept any configuration other than the one used here. Trying to use a\n\n // different pin configuration will result in a compiler error.\n\n let mut gpiob = p.GPIOB.split(&mut rcc.ahb);\n\n let scl = gpiob.pb6.into_af4(&mut gpiob.moder, &mut gpiob.afrl);\n\n let sda = gpiob.pb7.into_af4(&mut gpiob.moder, &mut gpiob.afrl);\n\n\n\n let i2c = I2c::i2c1(p.I2C1, (scl, sda), 400.khz(), clocks, &mut rcc.apb1);\n\n\n\n let mut lsm303dlhc = Lsm303dlhc::new(i2c).unwrap();\n", "file_path": "examples/lsm303dlhc.rs", "rank": 2, "score": 89772.46695283944 }, { "content": "fn main() {\n\n let (mut sender, receiver) = mpsc::channel();\n\n thread::spawn(move || {\n\n parse(&mut sender);\n\n });\n\n\n\n let mut window = Window::new(\"Quaternion visualizer\");\n\n let mut c = window.add_cube(0.3, 0.01, 0.2);\n\n\n\n c.set_color(0.0, 1.0, 0.0);\n\n\n\n window.set_light(Light::StickToCamera);\n\n\n\n let mut last = None;\n\n while window.render() {\n\n // grab the latest parsed quaternion\n\n loop {\n\n match receiver.try_recv() {\n\n Ok(q) => last = Some(q),\n\n Err(TryRecvError::Empty) => break,\n", "file_path": "viz/src/main.rs", "rank": 3, "score": 68883.8055891521 }, { "content": "fn main() {\n\n // Put the linker script somewhere the linker can find it\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"memory.x\"))\n\n .unwrap()\n\n .write_all(include_bytes!(\"memory.x\"))\n\n .unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n\n\n println!(\"cargo:rerun-if-changed=build.rs\");\n\n println!(\"cargo:rerun-if-changed=memory.x\");\n\n}\n", "file_path": "build.rs", "rank": 4, "score": 67160.90977278672 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let p = stm32f30x::Peripherals::take().unwrap();\n\n\n\n let mut rcc = p.RCC.constrain();\n\n let gpioe = p.GPIOE.split(&mut rcc.ahb);\n\n\n\n let mut leds = Leds::new(gpioe);\n\n\n\n for led in leds.iter_mut() {\n\n let result = led.on();\n\n assert_eq!(result.is_err(),false);\n\n }\n\n\n\n loop {}\n\n}\n", "file_path": "examples/leds.rs", "rank": 5, "score": 64110.54873990694 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let cp = cortex_m::Peripherals::take().unwrap();\n\n let dp = stm32f30x::Peripherals::take().unwrap();\n\n\n\n let mut flash = dp.FLASH.constrain();\n\n let mut rcc = dp.RCC.constrain();\n\n let mut gpioe = dp.GPIOE.split(&mut rcc.ahb);\n\n\n\n // clock configuration using the default settings (all clocks run at 8 MHz)\n\n let clocks = rcc.cfgr.freeze(&mut flash.acr);\n\n // TRY this alternate clock configuration (all clocks run at 16 MHz)\n\n // let clocks = rcc.cfgr.sysclk(16.mhz()).freeze(&mut flash.acr);\n\n\n\n let mut led: Led = gpioe\n\n .pe9\n\n .into_push_pull_output(&mut gpioe.moder, &mut gpioe.otyper)\n\n .into();\n\n let mut delay = Delay::new(cp.SYST, clocks);\n\n\n\n loop {\n\n let mut result = led.on();\n\n assert_eq!(result.is_err(), false);\n\n delay.delay_ms(1_000_u16);\n\n result = led.off();\n\n assert_eq!(result.is_err(), false);\n\n delay.delay_ms(1_000_u16);\n\n }\n\n}\n", "file_path": "examples/blinky.rs", "rank": 6, "score": 64110.54873990694 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let mut cp = cortex_m::Peripherals::take().unwrap();\n\n let dp = stm32f30x::Peripherals::take().unwrap();\n\n\n\n let mut flash = dp.FLASH.constrain();\n\n let mut rcc = dp.RCC.constrain();\n\n\n\n let clocks = rcc\n\n .cfgr\n\n .sysclk(64.mhz())\n\n .pclk1(32.mhz())\n\n .freeze(&mut flash.acr);\n\n\n\n // enable ITM\n\n // TODO this should be some high level API in the cortex-m crate\n\n unsafe {\n\n // enable TPIU and ITM\n\n cp.DCB.demcr.modify(|r| r | (1 << 24));\n\n\n\n // prescaler\n", "file_path": "examples/madgwick.rs", "rank": 7, "score": 64110.54873990694 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let p = stm32f30x::Peripherals::take().unwrap();\n\n\n\n let mut flash = p.FLASH.constrain();\n\n let mut rcc = p.RCC.constrain();\n\n let mut gpioc = p.GPIOC.split(&mut rcc.ahb);\n\n\n\n // clock configuration using the default settings (all clocks run at 8 MHz)\n\n let clocks = rcc.cfgr.freeze(&mut flash.acr);\n\n // TRY this alternate clock configuration (clocks run at nearly the maximum frequency)\n\n // let clocks = rcc.cfgr.sysclk(64.mhz()).pclk1(32.mhz()).freeze(&mut flash.acr);\n\n\n\n // The Serial API is highly generic\n\n // TRY the commented out, different pin configurations\n\n let tx = gpioc.pc4.into_af7(&mut gpioc.moder, &mut gpioc.afrl);\n\n\n\n let rx = gpioc.pc5.into_af7(&mut gpioc.moder, &mut gpioc.afrl);\n\n\n\n // TRY using a different USART peripheral here\n\n let serial = Serial::usart1(p.USART1, (tx, rx), 9_600.bps(), clocks, &mut rcc.apb2);\n", "file_path": "examples/serial.rs", "rank": 8, "score": 64110.54873990694 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n hprintln!(\"Hello, world!\").unwrap();\n\n\n\n loop {}\n\n}\n", "file_path": "examples/hello.rs", "rank": 9, "score": 64110.54873990694 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let cp = cortex_m::Peripherals::take().unwrap();\n\n let dp = stm32f30x::Peripherals::take().unwrap();\n\n\n\n let mut flash = dp.FLASH.constrain();\n\n let mut rcc = dp.RCC.constrain();\n\n let gpioe = dp.GPIOE.split(&mut rcc.ahb);\n\n\n\n // clock configuration using the default settings (all clocks run at 8 MHz)\n\n let clocks = rcc.cfgr.freeze(&mut flash.acr);\n\n // TRY this alternate clock configuration (all clocks run at 16 MHz)\n\n // let clocks = rcc.cfgr.sysclk(16.mhz()).freeze(&mut flash.acr);\n\n\n\n let mut leds = Leds::new(gpioe);\n\n let mut delay = Delay::new(cp.SYST, clocks);\n\n\n\n let n = leds.len();\n\n loop {\n\n for curr in 0..n {\n\n let next = (curr + 1) % n;\n\n let mut result = leds[curr].off();\n\n assert_eq!(result.is_err(),false);\n\n result = leds[next].on();\n\n assert_eq!(result.is_err(),false);\n\n\n\n delay.delay_ms(100_u8);\n\n }\n\n }\n\n}\n", "file_path": "examples/roulette.rs", "rank": 10, "score": 64110.54873990694 }, { "content": "// parses quaternions from stdin\n\nfn parse(sender: &mut Sender<(f32, f32, f32, f32)>) {\n\n let stdin = io::stdin();\n\n\n\n for mut frame in BufReader::new(stdin.lock()).split(0) {\n\n let mut frame = frame.unwrap();\n\n if let Ok(n) = cobs::decode_in_place(&mut frame) {\n\n if n == 16 {\n\n let mut start = 0;\n\n let w = LE::read_f32(&mut frame[start..start + 4]);\n\n start += 4;\n\n let x = LE::read_f32(&mut frame[start..start + 4]);\n\n start += 4;\n\n let y = LE::read_f32(&mut frame[start..start + 4]);\n\n start += 4;\n\n let z = LE::read_f32(&mut frame[start..start + 4]);\n\n start += 4;\n\n assert_eq!(start, n);\n\n\n\n sender.send((w, x, y, z)).unwrap();\n\n }\n\n }\n\n }\n\n}\n", "file_path": "viz/src/main.rs", "rank": 11, "score": 63081.84869282563 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let p = stm32f30x::Peripherals::take().unwrap();\n\n\n\n let mut flash = p.FLASH.constrain();\n\n let mut rcc = p.RCC.constrain();\n\n let mut gpioa = p.GPIOA.split(&mut rcc.ahb);\n\n\n\n let clocks = rcc.cfgr.freeze(&mut flash.acr);\n\n\n\n let tx = gpioa.pa9.into_af7(&mut gpioa.moder, &mut gpioa.afrh);\n\n let rx = gpioa.pa10.into_af7(&mut gpioa.moder, &mut gpioa.afrh);\n\n\n\n let serial = Serial::usart1(p.USART1, (tx, rx), 115_200.bps(), clocks, &mut rcc.apb2);\n\n let (mut tx, mut rx) = serial.split();\n\n\n\n loop {\n\n let byte = block!(rx.read()).unwrap();\n\n block!(tx.write(byte)).ok();\n\n }\n\n}\n", "file_path": "examples/serial-echo.rs", "rank": 13, "score": 61387.5495667994 }, { "content": "# Plots sensor data logged using the `log-sensors` example\n\n#\n\n# Usage:\n\n#\n\n# $ pipenv run plot.py /path/to/data.txt $WHAT\n\n#\n\n# where `$WHAT` can be one of \"accel\", \"gyro\", \"gyro-calibrated\", \"mag\" or\n\n# \"mag-calibrated\"\n\n\n\nimport cobs.cobs\n\nimport itertools\n\nimport matplotlib.pyplot as plt\n\nimport numpy as np\n\nimport seaborn as sns\n\nimport struct\n\nimport sys\n\n\n\n# apply plot style\n\nsns.set()\n\n\n\n# Constants\n\nN = 18 # frame size\n\nK_AR = 8.75e-3 # gyroscope sensitivity\n\nK_G = 2 / (1 << 15) # accelerometer sensitivity\n\nDT = 1 / 220 # sampling period\n\n\n\n# Parse input file\n\nwith open(sys.argv[1], 'rb') as f:\n\n data = f.read()\n\n\n\nmx, my, mz = [], [], []\n\narx, ary, arz = [], [], []\n\ngx, gy, gz = [], [], []\n\n\n\nfor (is_separator, frame) in itertools.groupby(data, lambda x: x == 0):\n\n if is_separator:\n\n continue\n\n\n\n try:\n\n frame = cobs.cobs.decode(bytes(frame))\n\n except cobs.cobs.DecodeError:\n\n sys.stderr.write('X')\n\n sys.stderr.flush()\n\n continue\n\n\n\n if len(frame) != N:\n\n sys.stderr.write('!')\n\n sys.stderr.flush()\n\n continue\n\n\n\n start = 0\n\n mx.append(struct.unpack('<h', frame[start:start+2])[0])\n\n start += 2\n\n my.append(struct.unpack('<h', frame[start:start+2])[0])\n\n start += 2\n\n mz.append(struct.unpack('<h', frame[start:start+2])[0])\n\n start += 2\n\n\n\n arx.append(struct.unpack('<h', frame[start:start+2])[0])\n\n start += 2\n\n ary.append(struct.unpack('<h', frame[start:start+2])[0])\n\n start += 2\n\n arz.append(struct.unpack('<h', frame[start:start+2])[0])\n\n start += 2\n\n\n\n gx.append(struct.unpack('<h', frame[start:start+2])[0])\n\n start += 2\n\n gy.append(struct.unpack('<h', frame[start:start+2])[0])\n\n start += 2\n\n gz.append(struct.unpack('<h', frame[start:start+2])[0])\n\n start += 2\n\n\n\n assert(start == N)\n\n\n\ntarget = sys.argv[2]\n\n\n\n# Scale data\n\nmx = np.array(mx)\n\nmy = np.array(my)\n\nmz = np.array(mz)\n\n\n\nmx_max = max(mx)\n\nmx_min = min(mx)\n\nmy_max = max(my)\n\nmy_min = min(my)\n\nmz_max = max(mz)\n\nmz_min = min(mz)\n\n\n\nmx_bias = (mx_max + mx_min) / 2\n\nmy_bias = (my_max + my_min) / 2\n\nmz_bias = (mz_max + mz_min) / 2\n\n\n\nmx_range = (mx_max - mx_min) / 2\n\nmy_range = (my_max - my_min) / 2\n\nmz_range = (mz_max - mz_min) / 2\n\n\n\nif target == 'mag-calibrated':\n\n mx = (mx - mx_bias) / mx_range\n\n my = (my - my_bias) / my_range\n\n mz = (mz - mz_bias) / mz_range\n\n\n\n mx_max = (mx_max - mx_bias) / mx_range\n\n mx_min = (mx_min - mx_bias) / mx_range\n\n my_max = (my_max - my_bias) / my_range\n\n my_min = (my_min - my_bias) / my_range\n\n mz_max = (mz_max - mz_bias) / mz_range\n\n mz_min = (mz_min - mz_bias) / mz_range\n\n\n\nmxy = max([abs(mx_max), abs(mx_min), abs(my_max), abs(my_min)])\n\nmyz = max([abs(my_max), abs(my_min), abs(mz_max), abs(mz_min)])\n\nmxz = max([abs(mx_max), abs(mx_min), abs(mz_max), abs(mz_min)])\n\n\n\narx_mean = np.mean(arx) * K_AR\n\nary_mean = np.mean(ary) * K_AR\n\narz_mean = np.mean(arz) * K_AR\n\n\n\nif target == 'gyro-calibrated':\n\n arx = np.array(arx) * K_AR - arx_mean\n\n ary = np.array(ary) * K_AR - ary_mean\n\n arz = np.array(arz) * K_AR - arz_mean\n\n\n\n arx_mean = 0\n\n ary_mean = 0\n\n arz_mean = 0\n\nelse:\n\n arx = np.array(arx) * K_AR\n\n ary = np.array(ary) * K_AR\n\n arz = np.array(arz) * K_AR\n\n\n\ngx = np.array(gx) * K_G\n\ngy = np.array(gy) * K_G\n\ngz = np.array(gz) * K_G\n\n\n\ngx_mean = np.mean(gx)\n\ngy_mean = np.mean(gy)\n\ngz_mean = np.mean(gz)\n\n\n\n# Plot\n\nx = np.arange(0, len(arx)) * DT\n\nif target == 'accel':\n\n plt.subplot(221)\n\n plt.plot(x, gx)\n\n plt.plot([x[0], x[-1]], np.ones(2) * gx_mean, label='mean')\n\n plt.xlim(round(x[0]), round(x[-1]))\n\n plt.legend()\n\n plt.ylabel('Acceleration (g)')\n\n plt.title(r'$G_x$')\n\n\n\n plt.subplot(222)\n\n plt.plot(x, gy)\n\n plt.plot([x[0], x[-1]], np.ones(2) * gy_mean, label='mean')\n\n plt.xlim(round(x[0]), round(x[-1]))\n\n plt.legend()\n\n plt.title(r'$G_y$')\n\n\n\n plt.subplot(223)\n\n plt.plot(x, gz)\n\n plt.plot([x[0], x[-1]], np.ones(2) * gz_mean, label='mean')\n\n plt.xlim(round(x[0]), round(x[-1]))\n\n plt.legend()\n\n plt.xlabel('Time (s)')\n\n plt.ylabel('Acceleration (g)')\n\n plt.title(r'$G_z$')\n\n\n\n g = np.sqrt(gx**2 + gy**2 + gz**2)\n\n plt.subplot(224)\n\n plt.plot(x, g)\n\n plt.xlim(round(x[0]), round(x[-1]))\n\n plt.xlabel('Time (s)')\n\n plt.title(r'$\\|G\\|$')\n\n\n\n plt.suptitle('Accelerometer data')\n\n plt.tight_layout()\n\n plt.subplots_adjust(top=0.88)\n\n\n\n plt.savefig(target + '.svg')\n\n plt.close()\n\n\n\nif target == 'gyro' or target == 'gyro-calibrated':\n\n plt.subplot(221)\n\n plt.plot(x, arx)\n\n plt.plot([x[0], x[-1]], np.ones(2) * arx_mean, label='mean')\n\n plt.xlim(round(x[0]), round(x[-1]))\n\n plt.legend()\n\n plt.ylabel('Angular rate (dps)')\n\n plt.title(r'$AR_x$')\n\n\n\n plt.subplot(222)\n\n plt.plot(x, ary)\n\n plt.plot([x[0], x[-1]], np.ones(2) * ary_mean, label='mean')\n\n plt.xlim(round(x[0]), round(x[-1]))\n\n plt.legend()\n\n plt.title(r'$AR_y$')\n\n\n\n plt.subplot(223)\n\n plt.plot(x, arz)\n\n plt.plot([x[0], x[-1]], np.ones(2) * arz_mean, label='mean')\n\n plt.xlim(round(x[0]), round(x[-1]))\n\n plt.legend()\n\n plt.xlabel('Time (s)')\n\n plt.ylabel('Angular rate (dps)')\n\n plt.title(r'$AR_z$')\n\n\n\n ar = np.sqrt(arx**2 + ary**2 + arz**2)\n\n plt.subplot(224)\n\n plt.plot(x, ar)\n\n plt.xlim(round(x[0]), round(x[-1]))\n\n plt.xlabel('Time (s)')\n\n plt.title(r'$\\|AR\\|$')\n\n\n\n if target == 'gyro-calibrated':\n\n plt.suptitle('Calibrated gyroscope data')\n\n else:\n\n plt.suptitle('Gyroscope data')\n\n\n\n plt.tight_layout()\n\n plt.subplots_adjust(top=0.88)\n\n\n\n plt.savefig(target + '.svg')\n\n plt.close()\n\n\n\nif target == 'mag' or target == 'mag-calibrated':\n\n ax = plt.subplot(221)\n\n plt.plot(mx, my, ',')\n\n plt.xlim(-mxy, mxy)\n\n plt.ylim(-mxy, mxy)\n\n ax.set_aspect(1)\n\n plt.xlabel(r'$M_X$')\n\n plt.ylabel(r'$M_Y$')\n\n plt.title(r'$M_{XY}$')\n\n\n\n ax = plt.subplot(222)\n\n plt.plot(my, mz, ',')\n\n plt.xlim(-myz, myz)\n\n plt.ylim(-myz, myz)\n\n ax.set_aspect(1)\n\n plt.xlabel(r'$M_Y$')\n\n plt.ylabel(r'$M_Z$')\n\n plt.title(r'$M_{YZ}$')\n\n\n\n ax = plt.subplot(223)\n\n plt.plot(mx, mz, ',')\n\n plt.xlim(-mxz, mxz)\n\n plt.ylim(-mxz, mxz)\n\n ax.set_aspect(1)\n\n plt.xlabel(r'$M_X$')\n\n plt.ylabel(r'$M_Z$')\n\n plt.title(r'$M_{XZ}$')\n\n\n\n m = np.sqrt(mx**2 + my**2 + mz**2)\n\n plt.subplot(224)\n\n plt.plot(x, m)\n\n plt.xlim(round(x[0]), round(x[-1]))\n\n plt.xlabel('Time (s)')\n\n plt.title(r'$\\|M\\|$')\n\n\n\n if target == 'mag-calibrated':\n\n plt.suptitle('Calibrated magnetometer data')\n\n else:\n\n plt.suptitle('Magnetometer data')\n\n\n\n plt.tight_layout()\n\n plt.subplots_adjust(top=0.88)\n\n\n\n plt.savefig(target + '.svg')\n\n plt.close()\n\n\n\n if target == 'mag-calibrated':\n\n print()\n\n print('X(bias =', mx_bias, ', range =', mx_range, ')')\n\n print('Y(bias =', my_bias, ', range =', my_range, ')')\n\n print('Z(bias =', mz_bias, ', range =', mz_range, ')')\n", "file_path": "plot.py", "rank": 14, "score": 36365.73880446349 }, { "content": "//! Sends \"Hello, world!\" through the first ITM stimulus port\n\n//!\n\n//! To receive the message on the host you'll have to do three things:\n\n//!\n\n//! You'll need to uncomment lines 8 and 15 of the `.gdbinit` file.\n\n//!\n\n//! You'll also need to connect the SWO pin of the on-board SWD programmer to pin PB3 as shown\n\n//! [here](https://rust-embedded.github.io/discovery/06-hello-world/index.html).\n\n//!\n\n//! Finally, you'll need to run `itmdump itm.fifo` (mind the file paths) to receive the message.\n\n//! Read the documentation of the [`itm`] crate, which provides the `itmdump` tool, for details.\n\n//!\n\n//! [`itm`]: https://docs.rs/itm/0.2.0/itm/\n\n#![deny(unsafe_code)]\n\n#![deny(warnings)]\n\n#![no_main]\n\n#![no_std]\n\n\n\nextern crate f3;\n\nextern crate panic_semihosting;\n\n\n\nuse cortex_m::{asm, iprintln};\n\nuse cortex_m_rt::entry;\n\n\n\n#[entry]\n", "file_path": "examples/itm.rs", "rank": 15, "score": 27348.77767069195 }, { "content": "//! Interfacing the on-board LSM303DLHC (accelerometer + compass)\n\n#![deny(unsafe_code)]\n\n#![deny(warnings)]\n\n#![no_std]\n\n#![no_main]\n\n\n\nextern crate panic_semihosting;\n\n\n\nuse cortex_m::asm;\n\nuse cortex_m_rt::entry;\n\nuse f3::{\n\n hal::{i2c::I2c, prelude::*, stm32f30x},\n\n Lsm303dlhc,\n\n};\n\n\n\n#[entry]\n", "file_path": "examples/lsm303dlhc.rs", "rank": 16, "score": 27248.907279973722 }, { "content": "//! Interfacing the on-board L3GD20 (gyroscope)\n\n#![deny(unsafe_code)]\n\n#![deny(warnings)]\n\n#![no_main]\n\n#![no_std]\n\n\n\nextern crate panic_semihosting;\n\n\n\nuse cortex_m::asm;\n\nuse cortex_m_rt::entry;\n\nuse f3::{\n\n hal::{prelude::*, spi::Spi, stm32f30x},\n\n l3gd20, L3gd20,\n\n};\n\n\n\n#[entry]\n", "file_path": "examples/l3gd20.rs", "rank": 17, "score": 27248.66741194024 }, { "content": " // different pin configuration will result in a compiler error.\n\n let sck = gpioa.pa5.into_af5(&mut gpioa.moder, &mut gpioa.afrl);\n\n let miso = gpioa.pa6.into_af5(&mut gpioa.moder, &mut gpioa.afrl);\n\n let mosi = gpioa.pa7.into_af5(&mut gpioa.moder, &mut gpioa.afrl);\n\n\n\n let spi = Spi::spi1(\n\n p.SPI1,\n\n (sck, miso, mosi),\n\n l3gd20::MODE,\n\n 1.mhz(),\n\n clocks,\n\n &mut rcc.apb2,\n\n );\n\n\n\n let mut l3gd20 = L3gd20::new(spi, nss).unwrap();\n\n\n\n // sanity check: the WHO_AM_I register always contains this value\n\n assert_eq!(l3gd20.who_am_i().unwrap(), 0xD4);\n\n\n\n let _m = l3gd20.all().unwrap();\n\n\n\n // when you reach this breakpoint you'll be able to inspect the variable `_m` which contains the\n\n // gyroscope and the temperature sensor readings\n\n asm::bkpt();\n\n\n\n loop {}\n\n}\n", "file_path": "examples/l3gd20.rs", "rank": 18, "score": 27245.089548056934 }, { "content": "\n\n let _accel = lsm303dlhc.accel().unwrap();\n\n let _mag = lsm303dlhc.mag().unwrap();\n\n let _temp = lsm303dlhc.temp().unwrap();\n\n\n\n // when you reach this breakpoint you'll be able to inspect the variables `_accel`, `_mag` and\n\n // `_temp` which contain the accelerometer, compass (magnetometer) and temperature sensor\n\n // readings\n\n asm::bkpt();\n\n\n\n loop {}\n\n}\n", "file_path": "examples/lsm303dlhc.rs", "rank": 19, "score": 27223.38437302472 }, { "content": "//! extern crate f3;\n\n//! extern crate panic_semihosting;\n\n//! \n\n//! use cortex_m::{asm, iprintln};\n\n//! use cortex_m_rt::entry;\n\n//! \n\n//! #[entry]\n\n//! fn main() -> ! {\n\n//! let p = cortex_m::Peripherals::take().unwrap();\n\n//! let mut itm = p.ITM;\n\n//! \n\n//! iprintln!(&mut itm.stim[0], \"Hello, world!\");\n\n//! \n\n//! asm::bkpt();\n\n//! \n\n//! loop {}\n\n//! }\n\n//! ```\n\n// Auto-generated. Do not modify.\n", "file_path": "src/examples/_01_itm.rs", "rank": 20, "score": 25787.617333995775 }, { "content": "//! Sends \"Hello, world!\" through the first ITM stimulus port\n\n//!\n\n//! To receive the message on the host you'll have to do three things:\n\n//!\n\n//! You'll need to uncomment lines 8 and 15 of the `.gdbinit` file.\n\n//!\n\n//! You'll also need to connect the SWO pin of the on-board SWD programmer to pin PB3 as shown\n\n//! [here](https://rust-embedded.github.io/discovery/06-hello-world/index.html).\n\n//!\n\n//! Finally, you'll need to run `itmdump itm.fifo` (mind the file paths) to receive the message.\n\n//! Read the documentation of the [`itm`] crate, which provides the `itmdump` tool, for details.\n\n//!\n\n//! [`itm`]: https://docs.rs/itm/0.2.0/itm/\n\n//!\n\n//! ```\n\n//! #![deny(unsafe_code)]\n\n//! #![deny(warnings)]\n\n//! #![no_main]\n\n//! #![no_std]\n\n//! \n", "file_path": "src/examples/_01_itm.rs", "rank": 21, "score": 25776.093043986286 }, { "content": "//! \n\n//! let mut flash = p.FLASH.constrain();\n\n//! let mut rcc = p.RCC.constrain();\n\n//! \n\n//! // TRY the other clock configuration\n\n//! let clocks = rcc.cfgr.freeze(&mut flash.acr);\n\n//! // let clocks = rcc.cfgr.sysclk(64.mhz()).pclk1(32.mhz()).freeze(&mut flash.acr);\n\n//! \n\n//! let mut gpioa = p.GPIOA.split(&mut rcc.ahb);\n\n//! let mut gpioe = p.GPIOE.split(&mut rcc.ahb);\n\n//! \n\n//! let mut nss = gpioe\n\n//! .pe3\n\n//! .into_push_pull_output(&mut gpioe.moder, &mut gpioe.otyper);\n\n//! nss.set_high();\n\n//! \n\n//! // The `L3gd20` abstraction exposed by the `f3` crate requires a specific pin configuration to\n\n//! // be used and won't accept any configuration other than the one used here. Trying to use a\n\n//! // different pin configuration will result in a compiler error.\n\n//! let sck = gpioa.pa5.into_af5(&mut gpioa.moder, &mut gpioa.afrl);\n", "file_path": "src/examples/_07_l3gd20.rs", "rank": 22, "score": 25695.747372411617 }, { "content": "//! \n\n//! let mut flash = p.FLASH.constrain();\n\n//! let mut rcc = p.RCC.constrain();\n\n//! \n\n//! // TRY the other clock configuration\n\n//! let clocks = rcc.cfgr.freeze(&mut flash.acr);\n\n//! // let clocks = rcc.cfgr.sysclk(64.mhz()).pclk1(32.mhz()).freeze(&mut flash.acr);\n\n//! \n\n//! // The `Lsm303dlhc` abstraction exposed by the `f3` crate requires a specific pin configuration\n\n//! // to be used and won't accept any configuration other than the one used here. Trying to use a\n\n//! // different pin configuration will result in a compiler error.\n\n//! let mut gpiob = p.GPIOB.split(&mut rcc.ahb);\n\n//! let scl = gpiob.pb6.into_af4(&mut gpiob.moder, &mut gpiob.afrl);\n\n//! let sda = gpiob.pb7.into_af4(&mut gpiob.moder, &mut gpiob.afrl);\n\n//! \n\n//! let i2c = I2c::i2c1(p.I2C1, (scl, sda), 400.khz(), clocks, &mut rcc.apb1);\n\n//! \n\n//! let mut lsm303dlhc = Lsm303dlhc::new(i2c).unwrap();\n\n//! \n\n//! let _accel = lsm303dlhc.accel().unwrap();\n", "file_path": "src/examples/_08_lsm303dlhc.rs", "rank": 23, "score": 25695.67626321382 }, { "content": "//! Interfacing the on-board LSM303DLHC (accelerometer + compass)\n\n//!\n\n//! ```\n\n//! #![deny(unsafe_code)]\n\n//! #![deny(warnings)]\n\n//! #![no_std]\n\n//! #![no_main]\n\n//! \n\n//! extern crate panic_semihosting;\n\n//! \n\n//! use cortex_m::asm;\n\n//! use cortex_m_rt::entry;\n\n//! use f3::{\n\n//! hal::{i2c::I2c, prelude::*, stm32f30x},\n\n//! Lsm303dlhc,\n\n//! };\n\n//! \n\n//! #[entry]\n\n//! fn main() -> ! {\n\n//! let p = stm32f30x::Peripherals::take().unwrap();\n", "file_path": "src/examples/_08_lsm303dlhc.rs", "rank": 24, "score": 25691.277566090557 }, { "content": "//! Interfacing the on-board L3GD20 (gyroscope)\n\n//!\n\n//! ```\n\n//! #![deny(unsafe_code)]\n\n//! #![deny(warnings)]\n\n//! #![no_main]\n\n//! #![no_std]\n\n//! \n\n//! extern crate panic_semihosting;\n\n//! \n\n//! use cortex_m::asm;\n\n//! use cortex_m_rt::entry;\n\n//! use f3::{\n\n//! hal::{prelude::*, spi::Spi, stm32f30x},\n\n//! l3gd20, L3gd20,\n\n//! };\n\n//! \n\n//! #[entry]\n\n//! fn main() -> ! {\n\n//! let p = stm32f30x::Peripherals::take().unwrap();\n", "file_path": "src/examples/_07_l3gd20.rs", "rank": 25, "score": 25691.06116469457 }, { "content": "//! let miso = gpioa.pa6.into_af5(&mut gpioa.moder, &mut gpioa.afrl);\n\n//! let mosi = gpioa.pa7.into_af5(&mut gpioa.moder, &mut gpioa.afrl);\n\n//! \n\n//! let spi = Spi::spi1(\n\n//! p.SPI1,\n\n//! (sck, miso, mosi),\n\n//! l3gd20::MODE,\n\n//! 1.mhz(),\n\n//! clocks,\n\n//! &mut rcc.apb2,\n\n//! );\n\n//! \n\n//! let mut l3gd20 = L3gd20::new(spi, nss).unwrap();\n\n//! \n\n//! // sanity check: the WHO_AM_I register always contains this value\n\n//! assert_eq!(l3gd20.who_am_i().unwrap(), 0xD4);\n\n//! \n\n//! let _m = l3gd20.all().unwrap();\n\n//! \n\n//! // when you reach this breakpoint you'll be able to inspect the variable `_m` which contains the\n\n//! // gyroscope and the temperature sensor readings\n\n//! asm::bkpt();\n\n//! \n\n//! loop {}\n\n//! }\n\n//! ```\n\n// Auto-generated. Do not modify.\n", "file_path": "src/examples/_07_l3gd20.rs", "rank": 26, "score": 25687.30475303857 }, { "content": "//! let _mag = lsm303dlhc.mag().unwrap();\n\n//! let _temp = lsm303dlhc.temp().unwrap();\n\n//! \n\n//! // when you reach this breakpoint you'll be able to inspect the variables `_accel`, `_mag` and\n\n//! // `_temp` which contain the accelerometer, compass (magnetometer) and temperature sensor\n\n//! // readings\n\n//! asm::bkpt();\n\n//! \n\n//! loop {}\n\n//! }\n\n//! ```\n\n// Auto-generated. Do not modify.\n", "file_path": "src/examples/_08_lsm303dlhc.rs", "rank": 27, "score": 25665.069007888098 }, { "content": "extern crate byteorder;\n\nextern crate cobs;\n\nextern crate kiss3d;\n\nextern crate nalgebra as na;\n\n\n\nuse std::io::{self, BufRead, BufReader};\n\nuse std::sync::mpsc::{self, Sender, TryRecvError};\n\nuse std::thread;\n\n\n\nuse byteorder::{ByteOrder, LE};\n\nuse kiss3d::light::Light;\n\nuse kiss3d::window::Window;\n\nuse na::UnitQuaternion;\n\n\n", "file_path": "viz/src/main.rs", "rank": 28, "score": 25076.390856058566 }, { "content": " Err(TryRecvError::Disconnected) => return,\n\n }\n\n }\n\n\n\n if let Some(q) = last {\n\n // NOTE In kiss3d the coordinate axes look like this\n\n //\n\n // ^ Y\n\n // |\n\n // X |\n\n // <----X Z\n\n //\n\n // whereas the gyroscope axes on the F3 look like this\n\n //\n\n // ^ Z\n\n // |\n\n // |\n\n // X o----> Y\n\n //\n\n // when the USB connectors are facing in that +Y way\n", "file_path": "viz/src/main.rs", "rank": 29, "score": 25066.307138152737 }, { "content": " c.set_local_rotation(UnitQuaternion::from_quaternion(na::Quaternion::new(\n\n q.0,\n\n -q.2, // -y\n\n q.3, // +z\n\n -q.1, // -x\n\n )));\n\n }\n\n }\n\n}\n\n\n", "file_path": "viz/src/main.rs", "rank": 30, "score": 25063.95125745224 }, { "content": "### Added\n\n\n\n- A \"static-ram\" opt-out Cargo feature to remove the RAM initialization routine.\n\n No `static mut` variables can be used if this feature has been disabled.\n\n\n\n- An \"interrupts\" opt-out Cargo feature to remove the interrupts section of the\n\n vector table. Interrupts can't be used if this feature has been disabled.\n\n\n\n### Changed\n\n\n\n- [breaking] The `main` and `init` functions must now be a plain `fn` rather\n\n than `extern \"C\" fn`\n\n\n\n- [breaking] The `exception::EXCEPTIONS`, `exception::reset` and\n\n `interrupt::INTERRUPTS` items have been removed.\n\n\n\n## [v0.2.0] - 2016-10-27\n\n\n\n### Added\n\n\n\n- Initialize the FPU before main\n\n\n\n- Support for sending `print!` formatted messages over \"Serial Port\".\n\n\n\n- Overridable interrupts\n\n\n\n- High level API for the LSM303DLHC and L3GD20\n\n\n\n- A `time` module in the spirit of `std::time`\n\n\n\n- Opt-out Cargo features to disable the default initialization code (`init`),\n\n the default exception handler and the default panic formatting (`panic_fmt`).\n\n\n\n### Changed\n\n\n\n- [breaking] The whole `peripheral` module has been revamped to provide type\n\n safe access to the *contents* of registers.\n\n\n\n## [v0.1.0] - 2016-10-04\n\n\n\n### Added\n\n\n\n- High level API over LEDs\n\n\n\n- A `delay::ms` function\n\n\n\n- \"Smart\" exceptions\n\n\n\n- `iprint!` macros\n\n\n\n- Default `panic_fmt` implementation\n\n\n\n- Default system initialization\n\n\n\n- Low level access to some peripherals: DBGMCU, GPIO, RCC and TIM\n\n\n\n[Unreleased]: https://github.com/japaric/f3/compare/v0.6.1...HEAD\n\n[v0.6.1]: https://github.com/japaric/f3/compare/v0.6.0...v0.6.1\n\n[v0.6.0]: https://github.com/japaric/f3/compare/v0.5.3...v0.6.0\n\n[v0.5.3]: https://github.com/japaric/f3/compare/v0.5.2...v0.5.3\n\n[v0.5.2]: https://github.com/japaric/f3/compare/v0.5.1...v0.5.2\n\n[v0.5.1]: https://github.com/japaric/f3/compare/v0.5.0...v0.5.1\n\n[v0.5.0]: https://github.com/japaric/f3/compare/v0.4.1...v0.5.0\n\n[v0.4.1]: https://github.com/japaric/f3/compare/v0.4.0...v0.4.1\n\n[v0.4.0]: https://github.com/japaric/f3/compare/v0.3.1...v0.4.0\n\n[v0.3.1]: https://github.com/japaric/f3/compare/v0.3.0...v0.3.1\n\n[v0.3.0]: https://github.com/japaric/f3/compare/v0.2.0...v0.3.0\n\n[v0.2.0]: https://github.com/japaric/f3/compare/v0.1.0...v0.2.0\n", "file_path": "CHANGELOG.md", "rank": 31, "score": 17016.17755189081 }, { "content": "# Change Log\n\n\n\nAll notable changes to this project will be documented in this file.\n\n\n\nThe format is based on [Keep a Changelog](http://keepachangelog.com/)\n\nand this project adheres to [Semantic Versioning](http://semver.org/).\n\n\n\n## [Unreleased]\n\n\n\n## [v0.6.1] - 2018-06-22\n\n\n\n### Added\n\n\n\n- Re-add the \"rt\" feature mentioned in the documentation that was removed by mistake in v0.6.0.\n\n\n\n### Fixed\n\n\n\n- Building example with recent nightlies.\n\n\n\n## [v0.6.0] - 2018-05-12\n\n\n\n- [breaking-change] bumped the `stm32f30x-hal` dependency to v0.2.0.\n\n\n\n- [breaking-change] this crate now requires `arm-none-eabi-gcc` to be installed and available on\n\n `$PATH` to build.\n\n\n\n## [v0.5.3] - 2018-02-19\n\n\n\n### Added\n\n\n\n- Example: Madgwick's orientation filter\n\n- Example: Logging sensor data over the ITM\n\n\n\n## [v0.5.2] - 2018-01-20\n\n\n\n### Added\n\n\n\n- A \"rt\" Cargo feature that enables the \"rt\" feature of the stm32f30x-hal dependency.\n\n\n\n## [v0.5.1] - 2018-01-17\n\n\n\n### Changed\n\n\n\n- Bumped the version of the cortex-m-rt dependency to 0.3.12\n\n\n\n## [v0.5.0] - 2018-01-17\n\n\n\n### Added\n\n\n\n- Board specific APIs for the user LEDs.\n\n- More concrete re-export of the `L3gd20` driver.\n\n- More concrete re-export of the `Lsm303dlhc` driver.\n\n- Re-export of the HAL provided by the `stm32f30x-hal` crate.\n\n\n\n### Removed\n\n\n\n- [breaking-change] All non-board specific APIs.\n\n\n\n## [v0.4.1] - 2017-05-09\n\n\n\n### Changed\n\n\n\n- Bumped `stm32f30x` dependency to v0.4.1\n\n- Updated the examples to match the stable release of the cortex-m-rtfm crate\n\n\n\n## [v0.4.0] - 2017-04-28\n\n\n\n### Changed\n\n\n\n- [breaking-change] The startup routine has been removed from this crate. This\n\n crate is now meant to be used with the [cortex-m-quickstart] template, check\n\n the crate level documentation for details.\n\n\n\n[cortex-m-quickstart]: https://docs.rs/cortex-m-quickstart/0.1.1/cortex_m_quickstart/\n\n\n\n- [breaking-change] The whole API is now async only (check the `examples`\n\n module). Note that for this release we are not on parity with the v0.3.0 API\n\n in terms of functionality.\n\n\n\n## [v0.3.0] - 2016-11-14\n\n\n", "file_path": "CHANGELOG.md", "rank": 32, "score": 17015.104612556515 }, { "content": "[![Build status](https://travis-ci.org/japaric/f3.svg?branch=master)](https://travis-ci.org/japaric/f3)\n\n[![crates.io](https://img.shields.io/crates/d/f3.svg)](https://crates.io/crates/f3)\n\n[![crates.io](https://img.shields.io/crates/v/f3.svg)](https://crates.io/crates/f3)\n\n\n\n# `f3`\n\n\n\n> Board Support Crate for the STM32F3DISCOVERY\n\n\n\n[STM32F3DISCOVERY]: http://www.st.com/en/evaluation-tools/stm32f3discovery.html\n\n\n\n<p align=\"center\">\n\n <a href=\"https://japaric.github.io/f3\">\n\n <img src=\"assets/madgwick.png\"/>\n\n </a>\n\n\n\n Implementation of Madgwick's algorithm. Click to see video. Source in examples/madgwick.rs\n\n</p>\n\n\n\n## [Documentation](https://docs.rs/f3)\n\n\n\n## [Change log](CHANGELOG.md)\n\n\n\n## License\n\n\n\nLicensed under either of\n\n\n\n- Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or\n\n http://www.apache.org/licenses/LICENSE-2.0)\n\n- MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)\n\n\n\nat your option.\n\n\n\n### Contribution\n\n\n\nUnless you explicitly state otherwise, any contribution intentionally submitted\n\nfor inclusion in the work by you, as defined in the Apache-2.0 license, shall be\n\ndual licensed as above, without any additional terms or conditions.\n", "file_path": "README.md", "rank": 33, "score": 17011.879749343236 }, { "content": "//! \n\n//! use aligned::Aligned;\n\n//! use byteorder::{ByteOrder, LE};\n\n//! use cortex_m::{asm, itm};\n\n//! use cortex_m_rt::entry;\n\n//! use f3::{\n\n//! hal::{i2c::I2c, prelude::*, spi::Spi, stm32f30x, timer::Timer},\n\n//! l3gd20::{self, Odr},\n\n//! lsm303dlhc::{AccelOdr, MagOdr},\n\n//! L3gd20, Lsm303dlhc,\n\n//! };\n\n//! use nb::block;\n\n//! \n\n//! // TRY changing the sampling frequency\n\n//! const FREQUENCY: u32 = 220;\n\n//! // TRY changing the number of samples\n\n//! const NSAMPLES: u32 = 32 * FREQUENCY; // = 32 seconds\n\n//! \n\n//! #[entry]\n\n//! fn main() -> ! {\n", "file_path": "src/examples/_09_log_sensors.rs", "rank": 34, "score": 44.512343393949514 }, { "content": " let mosi = gpioa.pa7.into_af5(&mut gpioa.moder, &mut gpioa.afrl);\n\n\n\n let spi = Spi::spi1(\n\n dp.SPI1,\n\n (sck, miso, mosi),\n\n l3gd20::MODE,\n\n 1.mhz(),\n\n clocks,\n\n &mut rcc.apb2,\n\n );\n\n\n\n let mut l3gd20 = L3gd20::new(spi, nss).unwrap();\n\n\n\n l3gd20.set_odr(Odr::Hz380).unwrap();\n\n\n\n let scl = gpiob.pb6.into_af4(&mut gpiob.moder, &mut gpiob.afrl);\n\n let sda = gpiob.pb7.into_af4(&mut gpiob.moder, &mut gpiob.afrl);\n\n\n\n let i2c = I2c::i2c1(dp.I2C1, (scl, sda), 400.khz(), clocks, &mut rcc.apb1);\n\n\n", "file_path": "examples/madgwick.rs", "rank": 36, "score": 41.96044953389195 }, { "content": "//! let miso = gpioa.pa6.into_af5(&mut gpioa.moder, &mut gpioa.afrl);\n\n//! let mosi = gpioa.pa7.into_af5(&mut gpioa.moder, &mut gpioa.afrl);\n\n//! \n\n//! let spi = Spi::spi1(\n\n//! dp.SPI1,\n\n//! (sck, miso, mosi),\n\n//! l3gd20::MODE,\n\n//! 1.mhz(),\n\n//! clocks,\n\n//! &mut rcc.apb2,\n\n//! );\n\n//! \n\n//! let mut l3gd20 = L3gd20::new(spi, nss).unwrap();\n\n//! \n\n//! l3gd20.set_odr(Odr::Hz380).unwrap();\n\n//! \n\n//! let scl = gpiob.pb6.into_af4(&mut gpiob.moder, &mut gpiob.afrl);\n\n//! let sda = gpiob.pb7.into_af4(&mut gpiob.moder, &mut gpiob.afrl);\n\n//! \n\n//! let i2c = I2c::i2c1(dp.I2C1, (scl, sda), 400.khz(), clocks, &mut rcc.apb1);\n", "file_path": "src/examples/_10_madgwick.rs", "rank": 37, "score": 41.70326850324896 }, { "content": "//! \n\n//! // enable stimulus port 0\n\n//! cp.ITM.ter[0].write(1);\n\n//! }\n\n//! \n\n//! let mut gpioa = dp.GPIOA.split(&mut rcc.ahb);\n\n//! let mut gpiob = dp.GPIOB.split(&mut rcc.ahb);\n\n//! let mut gpioe = dp.GPIOE.split(&mut rcc.ahb);\n\n//! \n\n//! // I2C\n\n//! let scl = gpiob.pb6.into_af4(&mut gpiob.moder, &mut gpiob.afrl);\n\n//! let sda = gpiob.pb7.into_af4(&mut gpiob.moder, &mut gpiob.afrl);\n\n//! \n\n//! let i2c = I2c::i2c1(dp.I2C1, (scl, sda), 400.khz(), clocks, &mut rcc.apb1);\n\n//! \n\n//! // LSM303DLHC\n\n//! let mut lsm303dlhc = Lsm303dlhc::new(i2c).unwrap();\n\n//! lsm303dlhc.accel_odr(AccelOdr::Hz400).unwrap();\n\n//! lsm303dlhc.mag_odr(MagOdr::Hz220).unwrap();\n\n//! \n", "file_path": "src/examples/_09_log_sensors.rs", "rank": 39, "score": 36.68107914880223 }, { "content": "//! use core::{f32::consts::PI, ptr};\n\n//! \n\n//! use aligned::Aligned;\n\n//! use byteorder::{ByteOrder, LE};\n\n//! use cast::{f32, i32};\n\n//! use cortex_m::itm;\n\n//! use cortex_m_rt::entry;\n\n//! use f3::{\n\n//! hal::{i2c::I2c, prelude::*, spi::Spi, stm32f30x, timer::Timer},\n\n//! l3gd20::{self, Odr},\n\n//! lsm303dlhc::{AccelOdr, MagOdr},\n\n//! L3gd20, Lsm303dlhc,\n\n//! };\n\n//! use madgwick::{F32x3, Marg};\n\n//! use nb::block;\n\n//! \n\n//! // Number of samples to use for gyroscope calibration\n\n//! const NSAMPLES: i32 = 256;\n\n//! \n\n//! // Magnetometer calibration parameters\n", "file_path": "src/examples/_10_madgwick.rs", "rank": 41, "score": 34.8475989966307 }, { "content": "//! l3gd20.set_odr(Odr::Hz380).unwrap();\n\n//! \n\n//! // TIMER\n\n//! let mut timer = Timer::tim2(dp.TIM2, FREQUENCY.hz(), clocks, &mut rcc.apb1);\n\n//! \n\n//! // start of COBS frame\n\n//! itm::write_all(&mut cp.ITM.stim[0], &[0]);\n\n//! \n\n//! // Capture N samples\n\n//! let mut tx_buf: Aligned<u32, [u8; 20]> = Aligned([0; 20]);\n\n//! for _ in 0..NSAMPLES {\n\n//! block!(timer.wait()).unwrap();\n\n//! \n\n//! // Read sensors\n\n//! let m = lsm303dlhc.mag().unwrap();\n\n//! let ar = l3gd20.gyro().unwrap();\n\n//! let g = lsm303dlhc.accel().unwrap();\n\n//! \n\n//! // Serialize the data\n\n//! let mut buf = [0; 18];\n", "file_path": "src/examples/_09_log_sensors.rs", "rank": 42, "score": 34.50699330585645 }, { "content": "//! // SPI\n\n//! let mut nss = gpioe\n\n//! .pe3\n\n//! .into_push_pull_output(&mut gpioe.moder, &mut gpioe.otyper);\n\n//! nss.set_high();\n\n//! let sck = gpioa.pa5.into_af5(&mut gpioa.moder, &mut gpioa.afrl);\n\n//! let miso = gpioa.pa6.into_af5(&mut gpioa.moder, &mut gpioa.afrl);\n\n//! let mosi = gpioa.pa7.into_af5(&mut gpioa.moder, &mut gpioa.afrl);\n\n//! \n\n//! let spi = Spi::spi1(\n\n//! dp.SPI1,\n\n//! (sck, miso, mosi),\n\n//! l3gd20::MODE,\n\n//! 1.mhz(),\n\n//! clocks,\n\n//! &mut rcc.apb2,\n\n//! );\n\n//! \n\n//! // L3GD20\n\n//! let mut l3gd20 = L3gd20::new(spi, nss).unwrap();\n", "file_path": "src/examples/_09_log_sensors.rs", "rank": 43, "score": 34.34740317499057 }, { "content": "use aligned::Aligned;\n\nuse byteorder::{ByteOrder, LE};\n\nuse cast::{f32, i32};\n\nuse cortex_m::itm;\n\nuse cortex_m_rt::entry;\n\nuse f3::{\n\n hal::{i2c::I2c, prelude::*, spi::Spi, stm32f30x, timer::Timer},\n\n l3gd20::{self, Odr},\n\n lsm303dlhc::{AccelOdr, MagOdr},\n\n L3gd20, Lsm303dlhc,\n\n};\n\nuse madgwick::{F32x3, Marg};\n\nuse nb::block;\n\n\n\n// Number of samples to use for gyroscope calibration\n\nconst NSAMPLES: i32 = 256;\n\n\n\n// Magnetometer calibration parameters\n\n// NOTE you need to use the right parameters for *your* magnetometer\n\n// You can use the `log-sensors` example to calibrate your magnetometer. The producer is explained\n", "file_path": "examples/madgwick.rs", "rank": 45, "score": 32.99850372308146 }, { "content": "//! #[entry]\n\n//! fn main() -> ! {\n\n//! let mut cp = cortex_m::Peripherals::take().unwrap();\n\n//! let dp = stm32f30x::Peripherals::take().unwrap();\n\n//! \n\n//! let mut flash = dp.FLASH.constrain();\n\n//! let mut rcc = dp.RCC.constrain();\n\n//! \n\n//! let clocks = rcc\n\n//! .cfgr\n\n//! .sysclk(64.mhz())\n\n//! .pclk1(32.mhz())\n\n//! .freeze(&mut flash.acr);\n\n//! \n\n//! // enable ITM\n\n//! // TODO this should be some high level API in the cortex-m crate\n\n//! unsafe {\n\n//! // enable TPIU and ITM\n\n//! cp.DCB.demcr.modify(|r| r | (1 << 24));\n\n//! \n", "file_path": "src/examples/_10_madgwick.rs", "rank": 46, "score": 32.43107217990155 }, { "content": " );\n\n\n\n // enable stimulus port 0\n\n cp.ITM.ter[0].write(1);\n\n }\n\n\n\n let mut gpioa = dp.GPIOA.split(&mut rcc.ahb);\n\n let mut gpiob = dp.GPIOB.split(&mut rcc.ahb);\n\n let mut gpioe = dp.GPIOE.split(&mut rcc.ahb);\n\n\n\n let mut nss = gpioe\n\n .pe3\n\n .into_push_pull_output(&mut gpioe.moder, &mut gpioe.otyper);\n\n nss.set_high();\n\n let mut led = gpioe\n\n .pe9\n\n .into_push_pull_output(&mut gpioe.moder, &mut gpioe.otyper);\n\n\n\n let sck = gpioa.pa5.into_af5(&mut gpioa.moder, &mut gpioa.afrl);\n\n let miso = gpioa.pa6.into_af5(&mut gpioa.moder, &mut gpioa.afrl);\n", "file_path": "examples/madgwick.rs", "rank": 47, "score": 30.037556347011535 }, { "content": "//! (1 << 0), // enable the ITM\n\n//! );\n\n//! \n\n//! // enable stimulus port 0\n\n//! cp.ITM.ter[0].write(1);\n\n//! }\n\n//! \n\n//! let mut gpioa = dp.GPIOA.split(&mut rcc.ahb);\n\n//! let mut gpiob = dp.GPIOB.split(&mut rcc.ahb);\n\n//! let mut gpioe = dp.GPIOE.split(&mut rcc.ahb);\n\n//! \n\n//! let mut nss = gpioe\n\n//! .pe3\n\n//! .into_push_pull_output(&mut gpioe.moder, &mut gpioe.otyper);\n\n//! nss.set_high();\n\n//! let mut led = gpioe\n\n//! .pe9\n\n//! .into_push_pull_output(&mut gpioe.moder, &mut gpioe.otyper);\n\n//! \n\n//! let sck = gpioa.pa5.into_af5(&mut gpioa.moder, &mut gpioa.afrl);\n", "file_path": "src/examples/_10_madgwick.rs", "rank": 48, "score": 29.60395744526678 }, { "content": "//! let mut cp = cortex_m::Peripherals::take().unwrap();\n\n//! let dp = stm32f30x::Peripherals::take().unwrap();\n\n//! \n\n//! let mut flash = dp.FLASH.constrain();\n\n//! let mut rcc = dp.RCC.constrain();\n\n//! \n\n//! let clocks = rcc\n\n//! .cfgr\n\n//! .sysclk(64.mhz())\n\n//! .pclk1(32.mhz())\n\n//! .freeze(&mut flash.acr);\n\n//! \n\n//! // enable ITM\n\n//! // TODO this should be some high level API in the cortex-m crate\n\n//! unsafe {\n\n//! // enable TPIU and ITM\n\n//! cp.DCB.demcr.modify(|r| r | (1 << 24));\n\n//! \n\n//! // prescaler\n\n//! let swo_freq = 2_000_000;\n", "file_path": "src/examples/_09_log_sensors.rs", "rank": 49, "score": 28.31543019422644 }, { "content": "#![no_std]\n\n\n\npub extern crate l3gd20;\n\npub extern crate lsm303dlhc;\n\npub extern crate stm32f30x_hal as hal;\n\n\n\nuse hal::gpio::gpioa::{PA5, PA6, PA7};\n\nuse hal::gpio::gpiob::{PB6, PB7};\n\nuse hal::gpio::gpioe::PE3;\n\nuse hal::gpio::{Output, PushPull, AF4, AF5};\n\nuse hal::i2c::I2c;\n\nuse hal::spi::Spi;\n\nuse hal::stm32f30x::{I2C1, SPI1};\n\n\n\npub mod examples;\n\npub mod led;\n\n\n\n/// On board L3GD20 connected to the SPI1 bus via the pins PA5, PA6, PA7 and PE3\n\npub type L3gd20 = l3gd20::L3gd20<Spi<SPI1, (PA5<AF5>, PA6<AF5>, PA7<AF5>)>, PE3<Output<PushPull>>>;\n\n\n\n/// On board LSM303DLHC connected to the I2C1 bus via the PB6 and PB7 pins\n\npub type Lsm303dlhc = lsm303dlhc::Lsm303dlhc<I2c<I2C1, (PB6<AF4>, PB7<AF4>)>>;\n", "file_path": "src/lib.rs", "rank": 50, "score": 28.057643292415936 }, { "content": "//! A LED roulette!\n\n//!\n\n//! ```\n\n//! #![deny(unsafe_code)]\n\n//! #![deny(warnings)]\n\n//! #![no_std]\n\n//! #![no_main]\n\n//! \n\n//! extern crate panic_semihosting;\n\n//! \n\n//! use cortex_m_rt::entry;\n\n//! use f3::{\n\n//! hal::{delay::Delay, prelude::*, stm32f30x},\n\n//! led::Leds,\n\n//! };\n\n//! \n\n//! #[entry]\n\n//! fn main() -> ! {\n\n//! let cp = cortex_m::Peripherals::take().unwrap();\n\n//! let dp = stm32f30x::Peripherals::take().unwrap();\n", "file_path": "src/examples/_04_roulette.rs", "rank": 51, "score": 27.278836711109534 }, { "content": "//! \n\n//! let mut flash = dp.FLASH.constrain();\n\n//! let mut rcc = dp.RCC.constrain();\n\n//! let gpioe = dp.GPIOE.split(&mut rcc.ahb);\n\n//! \n\n//! // clock configuration using the default settings (all clocks run at 8 MHz)\n\n//! let clocks = rcc.cfgr.freeze(&mut flash.acr);\n\n//! // TRY this alternate clock configuration (all clocks run at 16 MHz)\n\n//! // let clocks = rcc.cfgr.sysclk(16.mhz()).freeze(&mut flash.acr);\n\n//! \n\n//! let mut leds = Leds::new(gpioe);\n\n//! let mut delay = Delay::new(cp.SYST, clocks);\n\n//! \n\n//! let n = leds.len();\n\n//! loop {\n\n//! for curr in 0..n {\n\n//! let next = (curr + 1) % n;\n\n//! leds[curr].off();\n\n//! leds[next].on();\n\n//! \n\n//! delay.delay_ms(100_u8);\n\n//! }\n\n//! }\n\n//! }\n\n//! ```\n\n// Auto-generated. Do not modify.\n", "file_path": "src/examples/_04_roulette.rs", "rank": 52, "score": 26.482605133945164 }, { "content": "//! let dp = stm32f30x::Peripherals::take().unwrap();\n\n//! \n\n//! let mut flash = dp.FLASH.constrain();\n\n//! let mut rcc = dp.RCC.constrain();\n\n//! let mut gpioe = dp.GPIOE.split(&mut rcc.ahb);\n\n//! \n\n//! // clock configuration using the default settings (all clocks run at 8 MHz)\n\n//! let clocks = rcc.cfgr.freeze(&mut flash.acr);\n\n//! // TRY this alternate clock configuration (all clocks run at 16 MHz)\n\n//! // let clocks = rcc.cfgr.sysclk(16.mhz()).freeze(&mut flash.acr);\n\n//! \n\n//! let mut led: Led = gpioe\n\n//! .pe9\n\n//! .into_push_pull_output(&mut gpioe.moder, &mut gpioe.otyper)\n\n//! .into();\n\n//! let mut delay = Delay::new(cp.SYST, clocks);\n\n//! \n\n//! loop {\n\n//! led.on();\n\n//! delay.delay_ms(1_000_u16);\n\n//! led.off();\n\n//! delay.delay_ms(1_000_u16);\n\n//! }\n\n//! }\n\n//! ```\n\n// Auto-generated. Do not modify.\n", "file_path": "src/examples/_03_blinky.rs", "rank": 53, "score": 26.32480748089705 }, { "content": "//! Turns all the user LEDs on\n\n//!\n\n//! ```\n\n//! #![deny(unsafe_code)]\n\n//! #![deny(warnings)]\n\n//! #![no_std]\n\n//! #![no_main]\n\n//! \n\n//! extern crate panic_semihosting;\n\n//! \n\n//! use cortex_m_rt::entry;\n\n//! use f3::{\n\n//! hal::{prelude::*, stm32f30x},\n\n//! led::Leds,\n\n//! };\n\n//! \n\n//! #[entry]\n\n//! fn main() -> ! {\n\n//! let p = stm32f30x::Peripherals::take().unwrap();\n\n//! \n", "file_path": "src/examples/_02_leds.rs", "rank": 54, "score": 26.173362706000454 }, { "content": " let mut lsm303dlhc = Lsm303dlhc::new(i2c).unwrap();\n\n\n\n lsm303dlhc.accel_odr(AccelOdr::Hz400).unwrap();\n\n lsm303dlhc.mag_odr(MagOdr::Hz220).unwrap();\n\n\n\n let mut timer = Timer::tim2(dp.TIM2, 380.hz(), clocks, &mut rcc.apb1);\n\n\n\n // Calibrate the gyroscope\n\n let mut ar_bias_x = 0;\n\n let mut ar_bias_y = 0;\n\n let mut ar_bias_z = 0;\n\n for _ in 0..NSAMPLES {\n\n block!(timer.wait()).unwrap();\n\n\n\n let ar = l3gd20.all().unwrap().gyro;\n\n\n\n ar_bias_x += i32(ar.x);\n\n ar_bias_y += i32(ar.y);\n\n ar_bias_z += i32(ar.z);\n\n }\n", "file_path": "examples/madgwick.rs", "rank": 55, "score": 25.98700305239534 }, { "content": "//! \n\n//! let mut lsm303dlhc = Lsm303dlhc::new(i2c).unwrap();\n\n//! \n\n//! lsm303dlhc.accel_odr(AccelOdr::Hz400).unwrap();\n\n//! lsm303dlhc.mag_odr(MagOdr::Hz220).unwrap();\n\n//! \n\n//! let mut timer = Timer::tim2(dp.TIM2, 380.hz(), clocks, &mut rcc.apb1);\n\n//! \n\n//! // Calibrate the gyroscope\n\n//! let mut ar_bias_x = 0;\n\n//! let mut ar_bias_y = 0;\n\n//! let mut ar_bias_z = 0;\n\n//! for _ in 0..NSAMPLES {\n\n//! block!(timer.wait()).unwrap();\n\n//! \n\n//! let ar = l3gd20.all().unwrap().gyro;\n\n//! \n\n//! ar_bias_x += i32(ar.x);\n\n//! ar_bias_y += i32(ar.y);\n\n//! ar_bias_z += i32(ar.z);\n", "file_path": "src/examples/_10_madgwick.rs", "rank": 56, "score": 25.98700305239534 }, { "content": "//! Turns all the user LEDs on\n\n#![deny(unsafe_code)]\n\n#![deny(warnings)]\n\n#![no_std]\n\n#![no_main]\n\n\n\nextern crate panic_semihosting;\n\n\n\nuse cortex_m_rt::entry;\n\nuse f3::{\n\n hal::{prelude::*, stm32f30x},\n\n led::Leds,\n\n};\n\n\n\n#[entry]\n", "file_path": "examples/leds.rs", "rank": 57, "score": 25.87831236141398 }, { "content": "//! Blinks an LED\n\n//!\n\n//! ```\n\n//! #![deny(unsafe_code)]\n\n//! #![deny(warnings)]\n\n//! #![no_std]\n\n//! #![no_main]\n\n//! \n\n//! // Panic handler\n\n//! extern crate panic_semihosting;\n\n//! \n\n//! use cortex_m_rt::entry;\n\n//! use f3::{\n\n//! hal::{delay::Delay, prelude::*, stm32f30x},\n\n//! led::Led,\n\n//! };\n\n//! \n\n//! #[entry]\n\n//! fn main() -> ! {\n\n//! let cp = cortex_m::Peripherals::take().unwrap();\n", "file_path": "src/examples/_03_blinky.rs", "rank": 58, "score": 24.867125381459594 }, { "content": "//! Test the serial interface\n\n//!\n\n//! This example requires you to short (connect) the TX and RX pins.\n\n//!\n\n//! ```\n\n//! #![deny(unsafe_code)]\n\n//! #![deny(warnings)]\n\n//! #![no_main]\n\n//! #![no_std]\n\n//! \n\n//! extern crate panic_semihosting;\n\n//! \n\n//! use cortex_m::asm;\n\n//! use cortex_m_rt::entry;\n\n//! use f3::hal::{prelude::*, serial::Serial, stm32f30x};\n\n//! use nb::block;\n\n//! \n\n//! #[entry]\n\n//! fn main() -> ! {\n\n//! let p = stm32f30x::Peripherals::take().unwrap();\n", "file_path": "src/examples/_05_serial.rs", "rank": 59, "score": 24.680417494147566 }, { "content": "//! Test the serial interface\n\n//!\n\n//! This example requires you to short (connect) the TX and RX pins.\n\n#![deny(unsafe_code)]\n\n#![deny(warnings)]\n\n#![no_main]\n\n#![no_std]\n\n\n\nextern crate panic_semihosting;\n\n\n\nuse cortex_m::asm;\n\nuse cortex_m_rt::entry;\n\nuse f3::hal::{prelude::*, serial::Serial, stm32f30x};\n\nuse nb::block;\n\n\n\n#[entry]\n", "file_path": "examples/serial.rs", "rank": 60, "score": 24.10770312271416 }, { "content": "//! A LED roulette!\n\n#![deny(unsafe_code)]\n\n#![deny(warnings)]\n\n#![no_std]\n\n#![no_main]\n\n\n\nextern crate panic_semihosting;\n\n\n\nuse cortex_m_rt::entry;\n\nuse f3::{\n\n hal::{delay::Delay, prelude::*, stm32f30x},\n\n led::Leds,\n\n};\n\n\n\n#[entry]\n", "file_path": "examples/roulette.rs", "rank": 61, "score": 23.429266823160518 }, { "content": "//! Serial interface echo server\n\n//!\n\n//! In this example every received byte will be sent back to the sender. You can test this example\n\n//! with serial terminal emulator like `minicom`.\n\n//!\n\n//! ```\n\n//! #![deny(unsafe_code)]\n\n//! #![deny(warnings)]\n\n//! #![no_main]\n\n//! #![no_std]\n\n//! \n\n//! extern crate panic_semihosting;\n\n//! \n\n//! use cortex_m_rt::entry;\n\n//! use f3::hal::{prelude::*, serial::Serial, stm32f30x};\n\n//! use nb::block;\n\n//! \n\n//! #[entry]\n\n//! fn main() -> ! {\n\n//! let p = stm32f30x::Peripherals::take().unwrap();\n", "file_path": "src/examples/_06_serial_echo.rs", "rank": 62, "score": 23.282791917999898 }, { "content": "//! Blinks an LED\n\n#![deny(unsafe_code)]\n\n#![deny(warnings)]\n\n#![no_std]\n\n#![no_main]\n\n\n\n// Panic handler\n\nextern crate panic_semihosting;\n\n\n\nuse cortex_m_rt::entry;\n\nuse f3::{\n\n hal::{delay::Delay, prelude::*, stm32f30x},\n\n led::Led,\n\n};\n\n\n\n#[entry]\n", "file_path": "examples/blinky.rs", "rank": 63, "score": 23.003950498607622 }, { "content": "//! Serial interface echo server\n\n//!\n\n//! In this example every received byte will be sent back to the sender. You can test this example\n\n//! with serial terminal emulator like `minicom`.\n\n#![deny(unsafe_code)]\n\n#![deny(warnings)]\n\n#![no_main]\n\n#![no_std]\n\n\n\nextern crate panic_semihosting;\n\n\n\nuse cortex_m_rt::entry;\n\nuse f3::hal::{prelude::*, serial::Serial, stm32f30x};\n\nuse nb::block;\n\n\n\n#[entry]\n", "file_path": "examples/serial-echo.rs", "rank": 64, "score": 22.60267957434918 }, { "content": "//! }\n\n//! let ar_bias_x = (ar_bias_x / NSAMPLES) as i16;\n\n//! let ar_bias_y = (ar_bias_y / NSAMPLES) as i16;\n\n//! let ar_bias_z = (ar_bias_z / NSAMPLES) as i16;\n\n//! \n\n//! // Turn on the LED after calibrating the gyroscope\n\n//! led.set_high();\n\n//! \n\n//! let mut marg = Marg::new(BETA, 1. / f32(SAMPLE_FREQ));\n\n//! let mut timer = Timer::tim2(timer.free(), SAMPLE_FREQ.hz(), clocks, &mut rcc.apb1);\n\n//! \n\n//! let mut tx_buf: Aligned<u32, [u8; 18]> = Aligned([0; 18]);\n\n//! loop {\n\n//! block!(timer.wait()).unwrap();\n\n//! \n\n//! let m = lsm303dlhc.mag().unwrap();\n\n//! let ar = l3gd20.all().unwrap().gyro;\n\n//! let g = lsm303dlhc.accel().unwrap();\n\n//! \n\n//! let m_x = (f32(m.x) - M_BIAS_X) / M_SCALE_X;\n", "file_path": "src/examples/_10_madgwick.rs", "rank": 65, "score": 21.73240103373787 }, { "content": " let ar_bias_x = (ar_bias_x / NSAMPLES) as i16;\n\n let ar_bias_y = (ar_bias_y / NSAMPLES) as i16;\n\n let ar_bias_z = (ar_bias_z / NSAMPLES) as i16;\n\n\n\n // Turn on the LED after calibrating the gyroscope\n\n led.set_high();\n\n\n\n let mut marg = Marg::new(BETA, 1. / f32(SAMPLE_FREQ));\n\n let mut timer = Timer::tim2(timer.free(), SAMPLE_FREQ.hz(), clocks, &mut rcc.apb1);\n\n\n\n let mut tx_buf: Aligned<u32, [u8; 18]> = Aligned([0; 18]);\n\n loop {\n\n block!(timer.wait()).unwrap();\n\n\n\n let m = lsm303dlhc.mag().unwrap();\n\n let ar = l3gd20.all().unwrap().gyro;\n\n let g = lsm303dlhc.accel().unwrap();\n\n\n\n let m_x = (f32(m.x) - M_BIAS_X) / M_SCALE_X;\n\n let m_y = (f32(m.y) - M_BIAS_Y) / M_SCALE_Y;\n", "file_path": "examples/madgwick.rs", "rank": 66, "score": 20.970263433379365 }, { "content": "//! \n\n//! let mut flash = p.FLASH.constrain();\n\n//! let mut rcc = p.RCC.constrain();\n\n//! let mut gpioa = p.GPIOA.split(&mut rcc.ahb);\n\n//! \n\n//! let clocks = rcc.cfgr.freeze(&mut flash.acr);\n\n//! \n\n//! let tx = gpioa.pa9.into_af7(&mut gpioa.moder, &mut gpioa.afrh);\n\n//! let rx = gpioa.pa10.into_af7(&mut gpioa.moder, &mut gpioa.afrh);\n\n//! \n\n//! let serial = Serial::usart1(p.USART1, (tx, rx), 115_200.bps(), clocks, &mut rcc.apb2);\n\n//! let (mut tx, mut rx) = serial.split();\n\n//! \n\n//! loop {\n\n//! let byte = block!(rx.read()).unwrap();\n\n//! block!(tx.write(byte)).ok();\n\n//! }\n\n//! }\n\n//! ```\n\n// Auto-generated. Do not modify.\n", "file_path": "src/examples/_06_serial_echo.rs", "rank": 67, "score": 20.4798206584683 }, { "content": "//! Prints \"Hello, world\" on the OpenOCD console\n\n#![deny(unsafe_code)]\n\n#![deny(warnings)]\n\n#![no_main]\n\n#![no_std]\n\n\n\nextern crate f3;\n\nextern crate panic_semihosting;\n\n\n\nuse cortex_m_rt::entry;\n\nuse cortex_m_semihosting::hprintln;\n\n\n\n#[entry]\n", "file_path": "examples/hello.rs", "rank": 68, "score": 19.503886910999555 }, { "content": "//! Prints \"Hello, world\" on the OpenOCD console\n\n//!\n\n//! ```\n\n//! #![deny(unsafe_code)]\n\n//! #![deny(warnings)]\n\n//! #![no_main]\n\n//! #![no_std]\n\n//! \n\n//! extern crate f3;\n\n//! extern crate panic_semihosting;\n\n//! \n\n//! use cortex_m_rt::entry;\n\n//! use cortex_m_semihosting::hprintln;\n\n//! \n\n//! #[entry]\n\n//! fn main() -> ! {\n\n//! hprintln!(\"Hello, world!\").unwrap();\n\n//! \n\n//! loop {}\n\n//! }\n\n//! ```\n\n// Auto-generated. Do not modify.\n", "file_path": "src/examples/_00_hello.rs", "rank": 69, "score": 18.969223306829566 }, { "content": "//! \n\n//! let mut flash = p.FLASH.constrain();\n\n//! let mut rcc = p.RCC.constrain();\n\n//! let mut gpioc = p.GPIOC.split(&mut rcc.ahb);\n\n//! \n\n//! // clock configuration using the default settings (all clocks run at 8 MHz)\n\n//! let clocks = rcc.cfgr.freeze(&mut flash.acr);\n\n//! // TRY this alternate clock configuration (clocks run at nearly the maximum frequency)\n\n//! // let clocks = rcc.cfgr.sysclk(64.mhz()).pclk1(32.mhz()).freeze(&mut flash.acr);\n\n//! \n\n//! // The Serial API is highly generic\n\n//! // TRY the commented out, different pin configurations\n\n//! let tx = gpioc.pc4.into_af7(&mut gpioc.moder, &mut gpioc.afrl);\n\n//! \n\n//! let rx = gpioc.pc5.into_af7(&mut gpioc.moder, &mut gpioc.afrl);\n\n//! \n\n//! // TRY using a different USART peripheral here\n\n//! let serial = Serial::usart1(p.USART1, (tx, rx), 9_600.bps(), clocks, &mut rcc.apb2);\n\n//! let (mut tx, mut _rx) = serial.split();\n\n//! \n", "file_path": "src/examples/_05_serial.rs", "rank": 70, "score": 18.93726384766094 }, { "content": "//! cp.TPIU.acpr.write((clocks.sysclk().0 / swo_freq) - 1);\n\n//! \n\n//! // SWO NRZ\n\n//! cp.TPIU.sppr.write(2);\n\n//! \n\n//! cp.TPIU.ffcr.modify(|r| r & !(1 << 1));\n\n//! \n\n//! // STM32 specific: enable tracing in the DBGMCU_CR register\n\n//! const DBGMCU_CR: *mut u32 = 0xe0042004 as *mut u32;\n\n//! let r = ptr::read_volatile(DBGMCU_CR);\n\n//! ptr::write_volatile(DBGMCU_CR, r | (1 << 5));\n\n//! \n\n//! // unlock the ITM\n\n//! cp.ITM.lar.write(0xC5ACCE55);\n\n//! \n\n//! cp.ITM.tcr.write(\n\n//! (0b000001 << 16) | // TraceBusID\n\n//! (1 << 3) | // enable SWO output\n\n//! (1 << 0), // enable the ITM\n\n//! );\n", "file_path": "src/examples/_09_log_sensors.rs", "rank": 71, "score": 13.5883699752601 }, { "content": " let swo_freq = 2_000_000;\n\n cp.TPIU.acpr.write((clocks.sysclk().0 / swo_freq) - 1);\n\n\n\n // SWO NRZ\n\n cp.TPIU.sppr.write(2);\n\n\n\n cp.TPIU.ffcr.modify(|r| r & !(1 << 1));\n\n\n\n // STM32 specific: enable tracing in the DBGMCU_CR register\n\n const DBGMCU_CR: *mut u32 = 0xe0042004 as *mut u32;\n\n let r = ptr::read_volatile(DBGMCU_CR);\n\n ptr::write_volatile(DBGMCU_CR, r | (1 << 5));\n\n\n\n // unlock the ITM\n\n cp.ITM.lar.write(0xC5ACCE55);\n\n\n\n cp.ITM.tcr.write(\n\n (0b000001 << 16) | // TraceBusID\n\n (1 << 3) | // enable SWO output\n\n (1 << 0), // enable the ITM\n", "file_path": "examples/madgwick.rs", "rank": 72, "score": 13.467404622128853 }, { "content": "//! // prescaler\n\n//! let swo_freq = 2_000_000;\n\n//! cp.TPIU.acpr.write((clocks.sysclk().0 / swo_freq) - 1);\n\n//! \n\n//! // SWO NRZ\n\n//! cp.TPIU.sppr.write(2);\n\n//! \n\n//! cp.TPIU.ffcr.modify(|r| r & !(1 << 1));\n\n//! \n\n//! // STM32 specific: enable tracing in the DBGMCU_CR register\n\n//! const DBGMCU_CR: *mut u32 = 0xe0042004 as *mut u32;\n\n//! let r = ptr::read_volatile(DBGMCU_CR);\n\n//! ptr::write_volatile(DBGMCU_CR, r | (1 << 5));\n\n//! \n\n//! // unlock the ITM\n\n//! cp.ITM.lar.write(0xC5ACCE55);\n\n//! \n\n//! cp.ITM.tcr.write(\n\n//! (0b000001 << 16) | // TraceBusID\n\n//! (1 << 3) | // enable SWO output\n", "file_path": "src/examples/_10_madgwick.rs", "rank": 74, "score": 13.26323129893527 }, { "content": "//! The suggested way to receive this data is to connect the F3 SWO pin to a UART to USB converter\n\n//! and then to read out the associated device file using `itmdump`. Make sure you configure the\n\n//! serial device before calling `itmdump`. The commands to run are:\n\n//!\n\n//! ``` console\n\n//! $ stty -F /dev/ttyUSB0 raw 2000000 -echo\n\n//!\n\n//! $ itmdump -f /dev/ttyUSB0 > data.txt\n\n//! ```\n\n//!\n\n//! You can plot this data using the `plot.py` script in the root of this crate.\n\n//!\n\n//! ```\n\n//! #![deny(warnings)]\n\n//! #![no_main]\n\n//! #![no_std]\n\n//! \n\n//! extern crate panic_semihosting;\n\n//! \n\n//! use core::ptr;\n", "file_path": "src/examples/_09_log_sensors.rs", "rank": 76, "score": 12.801661518639696 }, { "content": " LE::write_f32(&mut buf[start..start + 4], quat.3);\n\n // start += 4;\n\n\n\n // Log data\n\n cobs::encode(&buf, &mut tx_buf.array);\n\n\n\n itm::write_aligned(&mut cp.ITM.stim[0], &tx_buf);\n\n }\n\n}\n", "file_path": "examples/madgwick.rs", "rank": 77, "score": 11.402665073055221 }, { "content": "//! start += 4;\n\n//! LE::write_f32(&mut buf[start..start + 4], quat.3);\n\n//! // start += 4;\n\n//! \n\n//! // Log data\n\n//! cobs::encode(&buf, &mut tx_buf.array);\n\n//! \n\n//! itm::write_aligned(&mut cp.ITM.stim[0], &tx_buf);\n\n//! }\n\n//! }\n\n//! ```\n\n// Auto-generated. Do not modify.\n", "file_path": "src/examples/_10_madgwick.rs", "rank": 79, "score": 11.065410238403537 }, { "content": "//! LE::write_i16(&mut buf[start..start + 2], g.z);\n\n//! \n\n//! // Log data\n\n//! cobs::encode(&buf, &mut tx_buf);\n\n//! \n\n//! itm::write_aligned(&mut cp.ITM.stim[0], &tx_buf);\n\n//! }\n\n//! \n\n//! // Done\n\n//! asm::bkpt();\n\n//! \n\n//! loop {}\n\n//! }\n\n//! ```\n\n// Auto-generated. Do not modify.\n", "file_path": "src/examples/_09_log_sensors.rs", "rank": 80, "score": 11.065410238403537 }, { "content": "//! $ itmdump -f /dev/ttyUSB0 > data.txt\n\n//! ```\n\n//!\n\n//! You can pipe the quaternions through the `viz` program (shipped with this crate) to get real\n\n//! time [visualization]. The command to run is:\n\n//!\n\n//! [visualization]: https://mobile.twitter.com/japaricious/status/962770003325005824\n\n//!\n\n//! ``` console\n\n//! $ itmdump -f /dev/ttyUSB0 | viz\n\n//! ```\n\n// #![deny(unsafe_code)]\n\n#![deny(warnings)]\n\n#![no_main]\n\n#![no_std]\n\n\n\nextern crate panic_semihosting;\n\n\n\nuse core::{f32::consts::PI, ptr};\n\n\n", "file_path": "examples/madgwick.rs", "rank": 81, "score": 10.810121086484768 }, { "content": "//! let mut rcc = p.RCC.constrain();\n\n//! let gpioe = p.GPIOE.split(&mut rcc.ahb);\n\n//! \n\n//! let mut leds = Leds::new(gpioe);\n\n//! \n\n//! for led in leds.iter_mut() {\n\n//! led.on();\n\n//! }\n\n//! \n\n//! loop {}\n\n//! }\n\n//! ```\n\n// Auto-generated. Do not modify.\n", "file_path": "src/examples/_02_leds.rs", "rank": 82, "score": 10.740010758001668 }, { "content": "//! $ itmdump -f /dev/ttyUSB0 > data.txt\n\n//! ```\n\n//!\n\n//! You can pipe the quaternions through the `viz` program (shipped with this crate) to get real\n\n//! time [visualization]. The command to run is:\n\n//!\n\n//! [visualization]: https://mobile.twitter.com/japaricious/status/962770003325005824\n\n//!\n\n//! ``` console\n\n//! $ itmdump -f /dev/ttyUSB0 | viz\n\n//! ```\n\n//!\n\n//! ```\n\n//! // #![deny(unsafe_code)]\n\n//! #![deny(warnings)]\n\n//! #![no_main]\n\n//! #![no_std]\n\n//! \n\n//! extern crate panic_semihosting;\n\n//! \n", "file_path": "src/examples/_10_madgwick.rs", "rank": 83, "score": 9.210111275490712 }, { "content": "//! Madgwick's orientation filter\n\n//!\n\n//! This demo runs Madgwick's orientation filter and logs the orientation of the board as\n\n//! quaternions via the ITM. The data is encoded in binary format and logged as COBS frame. The\n\n//! binary format is as follows:\n\n//!\n\n//! - `w`: `f32`, LE (Little Endian), 4 bytes\n\n//! - `x`: `f32`, LE, 4 bytes\n\n//! - `y`: `f32`, LE, 4 bytes\n\n//! - `z`: `f32`, LE, 4 bytes\n\n//!\n\n//! where the quaternion is the tuple `(w, x, y, z)`\n\n//!\n\n//! The suggested way to receive this data is to connect the F3 SWO pin to a UART to USB converter\n\n//! and then to read out the associated device file using `itmdump`. Make sure you configure the\n\n//! serial device before calling `itmdump`. The commands to run are:\n\n//!\n\n//! ``` console\n\n//! $ stty -F /dev/ttyUSB0 raw 2000000 -echo\n\n//!\n", "file_path": "examples/madgwick.rs", "rank": 84, "score": 8.197992112466185 }, { "content": "//! Madgwick's orientation filter\n\n//!\n\n//! This demo runs Madgwick's orientation filter and logs the orientation of the board as\n\n//! quaternions via the ITM. The data is encoded in binary format and logged as COBS frame. The\n\n//! binary format is as follows:\n\n//!\n\n//! - `w`: `f32`, LE (Little Endian), 4 bytes\n\n//! - `x`: `f32`, LE, 4 bytes\n\n//! - `y`: `f32`, LE, 4 bytes\n\n//! - `z`: `f32`, LE, 4 bytes\n\n//!\n\n//! where the quaternion is the tuple `(w, x, y, z)`\n\n//!\n\n//! The suggested way to receive this data is to connect the F3 SWO pin to a UART to USB converter\n\n//! and then to read out the associated device file using `itmdump`. Make sure you configure the\n\n//! serial device before calling `itmdump`. The commands to run are:\n\n//!\n\n//! ``` console\n\n//! $ stty -F /dev/ttyUSB0 raw 2000000 -echo\n\n//!\n", "file_path": "src/examples/_10_madgwick.rs", "rank": 85, "score": 8.197992112466185 }, { "content": " pub fn new(mut gpioe: gpioe::Parts) -> Self {\n\n let n = gpioe\n\n .pe9\n\n .into_push_pull_output(&mut gpioe.moder, &mut gpioe.otyper);\n\n let ne = gpioe\n\n .pe10\n\n .into_push_pull_output(&mut gpioe.moder, &mut gpioe.otyper);\n\n let e = gpioe\n\n .pe11\n\n .into_push_pull_output(&mut gpioe.moder, &mut gpioe.otyper);\n\n let se = gpioe\n\n .pe12\n\n .into_push_pull_output(&mut gpioe.moder, &mut gpioe.otyper);\n\n let s = gpioe\n\n .pe13\n\n .into_push_pull_output(&mut gpioe.moder, &mut gpioe.otyper);\n\n let sw = gpioe\n\n .pe14\n\n .into_push_pull_output(&mut gpioe.moder, &mut gpioe.otyper);\n\n let w = gpioe\n", "file_path": "src/led.rs", "rank": 86, "score": 7.910245412334433 }, { "content": " .pe15\n\n .into_push_pull_output(&mut gpioe.moder, &mut gpioe.otyper);\n\n let nw = gpioe\n\n .pe8\n\n .into_push_pull_output(&mut gpioe.moder, &mut gpioe.otyper);\n\n\n\n Leds {\n\n leds: [\n\n n.into(),\n\n ne.into(),\n\n e.into(),\n\n se.into(),\n\n s.into(),\n\n sw.into(),\n\n w.into(),\n\n nw.into(),\n\n ],\n\n }\n\n }\n\n}\n", "file_path": "src/led.rs", "rank": 87, "score": 7.396042387799831 }, { "content": "//! \n\n//! let mut start = 0;\n\n//! LE::write_i16(&mut buf[start..start + 2], m.x);\n\n//! start += 2;\n\n//! LE::write_i16(&mut buf[start..start + 2], m.y);\n\n//! start += 2;\n\n//! LE::write_i16(&mut buf[start..start + 2], m.z);\n\n//! start += 2;\n\n//! \n\n//! LE::write_i16(&mut buf[start..start + 2], ar.x);\n\n//! start += 2;\n\n//! LE::write_i16(&mut buf[start..start + 2], ar.y);\n\n//! start += 2;\n\n//! LE::write_i16(&mut buf[start..start + 2], ar.z);\n\n//! start += 2;\n\n//! \n\n//! LE::write_i16(&mut buf[start..start + 2], g.x);\n\n//! start += 2;\n\n//! LE::write_i16(&mut buf[start..start + 2], g.y);\n\n//! start += 2;\n", "file_path": "src/examples/_09_log_sensors.rs", "rank": 89, "score": 7.232829693960451 }, { "content": "//! Board support crate for the STM32F3DISCOVERY\n\n//!\n\n//! # Usage\n\n//!\n\n//! - Trying out the examples\n\n//!\n\n//! ``` text\n\n//! $ # if you don't have the clone subcommand\n\n//! $ cargo install cargo-clone\n\n//!\n\n//! $ cargo clone f3 --vers 0.6.0\n\n//!\n\n//! # on another terminal\n\n//! $ openocd -f interface/stlink-v2-1.cfg -f target/stm32f3x.cfg\n\n//!\n\n//! # flash and debug the \"Hello, world\" example\n\n//! $ cd f3\n\n//! $ rustup target add thumbv7em-none-eabihf\n\n//! $ cargo run --example hello\n\n//! ```\n", "file_path": "src/lib.rs", "rank": 90, "score": 7.060549912213604 }, { "content": "//! On-board user LEDs\n\n\n\nuse core::ops;\n\n\n\nuse embedded_hal::digital::v2::OutputPin;\n\n\n\nuse hal::gpio::gpioe::{self, PEx, PE10, PE11, PE12, PE13, PE14, PE15, PE8, PE9};\n\nuse hal::gpio::{Output, PushPull};\n\n\n\n/// North LED\n\npub type LD3 = PE9<Output<PushPull>>;\n\n\n\n/// Northeast LED\n\npub type LD5 = PE10<Output<PushPull>>;\n\n\n\n/// East LED\n\npub type LD7 = PE11<Output<PushPull>>;\n\n\n\n/// Southeast LED\n\npub type LD9 = PE12<Output<PushPull>>;\n", "file_path": "src/led.rs", "rank": 91, "score": 6.656531364743648 }, { "content": " let g_y = f32(g.y) * K_G;\n\n let g_z = f32(g.z) * K_G;\n\n let g = F32x3 {\n\n x: g_y,\n\n y: -g_x,\n\n z: g_z,\n\n };\n\n\n\n // Run the filter\n\n let quat = marg.update(m, ar, g);\n\n\n\n // Serialize the quaternion\n\n let mut start = 0;\n\n let mut buf = [0; 16];\n\n LE::write_f32(&mut buf[start..start + 4], quat.0);\n\n start += 4;\n\n LE::write_f32(&mut buf[start..start + 4], quat.1);\n\n start += 4;\n\n LE::write_f32(&mut buf[start..start + 4], quat.2);\n\n start += 4;\n", "file_path": "examples/madgwick.rs", "rank": 92, "score": 6.580871331842202 }, { "content": "//! let g_x = f32(g.x) * K_G;\n\n//! let g_y = f32(g.y) * K_G;\n\n//! let g_z = f32(g.z) * K_G;\n\n//! let g = F32x3 {\n\n//! x: g_y,\n\n//! y: -g_x,\n\n//! z: g_z,\n\n//! };\n\n//! \n\n//! // Run the filter\n\n//! let quat = marg.update(m, ar, g);\n\n//! \n\n//! // Serialize the quaternion\n\n//! let mut start = 0;\n\n//! let mut buf = [0; 16];\n\n//! LE::write_f32(&mut buf[start..start + 4], quat.0);\n\n//! start += 4;\n\n//! LE::write_f32(&mut buf[start..start + 4], quat.1);\n\n//! start += 4;\n\n//! LE::write_f32(&mut buf[start..start + 4], quat.2);\n", "file_path": "src/examples/_10_madgwick.rs", "rank": 93, "score": 6.532835862661553 }, { "content": "//! Examples in order of increasing complexity\n\n// Auto-generated. Do not modify.\n\npub mod _00_hello;\n\npub mod _01_itm;\n\npub mod _02_leds;\n\npub mod _03_blinky;\n\npub mod _04_roulette;\n\npub mod _05_serial;\n\npub mod _06_serial_echo;\n\npub mod _07_l3gd20;\n\npub mod _08_lsm303dlhc;\n\npub mod _09_log_sensors;\n\npub mod _10_madgwick;\n", "file_path": "src/examples/mod.rs", "rank": 94, "score": 6.457390679884721 }, { "content": " let (mut tx, mut _rx) = serial.split();\n\n\n\n let sent = b'X';\n\n\n\n asm::bkpt();\n\n\n\n // The `block!` macro makes an operation block until it finishes\n\n // NOTE the error type is `!`\n\n block!(tx.write(sent)).ok();\n\n\n\n // let received = block!(_rx.read()).unwrap();\n\n\n\n // assert_eq!(received, sent);\n\n\n\n // if all goes well you should reach this breakpoint\n\n asm::bkpt();\n\n\n\n loop {}\n\n}\n", "file_path": "examples/serial.rs", "rank": 95, "score": 6.416189746026038 }, { "content": "//! Logs data from the motion sensors over ITM\n\n//!\n\n//! This example logs sensor data over ITM. The data is encoded in a binary format and logged as\n\n//! COBS frames. The binary format is as follows:\n\n//!\n\n//! - Magnetometer readings\n\n//! - `mx`: `i16`, LE (Little Endian), 2 bytes\n\n//! - `my`: `i16`, LE, 2 bytes\n\n//! - `mz`: `i16`, LE, 2 bytes\n\n//!\n\n//! - Gyroscope readings\n\n//! - `arx`: `i16`, LE, 2 bytes\n\n//! - `ary`: `i16`, LE, 2 bytes\n\n//! - `arz`: `i16`, LE, 2 bytes\n\n//!\n\n//! - Accelerometer readings\n\n//! - `gx`: `i16`, LE, 2 bytes\n\n//! - `gy`: `i16`, LE, 2 bytes\n\n//! - `gz`: `i16`, LE, 2 bytes\n\n//!\n", "file_path": "src/examples/_09_log_sensors.rs", "rank": 96, "score": 6.328446901157825 }, { "content": "//!\n\n//! You'll need to have both OpenOCD and arm-none-eabi-gcc installed.\n\n//!\n\n//! - Building an application that depends on this crate\n\n//!\n\n//! To build applications (binary crates) using this crate follow [cortex-m-quickstart] instructions\n\n//! and add this crate as a dependency in step number 6 and make sure you enable the \"rt\" Cargo\n\n//! feature of this crate. Also, instead of step number 4 remove *both* the build.rs and memory.x\n\n//! files.\n\n//!\n\n//! [cortex-m-quickstart]: https://docs.rs/cortex-m-quickstart/~0.3\n\n//!\n\n//! # Examples\n\n//!\n\n//! See the [examples] module.\n\n//!\n\n//! [examples]: examples/index.html\n\n\n\n#![deny(missing_docs)]\n\n#![deny(warnings)]\n", "file_path": "src/lib.rs", "rank": 98, "score": 5.1196606574310035 }, { "content": "//! // NOTE you need to use the right parameters for *your* magnetometer\n\n//! // You can use the `log-sensors` example to calibrate your magnetometer. The producer is explained\n\n//! // in https://github.com/kriswiner/MPU6050/wiki/Simple-and-Effective-Magnetometer-Calibration\n\n//! const M_BIAS_X: f32 = -183.;\n\n//! const M_SCALE_X: f32 = 435.;\n\n//! \n\n//! const M_BIAS_Y: f32 = -172.;\n\n//! const M_SCALE_Y: f32 = 507.;\n\n//! \n\n//! const M_BIAS_Z: f32 = -136.;\n\n//! const M_SCALE_Z: f32 = 632.;\n\n//! \n\n//! // Sensitivities of the accelerometer and gyroscope, respectively\n\n//! const K_G: f32 = 2. / (1 << 15) as f32; // LSB -> g\n\n//! const K_AR: f32 = 8.75e-3 * PI / 180.; // LSB -> rad/s\n\n//! \n\n//! // Madgwick filter parameters\n\n//! const SAMPLE_FREQ: u32 = 220;\n\n//! const BETA: f32 = 1e-3;\n\n//! \n", "file_path": "src/examples/_10_madgwick.rs", "rank": 99, "score": 4.0994524411216355 } ]
Rust
benchmark/src/main.rs
negamartin/midly
6881b0b1cf55ef5aa8e8573a9176f14334e02555
use std::{ env, fs, path::{Path, PathBuf}, time::Instant, }; const MIDI_DIR: &str = "../test-asset"; const MIDI_EXT: &[&str] = &["mid", "midi", "rmi"]; const PARSERS: &[(&str, fn(&Path) -> Result<usize, String>)] = &[ (&"midly", parse_midly), (&"nom-midi", parse_nom), (&"rimd", parse_rimd), ]; fn parse_midly(path: &Path) -> Result<usize, String> { let data = fs::read(path).map_err(|err| format!("{}", err))?; let smf = midly::Smf::parse(&data).map_err(|err| format!("{}", err))?; Ok(smf.tracks.len()) } fn parse_nom(path: &Path) -> Result<usize, String> { let data = fs::read(path).map_err(|err| format!("{}", err))?; let smf = nom_midi::parser::parse_smf(&data) .map_err(|err| format!("{}", err))? .1; Ok(smf.tracks.len()) } fn parse_rimd(path: &Path) -> Result<usize, String> { let smf = rimd::SMF::from_file(path).map_err(|err| format!("{}", err))?; Ok(smf.tracks.len()) } fn list_midis(dir: &Path) -> Vec<PathBuf> { let mut midis = Vec::new(); for entry in fs::read_dir(dir).unwrap() { let path = entry.unwrap().path(); if MIDI_EXT .iter() .any(|ext| path.extension() == Some(ext.as_ref())) { midis.push(path); } } midis } fn use_parser(parse: fn(&Path) -> Result<usize, String>, path: &Path) -> Result<(), String> { let round = |num: f64| (num * 100.0).round() / 100.0; let runtime = || -> Result<_, String> { let start = Instant::now(); let out = parse(path)?; let time = round((start.elapsed().as_micros() as f64) / 1000.0); Ok((out, time)) }; let (track_count, cold_time) = runtime()?; let runtime = || -> Result<_, String> { let (out, time) = runtime()?; assert_eq!( out, track_count, "parser is not consistent with track counts" ); Ok(time) }; let iters = (2000.0 / cold_time).floor() as u64 + 1; let mut total_time = 0.0; let mut max_time = cold_time; let mut min_time = cold_time; for _ in 0..iters { let time = runtime()?; total_time += time; max_time = max_time.max(time); min_time = min_time.min(time); } let avg_time = round(total_time / (iters as f64)); eprintln!( "{} tracks in {} iters / min {} / avg {} / max {}", track_count, iters, min_time, avg_time, max_time ); Ok(()) } fn main() { let midi_filter = env::args().nth(1).unwrap_or_default().to_lowercase(); let parser_filter = env::args().nth(2).unwrap_or_default().to_lowercase(); let midi_dir = env::args().nth(3).unwrap_or(MIDI_DIR.to_string()); let parsers = PARSERS .iter() .filter(|(name, _)| name.contains(&parser_filter)) .collect::<Vec<_>>(); if parsers.is_empty() { eprintln!("no parsers match the pattern \"{}\"", parser_filter); eprint!("available parsers: "); for (i, (name, _)) in PARSERS.iter().enumerate() { if i > 0 { eprint!(", "); } eprint!("{}", name); } } let unfiltered_midis = list_midis(midi_dir.as_ref()); let midis = unfiltered_midis .iter() .filter(|midi| { midi.file_name() .unwrap_or_default() .to_str() .expect("non-utf8 file") .to_lowercase() .contains(&midi_filter) }) .collect::<Vec<_>>(); if midis.is_empty() { eprintln!("no midi files match the pattern \"{}\"", midi_filter); eprintln!("available midi files:"); for file in unfiltered_midis.iter() { eprintln!(" {}", file.display()); } } else { for midi in midis { let size = std::fs::read(midi).map(|b| b.len()).unwrap_or(0); eprintln!("parsing file \"{}\" ({} KB)", midi.display(), size / 1024); for &(name, parse) in parsers.iter() { eprint!(" {}: ", name); match use_parser(*parse, &midi) { Ok(()) => {} Err(_err) => { eprintln!("parse error"); } } } eprintln!(); } } }
use std::{ env, fs, path::{Path, PathBuf}, time::Instant, }; const MIDI_DIR: &str = "../test-asset"; const MIDI_EXT: &[&str] = &["mid", "midi", "rmi"]; const PARSERS: &[(&str, fn(&Path) -> Result<usize, String>)] = &[ (&"midly", parse_midly), (&"nom-midi", parse_nom), (&"rimd", parse_rimd), ]; fn parse_midly(path: &Path) -> Result<usize, String> { let data = fs::read(path).map_err(|err| format!("{}", err))?; let smf = midly::Smf::parse(&data).map_err(|err| format!("{}", err))?; Ok(smf.tracks.len()) } fn parse_nom(path: &Path) -> Result<usize, String> { let data = fs::read(path).map_err(|err| format!("{}", err))?; let smf = nom_midi::parser::parse_smf(&data) .map_err(|err| format!("{}", err))? .1; Ok(smf.tracks.len()) } fn parse_rimd(path: &Path) -> Result<usize, String> { let smf = rimd::SMF::from_file(path).map_err(|err| format!("{}", err))?; Ok(smf.tracks.len()) } fn list_midis(dir: &Path) -> Vec<PathBuf> { let mut midis = Vec::new(); for entry in fs::read_dir(dir).unwrap() { let path = entry.unwrap().path(); if MIDI_EXT .iter() .any(|ext| path.extension() == Some(ext.as_ref())) { midis.push(path); } } midis } fn use_parser(parse: fn(&Path) -> Result<usize, String>, path: &Path) -> Result<(), String> { let round = |num: f64| (num * 100.0).round() / 100.0; let runtime = || -> Result<_, String> { let start = Instant::now(); let out = parse(path)?; let time = round((start.elapsed().as_micros() as f64) / 1000.0); Ok((out, time)) }; let (track_count, cold_time) = runtime()?; let runtime = || -> Result<_, String> { let (out, time) = runtime()?; assert_eq!( out, track_count, "parser is not consistent with track counts" ); Ok(time) }; let iters = (2000.0 / cold_time).floor() as u64 + 1; let mut total_time = 0.0; let mut max_time = cold_time; let mut min_time = cold_time; for _ in 0..iters { let time = runtime()?; total_time += time; max_time = max_time.max(time); min_time = min_time.min(time); }
fn main() { let midi_filter = env::args().nth(1).unwrap_or_default().to_lowercase(); let parser_filter = env::args().nth(2).unwrap_or_default().to_lowercase(); let midi_dir = env::args().nth(3).unwrap_or(MIDI_DIR.to_string()); let parsers = PARSERS .iter() .filter(|(name, _)| name.contains(&parser_filter)) .collect::<Vec<_>>(); if parsers.is_empty() { eprintln!("no parsers match the pattern \"{}\"", parser_filter); eprint!("available parsers: "); for (i, (name, _)) in PARSERS.iter().enumerate() { if i > 0 { eprint!(", "); } eprint!("{}", name); } } let unfiltered_midis = list_midis(midi_dir.as_ref()); let midis = unfiltered_midis .iter() .filter(|midi| { midi.file_name() .unwrap_or_default() .to_str() .expect("non-utf8 file") .to_lowercase() .contains(&midi_filter) }) .collect::<Vec<_>>(); if midis.is_empty() { eprintln!("no midi files match the pattern \"{}\"", midi_filter); eprintln!("available midi files:"); for file in unfiltered_midis.iter() { eprintln!(" {}", file.display()); } } else { for midi in midis { let size = std::fs::read(midi).map(|b| b.len()).unwrap_or(0); eprintln!("parsing file \"{}\" ({} KB)", midi.display(), size / 1024); for &(name, parse) in parsers.iter() { eprint!(" {}: ", name); match use_parser(*parse, &midi) { Ok(()) => {} Err(_err) => { eprintln!("parse error"); } } } eprintln!(); } } }
let avg_time = round(total_time / (iters as f64)); eprintln!( "{} tracks in {} iters / min {} / avg {} / max {}", track_count, iters, min_time, avg_time, max_time ); Ok(()) }
function_block-function_prefix_line
[ { "content": "#[cfg(feature = \"alloc\")]\n\nfn validate_smf(header: &Header, track_count_hint: u16, track_count: usize) -> Result<()> {\n\n if cfg!(feature = \"strict\") {\n\n ensure!(\n\n track_count_hint as usize == track_count,\n\n err_malformed!(\"file has a different amount of tracks than declared\")\n\n );\n\n ensure!(\n\n header.format != Format::SingleTrack || track_count == 1,\n\n err_malformed!(\"singletrack format file has multiple tracks\")\n\n );\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/smf.rs", "rank": 4, "score": 148738.21408986306 }, { "content": "/// Parse a raw MIDI file lazily, yielding its header and a lazy track iterator.\n\n/// No allocations are made.\n\n///\n\n/// The track iterator that is returned yields event iterators, which in turn yield concrete events.\n\n///\n\n/// This function is always available, even in `no_std` environments.\n\npub fn parse(raw: &[u8]) -> Result<(Header, TrackIter)> {\n\n let raw = match raw.get(..4) {\n\n Some(b\"RIFF\") => riff::unwrap(raw)?,\n\n Some(b\"MThd\") => raw,\n\n _ => bail!(err_invalid!(\"not a midi file\")),\n\n };\n\n let mut chunks = ChunkIter::new(raw);\n\n let (header, track_count) = match chunks.next() {\n\n Some(maybe_chunk) => match maybe_chunk.context(err_invalid!(\"invalid midi header\"))? {\n\n Chunk::Header(header, track_count) => Ok((header, track_count)),\n\n Chunk::Track(_) => Err(err_invalid!(\"expected header, found track\")),\n\n },\n\n None => Err(err_invalid!(\"no midi header chunk\")),\n\n }?;\n\n let tracks = chunks.as_tracks(track_count);\n\n Ok((header, tracks))\n\n}\n\n\n", "file_path": "src/smf.rs", "rank": 5, "score": 125643.12136929375 }, { "content": "fn fs_vs_cpu(path: &Path) -> Result<(), Box<dyn Error>> {\n\n let deadline = Instant::now() + Duration::from_millis(1000);\n\n let mut first = None;\n\n let mut total_fs = Duration::from_secs(0);\n\n let mut total_cpu = Duration::from_secs(0);\n\n let mut iters = 0;\n\n loop {\n\n let a = Instant::now();\n\n let data = fs::read(path)?;\n\n let b = Instant::now();\n\n let smf = Smf::parse(&data)?;\n\n let c = Instant::now();\n\n if let Some((_, _, track_count)) = first {\n\n total_fs += b - a;\n\n total_cpu += c - b;\n\n assert_eq!(track_count, smf.tracks.len());\n\n iters += 1;\n\n } else {\n\n first = Some((b - a, c - b, smf.tracks.len()));\n\n }\n", "file_path": "collectinfo/src/main.rs", "rank": 6, "score": 125308.75425705369 }, { "content": "fn list_midis(dir: &Path) -> Vec<PathBuf> {\n\n let mut midis = Vec::new();\n\n for entry in fs::read_dir(dir).unwrap() {\n\n let path = entry.unwrap().path();\n\n if MIDI_EXT\n\n .iter()\n\n .any(|ext| path.extension() == Some(ext.as_ref()))\n\n {\n\n midis.push(path);\n\n }\n\n }\n\n midis\n\n}\n\n\n", "file_path": "collectinfo/src/main.rs", "rank": 7, "score": 111606.92514900217 }, { "content": "fn bytes_per_event(path: &Path) -> Result<(), Box<dyn Error>> {\n\n let file = fs::read(path)?;\n\n let (_header, tracks) = midly::parse(&file)?;\n\n let mut total_bytes = 0;\n\n let mut total_events = 0;\n\n let mut min_bpe = std::f64::INFINITY;\n\n let mut max_bpe = 0.0;\n\n let bpe = tracks\n\n .map(|track| {\n\n let track = track?;\n\n let track_bytes = track.unread().len();\n\n let track_events = track.count();\n\n total_bytes += track_bytes;\n\n total_events += track_events;\n\n let bpe = track_bytes as f64 / track_events as f64;\n\n if bpe < min_bpe {\n\n min_bpe = bpe;\n\n }\n\n if bpe > max_bpe {\n\n max_bpe = bpe;\n", "file_path": "collectinfo/src/main.rs", "rank": 9, "score": 105731.3740186067 }, { "content": "#[cfg(feature = \"std\")]\n\n#[inline]\n\npub fn write_std<'a, T, E, W>(header: &Header, tracks: T, out: W) -> io::Result<()>\n\nwhere\n\n T: IntoIterator<Item = E>,\n\n T::IntoIter: ExactSizeIterator + Clone + Send,\n\n E: IntoIterator<Item = &'a TrackEvent<'a>>,\n\n E::IntoIter: Clone + Send,\n\n W: io::Write,\n\n{\n\n write(header, tracks, &mut IoWrap(out))\n\n}\n\n\n", "file_path": "src/smf.rs", "rank": 10, "score": 101100.73271379135 }, { "content": "/// Encode and write a generic MIDI file into the given generic writer.\n\n/// The MIDI file is represented by a header and a list of tracks.\n\n///\n\n/// # Errors\n\n///\n\n/// The MIDI writer raises almost no errors by itself, it only bubbles errors from the underlying\n\n/// writer.\n\n/// The only exception to this rule are extreme cases that break the limits of the MIDI spec: if\n\n/// there are more than 65535 tracks, if the data for a single event is 256MB or larger, or if the\n\n/// total size of any track is 4GB or larger.\n\n///\n\n/// # Implementation notes\n\n///\n\n/// Currently this function will attempt to use multiple threads to encode the file if possible and\n\n/// the file is large enough to make it worth it.\n\n///\n\n/// Otherwise, each track will be written to an in-memory buffer before writing to disk.\n\n///\n\n/// If allocation is disabled, but the writer is seekable, the file will be written once and it\n\n/// will be seeked back in order to write down the chunk sizes.\n\n///\n\n/// Otherwise, encoding will happen twice: once to determine the size of the chunks and once again\n\n/// to actually write down the file.\n\npub fn write<'a, T, E, W>(header: &Header, tracks: T, out: &mut W) -> WriteResult<W>\n\nwhere\n\n T: IntoIterator<Item = E>,\n\n T::IntoIter: ExactSizeIterator + Clone + Send,\n\n E: IntoIterator<Item = &'a TrackEvent<'a>>,\n\n E::IntoIter: Clone + Send,\n\n W: Write,\n\n{\n\n let tracks = tracks.into_iter().map(|events| events.into_iter());\n\n //Write the header first\n\n Chunk::write_header(header, tracks.len(), out)?;\n\n\n\n //Try to write the file in parallel\n\n #[cfg(feature = \"parallel\")]\n\n {\n\n //Figure out whether multithreading is worth it\n\n let event_count = tracks\n\n .clone()\n\n .map(|track| track.into_iter().size_hint().0)\n\n .sum::<usize>();\n", "file_path": "src/smf.rs", "rank": 11, "score": 98470.4446558391 }, { "content": "/// Take note of how long it takes to parse.\n\nfn time<F: FnOnce() -> R, R>(activity: &str, op: F) -> R {\n\n let start = Instant::now();\n\n let result = op();\n\n let took = Instant::now() - start;\n\n println!(\"{}: {}ms\", activity, (took * 1000).as_secs());\n\n result\n\n}\n\n\n", "file_path": "src/test.rs", "rank": 12, "score": 96729.57596779455 }, { "content": "fn test_rewrite(filename: &str) {\n\n println!(\"parsing...\");\n\n open! {smf: filename};\n\n open! {smf: [parse_collect] smf};\n\n println!(\"rewriting...\");\n\n let mut file = Vec::with_capacity(16 * 1024);\n\n time(&format!(\"{} (rewrite)\", filename), || {\n\n smf.write(&mut file).expect(\"failed to rewrite midi file\");\n\n });\n\n println!(\"reparsing...\");\n\n let clone_smf = time(&format!(\"{} (reparse)\", filename), || {\n\n parse_collect::Smf::parse(&file).expect(\"failed to reparse midi file\")\n\n });\n\n assert_eq!(\n\n smf, clone_smf,\n\n \"reparsed midi file is not identical to the original\"\n\n );\n\n}\n\n\n\nmod parse_collect {\n", "file_path": "src/test.rs", "rank": 13, "score": 79854.88201545233 }, { "content": "fn test_stream_api(file: &str) {\n\n use crate::{\n\n live::{LiveEvent, SystemCommon, SystemRealtime},\n\n num::u7,\n\n stream::MidiStream,\n\n TrackEventKind,\n\n };\n\n\n\n #[derive(Debug)]\n\n struct EventData<'a> {\n\n fired_at: usize,\n\n event: Result<LiveEvent<'a>, (usize, usize)>,\n\n }\n\n\n\n open! {file: file};\n\n open! {smf: [parse_bytemap] file};\n\n //Holds data bytes for sysex expected events\n\n let mut sysex_bytes = Vec::new();\n\n //Holds expected events\n\n let mut expected_evs = Vec::new();\n", "file_path": "src/test.rs", "rank": 14, "score": 77441.83625576962 }, { "content": "fn test_live_api(file: &str) {\n\n open! {file: file};\n\n open! {smf: [parse_bytemap] file};\n\n #[cfg(feature = \"alloc\")]\n\n let arena = crate::Arena::new();\n\n for (bytes, ev) in smf.tracks.iter().flat_map(|track| track.iter()) {\n\n use crate::{\n\n live::{LiveEvent, SystemCommon},\n\n num::u7,\n\n TrackEventKind,\n\n };\n\n match ev.kind {\n\n TrackEventKind::Midi { channel, message } => {\n\n let mut raw_bytes;\n\n let stream_ev = if bytes.first().map(|&b| b < 0x80).unwrap_or(true) {\n\n raw_bytes = vec![message.status_nibble() << 4 | channel.as_int()];\n\n raw_bytes.extend_from_slice(bytes);\n\n LiveEvent::parse(&raw_bytes[..]).unwrap()\n\n } else {\n\n LiveEvent::parse(bytes).unwrap()\n", "file_path": "src/test.rs", "rank": 15, "score": 77441.83625576962 }, { "content": "pub fn unwrap(raw: &[u8]) -> Result<&[u8]> {\n\n let (id, mut riff) = ChunkIter(raw)\n\n .next()\n\n .ok_or(err_invalid!(\"no main riff chunk\"))?;\n\n if &id != b\"RIFF\" {\n\n bail!(err_invalid!(\"invalid main riff chunk\"));\n\n }\n\n let formtype = riff\n\n .split_checked(4)\n\n .ok_or(err_invalid!(\"failed to read riff formtype\"))?;\n\n if formtype != b\"RMID\" {\n\n bail!(err_invalid!(\"not an rmid riff file\"));\n\n }\n\n for (id, chunk) in ChunkIter(riff) {\n\n if &id == b\"data\" {\n\n return Ok(chunk);\n\n }\n\n }\n\n bail!(err_invalid!(\"no rmid data chunk\"))\n\n}\n", "file_path": "src/riff.rs", "rank": 16, "score": 68124.85830290604 }, { "content": "#[derive(Clone, Debug)]\n\nstruct ChunkIter<'a> {\n\n /// Starts at the current index, ends at EOF.\n\n raw: &'a [u8],\n\n}\n\nimpl<'a> ChunkIter<'a> {\n\n #[inline]\n\n fn new(raw: &'a [u8]) -> ChunkIter {\n\n ChunkIter { raw }\n\n }\n\n\n\n #[inline]\n\n fn as_tracks(self, track_count_hint: u16) -> TrackIter<'a> {\n\n TrackIter {\n\n chunks: self,\n\n track_count_hint,\n\n }\n\n }\n\n}\n\nimpl<'a> Iterator for ChunkIter<'a> {\n\n type Item = Result<Chunk<'a>>;\n", "file_path": "src/smf.rs", "rank": 17, "score": 55517.06333475305 }, { "content": "#[derive(Clone, Debug)]\n\nstruct EventIterGeneric<'a, T> {\n\n raw: &'a [u8],\n\n running_status: Option<u8>,\n\n _kind: PhantomData<T>,\n\n}\n\nimpl<'a, T: EventKind<'a>> EventIterGeneric<'a, T> {\n\n #[inline]\n\n fn new(raw: &[u8]) -> EventIterGeneric<T> {\n\n EventIterGeneric {\n\n raw,\n\n running_status: None,\n\n _kind: PhantomData,\n\n }\n\n }\n\n\n\n /// Get the remaining unread bytes.\n\n #[inline]\n\n fn unread(&self) -> &'a [u8] {\n\n self.raw\n\n }\n", "file_path": "src/smf.rs", "rank": 18, "score": 49946.93727223502 }, { "content": "fn main() {\n\n let midi_filter = env::args().nth(1).unwrap_or_default().to_lowercase();\n\n let info_filter = env::args().nth(2).unwrap_or_default().to_lowercase();\n\n let midi_dir = env::args().nth(3).unwrap_or(MIDI_DIR.to_string());\n\n\n\n let collectors = INFO_COLLECTORS\n\n .iter()\n\n .filter(|(name, _)| name.contains(&info_filter))\n\n .collect::<Vec<_>>();\n\n if collectors.is_empty() {\n\n eprintln!(\"no info collectors match the pattern \\\"{}\\\"\", info_filter);\n\n eprint!(\"available info colectors: \");\n\n for (i, (name, _)) in INFO_COLLECTORS.iter().enumerate() {\n\n if i > 0 {\n\n eprint!(\", \");\n\n }\n\n eprint!(\"{}\", name);\n\n }\n\n eprintln!();\n\n }\n", "file_path": "collectinfo/src/main.rs", "rank": 19, "score": 43045.75886602106 }, { "content": "#[test]\n\nfn live_system() {\n\n use crate::{\n\n live::{LiveEvent, MtcQuarterFrameMessage, SystemCommon, SystemRealtime},\n\n num::u7,\n\n };\n\n\n\n // System common\n\n assert_eq!(\n\n LiveEvent::parse(&[0xF0, b'h', b'e', b'l', b'l', b'o', 0xF7]).unwrap(),\n\n LiveEvent::Common(SystemCommon::SysEx(u7::slice_from_int(b\"hello\")))\n\n );\n\n assert_eq!(\n\n LiveEvent::parse(&[0xF1, 0x36]).unwrap(),\n\n LiveEvent::Common(SystemCommon::MidiTimeCodeQuarterFrame(\n\n MtcQuarterFrameMessage::SecondsHigh,\n\n 6.into()\n\n ))\n\n );\n\n assert_eq!(\n\n LiveEvent::parse(&[0xF2, 0x08, 0x01]).unwrap(),\n", "file_path": "src/test.rs", "rank": 20, "score": 43045.75886602106 }, { "content": "# Midly\n\n\n\nMidly is a MIDI decoder and encoder designed for efficiency and completeness, supporting both\n\n`.mid` files and real-time MIDI streams while making minimal allocations.\n\n\n\nSee the crate-level documentation for the available features and `no_std` support.\n\n\n\n## Getting started\n\n\n\nFirst add the following line to your `Cargo.toml` file, under the\n\n`[dependencies]` section:\n\n\n\n```toml\n\nmidly = \"0.5\"\n\n```\n\n\n\nThen use the `Smf` type in the crate root:\n\n\n\n```rust\n\n// Load bytes first\n\nlet data = std::fs::read(\"Pi.mid\").unwrap();\n\n\n\n// Parse the raw bytes\n\nlet mut smf = midly::Smf::parse(&data).unwrap();\n\n\n\n// Use the information\n\nprintln!(\"midi file has {} tracks!\", smf.tracks.len());\n\n\n\n// Modify the file\n\nsmf.header.format = midly::Format::Sequential;\n\n\n\n// Save it back\n\nsmf.save(\"PiRewritten.mid\").unwrap();\n\n```\n\n\n\nOr use the `LiveEvent` type to parse real-time MIDI events:\n\n\n\n```rust\n\nuse midly::{live::LiveEvent, MidiMessage};\n\n\n\nfn on_midi(event: &[u8]) {\n\n let event = LiveEvent::parse(event).unwrap();\n\n match event {\n\n LiveEvent::Midi { channel, message } => match message {\n\n MidiMessage::NoteOn { key, vel } => {\n\n println!(\"hit note {} on channel {}\", key, channel);\n\n }\n\n _ => {}\n\n },\n\n _ => {}\n\n }\n\n}\n\n```\n\n\n\nMost types to be imported are on the crate root and are documented in-place.\n\nCheck the [crate documentation](https://docs.rs/midly) for more information.\n\n\n", "file_path": "README.md", "rank": 22, "score": 39070.581260079445 }, { "content": "\n\n# Version changelog\n\n\n\n### 0.5.3\n\n\n\n\n\n\n\n### 0.5.2\n\n\n\n- Fix parsing of `LiveEvent::Realtime`.\n\n- Add implementations of `PartialEq` between restricted ints and primitive ints.\n\n\n\n### 0.5.1\n\n\n\n- Added `new` constructors everywhere they made sense.\n\n- Exposed `EventBytemapIter`, which was accidentally private.\n\n- Fixed `DefaultBuffer::max_cap` not being a `const fn` if the `alloc` feature was disabled.\n\n- Implement `Send` for `Arena`.\n\n\n\n## 0.5\n\n\n\n- Rename `number` module to `num`.\n\n- Simplify generic `Smf<T>` to `Smf`, `SmfBytemap` and generic `parse`/`write` functions.\n\n- Add the `alloc` feature, which can be disabled to make the crate fully `no_std` and make no\n\n allocations.\n\n- Add the `parallel` feature to disable multithreading and the `rayon` dependency without dropping\n\n integration with `std`.\n\n- Move error framework from `failure` to `std::error::Error` when `std` is enabled, and no error\n\n trait when disabled.\n\n- Errors are always a thin pointer.\n\n- Writing now supports `no_std` with an auxiliary `Write` trait.\n\n- Event bytes no longer include delta-time.\n\n- Optimized allocations by guessing the amount of bytes per event.\n\n- Files without a correct header now fail early.\n\n- Added a `PitchBend` newtype to manipulate pitch bend values.\n\n- Added a `live` module that allows parsing standalone MIDI events.\n\n- Added a `stream` module to support raw MIDI stream decoding.\n\n- All types now implement `Debug`, and all data types implement `Hash`.\n\n- `Smf::new` no longer returns an error, and creates an empty `Smf` with no tracks. To create an\n\n `Smf` with prebuilt tracks use `Smf { header, tracks }` construction.\n\n- Added `Arena` to make track construction more ergonomic.\n\n\n\n### 0.4.1\n\n\n\n- Add support for the `.rmi` RIFF wrapper for MIDI files.\n\n- Errors are now a thin pointer in release mode.\n\n- Add a `TrackIter::running_status_mut` method.\n\n- Update `README.md` to match `0.4`.\n\n\n\n## 0.4\n\n\n\n- `EventKind::parse` and `Event::read` no longer return event bytes.\n\n- Simplify `lenient` and `strict` features to a simple `strict` feature.\n\n\n", "file_path": "CHANGELOG.md", "rank": 23, "score": 39055.178809275654 }, { "content": "## Speed\n\n\n\nAlthough performance is not critical in a MIDI library, it still is an important objective of the\n\n`midly` library, providing automatic multithreading and minimal allocations.\n\nThe following chart presents benchmark results against other MIDI libraries in the ecosystem capable\n\nof reading `.mid` files. The benchmarks were done on a warm file cache.\n\n\n\n| File name | File size | `rimd 0.0.1` | `nom-midi 0.5.1` | `midly 0.5.2` |\n\n| --------------- | --------- | ------------ | ---------------- | ------------- |\n\n| `Clementi.mid` | 4 KB | 11 ms | Error | 0.15 ms |\n\n| `CrabRave.mid` | 53 KB | 145 ms | 0.55 ms | 0.26 ms |\n\n| `Beethoven.rmi` | 90 KB | Error | Error | 0.48 ms |\n\n| `Pi.mid` | 24 MB | 66700 ms | 358 ms | 85 ms |\n\n| `PiDamaged.mid` | 64 KB | Freeze | Error | 0.55 ms |\n\n\n\nThe above results are only referential, actual performance depends on the hardware and operating\n\nsystem.\n", "file_path": "README.md", "rank": 24, "score": 39052.80000757174 }, { "content": "## 0.3\n\n\n\n- Add support for writing MIDI files and events.\n\n- Handle running status more correctly.\n\n\n\n### 0.2.2\n\n\n\n- Fix pitch bend messages being read with the wrong endianness.\n\n\n\n### 0.2.1\n\n\n\n- Update `README.md` to match the API of `0.2`.\n\n- Added an `ErrorKind::mesage` method.\n\n\n\n## 0.2\n\n\n\n- Move error framework from `error-chain` to `failure`.\n\n- Renamed `Varlen` to `u28`.\n\n- Added `lenient` and `strict` crate features to configure how eager should the parser be to reject\n\n files.\n\n- Give meaningful names to MIDI message fields.\n\n- Replace the `SmfBuffer` convenience type with three `parse` variants.\n\n- Default the `Smf<T>` type to `Smf<Vec<Event>>`.\n\n- No longer tries to parallelize lazy parsing.\n\n- Renamed `EventKind::read` to `EventKind::parse` to match the rest of the parse methods.\n\n- Added an optional (enabled by default) `std` feature to make the crate `no_std + alloc`.\n\n\n\n### 0.1.3\n\n\n\n- Rename `Fps::as_u8` and `Fps::from_u8` to `Fps::as_int` and `Fps::from_int`.\n\n\n\n### 0.1.2\n\n\n\n- Make MIDI primitives public.\n\n\n\n### 0.1.1\n\n\n\n- Add `as_int` method to convert MIDI integers to primitives.\n\n\n\n## 0.1.0\n\n\n\n- Initial release.\n\n\n\n# Planned changes\n\n\n\n- Move to a cursor approach instead of an advancing slice, for performance.\n", "file_path": "CHANGELOG.md", "rank": 25, "score": 39044.37173628303 }, { "content": " let mut bytes = [0; 6];\n\n bytes[0..2].copy_from_slice(&self.format.encode()[..]);\n\n bytes[2..4].copy_from_slice(&track_count.to_be_bytes()[..]);\n\n bytes[4..6].copy_from_slice(&self.timing.encode()[..]);\n\n bytes\n\n }\n\n}\n\n\n\n/// An iterator over all *tracks* in a Standard Midi File.\n\n/// Created by the [`parse`](fn.parse.html) function.\n\n///\n\n/// This type is always available, even in `no_std` environments.\n\n#[derive(Clone, Debug)]\n\npub struct TrackIter<'a> {\n\n chunks: ChunkIter<'a>,\n\n track_count_hint: u16,\n\n}\n\nimpl<'a> TrackIter<'a> {\n\n /// Create an event iterator from raw SMF bytes, excluding the header.\n\n ///\n", "file_path": "src/smf.rs", "rank": 26, "score": 30717.540602158428 }, { "content": " pub fn parse(raw: &[u8]) -> Result<Smf> {\n\n let (header, tracks) = parse(raw)?;\n\n let track_count_hint = tracks.track_count_hint;\n\n let tracks = tracks.collect_tracks()?;\n\n validate_smf(&header, track_count_hint, tracks.len())?;\n\n Ok(Smf { header, tracks })\n\n }\n\n\n\n /// Encodes and writes the file to the given generic writer.\n\n ///\n\n /// Note that this function requires a `midly::io::Write` writer, not a `std::io::Write` writer.\n\n /// This makes it possible to support `no_std` environments, as well as custom writer errors.\n\n /// If you're looking to write to a `File`, see the [`save`](#method.save) method.\n\n /// If you're looking to write to a `std::io::Write` writer, see the\n\n /// [`write_std`](#method.write_std) method.\n\n ///\n\n /// This function is always available, even in `no_std` environments.\n\n #[inline]\n\n pub fn write<W: Write>(&self, out: &mut W) -> WriteResult<W> {\n\n write(&self.header, &self.tracks, out)\n", "file_path": "src/smf.rs", "rank": 27, "score": 30717.074059414437 }, { "content": " /// The header of this MIDI file, indicating tempo information and track format.\n\n pub header: Header,\n\n /// A list of tracks within this MIDI file.\n\n ///\n\n /// Each track consists simply of a list of events (ie. there is no track metadata).\n\n pub tracks: Vec<Track<'a>>,\n\n}\n\n#[cfg(feature = \"alloc\")]\n\nimpl<'a> Smf<'a> {\n\n /// Create a new empty `Smf` with zero tracks, using the given header.\n\n #[inline]\n\n pub fn new(header: Header) -> Smf<'a> {\n\n Smf {\n\n header,\n\n tracks: vec![],\n\n }\n\n }\n\n\n\n /// Parse a `.mid` Standard Midi File from its raw bytes.\n\n /// If you casually want to parse `.mid` files, this is the function you're looking for.\n", "file_path": "src/smf.rs", "rank": 28, "score": 30716.98405536489 }, { "content": " /// A non-generic, non-inline function.\n\n /// This means that this function will be compiled and monomorphized once, and reused for\n\n /// every call to `save`.\n\n fn save_impl(smf: &Smf, path: &Path) -> io::Result<()> {\n\n smf.write(&mut IoWrap(File::create(path)?))\n\n }\n\n save_impl(self, path.as_ref())\n\n }\n\n}\n\n\n\n/// A track, represented as a `Vec` of events along with their originating bytes.\n\n///\n\n/// This type alias is only available with the `alloc` feature enabled.\n\n#[cfg(feature = \"alloc\")]\n\npub type BytemappedTrack<'a> = Vec<(&'a [u8], TrackEvent<'a>)>;\n\n\n\n/// A `.mid` Standard Midi File, but keeps a mapping to the raw bytes that make up each event.\n\n///\n\n/// This type is only available with the `alloc` feature enabled.\n\n#[cfg(feature = \"alloc\")]\n", "file_path": "src/smf.rs", "rank": 29, "score": 30715.69650755545 }, { "content": " pub fn parse(raw: &[u8]) -> Result<SmfBytemap> {\n\n let (header, tracks) = parse(raw)?;\n\n let track_count_hint = tracks.track_count_hint;\n\n let tracks = tracks.collect_bytemapped()?;\n\n validate_smf(&header, track_count_hint, tracks.len())?;\n\n Ok(SmfBytemap { header, tracks })\n\n }\n\n\n\n /// Encodes and writes the *events* (not the bytemap) to the given generic writer.\n\n #[inline]\n\n pub fn write<W: Write>(&self, out: &mut W) -> WriteResult<W> {\n\n write(\n\n &self.header,\n\n self.tracks\n\n .iter()\n\n .map(|bytemapped| bytemapped.iter().map(|(_b, ev)| ev)),\n\n out,\n\n )\n\n }\n\n\n", "file_path": "src/smf.rs", "rank": 30, "score": 30714.919654638208 }, { "content": " /// Usually it's not possible to determine the timing of a file with just this field, the first\n\n /// few events of the first track must be parsed in the best case, and in the worst case the\n\n /// file might have changing tempos along the song.\n\n pub timing: Timing,\n\n}\n\nimpl Header {\n\n /// Create a new header from its raw parts.\n\n #[inline]\n\n pub fn new(format: Format, timing: Timing) -> Header {\n\n Header { format, timing }\n\n }\n\n\n\n /// Read the contents of a header chunk, including the `Header` and the track count.\n\n fn read(mut raw: &[u8]) -> Result<(Header, u16)> {\n\n let format = Format::read(&mut raw)?;\n\n let track_count = u16::read(&mut raw)?;\n\n let timing = Timing::read(&mut raw)?;\n\n Ok((Header::new(format, timing), track_count))\n\n }\n\n fn encode(&self, track_count: u16) -> [u8; 6] {\n", "file_path": "src/smf.rs", "rank": 31, "score": 30714.907049312827 }, { "content": "/// How many bytes must a MIDI body have in order to enable multithreading.\n\n///\n\n/// When writing, the MIDI body size is estimated from the event count using `BYTES_PER_EVENT`.\n\n#[cfg(feature = \"parallel\")]\n\nconst PARALLEL_ENABLE_THRESHOLD: usize = 3 * 1024;\n\n\n\n/// A single track: simply a list of track events.\n\n///\n\n/// Only available with the `alloc` feature enabled.\n\n#[cfg(feature = \"alloc\")]\n\npub type Track<'a> = Vec<TrackEvent<'a>>;\n\n\n\n/// Represents a single `.mid` Standard Midi File.\n\n/// If you're casually looking to parse a `.mid` file, this is the type you're looking for.\n\n///\n\n/// This type is only available with the `alloc` feature enabled.\n\n/// If you're looking for a fully `no_std` alternative, see the [`parse`](fn.parse.html) function.\n\n#[cfg(feature = \"alloc\")]\n\n#[derive(Clone, PartialEq, Eq, Debug, Hash)]\n\npub struct Smf<'a> {\n", "file_path": "src/smf.rs", "rank": 32, "score": 30714.8829948331 }, { "content": " if (event_count as f32 * EVENTS_TO_BYTES) > PARALLEL_ENABLE_THRESHOLD as f32 {\n\n use rayon::prelude::*;\n\n\n\n //Write out the tracks in parallel into several different buffers\n\n let mut track_chunks = Vec::new();\n\n tracks\n\n .collect::<Vec<_>>()\n\n .into_par_iter()\n\n .map(|track| {\n\n let mut track_chunk = Vec::new();\n\n Chunk::write_to_vec(track, &mut track_chunk)?;\n\n Ok(track_chunk)\n\n })\n\n .collect_into_vec(&mut track_chunks);\n\n\n\n //Write down the tracks sequentially and in order\n\n for result in track_chunks {\n\n let track_chunk = result.map_err(W::invalid_input)?;\n\n out.write(&track_chunk)?;\n\n }\n", "file_path": "src/smf.rs", "rank": 33, "score": 30714.368238949934 }, { "content": " fn size_hint(&self) -> (usize, Option<usize>) {\n\n (\n\n self.track_count_hint as usize,\n\n Some(self.track_count_hint as usize),\n\n )\n\n }\n\n\n\n #[inline]\n\n fn next(&mut self) -> Option<Result<EventIter<'a>>> {\n\n loop {\n\n if let Some(chunk) = self.chunks.next() {\n\n self.track_count_hint = self.track_count_hint.saturating_sub(1);\n\n match chunk {\n\n Ok(Chunk::Track(track)) => break Some(Ok(EventIter::new(track))),\n\n //Read another header (?)\n\n Ok(Chunk::Header(..)) => {\n\n if cfg!(feature = \"strict\") {\n\n break Some(Err(err_malformed!(\"found duplicate header\").into()));\n\n } else {\n\n //Ignore duplicate header\n", "file_path": "src/smf.rs", "rank": 34, "score": 30713.61244720255 }, { "content": " }\n\n\n\n /// Encodes and writes the file to the given `std::io::Write` writer.\n\n ///\n\n /// This function is similar to the [`write`](#method.write) method, but writes to a\n\n /// `std::io::Write` writer instead of a `midly::io::Write` writer.\n\n ///\n\n /// This function is only available with the `std` feature enabled.\n\n #[cfg(feature = \"std\")]\n\n #[inline]\n\n pub fn write_std<W: io::Write>(&self, out: W) -> io::Result<()> {\n\n write_std(&self.header, &self.tracks, out)\n\n }\n\n\n\n /// Encodes and writes the file to the given path.\n\n ///\n\n /// This function is only available with the `std` feature enabled.\n\n #[cfg(feature = \"std\")]\n\n #[inline]\n\n pub fn save<P: AsRef<Path>>(&self, path: P) -> io::Result<()> {\n", "file_path": "src/smf.rs", "rank": 35, "score": 30712.284636864824 }, { "content": "}\n\n\n\n/// An iterator over the events of a single track.\n\n/// Yielded by the [`TrackIter`](struct.TrackIter.html) iterator.\n\n///\n\n/// This iterator is lazy, it parses events as it goes, and therefore produces `Result<TrackEvent>>`\n\n/// rather than `TrackEvent`.\n\n///\n\n/// This type is always available, even in `no_std` environments.\n\n#[derive(Clone, Debug)]\n\npub struct EventIter<'a> {\n\n inner: EventIterGeneric<'a, Self>,\n\n}\n\nimpl<'a> EventKind<'a> for EventIter<'a> {\n\n type Event = TrackEvent<'a>;\n\n #[inline]\n\n fn read_ev(raw: &mut &'a [u8], rs: &mut Option<u8>) -> Result<TrackEvent<'a>> {\n\n TrackEvent::read(raw, rs)\n\n }\n\n}\n", "file_path": "src/smf.rs", "rank": 36, "score": 30711.77724136603 }, { "content": " track: impl Iterator<Item = &'a TrackEvent<'a>> + Clone,\n\n out: &mut W,\n\n ) -> WriteResult<W> {\n\n let mut counter = WriteCounter(0);\n\n Self::write_raw(track.clone(), &mut counter).map_err(W::invalid_input)?;\n\n let len = Self::check_len::<W, _>(counter.0)?;\n\n let mut head = [b'M', b'T', b'r', b'k', 0, 0, 0, 0];\n\n head[4..8].copy_from_slice(&len);\n\n out.write(&head)?;\n\n Self::write_raw(track, out)?;\n\n Ok(())\n\n }\n\n\n\n /// Write a single chunk using the seek method.\n\n ///\n\n /// The chunk is written once, then the writer is seeked back and the chunk length is written\n\n /// last.\n\n fn write_seek<W: Write + Seek>(\n\n track: impl Iterator<Item = &'a TrackEvent<'a>>,\n\n out: &mut W,\n", "file_path": "src/smf.rs", "rank": 37, "score": 30711.42801366548 }, { "content": " }\n\n\n\n /// Write a header chunk into a writer.\n\n fn write_header<W: Write>(header: &Header, track_count: usize, out: &mut W) -> WriteResult<W> {\n\n let mut header_chunk = [0; 4 + 4 + 6];\n\n let track_count = u16::try_from(track_count)\n\n .map_err(|_| W::invalid_input(\"track count exceeds 16 bit range\"))?;\n\n let header = header.encode(track_count);\n\n header_chunk[0..4].copy_from_slice(&b\"MThd\"[..]);\n\n header_chunk[4..8].copy_from_slice(&(header.len() as u32).to_be_bytes()[..]);\n\n header_chunk[8..].copy_from_slice(&header[..]);\n\n out.write(&header_chunk[..])?;\n\n Ok(())\n\n }\n\n\n\n /// Write a single track chunk using the probe method.\n\n ///\n\n /// When probing, the chunk is written twice: one to find out the length of the chunk and again\n\n /// to actually write the chunk contents.\n\n fn write_probe<W: Write>(\n", "file_path": "src/smf.rs", "rank": 38, "score": 30710.948204339686 }, { "content": "//! Specific to the SMF packaging of MIDI streams.\n\n\n\nuse crate::{\n\n event::TrackEvent,\n\n prelude::*,\n\n primitive::{Format, Timing},\n\n riff,\n\n};\n\n\n\n/// How many events per byte to estimate when allocating memory for events while parsing.\n\n///\n\n/// A value that is too large (ie. too few bytes/event), will overallocate, while a value that is\n\n/// too small (ie. too many bytes/event) will underallocate.\n\n///\n\n/// Usually, since memory is cheap it's better to overallocate, since reallocating the buffer may\n\n/// result in costly memory moves.\n\n/// This means that it's better to err on the large side (too few bytes/event).\n\n///\n\n/// Real-world tests show that without running status, the average is a little above 4 bytes/event,\n\n/// and with running status enabled it's a little above 3 bytes/event.\n", "file_path": "src/smf.rs", "rank": 39, "score": 30710.671640024302 }, { "content": " }\n\n}\n\n\n\n/// An iterator over the events of a single track that keeps track of the raw bytes that make up\n\n/// each event.\n\n/// Created by the [`EventIter::bytemapped`](struct.EventIter.html#method.bytemapped) method.\n\n///\n\n/// This iterator is lazy, it parses events as it goes, and therefore produces\n\n/// `Result<(&[u8], TrackEvent)>>` rather than just `(&[u8], TrackEvent)`.\n\n///\n\n/// This type is always available, even in `no_std` environments.\n\n#[derive(Clone, Debug)]\n\npub struct EventBytemapIter<'a> {\n\n inner: EventIterGeneric<'a, Self>,\n\n}\n\nimpl<'a> EventKind<'a> for EventBytemapIter<'a> {\n\n type Event = (&'a [u8], TrackEvent<'a>);\n\n #[inline]\n\n fn read_ev(raw: &mut &'a [u8], rs: &mut Option<u8>) -> Result<Self::Event> {\n\n TrackEvent::read_bytemap(raw, rs)\n", "file_path": "src/smf.rs", "rank": 40, "score": 30710.57380076656 }, { "content": " /// The main way to obtain raw SMF without a header is the [`unread`](#method.unread) method.\n\n #[inline]\n\n pub fn new(raw: &[u8]) -> TrackIter {\n\n TrackIter {\n\n chunks: ChunkIter::new(raw),\n\n track_count_hint: 0,\n\n }\n\n }\n\n\n\n /// Peek at the remaining unparsed bytes in the file.\n\n #[inline]\n\n pub fn unread(&self) -> &'a [u8] {\n\n self.chunks.raw\n\n }\n\n\n\n /// Parse and collect the remaining unparsed tracks into a `Vec` of tracks.\n\n ///\n\n /// This function is only available with the `alloc` feature enabled.\n\n #[cfg(feature = \"alloc\")]\n\n pub fn collect_tracks(self) -> Result<Vec<Track<'a>>> {\n", "file_path": "src/smf.rs", "rank": 41, "score": 30709.745631936697 }, { "content": " /// Encodes and writes the *events* (not the bytemap) to the given `std::io::Write` writer.\n\n ///\n\n /// This function is only available with the `std` feature enabled.\n\n #[cfg(feature = \"std\")]\n\n #[inline]\n\n pub fn write_std<W: io::Write>(&self, out: W) -> io::Result<()> {\n\n write_std(\n\n &self.header,\n\n self.tracks\n\n .iter()\n\n .map(|bytemapped| bytemapped.iter().map(|(_b, ev)| ev)),\n\n out,\n\n )\n\n }\n\n\n\n /// Creates/overwrites the file at the given path and writes the *events* (not the bytemap) to\n\n /// it.\n\n ///\n\n /// This function is only available with the `std` feature enabled.\n\n #[cfg(feature = \"std\")]\n", "file_path": "src/smf.rs", "rank": 42, "score": 30709.722704570475 }, { "content": " _kind: PhantomData,\n\n },\n\n }\n\n }\n\n\n\n /// Collects the remaining unparsed events into a `Vec<(&[u8], TrackEvent)>`.\n\n ///\n\n /// This function is a smarter version of `Iterator::collect`, as it guesses allocations and\n\n /// is usually optimized better than its naive counterpart.\n\n ///\n\n /// This function is only available with the `alloc` feature enabled.\n\n #[cfg(feature = \"alloc\")]\n\n #[inline]\n\n pub fn into_vec(self) -> Result<Vec<(&'a [u8], TrackEvent<'a>)>> {\n\n self.inner.into_vec()\n\n }\n\n}\n\nimpl<'a> Iterator for EventBytemapIter<'a> {\n\n type Item = Result<(&'a [u8], TrackEvent<'a>)>;\n\n #[inline]\n\n fn next(&mut self) -> Option<Self::Item> {\n\n self.inner.next()\n\n }\n\n}\n", "file_path": "src/smf.rs", "rank": 43, "score": 30709.65300770449 }, { "content": " ) -> WriteResult<W> {\n\n out.write(b\"MTrk\\0\\0\\0\\0\")?;\n\n let start = out.tell()?;\n\n Self::write_raw(track, out)?;\n\n let len = Self::check_len::<W, _>(out.tell()? - start)?;\n\n out.write_at(&len, start - 4)?;\n\n Ok(())\n\n }\n\n\n\n /// Write a chunk to an in-memory `Vec`.\n\n ///\n\n /// Because the output is in-memory, the chunk can simply wind back and write the chunk length\n\n /// last.\n\n #[cfg(feature = \"alloc\")]\n\n fn write_to_vec(\n\n track: impl Iterator<Item = &'a TrackEvent<'a>>,\n\n out: &mut Vec<u8>,\n\n ) -> WriteResult<Vec<u8>> {\n\n let cap = (track.size_hint().0 as f32 * EVENTS_TO_BYTES) as usize;\n\n out.clear();\n", "file_path": "src/smf.rs", "rank": 44, "score": 30709.621114946545 }, { "content": " #[inline]\n\n pub fn save<P: AsRef<Path>>(&self, path: P) -> io::Result<()> {\n\n /// A non-generic, non-inline function.\n\n /// This means that this function will be compiled and monomorphized once, and reused for\n\n /// every call to `save`.\n\n fn save_impl(smf: &SmfBytemap, path: &Path) -> io::Result<()> {\n\n smf.write(&mut IoWrap(File::create(path)?))\n\n }\n\n save_impl(self, path.as_ref())\n\n }\n\n}\n\n\n\n#[cfg(feature = \"alloc\")]\n", "file_path": "src/smf.rs", "rank": 45, "score": 30709.17456107919 }, { "content": " self.generic_collect(EventIter::into_vec)\n\n }\n\n\n\n /// Parse and collect the remaining unparsed tracks into a `Vec` of tracks, keeping a mapping\n\n /// to the original bytes that make up each event.\n\n ///\n\n /// This function is only available with the `alloc` feature enabled.\n\n #[cfg(feature = \"alloc\")]\n\n pub fn collect_bytemapped(self) -> Result<Vec<BytemappedTrack<'a>>> {\n\n self.generic_collect(|events| events.bytemapped().into_vec())\n\n }\n\n\n\n #[cfg(feature = \"alloc\")]\n\n #[inline]\n\n fn generic_collect<T: Send + 'a>(\n\n self,\n\n collect: impl Fn(EventIter<'a>) -> Result<Vec<T>> + Send + Sync,\n\n ) -> Result<Vec<Vec<T>>> {\n\n //Attempt to use multiple threads if possible and advantageous\n\n #[cfg(feature = \"parallel\")]\n", "file_path": "src/smf.rs", "rank": 46, "score": 30709.08877288866 }, { "content": "impl<'a, T: EventKind<'a>> Iterator for EventIterGeneric<'a, T> {\n\n type Item = Result<T::Event>;\n\n #[inline]\n\n fn next(&mut self) -> Option<Self::Item> {\n\n if !self.raw.is_empty() {\n\n match T::read_ev(&mut self.raw, &mut self.running_status) {\n\n Ok(ev) => Some(Ok(ev)),\n\n Err(err) => {\n\n self.raw = &[];\n\n if cfg!(feature = \"strict\") {\n\n Some(Err(err).context(err_malformed!(\"malformed event\")))\n\n } else {\n\n None\n\n }\n\n }\n\n }\n\n } else {\n\n None\n\n }\n\n }\n", "file_path": "src/smf.rs", "rank": 47, "score": 30708.682253699466 }, { "content": " {\n\n if self.unread().len() >= PARALLEL_ENABLE_THRESHOLD {\n\n use rayon::prelude::*;\n\n\n\n let chunk_vec = self.collect::<Result<Vec<_>>>()?;\n\n return chunk_vec\n\n .into_par_iter()\n\n .map(collect)\n\n .collect::<Result<Vec<Vec<T>>>>();\n\n }\n\n }\n\n //Fall back to single-threaded\n\n self.map(|r| r.and_then(&collect))\n\n .collect::<Result<Vec<Vec<T>>>>()\n\n }\n\n}\n\nimpl<'a> Iterator for TrackIter<'a> {\n\n type Item = Result<EventIter<'a>>;\n\n\n\n #[inline]\n", "file_path": "src/smf.rs", "rank": 48, "score": 30708.29775693062 }, { "content": " out.reserve(8 + cap);\n\n out.extend_from_slice(b\"MTrk\\0\\0\\0\\0\");\n\n Self::write_raw(track, out)?;\n\n let len = Self::check_len::<Vec<u8>, _>(out.len() - 8)?;\n\n out[4..8].copy_from_slice(&len);\n\n Ok(())\n\n }\n\n\n\n /// Auxiliary method. Iterate over the events of a track and write them out.\n\n fn write_raw<W: Write>(\n\n track: impl Iterator<Item = &'a TrackEvent<'a>>,\n\n out: &mut W,\n\n ) -> WriteResult<W> {\n\n let mut running_status = None;\n\n for ev in track {\n\n ev.write(&mut running_status, out)?;\n\n }\n\n Ok(())\n\n }\n\n\n", "file_path": "src/smf.rs", "rank": 49, "score": 30708.21106003476 }, { "content": " }\n\n }\n\n\n\n /// Collects the remaining unparsed events into a `Track`.\n\n ///\n\n /// This function is a smarter version of `Iterator::collect`, as it guesses allocations and\n\n /// is usually optimized better than its naive counterpart.\n\n ///\n\n /// This function is only available with the `alloc` feature enabled.\n\n #[cfg(feature = \"alloc\")]\n\n #[inline]\n\n pub fn into_vec(self) -> Result<Track<'a>> {\n\n self.inner.into_vec()\n\n }\n\n}\n\nimpl<'a> Iterator for EventIter<'a> {\n\n type Item = Result<TrackEvent<'a>>;\n\n #[inline]\n\n fn next(&mut self) -> Option<Self::Item> {\n\n self.inner.next()\n", "file_path": "src/smf.rs", "rank": 50, "score": 30708.195174433506 }, { "content": " #[inline]\n\n pub fn running_status(&self) -> Option<u8> {\n\n self.inner.running_status()\n\n }\n\n\n\n /// Modify the current running status of the track.\n\n #[inline]\n\n pub fn running_status_mut(&mut self) -> &mut Option<u8> {\n\n self.inner.running_status_mut()\n\n }\n\n\n\n /// Make this event iterator keep track of the raw bytes that make up each event.\n\n #[inline]\n\n pub fn bytemapped(self) -> EventBytemapIter<'a> {\n\n EventBytemapIter {\n\n inner: EventIterGeneric {\n\n raw: self.inner.raw,\n\n running_status: self.inner.running_status,\n\n _kind: PhantomData,\n\n },\n", "file_path": "src/smf.rs", "rank": 51, "score": 30708.112221701187 }, { "content": " #[inline]\n\n fn next(&mut self) -> Option<Result<Chunk<'a>>> {\n\n //Flip around option and result\n\n match Chunk::read(&mut self.raw) {\n\n Ok(Some(chunk)) => Some(Ok(chunk)),\n\n Ok(None) => None,\n\n Err(err) => {\n\n //Ensure `Chunk::read` isn't called again, by setting read pointer to EOF (len 0)\n\n //This is to prevent use of corrupted state (such as reading a new Chunk from the\n\n //middle of a malformed message)\n\n self.raw = &[];\n\n Some(Err(err))\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/smf.rs", "rank": 52, "score": 30708.098074038164 }, { "content": " /// Auxiliary method. Given an arbitrary-width length, fit it into a 32-bit big-endian integer,\n\n /// reporting an error if it does not fit.\n\n fn check_len<W, T>(len: T) -> StdResult<[u8; 4], W::Error>\n\n where\n\n u32: TryFrom<T>,\n\n W: Write,\n\n {\n\n let len = u32::try_from(len)\n\n .map_err(|_| W::invalid_input(\"midi chunk size exceeds 32 bit range\"))?;\n\n Ok(len.to_be_bytes())\n\n }\n\n}\n\n\n\n/// A MIDI file header, indicating metadata about the file.\n\n#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]\n\npub struct Header {\n\n /// Information about how should the tracks be laid out when playing them back.\n\n pub format: Format,\n\n /// Tempo information about the file.\n\n ///\n", "file_path": "src/smf.rs", "rank": 53, "score": 30708.010150901962 }, { "content": " #[inline]\n\n fn into_vec(mut self) -> Result<Vec<T::Event>> {\n\n let mut events = Vec::with_capacity(self.estimate_events());\n\n while !self.raw.is_empty() {\n\n match T::read_ev(&mut self.raw, &mut self.running_status) {\n\n Ok(ev) => events.push(ev),\n\n Err(err) => {\n\n self.raw = &[];\n\n if cfg!(feature = \"strict\") {\n\n Err(err).context(err_malformed!(\"malformed event\"))?;\n\n } else {\n\n //Stop reading track silently on failure\n\n break;\n\n }\n\n }\n\n }\n\n }\n\n Ok(events)\n\n }\n\n}\n", "file_path": "src/smf.rs", "rank": 54, "score": 30707.896433186736 }, { "content": " return Ok(());\n\n }\n\n }\n\n\n\n #[cfg(feature = \"alloc\")]\n\n {\n\n //Write the tracks into a buffer before writing out to the file\n\n let mut buf = Vec::new();\n\n for track in tracks {\n\n Chunk::write_to_vec(track, &mut buf).map_err(|msg| W::invalid_input(msg))?;\n\n out.write(&buf)?;\n\n }\n\n return Ok(());\n\n }\n\n\n\n #[allow(unreachable_code)]\n\n {\n\n if let Some(out) = out.make_seekable() {\n\n //Write down using seeks if the writer is seekable\n\n for track in tracks {\n", "file_path": "src/smf.rs", "rank": 55, "score": 30707.798814072972 }, { "content": " Chunk::write_seek(track, out)?;\n\n }\n\n return Ok(());\n\n }\n\n\n\n //Last resort: do probe-writing.\n\n //Two passes are done: one to find out the size of the chunk and another to actually\n\n //write the chunk.\n\n for track in tracks {\n\n Chunk::write_probe(track, out)?;\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\n/// Similar to [`write`](fn.write.html), but writes to a `std::io::Write` writer instead of a\n\n/// `midly::io::Write` writer.\n\n///\n\n/// This function is only available with the `std` feature enabled.\n", "file_path": "src/smf.rs", "rank": 56, "score": 30707.64774723129 }, { "content": "#[derive(Clone, PartialEq, Eq, Debug, Hash)]\n\npub struct SmfBytemap<'a> {\n\n /// The header of this file.\n\n pub header: Header,\n\n /// A list of tracks, along with the bytemap of their events.\n\n pub tracks: Vec<BytemappedTrack<'a>>,\n\n}\n\n#[cfg(feature = \"alloc\")]\n\nimpl<'a> SmfBytemap<'a> {\n\n /// Create a new empty `SmfBytemap` with zero tracks, using the given header.\n\n #[inline]\n\n pub fn new(header: Header) -> SmfBytemap<'a> {\n\n SmfBytemap {\n\n header,\n\n tracks: vec![],\n\n }\n\n }\n\n\n\n /// Parse a Standard Midi File from its raw bytes, keeping a map to the original bytes that\n\n /// make up each event.\n", "file_path": "src/smf.rs", "rank": 57, "score": 30707.525365859234 }, { "content": " if cfg!(feature = \"strict\") {\n\n bail!(err_malformed!(\"reached eof before chunk ended\"));\n\n } else {\n\n //Just use the remainder of the file\n\n mem::replace(raw, &[])\n\n }\n\n }\n\n };\n\n match id {\n\n b\"MThd\" => {\n\n let (header, track_count) = Header::read(chunkdata)?;\n\n break Some(Chunk::Header(header, track_count));\n\n }\n\n b\"MTrk\" => {\n\n break Some(Chunk::Track(chunkdata));\n\n }\n\n //Unknown chunk, just ignore and read the next one\n\n _ => (),\n\n }\n\n })\n", "file_path": "src/smf.rs", "rank": 58, "score": 30707.350514512116 }, { "content": "\n\n /// Get the current running status of the track.\n\n #[inline]\n\n pub fn running_status(&self) -> Option<u8> {\n\n self.inner.running_status()\n\n }\n\n\n\n /// Modify the current running status of the track.\n\n #[inline]\n\n pub fn running_status_mut(&mut self) -> &mut Option<u8> {\n\n self.inner.running_status_mut()\n\n }\n\n\n\n /// Stop collecting bytemap information for any remaining events.\n\n #[inline]\n\n pub fn not_bytemapped(self) -> EventIter<'a> {\n\n EventIter {\n\n inner: EventIterGeneric {\n\n raw: self.inner.raw,\n\n running_status: self.inner.running_status,\n", "file_path": "src/smf.rs", "rank": 59, "score": 30705.717854534403 }, { "content": "/// This makes sense, since it's DeltaTime [+ Status] + Key + Velocity for NoteOn and NoteOff\n\n/// events, which should make up the bulk of most MIDI files.\n\n///\n\n/// Erring on the large side for events/byte (erring on the small side for bytes/event), we can\n\n/// approximate to 3 bytes/event.\n\n#[cfg(feature = \"alloc\")]\n\nconst BYTES_TO_EVENTS: f32 = 1.0 / 3.0;\n\n\n\n/// How many bytes per event to estimate when allocating memory when writing.\n\n///\n\n/// A value that is too large will overallocate space for bytes, while a value that's too small\n\n/// will underallocate bytes.\n\n///\n\n/// Since the writer uses running status by default, a value a bit over `3` will allocate enough for\n\n/// almost all cases (Except for eg. info tracks, which usually have a high byte/event count\n\n/// because they contain text. However these tracks are small enough that reallocating doesn't\n\n/// matter too much).\n\n#[cfg(feature = \"alloc\")]\n\nconst EVENTS_TO_BYTES: f32 = 3.4;\n\n\n", "file_path": "src/smf.rs", "rank": 60, "score": 30705.044375671398 }, { "content": "impl<'a> EventIter<'a> {\n\n /// Create an event iterator from raw track bytes.\n\n ///\n\n /// It can be hard to obtain raw track bytes.\n\n /// Usually these raw track bytes are obtained from the [`unread`](#method.unread) method on an\n\n /// event iterator.\n\n #[inline]\n\n pub fn new(raw: &[u8]) -> EventIter {\n\n EventIter {\n\n inner: EventIterGeneric::new(raw),\n\n }\n\n }\n\n\n\n /// Get the remaining unparsed event bytes.\n\n #[inline]\n\n pub fn unread(&self) -> &'a [u8] {\n\n self.inner.unread()\n\n }\n\n\n\n /// Get the current running status of the track.\n", "file_path": "src/smf.rs", "rank": 61, "score": 30703.527116351946 }, { "content": " }\n\n}\n\nimpl<'a> EventBytemapIter<'a> {\n\n /// Create an event iterator from raw track bytes.\n\n ///\n\n /// It can be hard to obtain raw track bytes.\n\n /// Usually these raw track bytes are obtained from the [`unread`](#method.unread) method on an\n\n /// event iterator.\n\n #[inline]\n\n pub fn new(raw: &[u8]) -> EventBytemapIter {\n\n EventBytemapIter {\n\n inner: EventIterGeneric::new(raw),\n\n }\n\n }\n\n\n\n /// Get the remaining unparsed event bytes.\n\n #[inline]\n\n pub fn unread(&self) -> &'a [u8] {\n\n self.inner.unread()\n\n }\n", "file_path": "src/smf.rs", "rank": 62, "score": 30703.286750714244 }, { "content": "\n\n /// Get the current running status of the track.\n\n #[inline]\n\n fn running_status(&self) -> Option<u8> {\n\n self.running_status\n\n }\n\n\n\n /// Modify the current running status of the track.\n\n #[inline]\n\n fn running_status_mut(&mut self) -> &mut Option<u8> {\n\n &mut self.running_status\n\n }\n\n\n\n #[cfg(feature = \"alloc\")]\n\n #[inline]\n\n fn estimate_events(&self) -> usize {\n\n (self.raw.len() as f32 * BYTES_TO_EVENTS) as usize\n\n }\n\n\n\n #[cfg(feature = \"alloc\")]\n", "file_path": "src/smf.rs", "rank": 63, "score": 30702.516376190826 }, { "content": " }\n\n }\n\n //Failed to read chunk\n\n Err(err) => {\n\n if cfg!(feature = \"strict\") {\n\n break Some(Err(err).context(err_malformed!(\"invalid chunk\")));\n\n } else {\n\n //Ignore invalid chunk\n\n }\n\n }\n\n }\n\n } else {\n\n break None;\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/smf.rs", "rank": 64, "score": 30700.091779361857 }, { "content": "#[derive(Copy, Clone, Debug)]\n\nenum Chunk<'a> {\n\n Header(Header, u16),\n\n Track(&'a [u8]),\n\n}\n\nimpl<'a> Chunk<'a> {\n\n /// Should be called with a byte slice at least as large as the chunk (ideally until EOF).\n\n /// The slice will be modified to point to the next chunk.\n\n /// If we're *exactly* at EOF (slice length 0), returns a None signalling no more chunks.\n\n fn read(raw: &mut &'a [u8]) -> Result<Option<Chunk<'a>>> {\n\n Ok(loop {\n\n if raw.is_empty() {\n\n break None;\n\n }\n\n let id = raw\n\n .split_checked(4)\n\n .ok_or(err_invalid!(\"failed to read chunkid\"))?;\n\n let len = u32::read(raw).context(err_invalid!(\"failed to read chunklen\"))?;\n\n let chunkdata = match raw.split_checked(len as usize) {\n\n Some(chunkdata) => chunkdata,\n\n None => {\n", "file_path": "src/smf.rs", "rank": 65, "score": 28882.84270173781 }, { "content": "trait EventKind<'a> {\n\n type Event: 'a;\n\n fn read_ev(raw: &mut &'a [u8], running_status: &mut Option<u8>) -> Result<Self::Event>;\n\n}\n\n\n", "file_path": "src/smf.rs", "rank": 66, "score": 27272.322781210158 }, { "content": "struct ChunkIter<'a>(&'a [u8]);\n\nimpl<'a> Iterator for ChunkIter<'a> {\n\n type Item = ([u8; 4], &'a [u8]);\n\n fn next(&mut self) -> Option<([u8; 4], &'a [u8])> {\n\n if self.0.len() >= 8 {\n\n let mut id = [0; 4];\n\n let mut len = [0; 4];\n\n id.copy_from_slice(&self.0[..4]);\n\n len.copy_from_slice(&self.0[4..8]);\n\n self.0 = &self.0[8..];\n\n let len = u32::from_le_bytes(len);\n\n let data = match self.0.split_checked(len as usize) {\n\n Some(data) => data,\n\n None => mem::replace(&mut self.0, &[]),\n\n };\n\n if len % 2 == 1 {\n\n let _pad = self.0.split_checked(1);\n\n }\n\n Some((id, data))\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/riff.rs", "rank": 67, "score": 26752.982564268164 }, { "content": "use midly::Smf;\n\nuse std::{\n\n env,\n\n error::Error,\n\n fs,\n\n path::{Path, PathBuf},\n\n time::{Duration, Instant},\n\n};\n\n\n\nconst MIDI_DIR: &str = \"../test-asset\";\n\n\n\nconst MIDI_EXT: &[&str] = &[\"mid\", \"midi\", \"rmi\"];\n\n\n\nconst INFO_COLLECTORS: &[(&str, fn(&Path) -> Result<(), Box<dyn Error>>)] = &[\n\n (\"fs_vs_cpu\", fs_vs_cpu),\n\n (\"bytes_per_event\", bytes_per_event),\n\n];\n\n\n", "file_path": "collectinfo/src/main.rs", "rank": 69, "score": 40.0811218667938 }, { "content": "use crate::{EventIter, Result as MidlyResult, TrackEvent};\n\nuse std::{fs, path::Path, time::Instant};\n\n\n\n/// Open and read the content of a file.\n\nmacro_rules! open {\n\n {$name:ident : $file:expr} => {\n\n let $name = fs::read(AsRef::<Path>::as_ref(\"test-asset\").join($file)).unwrap();\n\n };\n\n {$name:ident : [$parse:ident] $file:expr} => {\n\n let $name = match $parse::Smf::parse(&$file[..]) {\n\n Ok(smf) => smf,\n\n Err(err) => {\n\n eprintln!(\"failed to parse test file: {:?}\", err);\n\n panic!()\n\n },\n\n };\n\n };\n\n}\n\n\n\n/// Macro for parsing a MIDI file.\n", "file_path": "src/test.rs", "rank": 70, "score": 33.788538186623626 }, { "content": "pub use crate::smf::write_std;\n\n#[cfg(feature = \"alloc\")]\n\npub use crate::{\n\n arena::Arena,\n\n smf::{BytemappedTrack, Smf, SmfBytemap, Track},\n\n};\n\npub use crate::{\n\n error::{Error, ErrorKind, Result},\n\n event::{MetaMessage, MidiMessage, PitchBend, TrackEvent, TrackEventKind},\n\n primitive::{Format, Fps, SmpteTime, Timing},\n\n smf::{parse, write, EventBytemapIter, EventIter, Header, TrackIter},\n\n};\n\n\n\n/// Exotically-sized integers used by the MIDI standard.\n\npub mod num {\n\n pub use crate::primitive::{u14, u15, u24, u28, u4, u7};\n\n}\n\n\n\n#[cfg(test)]\n\nmod test;\n", "file_path": "src/lib.rs", "rank": 71, "score": 27.5367835772447 }, { "content": "//! # Overview\n\n//!\n\n//! `midly` is a full-featured MIDI parser and writer, focused on performance.\n\n//!\n\n//! Parsing a `.mid` file can be as simple as:\n\n//!\n\n//! ```rust\n\n//! # #[cfg(feature = \"alloc\")] {\n\n//! use midly::Smf;\n\n//!\n\n//! let smf = Smf::parse(include_bytes!(\"../test-asset/Clementi.mid\")).unwrap();\n\n//!\n\n//! for (i, track) in smf.tracks.iter().enumerate() {\n\n//! println!(\"track {} has {} events\", i, track.len());\n\n//! }\n\n//! # }\n\n//! ```\n\n//!\n\n//! # Parsing Standard Midi Files (`.mid` files)\n\n//!\n", "file_path": "src/lib.rs", "rank": 72, "score": 26.974083783667652 }, { "content": "//! ```rust\n\n//! # #[cfg(feature = \"std\")] {\n\n//! # use std::fs;\n\n//! # use midly::Smf;\n\n//! # let bytes = fs::read(\"test-asset/Clementi.mid\").unwrap();\n\n//! # let smf = Smf::parse(&bytes).unwrap();\n\n//! let mut in_memory = Vec::new();\n\n//! smf.write(&mut in_memory).unwrap();\n\n//!\n\n//! println!(\"midi file fits in {} bytes!\", in_memory.len());\n\n//! # }\n\n//! ```\n\n//!\n\n//! # Parsing standalone MIDI messages\n\n//!\n\n//! When using an OS API such as [`midir`](https://docs.rs/midir),\n\n//! [`LiveEvent`](live/enum.LiveEvent.html) can be used to parse the raw MIDI bytes:\n\n//!\n\n//! ```rust\n\n//! use midly::{live::LiveEvent, MidiMessage};\n", "file_path": "src/lib.rs", "rank": 74, "score": 26.643545550111323 }, { "content": " pub fn parse(raw: &[u8]) -> MidlyResult<Smf> {\n\n let (header, tracks) = crate::parse(raw)?;\n\n Ok(Smf {\n\n header,\n\n tracks: tracks\n\n .map(|events| {\n\n events.and_then(|evs| evs.bytemapped().collect::<MidlyResult<Vec<_>>>())\n\n })\n\n .collect::<MidlyResult<Vec<_>>>()?,\n\n })\n\n }\n\n }\n\n pub fn len(mut raw: &[u8], track: Vec<(&[u8], TrackEvent)>) -> usize {\n\n //Quick and dirty test to make sure events bytes are present in the source in order, and\n\n //NOT consecutive (because delta times must interrupt every single event)\n\n for (bytes, _ev) in track.iter() {\n\n let mut advanced = false;\n\n while !raw.starts_with(*bytes) {\n\n advanced = true;\n\n match raw.get(1..) {\n", "file_path": "src/test.rs", "rank": 75, "score": 25.40829412168429 }, { "content": "//!\n\n//! # Writing Standard Midi Files\n\n//!\n\n//! Saving `.mid` files is as simple as using the `Smf::save` method:\n\n//!\n\n//! ```rust\n\n//! # #[cfg(feature = \"std\")] {\n\n//! # use std::fs;\n\n//! # use midly::Smf;\n\n//! // Parse file\n\n//! let bytes = fs::read(\"test-asset/Clementi.mid\").unwrap();\n\n//! let smf = Smf::parse(&bytes).unwrap();\n\n//!\n\n//! // Rewrite file\n\n//! smf.save(\"test-asset/ClementiRewritten.mid\").unwrap();\n\n//! # }\n\n//! ```\n\n//!\n\n//! SMF files can also be written to an arbitrary writer:\n\n//!\n", "file_path": "src/lib.rs", "rank": 76, "score": 24.53199788061503 }, { "content": "macro_rules! test {\n\n {$file:expr => $parse_method:ident} => {{\n\n let counts = time(&$file.to_string(), ||->Vec<_> {\n\n open!{file: $file};\n\n open!{smf: [$parse_method] file};\n\n smf.tracks.into_iter().map(|track| $parse_method::len(&file[..], track)).collect()\n\n });\n\n for (i,count) in counts.iter().enumerate() {\n\n println!(\"track {} has {} events\", i, count);\n\n }\n\n }};\n\n}\n\n\n\n#[cfg(not(feature = \"alloc\"))]\n\nimpl crate::io::Write for Vec<u8> {\n\n type Error = &'static str;\n\n type Seekable = crate::io::NotSeekable<Self>;\n\n fn write(&mut self, buf: &[u8]) -> Result<(), &'static str> {\n\n self.extend_from_slice(buf);\n\n Ok(())\n\n }\n\n fn invalid_input(msg: &'static str) -> &'static str {\n\n msg\n\n }\n\n}\n\n\n", "file_path": "src/test.rs", "rank": 77, "score": 24.338111802877687 }, { "content": " }\n\n }\n\n pub fn len(_raw: &[u8], track: MidlyResult<EventIter>) -> usize {\n\n match track {\n\n Ok(track) => track.count(),\n\n Err(err) => panic!(\"failed to parse track: {}\", err),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/test.rs", "rank": 78, "score": 23.762385908209687 }, { "content": " use super::*;\n\n #[cfg(feature = \"alloc\")]\n\n pub use crate::Smf;\n\n #[cfg(not(feature = \"alloc\"))]\n\n #[derive(Clone, PartialEq, Eq, Debug)]\n\n pub struct Smf<'a> {\n\n pub header: crate::Header,\n\n pub tracks: Vec<Vec<TrackEvent<'a>>>,\n\n }\n\n #[cfg(not(feature = \"alloc\"))]\n\n impl<'a> Smf<'a> {\n\n pub fn parse(raw: &[u8]) -> MidlyResult<Smf> {\n\n let (header, tracks) = crate::parse(raw)?;\n\n Ok(Smf {\n\n header,\n\n tracks: tracks\n\n .map(|events| events.and_then(|evs| evs.collect::<MidlyResult<Vec<_>>>()))\n\n .collect::<MidlyResult<Vec<_>>>()?,\n\n })\n\n }\n", "file_path": "src/test.rs", "rank": 79, "score": 21.222701523839735 }, { "content": "//! Parsing Standard Midi Files is usually done through the [`Smf`](struct.Smf.html) struct (or if\n\n//! working in a `no_std` environment without an allocator, through the [`parse`](fn.parse.html)\n\n//! function).\n\n//!\n\n//! Note that most types in this crate have a lifetime parameter, because they reference the bytes\n\n//! in the original file (in order to avoid allocations).\n\n//! For this reason, reading a file and parsing it must be done in two separate steps:\n\n//!\n\n//! ```rust\n\n//! # #[cfg(feature = \"alloc\")] {\n\n//! use std::fs;\n\n//! use midly::Smf;\n\n//!\n\n//! // Load bytes into a buffer\n\n//! let bytes = fs::read(\"test-asset/Clementi.mid\").unwrap();\n\n//!\n\n//! // Parse bytes in a separate step\n\n//! let smf = Smf::parse(&bytes).unwrap();\n\n//! # }\n\n//! ```\n", "file_path": "src/lib.rs", "rank": 81, "score": 20.78945079235139 }, { "content": "//! All sort of events and their parsers.\n\n\n\nuse crate::{\n\n live::{LiveEvent, SystemCommon},\n\n prelude::*,\n\n primitive::{read_varlen_slice, write_varlen_slice, SmpteTime},\n\n};\n\n\n\n/// Represents a parsed SMF track event.\n\n///\n\n/// Consists of a delta time (in MIDI ticks relative to the previous event) and the actual track\n\n/// event.\n\n#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]\n\npub struct TrackEvent<'a> {\n\n /// How many MIDI ticks after the previous event should this event fire.\n\n pub delta: u28,\n\n /// The type of event along with event-specific data.\n\n pub kind: TrackEventKind<'a>,\n\n}\n\nimpl<'a> TrackEvent<'a> {\n", "file_path": "src/event.rs", "rank": 82, "score": 19.715202357347856 }, { "content": " #[test]\n\n #[cfg_attr(feature = \"strict\", should_panic)]\n\n fn pidamaged() {\"PiDamaged.mid\"}\n\n\n\n #[test]\n\n fn levels() {\"Levels.mid\"}\n\n\n\n #[test]\n\n fn beethoven() {\"Beethoven.rmi\"}\n\n\n\n #[test]\n\n fn sysex() {\"SysExTest.mid\"}\n\n }\n\n\n\n #[test]\n\n fn not_midi() {\n\n open! {file: \"colorlist.txt\"};\n\n let result = parse_collect::Smf::parse(&file);\n\n match result {\n\n Ok(_) => panic!(\"parsed an invalid midi file\"),\n", "file_path": "src/test.rs", "rank": 83, "score": 19.21954902430085 }, { "content": " Some(new_raw) => raw = new_raw,\n\n None => panic!(\"event bytes are not present in the raw bytes\"),\n\n }\n\n }\n\n assert!(advanced, \"event bytes cannot be consecutive\");\n\n raw = &raw[bytes.len()..];\n\n }\n\n track.len()\n\n }\n\n}\n\nmod parse_lazy {\n\n use super::*;\n\n pub struct Smf<'a> {\n\n pub header: crate::Header,\n\n pub tracks: crate::TrackIter<'a>,\n\n }\n\n impl Smf<'_> {\n\n pub fn parse(raw: &[u8]) -> MidlyResult<Smf> {\n\n let (header, tracks) = crate::parse(raw)?;\n\n Ok(Smf { header, tracks })\n", "file_path": "src/test.rs", "rank": 84, "score": 18.902711661208222 }, { "content": "#![cfg(feature = \"alloc\")]\n\n\n\nuse crate::prelude::*;\n\nuse core::cell::UnsafeCell;\n\n\n\n/// Helps overcome limitations of the lifetime system when constructing MIDI events and files.\n\n///\n\n/// Because many events contain references to data that outlives them, it can be hard to build a\n\n/// MIDI file programatically.\n\n///\n\n/// Consider the following code:\n\n///\n\n/// ```rust,compile_fail\n\n/// use midly::{TrackEvent, TrackEventKind, MetaMessage};\n\n///\n\n/// let mut track = Vec::new();\n\n/// for i in 0..64 {\n\n/// let marker_name = format!(\"Marker {}\", i);\n\n/// let marker_ref = marker_name.as_bytes();\n\n/// track.push(TrackEvent {\n", "file_path": "src/arena.rs", "rank": 85, "score": 18.456303887203127 }, { "content": " }};\n\n}\n\n\n\npub(crate) trait ResultExt<T> {\n\n fn context(self, ctx: &'static ErrorKind) -> StdResult<T, Error>;\n\n}\n\nimpl<T> ResultExt<T> for StdResult<T, Error> {\n\n #[inline]\n\n fn context(self, ctx: &'static ErrorKind) -> StdResult<T, Error> {\n\n self.map_err(|err| err.chain_ctx(ctx))\n\n }\n\n}\n\nimpl<T> ResultExt<T> for StdResult<T, &'static ErrorKind> {\n\n #[inline]\n\n fn context(self, ctx: &'static ErrorKind) -> StdResult<T, Error> {\n\n self.map_err(|errkind| Error::from(errkind).chain_ctx(ctx))\n\n }\n\n}\n\n\n\n/// The result type used by the MIDI parser.\n\npub type Result<T> = StdResult<T, Error>;\n\npub(crate) use core::result::Result as StdResult;\n", "file_path": "src/error.rs", "rank": 86, "score": 18.0984322945471 }, { "content": " /// Parse a complete MIDI message from its raw bytes.\n\n ///\n\n /// This method can be used to parse raw MIDI bytes coming from an OS API (ie. a status byte\n\n /// in the range `0x80 ..= 0xFF` followed by data bytes in the range `0x00 ..= 0x7F`).\n\n ///\n\n /// Note that this function will not read the \"meta messages\" present in `.mid` files, since\n\n /// those cannot appear in a live MIDI connection, only in offline files.\n\n ///\n\n /// Also see the example in the root crate documentation.\n\n pub fn parse(mut raw: &'a [u8]) -> Result<LiveEvent<'a>> {\n\n let status = raw\n\n .split_checked(1)\n\n .ok_or_else(|| err_invalid!(\"no status byte\"))?[0];\n\n let data = u7::slice_from_int(raw);\n\n Self::read(status, data)\n\n }\n\n\n\n pub(crate) fn read(status: u8, data: &[u7]) -> Result<LiveEvent> {\n\n match status {\n\n 0x80..=0xEF => {\n", "file_path": "src/live.rs", "rank": 87, "score": 17.532529442441884 }, { "content": " pub(crate) use std::{fs::File, io, path::Path};\n\n\n\n macro_rules! bit_range {\n\n ($val:expr, $range:expr) => {{\n\n let mask = (1 << ($range.end - $range.start)) - 1;\n\n ($val >> $range.start) & mask\n\n }};\n\n }\n\n}\n\n\n\nmod arena;\n\nmod event;\n\npub mod io;\n\npub mod live;\n\nmod primitive;\n\nmod riff;\n\nmod smf;\n\npub mod stream;\n\n\n\n#[cfg(feature = \"std\")]\n", "file_path": "src/lib.rs", "rank": 88, "score": 17.40590575121082 }, { "content": " Sequential,\n\n}\n\nimpl Format {\n\n pub(crate) fn read(raw: &mut &[u8]) -> Result<Format> {\n\n let format = u16::read(raw)?;\n\n Ok(match format {\n\n 0 => Format::SingleTrack,\n\n 1 => Format::Parallel,\n\n 2 => Format::Sequential,\n\n _ => bail!(err_invalid!(\"invalid smf format\")),\n\n })\n\n }\n\n\n\n pub(crate) fn encode(&self) -> [u8; 2] {\n\n let code: u16 = match self {\n\n Format::SingleTrack => 0,\n\n Format::Parallel => 1,\n\n Format::Sequential => 2,\n\n };\n\n code.to_be_bytes()\n", "file_path": "src/primitive.rs", "rank": 89, "score": 17.26728517199491 }, { "content": " //Holds the raw reconstructed bytes to be re-parsed\n\n let mut byte_stream = Vec::new();\n\n for (bytes, ev) in smf.tracks.iter().flat_map(|track| track.iter()) {\n\n match ev.kind {\n\n TrackEventKind::Midi { channel, message } => {\n\n //Write down the message bytes, directly from the source bytes\n\n byte_stream.extend_from_slice(bytes);\n\n //Add an expected event\n\n expected_evs.push(EventData {\n\n //Midi messages are fired as soon as the last data byte arrives, therefore the\n\n //length-1\n\n fired_at: byte_stream.len() - 1,\n\n event: Ok(LiveEvent::Midi { channel, message }),\n\n });\n\n }\n\n TrackEventKind::SysEx(data) => {\n\n assert!(\n\n data.iter()\n\n .enumerate()\n\n .all(|(i, &b)| (i == data.len() - 1 && b == 0xF7) || b < 0x80),\n", "file_path": "src/test.rs", "rank": 90, "score": 17.25474852814579 }, { "content": " }\n\n $(#[$attr])*\n\n fn rewrite() {\n\n test_rewrite($filename);\n\n }\n\n }\n\n )*}\n\n}\n\n\n\n/// Test the MIDI parser on several files.\n\nmod parse {\n\n use super::*;\n\n\n\n def_tests! {\n\n #[test]\n\n fn clementi() {\"Clementi.mid\"}\n\n\n\n #[test]\n\n fn sandstorm() {\"Sandstorm.mid\"}\n\n\n", "file_path": "src/test.rs", "rank": 91, "score": 16.771785591904376 }, { "content": "}\n\nimpl<'a> TrackEventKind<'a> {\n\n fn read(raw: &mut &'a [u8], running_status: &mut Option<u8>) -> Result<TrackEventKind<'a>> {\n\n //Read status\n\n let mut status = *raw.get(0).ok_or(err_invalid!(\"failed to read status\"))?;\n\n if status < 0x80 {\n\n //Running status!\n\n status = running_status.ok_or(err_invalid!(\n\n \"event missing status with no running status active\"\n\n ))?;\n\n } else {\n\n //Advance slice 1 byte to consume status. Note that because we already did `get()`, we\n\n //can use panicking index here\n\n *raw = &raw[1..];\n\n }\n\n //Delegate further parsing depending on status\n\n let kind = match status {\n\n 0x80..=0xEF => {\n\n *running_status = Some(status);\n\n let data = MidiMessage::read_data_u8(status, raw)?;\n", "file_path": "src/event.rs", "rank": 92, "score": 16.712360637166906 }, { "content": " pub fn write<W: crate::io::Write>(&self, out: &mut W) -> Result<(), W::Error> {\n\n crate::write(&self.header, self.tracks.iter(), out)\n\n }\n\n }\n\n pub fn len(_raw: &[u8], track: Vec<TrackEvent>) -> usize {\n\n track.len()\n\n }\n\n}\n\nmod parse_bytemap {\n\n use super::*;\n\n #[cfg(feature = \"alloc\")]\n\n pub use crate::SmfBytemap as Smf;\n\n #[cfg(not(feature = \"alloc\"))]\n\n #[derive(Clone, PartialEq, Eq, Debug)]\n\n pub struct Smf<'a> {\n\n pub header: crate::Header,\n\n pub tracks: Vec<Vec<(&'a [u8], TrackEvent<'a>)>>,\n\n }\n\n #[cfg(not(feature = \"alloc\"))]\n\n impl<'a> Smf<'a> {\n", "file_path": "src/test.rs", "rank": 93, "score": 16.540564277362456 }, { "content": " 0x05 => MetaMessage::Lyric(data),\n\n 0x06 => MetaMessage::Marker(data),\n\n 0x07 => MetaMessage::CuePoint(data),\n\n 0x08 => MetaMessage::ProgramName(data),\n\n 0x09 => MetaMessage::DeviceName(data),\n\n 0x20 if data.len() >= 1 => MetaMessage::MidiChannel(u4::read(&mut data)?),\n\n 0x21 if data.len() >= 1 => MetaMessage::MidiPort(u7::read(&mut data)?),\n\n 0x2F => MetaMessage::EndOfTrack,\n\n 0x51 if data.len() >= 3 => MetaMessage::Tempo(u24::read(&mut data)?),\n\n 0x54 if data.len() >= 5 => MetaMessage::SmpteOffset(\n\n SmpteTime::read(&mut data).context(err_invalid!(\"failed to read smpte time\"))?,\n\n ),\n\n 0x58 if data.len() >= 4 => MetaMessage::TimeSignature(\n\n u8::read(&mut data)?,\n\n u8::read(&mut data)?,\n\n u8::read(&mut data)?,\n\n u8::read(&mut data)?,\n\n ),\n\n 0x59 => {\n\n MetaMessage::KeySignature(u8::read(&mut data)? as i8, u8::read(&mut data)? != 0)\n", "file_path": "src/event.rs", "rank": 94, "score": 16.378902273223403 }, { "content": "/// delta: 0.into(),\n\n/// kind: TrackEventKind::Meta(MetaMessage::Marker(marker_ref)),\n\n/// });\n\n/// }\n\n/// ```\n\n///\n\n/// Looks pretty good, but it fails to compile with\n\n/// `error[E0597]: \"marker_name\" does not live long enough`, with a rightful reason: `marker_name`\n\n/// is dropped before the next iteration of the `for` loop.\n\n///\n\n/// Instead, use the [`Arena`](struct.Arena.html) type like the following code:\n\n///\n\n/// ```rust\n\n/// use midly::{TrackEvent, TrackEventKind, MetaMessage};\n\n///\n\n/// let arena = midly::Arena::new();\n\n/// let mut track = Vec::new();\n\n/// for i in 0..64 {\n\n/// let marker_name = format!(\"Marker {}\", i);\n\n/// let marker_ref = arena.add(marker_name.as_bytes());\n", "file_path": "src/arena.rs", "rank": 95, "score": 16.278923762516882 }, { "content": " }\n\n 0x7F => MetaMessage::SequencerSpecific(data),\n\n _ => MetaMessage::Unknown(type_byte, data),\n\n })\n\n }\n\n fn write<W: Write>(&self, out: &mut W) -> WriteResult<W> {\n\n let mut write_msg = |type_byte: u8, data: &[u8]| {\n\n out.write(&[type_byte])?;\n\n write_varlen_slice(data, out)?;\n\n Ok(())\n\n };\n\n match self {\n\n MetaMessage::TrackNumber(track_num) => match track_num {\n\n None => write_msg(0x00, &[]),\n\n Some(track_num) => write_msg(0x00, &track_num.to_be_bytes()[..]),\n\n },\n\n MetaMessage::Text(data) => write_msg(0x01, data),\n\n MetaMessage::Copyright(data) => write_msg(0x02, data),\n\n MetaMessage::TrackName(data) => write_msg(0x03, data),\n\n MetaMessage::InstrumentName(data) => write_msg(0x04, data),\n", "file_path": "src/event.rs", "rank": 96, "score": 16.008051498281873 }, { "content": " io::Write::write_all(&mut self.0, buf)\n\n }\n\n #[inline]\n\n fn invalid_input(msg: &'static str) -> io::Error {\n\n io::Error::new(io::ErrorKind::InvalidInput, msg)\n\n }\n\n #[inline]\n\n fn make_seekable(&mut self) -> Option<&mut Self> {\n\n Some(self)\n\n }\n\n}\n\n#[cfg(feature = \"std\")]\n\nimpl<T: io::Write + io::Seek> Seek for SeekableWrap<T> {\n\n #[inline]\n\n fn tell(&mut self) -> io::Result<u64> {\n\n io::Seek::seek(&mut self.0, io::SeekFrom::Current(0))\n\n }\n\n #[inline]\n\n fn write_at(&mut self, buf: &[u8], pos: u64) -> io::Result<()> {\n\n io::Seek::seek(&mut self.0, io::SeekFrom::Start(pos))?;\n", "file_path": "src/io.rs", "rank": 97, "score": 15.820967083421234 }, { "content": " Ok(())\n\n }\n\n }\n\n #[inline]\n\n fn invalid_input(msg: &'static str) -> CursorError {\n\n CursorError::InvalidInput(msg)\n\n }\n\n}\n\n\n\n/// Bridge between a `midly::io::Write` type and a `std::io::Write` type.\n\n///\n\n/// Always available, but only implements `midly::io::Write` when the `std` feature is enabled.\n\n#[derive(Debug, Clone, Default)]\n\npub struct IoWrap<T>(pub T);\n\n#[cfg(feature = \"std\")]\n\nimpl<T: io::Write> Write for IoWrap<T> {\n\n type Error = io::Error;\n\n type Seekable = NotSeekable<Self>;\n\n #[inline]\n\n fn write(&mut self, buf: &[u8]) -> io::Result<()> {\n", "file_path": "src/io.rs", "rank": 98, "score": 15.701065352665577 }, { "content": " io::Write::write_all(&mut self.0, buf)\n\n }\n\n #[inline]\n\n fn invalid_input(msg: &'static str) -> io::Error {\n\n io::Error::new(io::ErrorKind::InvalidInput, msg)\n\n }\n\n}\n\n\n\n/// Bridge between a `midly::io::{Write, Seek}` type and a `std::io::{Write, Seek}` type.\n\n///\n\n/// Always available, but only implements `midly::io::{Write, Seek}` when the `std` feature is\n\n/// enabled.\n\n#[derive(Debug, Clone, Default)]\n\npub struct SeekableWrap<T>(pub T);\n\n#[cfg(feature = \"std\")]\n\nimpl<T: io::Write + io::Seek> Write for SeekableWrap<T> {\n\n type Error = io::Error;\n\n type Seekable = Self;\n\n #[inline]\n\n fn write(&mut self, buf: &[u8]) -> io::Result<()> {\n", "file_path": "src/io.rs", "rank": 99, "score": 15.491212116528327 } ]
Rust
crates/rust-analyzer/src/config.rs
theHamsta/rust-analyzer
f7f01dd5f0a8254abeefb0156e2c4ebe1447bfb6
use std::{ffi::OsString, path::PathBuf}; use lsp_types::ClientCapabilities; use ra_flycheck::FlycheckConfig; use ra_ide::{AssistConfig, CompletionConfig, InlayHintsConfig}; use ra_project_model::CargoConfig; use serde::Deserialize; #[derive(Debug, Clone)] pub struct Config { pub client_caps: ClientCapsConfig, pub with_sysroot: bool, pub publish_diagnostics: bool, pub lru_capacity: Option<usize>, pub proc_macro_srv: Option<(PathBuf, Vec<OsString>)>, pub files: FilesConfig, pub notifications: NotificationsConfig, pub cargo: CargoConfig, pub rustfmt: RustfmtConfig, pub check: Option<FlycheckConfig>, pub inlay_hints: InlayHintsConfig, pub completion: CompletionConfig, pub assist: AssistConfig, pub call_info_full: bool, pub lens: LensConfig, } #[derive(Clone, Debug, PartialEq, Eq)] pub struct LensConfig { pub run: bool, pub debug: bool, pub impementations: bool, } impl Default for LensConfig { fn default() -> Self { Self { run: true, debug: true, impementations: true } } } impl LensConfig { pub const NO_LENS: LensConfig = Self { run: false, debug: false, impementations: false }; pub fn any(&self) -> bool { self.impementations || self.runnable() } pub fn none(&self) -> bool { !self.any() } pub fn runnable(&self) -> bool { self.run || self.debug } } #[derive(Debug, Clone)] pub struct FilesConfig { pub watcher: FilesWatcher, pub exclude: Vec<String>, } #[derive(Debug, Clone)] pub enum FilesWatcher { Client, Notify, } #[derive(Debug, Clone)] pub struct NotificationsConfig { pub cargo_toml_not_found: bool, } #[derive(Debug, Clone)] pub enum RustfmtConfig { Rustfmt { extra_args: Vec<String>, }, #[allow(unused)] CustomCommand { command: String, args: Vec<String>, }, } #[derive(Debug, Clone, Default)] pub struct ClientCapsConfig { pub location_link: bool, pub line_folding_only: bool, pub hierarchical_symbols: bool, pub code_action_literals: bool, pub work_done_progress: bool, pub code_action_group: bool, } impl Default for Config { fn default() -> Self { Config { client_caps: ClientCapsConfig::default(), with_sysroot: true, publish_diagnostics: true, lru_capacity: None, proc_macro_srv: None, files: FilesConfig { watcher: FilesWatcher::Notify, exclude: Vec::new() }, notifications: NotificationsConfig { cargo_toml_not_found: true }, cargo: CargoConfig::default(), rustfmt: RustfmtConfig::Rustfmt { extra_args: Vec::new() }, check: Some(FlycheckConfig::CargoCommand { command: "check".to_string(), all_targets: true, all_features: true, extra_args: Vec::new(), }), inlay_hints: InlayHintsConfig { type_hints: true, parameter_hints: true, chaining_hints: true, max_length: None, }, completion: CompletionConfig { enable_postfix_completions: true, add_call_parenthesis: true, add_call_argument_snippets: true, ..CompletionConfig::default() }, assist: AssistConfig::default(), call_info_full: true, lens: LensConfig::default(), } } } impl Config { #[rustfmt::skip] pub fn update(&mut self, value: &serde_json::Value) { log::info!("Config::update({:#})", value); let client_caps = self.client_caps.clone(); *self = Default::default(); self.client_caps = client_caps; set(value, "/withSysroot", &mut self.with_sysroot); set(value, "/diagnostics/enable", &mut self.publish_diagnostics); set(value, "/lruCapacity", &mut self.lru_capacity); self.files.watcher = match get(value, "/files/watcher") { Some("client") => FilesWatcher::Client, Some("notify") | _ => FilesWatcher::Notify }; set(value, "/notifications/cargoTomlNotFound", &mut self.notifications.cargo_toml_not_found); set(value, "/cargo/noDefaultFeatures", &mut self.cargo.no_default_features); set(value, "/cargo/allFeatures", &mut self.cargo.all_features); set(value, "/cargo/features", &mut self.cargo.features); set(value, "/cargo/loadOutDirsFromCheck", &mut self.cargo.load_out_dirs_from_check); set(value, "/cargo/target", &mut self.cargo.target); match get(value, "/procMacro/enable") { Some(true) => { if let Ok(path) = std::env::current_exe() { self.proc_macro_srv = Some((path, vec!["proc-macro".into()])); } } _ => self.proc_macro_srv = None, } match get::<Vec<String>>(value, "/rustfmt/overrideCommand") { Some(mut args) if !args.is_empty() => { let command = args.remove(0); self.rustfmt = RustfmtConfig::CustomCommand { command, args, } } _ => { if let RustfmtConfig::Rustfmt { extra_args } = &mut self.rustfmt { set(value, "/rustfmt/extraArgs", extra_args); } } }; if let Some(false) = get(value, "/checkOnSave/enable") { self.check = None; } else { match get::<Vec<String>>(value, "/checkOnSave/overrideCommand") { Some(mut args) if !args.is_empty() => { let command = args.remove(0); self.check = Some(FlycheckConfig::CustomCommand { command, args, }); } _ => { if let Some(FlycheckConfig::CargoCommand { command, extra_args, all_targets, all_features }) = &mut self.check { set(value, "/checkOnSave/extraArgs", extra_args); set(value, "/checkOnSave/command", command); set(value, "/checkOnSave/allTargets", all_targets); set(value, "/checkOnSave/allFeatures", all_features); } } }; } set(value, "/inlayHints/typeHints", &mut self.inlay_hints.type_hints); set(value, "/inlayHints/parameterHints", &mut self.inlay_hints.parameter_hints); set(value, "/inlayHints/chainingHints", &mut self.inlay_hints.chaining_hints); set(value, "/inlayHints/maxLength", &mut self.inlay_hints.max_length); set(value, "/completion/postfix/enable", &mut self.completion.enable_postfix_completions); set(value, "/completion/addCallParenthesis", &mut self.completion.add_call_parenthesis); set(value, "/completion/addCallArgumentSnippets", &mut self.completion.add_call_argument_snippets); set(value, "/callInfo/full", &mut self.call_info_full); let mut lens_enabled = true; set(value, "/lens/enable", &mut lens_enabled); if lens_enabled { set(value, "/lens/run", &mut self.lens.run); set(value, "/lens/debug", &mut self.lens.debug); set(value, "/lens/implementations", &mut self.lens.impementations); } else { self.lens = LensConfig::NO_LENS; } log::info!("Config::update() = {:#?}", self); fn get<'a, T: Deserialize<'a>>(value: &'a serde_json::Value, pointer: &str) -> Option<T> { value.pointer(pointer).and_then(|it| T::deserialize(it).ok()) } fn set<'a, T: Deserialize<'a>>(value: &'a serde_json::Value, pointer: &str, slot: &mut T) { if let Some(new_value) = get(value, pointer) { *slot = new_value } } } pub fn update_caps(&mut self, caps: &ClientCapabilities) { if let Some(doc_caps) = caps.text_document.as_ref() { if let Some(value) = doc_caps.definition.as_ref().and_then(|it| it.link_support) { self.client_caps.location_link = value; } if let Some(value) = doc_caps.folding_range.as_ref().and_then(|it| it.line_folding_only) { self.client_caps.line_folding_only = value } if let Some(value) = doc_caps .document_symbol .as_ref() .and_then(|it| it.hierarchical_document_symbol_support) { self.client_caps.hierarchical_symbols = value } if let Some(value) = doc_caps.code_action.as_ref().map(|it| it.code_action_literal_support.is_some()) { self.client_caps.code_action_literals = value; } self.completion.allow_snippets(false); if let Some(completion) = &doc_caps.completion { if let Some(completion_item) = &completion.completion_item { if let Some(value) = completion_item.snippet_support { self.completion.allow_snippets(value); } } } } if let Some(window_caps) = caps.window.as_ref() { if let Some(value) = window_caps.work_done_progress { self.client_caps.work_done_progress = value; } } self.assist.allow_snippets(false); if let Some(experimental) = &caps.experimental { let snippet_text_edit = experimental.get("snippetTextEdit").and_then(|it| it.as_bool()) == Some(true); self.assist.allow_snippets(snippet_text_edit); let code_action_group = experimental.get("codeActionGroup").and_then(|it| it.as_bool()) == Some(true); self.client_caps.code_action_group = code_action_group } } }
use std::{ffi::OsString, path::PathBuf}; use lsp_types::ClientCapabilities; use ra_flycheck::FlycheckConfig; use ra_ide::{AssistConfig, CompletionConfig, InlayHintsConfig}; use ra_project_model::CargoConfig; use serde::Deserialize; #[derive(Debug, Clone)] pub struct Config { pub client_caps: ClientCapsConfig, pub with_sysroot: bool, pub publish_diagnostics: bool, pub lru_capacity: Option<usize>, pub proc_macro_srv: Option<(PathBuf, Vec<OsString>)>, pub files: FilesConfig, pub notifications: NotificationsConfig, pub cargo: CargoConfig, pub rustfmt: RustfmtConfig, pub check: Option<FlycheckConfig>, pub inlay_hints: InlayHintsConfig, pub completion: CompletionConfig, pub assist: AssistConfig, pub call_info_full: bool, pub lens: LensConfig, } #[derive(Clone, Debug, PartialEq, Eq)] pub struct LensConfig { pub run: bool, pub debug: bool, pub impementations: bool, } impl Default for LensConfig { fn default() -> Self { Self { run: true, debug: true, impementations: true } } } impl LensConfig { pub const NO_LENS: LensConfig = Self { run: false, debug: false, impementations: false }; pub fn any(&self) -> bool { self.impementations || self.runnable() } pub fn none(&self) -> bool { !self.any() } pub fn runnable(&self) -> bool { self.run || self.debug } } #[derive(Debug, Clone)] pub struct FilesConfig { pub watcher: FilesWatcher, pub exclude: Vec<String>, } #[derive(Debug, Clone)] pub enum FilesWatcher { Client, Notify, } #[derive(Debug, Clone)] pub struct NotificationsConfig { pub cargo_toml_not_found: bool, } #[derive(Debug, Clone)] pub enum RustfmtConfig { Rustfmt { extra_args: Vec<String>, }, #[allow(unused)] CustomCommand { command: String, args: Vec<String>, }, } #[derive(Debug, Clone, Default)] pub struct ClientCapsConfig { pub location_link: bool, pub line_folding_only: bool, pub hierarchical_symbols: bool, pub code_action_literals: bool, pub work_done_progress: bool, pub code_action_group: bool, } impl Default for Config { fn default() -> Self { Config { client_caps: ClientCapsConfig::default(), with_sysroot: true, publish_diagnostics: true, lru_capacity: None, proc_macro_srv: None, files: FilesConfig { watcher: FilesWatcher::Notify, exclude: Vec::new() }, notifications: NotificationsConfig { cargo_toml_not_found: true },
} impl Config { #[rustfmt::skip] pub fn update(&mut self, value: &serde_json::Value) { log::info!("Config::update({:#})", value); let client_caps = self.client_caps.clone(); *self = Default::default(); self.client_caps = client_caps; set(value, "/withSysroot", &mut self.with_sysroot); set(value, "/diagnostics/enable", &mut self.publish_diagnostics); set(value, "/lruCapacity", &mut self.lru_capacity); self.files.watcher = match get(value, "/files/watcher") { Some("client") => FilesWatcher::Client, Some("notify") | _ => FilesWatcher::Notify }; set(value, "/notifications/cargoTomlNotFound", &mut self.notifications.cargo_toml_not_found); set(value, "/cargo/noDefaultFeatures", &mut self.cargo.no_default_features); set(value, "/cargo/allFeatures", &mut self.cargo.all_features); set(value, "/cargo/features", &mut self.cargo.features); set(value, "/cargo/loadOutDirsFromCheck", &mut self.cargo.load_out_dirs_from_check); set(value, "/cargo/target", &mut self.cargo.target); match get(value, "/procMacro/enable") { Some(true) => { if let Ok(path) = std::env::current_exe() { self.proc_macro_srv = Some((path, vec!["proc-macro".into()])); } } _ => self.proc_macro_srv = None, } match get::<Vec<String>>(value, "/rustfmt/overrideCommand") { Some(mut args) if !args.is_empty() => { let command = args.remove(0); self.rustfmt = RustfmtConfig::CustomCommand { command, args, } } _ => { if let RustfmtConfig::Rustfmt { extra_args } = &mut self.rustfmt { set(value, "/rustfmt/extraArgs", extra_args); } } }; if let Some(false) = get(value, "/checkOnSave/enable") { self.check = None; } else { match get::<Vec<String>>(value, "/checkOnSave/overrideCommand") { Some(mut args) if !args.is_empty() => { let command = args.remove(0); self.check = Some(FlycheckConfig::CustomCommand { command, args, }); } _ => { if let Some(FlycheckConfig::CargoCommand { command, extra_args, all_targets, all_features }) = &mut self.check { set(value, "/checkOnSave/extraArgs", extra_args); set(value, "/checkOnSave/command", command); set(value, "/checkOnSave/allTargets", all_targets); set(value, "/checkOnSave/allFeatures", all_features); } } }; } set(value, "/inlayHints/typeHints", &mut self.inlay_hints.type_hints); set(value, "/inlayHints/parameterHints", &mut self.inlay_hints.parameter_hints); set(value, "/inlayHints/chainingHints", &mut self.inlay_hints.chaining_hints); set(value, "/inlayHints/maxLength", &mut self.inlay_hints.max_length); set(value, "/completion/postfix/enable", &mut self.completion.enable_postfix_completions); set(value, "/completion/addCallParenthesis", &mut self.completion.add_call_parenthesis); set(value, "/completion/addCallArgumentSnippets", &mut self.completion.add_call_argument_snippets); set(value, "/callInfo/full", &mut self.call_info_full); let mut lens_enabled = true; set(value, "/lens/enable", &mut lens_enabled); if lens_enabled { set(value, "/lens/run", &mut self.lens.run); set(value, "/lens/debug", &mut self.lens.debug); set(value, "/lens/implementations", &mut self.lens.impementations); } else { self.lens = LensConfig::NO_LENS; } log::info!("Config::update() = {:#?}", self); fn get<'a, T: Deserialize<'a>>(value: &'a serde_json::Value, pointer: &str) -> Option<T> { value.pointer(pointer).and_then(|it| T::deserialize(it).ok()) } fn set<'a, T: Deserialize<'a>>(value: &'a serde_json::Value, pointer: &str, slot: &mut T) { if let Some(new_value) = get(value, pointer) { *slot = new_value } } } pub fn update_caps(&mut self, caps: &ClientCapabilities) { if let Some(doc_caps) = caps.text_document.as_ref() { if let Some(value) = doc_caps.definition.as_ref().and_then(|it| it.link_support) { self.client_caps.location_link = value; } if let Some(value) = doc_caps.folding_range.as_ref().and_then(|it| it.line_folding_only) { self.client_caps.line_folding_only = value } if let Some(value) = doc_caps .document_symbol .as_ref() .and_then(|it| it.hierarchical_document_symbol_support) { self.client_caps.hierarchical_symbols = value } if let Some(value) = doc_caps.code_action.as_ref().map(|it| it.code_action_literal_support.is_some()) { self.client_caps.code_action_literals = value; } self.completion.allow_snippets(false); if let Some(completion) = &doc_caps.completion { if let Some(completion_item) = &completion.completion_item { if let Some(value) = completion_item.snippet_support { self.completion.allow_snippets(value); } } } } if let Some(window_caps) = caps.window.as_ref() { if let Some(value) = window_caps.work_done_progress { self.client_caps.work_done_progress = value; } } self.assist.allow_snippets(false); if let Some(experimental) = &caps.experimental { let snippet_text_edit = experimental.get("snippetTextEdit").and_then(|it| it.as_bool()) == Some(true); self.assist.allow_snippets(snippet_text_edit); let code_action_group = experimental.get("codeActionGroup").and_then(|it| it.as_bool()) == Some(true); self.client_caps.code_action_group = code_action_group } } }
cargo: CargoConfig::default(), rustfmt: RustfmtConfig::Rustfmt { extra_args: Vec::new() }, check: Some(FlycheckConfig::CargoCommand { command: "check".to_string(), all_targets: true, all_features: true, extra_args: Vec::new(), }), inlay_hints: InlayHintsConfig { type_hints: true, parameter_hints: true, chaining_hints: true, max_length: None, }, completion: CompletionConfig { enable_postfix_completions: true, add_call_parenthesis: true, add_call_argument_snippets: true, ..CompletionConfig::default() }, assist: AssistConfig::default(), call_info_full: true, lens: LensConfig::default(), } }
function_block-function_prefix_line
[ { "content": "pub fn run_dist(nightly: bool, client_version: Option<String>) -> Result<()> {\n\n let dist = project_root().join(\"dist\");\n\n rm_rf(&dist)?;\n\n fs2::create_dir_all(&dist)?;\n\n\n\n if let Some(version) = client_version {\n\n let release_tag = if nightly { \"nightly\".to_string() } else { date_iso()? };\n\n dist_client(&version, &release_tag)?;\n\n }\n\n dist_server(nightly)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "xtask/src/dist.rs", "rank": 0, "score": 464079.02891305735 }, { "content": "#[doc(hidden)]\n\npub fn run_process(cmd: String, echo: bool, stdin: Option<&[u8]>) -> Result<String> {\n\n run_process_inner(&cmd, echo, stdin).with_context(|| format!(\"process `{}` failed\", cmd))\n\n}\n\n\n", "file_path": "xtask/src/not_bash.rs", "rank": 1, "score": 413012.8171194886 }, { "content": "pub fn reparse(&self, edit: &AtomTextEdit) -> File {\n\n <|>self.incremental_reparse(edit).unwrap_or_else(|| self.full_reparse(edit))\n\n}\n\n\",\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_join_lines_block() {\n\n check_join_lines(\n\n r\"\n", "file_path": "crates/ra_ide/src/join_lines.rs", "rank": 2, "score": 367461.37701848336 }, { "content": "fn make_const_compl_syntax(const_: &ast::ConstDef) -> String {\n\n let const_ = edit::remove_attrs_and_docs(const_);\n\n\n\n let const_start = const_.syntax().text_range().start();\n\n let const_end = const_.syntax().text_range().end();\n\n\n\n let start =\n\n const_.syntax().first_child_or_token().map_or(const_start, |f| f.text_range().start());\n\n\n\n let end = const_\n\n .syntax()\n\n .children_with_tokens()\n\n .find(|s| s.kind() == T![;] || s.kind() == T![=])\n\n .map_or(const_end, |f| f.text_range().start());\n\n\n\n let len = end - start;\n\n let range = TextRange::new(0.into(), len);\n\n\n\n let syntax = const_.syntax().text().slice(range).to_string();\n\n\n", "file_path": "crates/ra_ide/src/completion/complete_trait_impl.rs", "rank": 3, "score": 367263.19498643914 }, { "content": "pub fn run_release(dry_run: bool) -> Result<()> {\n\n if !dry_run {\n\n run!(\"git switch release\")?;\n\n run!(\"git fetch upstream --tags --force\")?;\n\n run!(\"git reset --hard tags/nightly\")?;\n\n run!(\"git push\")?;\n\n }\n\n\n\n let website_root = project_root().join(\"../rust-analyzer.github.io\");\n\n let changelog_dir = website_root.join(\"./thisweek/_posts\");\n\n\n\n let today = date_iso()?;\n\n let commit = run!(\"git rev-parse HEAD\")?;\n\n let changelog_n = fs2::read_dir(changelog_dir.as_path())?.count();\n\n\n\n let contents = format!(\n\n \"\\\n\n= Changelog #{}\n\n:sectanchors:\n\n:page-layout: post\n", "file_path": "xtask/src/lib.rs", "rank": 4, "score": 358896.89935574925 }, { "content": " pub trait Trait { fn the_method(&self); }\n\n }\n\n use m::Trait;\n\n impl Trait for A {}\n\n fn foo(a: A) {\n\n a.<|>\n\n }\n\n \",\n\n ),\n\n @r###\"\n\n [\n\n CompletionItem {\n\n label: \"the_method()\",\n\n source_range: 219..219,\n\n delete: 219..219,\n\n insert: \"the_method()$0\",\n\n kind: Method,\n\n lookup: \"the_method\",\n\n detail: \"fn the_method(&self)\",\n\n },\n", "file_path": "crates/ra_ide/src/completion/complete_dot.rs", "rank": 5, "score": 352153.74003186036 }, { "content": " pub trait Fn<Args> { type Output; }\n", "file_path": "crates/ra_assists/src/handlers/add_missing_impl_members.rs", "rank": 6, "score": 342433.4877663456 }, { "content": "//- /main.rs\n\ntrait Clone { fn clone(&self) -> Self; }\n", "file_path": "crates/ra_hir_ty/src/tests/method_resolution.rs", "rank": 7, "score": 336608.4364135792 }, { "content": "// Generates the surrounding `impl Type { <code> }` including type and lifetime\n\n// parameters\n\nfn generate_impl_text(strukt: &ast::StructDef, code: &str) -> String {\n\n let type_params = strukt.type_param_list();\n\n let mut buf = String::with_capacity(code.len());\n\n buf.push_str(\"\\n\\nimpl\");\n\n if let Some(type_params) = &type_params {\n\n format_to!(buf, \"{}\", type_params.syntax());\n\n }\n\n buf.push_str(\" \");\n\n buf.push_str(strukt.name().unwrap().text().as_str());\n\n if let Some(type_params) = type_params {\n\n let lifetime_params = type_params\n\n .lifetime_params()\n\n .filter_map(|it| it.lifetime_token())\n\n .map(|it| it.text().clone());\n\n let type_params =\n\n type_params.type_params().filter_map(|it| it.name()).map(|it| it.text().clone());\n\n format_to!(buf, \"<{}>\", lifetime_params.chain(type_params).sep_by(\", \"))\n\n }\n\n\n\n format_to!(buf, \" {{\\n{}\\n}}\\n\", code);\n\n\n\n buf\n\n}\n\n\n", "file_path": "crates/ra_assists/src/handlers/add_new.rs", "rank": 8, "score": 323986.8646536578 }, { "content": "fn get_receiver_text(receiver: &ast::Expr, receiver_is_ambiguous_float_literal: bool) -> String {\n\n if receiver_is_ambiguous_float_literal {\n\n let text = receiver.syntax().text();\n\n let without_dot = ..text.len() - TextSize::of('.');\n\n text.slice(without_dot).to_string()\n\n } else {\n\n receiver.to_string()\n\n }\n\n}\n\n\n", "file_path": "crates/ra_ide/src/completion/complete_postfix.rs", "rank": 9, "score": 321278.20917749393 }, { "content": "fn is_deprecated(node: impl HasAttrs, db: &RootDatabase) -> bool {\n\n node.attrs(db).by_key(\"deprecated\").exists()\n\n}\n\n\n", "file_path": "crates/ra_ide/src/completion/presentation.rs", "rank": 10, "score": 321202.49542413955 }, { "content": "pub fn rust_files(path: &Path) -> impl Iterator<Item = PathBuf> {\n\n let iter = WalkDir::new(path);\n\n return iter\n\n .into_iter()\n\n .filter_entry(|e| !is_hidden(e))\n\n .map(|e| e.unwrap())\n\n .filter(|e| !e.file_type().is_dir())\n\n .map(|e| e.into_path())\n\n .filter(|path| path.extension().map(|it| it == \"rs\").unwrap_or(false));\n\n\n\n fn is_hidden(entry: &DirEntry) -> bool {\n\n entry.file_name().to_str().map(|s| s.starts_with('.')).unwrap_or(false)\n\n }\n\n}\n\n\n", "file_path": "xtask/src/lib.rs", "rank": 11, "score": 319077.7551702206 }, { "content": "fn fn_arg_name(fn_arg: &ast::Expr) -> Option<String> {\n\n match fn_arg {\n\n ast::Expr::CastExpr(cast_expr) => fn_arg_name(&cast_expr.expr()?),\n\n _ => Some(\n\n fn_arg\n\n .syntax()\n\n .descendants()\n\n .filter(|d| ast::NameRef::can_cast(d.kind()))\n\n .last()?\n\n .to_string(),\n\n ),\n\n }\n\n}\n\n\n", "file_path": "crates/ra_assists/src/handlers/add_function.rs", "rank": 12, "score": 313712.92802158836 }, { "content": "#[inline(always)]\n\npub fn is_ci() -> bool {\n\n option_env!(\"CI\").is_some()\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! eprintln {\n\n ($($tt:tt)*) => {{\n\n if $crate::is_ci() {\n\n panic!(\"Forgot to remove debug-print?\")\n\n }\n\n std::eprintln!($($tt)*)\n\n }}\n\n}\n\n\n\n/// Appends formatted string to a `String`.\n\n#[macro_export]\n\nmacro_rules! format_to {\n\n ($buf:expr) => ();\n\n ($buf:expr, $lit:literal $($arg:tt)*) => {\n\n { use ::std::fmt::Write as _; let _ = ::std::write!($buf, $lit $($arg)*); }\n\n };\n\n}\n\n\n", "file_path": "crates/stdx/src/lib.rs", "rank": 13, "score": 312032.86839206686 }, { "content": "pub fn use_tree_list(use_trees: impl IntoIterator<Item = ast::UseTree>) -> ast::UseTreeList {\n\n let use_trees = use_trees.into_iter().map(|it| it.syntax().clone()).join(\", \");\n\n ast_from_text(&format!(\"use {{{}}};\", use_trees))\n\n}\n\n\n", "file_path": "crates/ra_syntax/src/ast/make.rs", "rank": 14, "score": 311198.87150900066 }, { "content": "pub fn run_rustfmt(mode: Mode) -> Result<()> {\n\n let _dir = pushd(project_root());\n\n let _e = pushenv(\"RUSTUP_TOOLCHAIN\", \"stable\");\n\n ensure_rustfmt()?;\n\n match mode {\n\n Mode::Overwrite => run!(\"cargo fmt\"),\n\n Mode::Verify => run!(\"cargo fmt -- --check\"),\n\n }?;\n\n Ok(())\n\n}\n\n\n", "file_path": "xtask/src/lib.rs", "rank": 15, "score": 306723.22926228045 }, { "content": "pub fn cargo() -> PathBuf {\n\n get_path_for_executable(\"cargo\")\n\n}\n\n\n", "file_path": "crates/ra_toolchain/src/lib.rs", "rank": 16, "score": 306715.9550424544 }, { "content": "fn check_disjoint(indels: &mut [impl std::borrow::Borrow<Indel>]) -> bool {\n\n indels.sort_by_key(|indel| (indel.borrow().delete.start(), indel.borrow().delete.end()));\n\n indels\n\n .iter()\n\n .zip(indels.iter().skip(1))\n\n .all(|(l, r)| l.borrow().delete.end() <= r.borrow().delete.start())\n\n}\n", "file_path": "crates/ra_text_edit/src/lib.rs", "rank": 17, "score": 305899.3087888552 }, { "content": "fn run_process_inner(cmd: &str, echo: bool, stdin: Option<&[u8]>) -> Result<String> {\n\n let mut args = shelx(cmd);\n\n let binary = args.remove(0);\n\n let current_dir = Env::with(|it| it.cwd().to_path_buf());\n\n\n\n if echo {\n\n println!(\"> {}\", cmd)\n\n }\n\n\n\n let mut command = Command::new(binary);\n\n command.args(args).current_dir(current_dir).stderr(Stdio::inherit());\n\n let output = match stdin {\n\n None => command.stdin(Stdio::null()).output(),\n\n Some(stdin) => {\n\n command.stdin(Stdio::piped()).stdout(Stdio::piped());\n\n let mut process = command.spawn()?;\n\n process.stdin.take().unwrap().write_all(stdin)?;\n\n process.wait_with_output()\n\n }\n\n }?;\n", "file_path": "xtask/src/not_bash.rs", "rank": 18, "score": 304638.9390846086 }, { "content": "/// Returns `false` if slow tests should not run, otherwise returns `true` and\n\n/// also creates a file at `./target/.slow_tests_cookie` which serves as a flag\n\n/// that slow tests did run.\n\npub fn skip_slow_tests() -> bool {\n\n let should_skip = std::env::var(\"CI\").is_err() && std::env::var(\"RUN_SLOW_TESTS\").is_err();\n\n if should_skip {\n\n eprintln!(\"ignoring slow test\")\n\n } else {\n\n let path = project_dir().join(\"./target/.slow_tests_cookie\");\n\n fs::write(&path, \".\").unwrap();\n\n }\n\n should_skip\n\n}\n\n\n\nconst REWRITE: bool = false;\n\n\n", "file_path": "crates/test_utils/src/lib.rs", "rank": 19, "score": 303918.3521004054 }, { "content": "fn has_new_fn(imp: &ast::ImplDef) -> bool {\n\n if let Some(il) = imp.item_list() {\n\n for item in il.assoc_items() {\n\n if let ast::AssocItem::FnDef(f) = item {\n\n if let Some(name) = f.name() {\n\n if name.text().eq_ignore_ascii_case(\"new\") {\n\n return true;\n\n }\n\n }\n\n }\n\n }\n\n }\n\n\n\n false\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target};\n\n\n", "file_path": "crates/ra_assists/src/handlers/add_new.rs", "rank": 20, "score": 302946.65657946945 }, { "content": "pub fn date_iso() -> Result<String> {\n\n run!(\"date --iso --utc\")\n\n}\n\n\n", "file_path": "xtask/src/not_bash.rs", "rank": 21, "score": 301530.51105014014 }, { "content": "/// Makes duplicate argument names unique by appending incrementing numbers.\n\n///\n\n/// ```\n\n/// let mut names: Vec<String> =\n\n/// vec![\"foo\".into(), \"foo\".into(), \"bar\".into(), \"baz\".into(), \"bar\".into()];\n\n/// deduplicate_arg_names(&mut names);\n\n/// let expected: Vec<String> =\n\n/// vec![\"foo_1\".into(), \"foo_2\".into(), \"bar_1\".into(), \"baz\".into(), \"bar_2\".into()];\n\n/// assert_eq!(names, expected);\n\n/// ```\n\nfn deduplicate_arg_names(arg_names: &mut Vec<String>) {\n\n let arg_name_counts = arg_names.iter().fold(FxHashMap::default(), |mut m, name| {\n\n *m.entry(name).or_insert(0) += 1;\n\n m\n\n });\n\n let duplicate_arg_names: FxHashSet<String> = arg_name_counts\n\n .into_iter()\n\n .filter(|(_, count)| *count >= 2)\n\n .map(|(name, _)| name.clone())\n\n .collect();\n\n\n\n let mut counter_per_name = FxHashMap::default();\n\n for arg_name in arg_names.iter_mut() {\n\n if duplicate_arg_names.contains(arg_name) {\n\n let counter = counter_per_name.entry(arg_name.clone()).or_insert(1);\n\n arg_name.push('_');\n\n arg_name.push_str(&counter.to_string());\n\n *counter += 1;\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/ra_assists/src/handlers/add_function.rs", "rank": 22, "score": 300764.0519007085 }, { "content": "/// Collects all `.rs` files from `dir` subdirectories defined by `paths`.\n\npub fn collect_rust_files(root_dir: &Path, paths: &[&str]) -> Vec<(PathBuf, String)> {\n\n paths\n\n .iter()\n\n .flat_map(|path| {\n\n let path = root_dir.to_owned().join(path);\n\n rust_files_in_dir(&path).into_iter()\n\n })\n\n .map(|path| {\n\n let text = read_text(&path);\n\n (path, text)\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "crates/test_utils/src/lib.rs", "rank": 23, "score": 299806.8460692765 }, { "content": "trait Foo { fn foo(&self); }\n", "file_path": "crates/ra_assists/src/handlers/add_missing_impl_members.rs", "rank": 24, "score": 297386.184051551 }, { "content": "fn strange() -> bool { let _x: bool = return true; }\n\n\n", "file_path": "crates/ra_syntax/test_data/parser/ok/0035_weird_exprs.rs", "rank": 25, "score": 294018.9183989896 }, { "content": "pub fn run() -> io::Result<()> {\n\n let mut srv = ProcMacroSrv::default();\n\n\n\n while let Some(req) = read_request()? {\n\n let res = match req {\n\n msg::Request::ListMacro(task) => srv.list_macros(&task).map(msg::Response::ListMacro),\n\n msg::Request::ExpansionMacro(task) => {\n\n srv.expand(&task).map(msg::Response::ExpansionMacro)\n\n }\n\n };\n\n\n\n let msg = res.unwrap_or_else(|err| {\n\n msg::Response::Error(msg::ResponseError {\n\n code: msg::ErrorCode::ExpansionError,\n\n message: err,\n\n })\n\n });\n\n\n\n if let Err(err) = write_response(msg) {\n\n eprintln!(\"Write message error: {}\", err);\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/ra_proc_macro_srv/src/cli.rs", "rank": 26, "score": 293872.10471775057 }, { "content": "fn add_const_impl(\n\n const_def_node: &SyntaxNode,\n\n acc: &mut Completions,\n\n ctx: &CompletionContext,\n\n const_: hir::Const,\n\n) {\n\n let const_name = const_.name(ctx.db).map(|n| n.to_string());\n\n\n\n if let Some(const_name) = const_name {\n\n let snippet = make_const_compl_syntax(&const_.source(ctx.db).value);\n\n\n\n let range = TextRange::new(const_def_node.text_range().start(), ctx.source_range().end());\n\n\n\n CompletionItem::new(CompletionKind::Magic, ctx.source_range(), snippet.clone())\n\n .text_edit(TextEdit::replace(range, snippet))\n\n .lookup_by(const_name)\n\n .kind(CompletionItemKind::Const)\n\n .set_documentation(const_.docs(ctx.db))\n\n .add_to(acc);\n\n }\n\n}\n\n\n", "file_path": "crates/ra_ide/src/completion/complete_trait_impl.rs", "rank": 27, "score": 293301.67676252074 }, { "content": "pub fn file_structure(file: &SourceFile) -> Vec<StructureNode> {\n\n let mut res = Vec::new();\n\n let mut stack = Vec::new();\n\n\n\n for event in file.syntax().preorder() {\n\n match event {\n\n WalkEvent::Enter(node) => {\n\n if let Some(mut symbol) = structure_node(&node) {\n\n symbol.parent = stack.last().copied();\n\n stack.push(res.len());\n\n res.push(symbol);\n\n }\n\n }\n\n WalkEvent::Leave(node) => {\n\n if structure_node(&node).is_some() {\n\n stack.pop().unwrap();\n\n }\n\n }\n\n }\n\n }\n\n res\n\n}\n\n\n", "file_path": "crates/ra_ide/src/display/structure.rs", "rank": 28, "score": 291624.08763286076 }, { "content": "pub fn param(name: String, ty: String) -> ast::Param {\n\n ast_from_text(&format!(\"fn f({}: {}) {{ }}\", name, ty))\n\n}\n\n\n", "file_path": "crates/ra_syntax/src/ast/make.rs", "rank": 29, "score": 290432.59221180284 }, { "content": " trait Foo { fn foo(&self, bar: dyn Fn(u32) -> i32); }\n\n}\n", "file_path": "crates/ra_assists/src/handlers/add_missing_impl_members.rs", "rank": 30, "score": 290090.43205681164 }, { "content": "pub fn parse(no_dump: bool) -> Result<()> {\n\n let _p = profile(\"parsing\");\n\n let file = file()?;\n\n if !no_dump {\n\n println!(\"{:#?}\", file.syntax());\n\n }\n\n std::mem::forget(file);\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/rust-analyzer/src/cli.rs", "rank": 31, "score": 289357.9226434904 }, { "content": "pub fn highlight(rainbow: bool) -> Result<()> {\n\n let (analysis, file_id) = Analysis::from_single_file(read_stdin()?);\n\n let html = analysis.highlight_as_html(file_id, rainbow).unwrap();\n\n println!(\"{}\", html);\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/rust-analyzer/src/cli.rs", "rank": 32, "score": 289357.9226434904 }, { "content": "pub fn to_lower_snake_case(s: &str) -> String {\n\n let mut buf = String::with_capacity(s.len());\n\n let mut prev = false;\n\n for c in s.chars() {\n\n if c.is_ascii_uppercase() && prev {\n\n buf.push('_')\n\n }\n\n prev = true;\n\n\n\n buf.push(c.to_ascii_lowercase());\n\n }\n\n buf\n\n}\n\n\n", "file_path": "crates/stdx/src/lib.rs", "rank": 33, "score": 289333.0394846631 }, { "content": "#[salsa::query_group(SourceDatabaseStorage)]\n\npub trait SourceDatabase: CheckCanceled + FileLoader + std::fmt::Debug {\n\n // Parses the file into the syntax tree.\n\n #[salsa::invoke(parse_query)]\n\n fn parse(&self, file_id: FileId) -> Parse<ast::SourceFile>;\n\n\n\n /// The crate graph.\n\n #[salsa::input]\n\n fn crate_graph(&self) -> Arc<CrateGraph>;\n\n}\n\n\n", "file_path": "crates/ra_db/src/lib.rs", "rank": 34, "score": 286830.82655102713 }, { "content": "pub fn has_errors(node: &SyntaxNode) -> bool {\n\n node.children().any(|it| it.kind() == SyntaxKind::ERROR)\n\n}\n\n\n\n#[derive(Debug, PartialEq, Eq, Clone, Copy)]\n\npub enum InsertPosition<T> {\n\n First,\n\n Last,\n\n Before(T),\n\n After(T),\n\n}\n\n\n\npub struct TreeDiff {\n\n replacements: FxHashMap<SyntaxElement, SyntaxElement>,\n\n}\n\n\n\nimpl TreeDiff {\n\n pub fn into_text_edit(&self, builder: &mut TextEditBuilder) {\n\n for (from, to) in self.replacements.iter() {\n\n builder.replace(from.text_range(), to.to_string())\n\n }\n\n }\n\n\n\n pub fn is_empty(&self) -> bool {\n\n self.replacements.is_empty()\n\n }\n\n}\n\n\n", "file_path": "crates/ra_syntax/src/algo.rs", "rank": 35, "score": 286812.4286352207 }, { "content": "/// Read file and normalize newlines.\n\n///\n\n/// `rustc` seems to always normalize `\\r\\n` newlines to `\\n`:\n\n///\n\n/// ```\n\n/// let s = \"\n\n/// \";\n\n/// assert_eq!(s.as_bytes(), &[10]);\n\n/// ```\n\n///\n\n/// so this should always be correct.\n\npub fn read_text(path: &Path) -> String {\n\n fs::read_to_string(path)\n\n .unwrap_or_else(|_| panic!(\"File at {:?} should be valid\", path))\n\n .replace(\"\\r\\n\", \"\\n\")\n\n}\n\n\n", "file_path": "crates/test_utils/src/lib.rs", "rank": 36, "score": 286799.3436165664 }, { "content": "trait Test { fn test(&self) -> bool; }\n\nimpl<T: Sized> Test for T {}\n\n\n", "file_path": "crates/ra_hir_ty/src/tests/traits.rs", "rank": 37, "score": 284959.54169066815 }, { "content": "fn existing_from_impl(\n\n sema: &'_ hir::Semantics<'_, RootDatabase>,\n\n variant: &ast::EnumVariant,\n\n) -> Option<()> {\n\n let variant = sema.to_def(variant)?;\n\n let enum_ = variant.parent_enum(sema.db);\n\n let krate = enum_.module(sema.db).krate();\n\n\n\n let from_trait = FamousDefs(sema, krate).core_convert_From()?;\n\n\n\n let enum_type = enum_.ty(sema.db);\n\n\n\n let wrapped_type = variant.fields(sema.db).get(0)?.signature_ty(sema.db);\n\n\n\n if enum_type.impls_trait(sema.db, from_trait, &[wrapped_type]) {\n\n Some(())\n\n } else {\n\n None\n\n }\n\n}\n", "file_path": "crates/ra_assists/src/handlers/add_from_impl_for_enum.rs", "rank": 38, "score": 282865.1888998058 }, { "content": " trait Foo { fn foo(&self, bar: Bar); }\n\n}\n", "file_path": "crates/ra_assists/src/handlers/add_missing_impl_members.rs", "rank": 39, "score": 282754.4788415507 }, { "content": "pub fn extend_selection(file: &File, range: TextRange) -> Option<TextRange> {\n\n let syntax = file.syntax();\n\n extend(syntax.borrowed(), range)\n\n}\n\n\n\npub(crate) fn extend(root: SyntaxNodeRef, range: TextRange) -> Option<TextRange> {\n\n if range.is_empty() {\n\n let offset = range.start();\n\n let mut leaves = find_leaf_at_offset(root, offset);\n\n if leaves.clone().all(|it| it.kind() == WHITESPACE) {\n\n return Some(extend_ws(root, leaves.next()?, offset));\n\n }\n\n let leaf = match leaves {\n\n LeafAtOffset::None => return None,\n\n LeafAtOffset::Single(l) => l,\n\n LeafAtOffset::Between(l, r) => pick_best(l, r),\n\n };\n\n return Some(leaf.range());\n\n };\n\n let node = find_covering_node(root, range);\n", "file_path": "crates/ra_syntax/test_data/parser/fuzz-failures/0001.rs", "rank": 40, "score": 280624.3347388954 }, { "content": "fn fmt_segments_raw(segments: &[SmolStr], buf: &mut String) {\n\n let mut iter = segments.iter();\n\n if let Some(s) = iter.next() {\n\n buf.push_str(s);\n\n }\n\n for s in iter {\n\n buf.push_str(\"::\");\n\n buf.push_str(s);\n\n }\n\n}\n\n\n", "file_path": "crates/ra_assists/src/utils/insert_use.rs", "rank": 41, "score": 279870.1536943349 }, { "content": "pub fn foo<S: Iterator>() -> String\n\nwhere\n\n <S as Iterator>::Item: Eq,\n\n{\n\n \"\".to_owned()\n\n}\n", "file_path": "crates/ra_syntax/test_data/parser/ok/0036_fully_qualified.rs", "rank": 42, "score": 279598.4129253256 }, { "content": "fn punch_card() -> impl std::fmt::Debug {\n\n ..=..=.. .. .. .. .. .. .. .. .. .. .. ..=.. ..\n\n ..=.. ..=.. .. .. .. .. .. .. .. .. ..=..=..=..\n\n ..=.. ..=.. ..=.. ..=.. .. ..=..=.. .. ..=.. ..\n\n ..=..=.. .. ..=.. ..=.. ..=.. .. .. .. ..=.. ..\n\n ..=.. ..=.. ..=.. ..=.. .. ..=.. .. .. ..=.. ..\n\n ..=.. ..=.. ..=.. ..=.. .. .. ..=.. .. ..=.. ..\n\n ..=.. ..=.. .. ..=..=.. ..=..=.. .. .. ..=.. ..\n\n}\n\n\n", "file_path": "crates/ra_syntax/test_data/parser/ok/0035_weird_exprs.rs", "rank": 43, "score": 278029.7542526249 }, { "content": "pub fn pushd(path: impl Into<PathBuf>) -> Pushd {\n\n Env::with(|env| env.pushd(path.into()));\n\n Pushd { _p: () }\n\n}\n\n\n\nimpl Drop for Pushd {\n\n fn drop(&mut self) {\n\n Env::with(|env| env.popd())\n\n }\n\n}\n\n\n\npub struct Pushenv {\n\n _p: (),\n\n}\n\n\n", "file_path": "xtask/src/not_bash.rs", "rank": 44, "score": 277532.41575290734 }, { "content": "fn compare_path_segment(a: &SmolStr, b: &ast::PathSegment) -> bool {\n\n if let Some(kb) = b.kind() {\n\n match kb {\n\n ast::PathSegmentKind::Name(nameref_b) => a == nameref_b.text(),\n\n ast::PathSegmentKind::SelfKw => a == \"self\",\n\n ast::PathSegmentKind::SuperKw => a == \"super\",\n\n ast::PathSegmentKind::CrateKw => a == \"crate\",\n\n ast::PathSegmentKind::Type { .. } => false, // not allowed in imports\n\n }\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "crates/ra_assists/src/utils/insert_use.rs", "rank": 45, "score": 276250.6892813849 }, { "content": "fn compare_path_segment_with_name(a: &SmolStr, b: &ast::Name) -> bool {\n\n a == b.text()\n\n}\n\n\n", "file_path": "crates/ra_assists/src/utils/insert_use.rs", "rank": 46, "score": 276250.6892813849 }, { "content": " trait Foo<T> { fn foo(&self, bar: T); }\n\n pub struct Param;\n\n}\n", "file_path": "crates/ra_assists/src/handlers/add_missing_impl_members.rs", "rank": 47, "score": 276182.4832678007 }, { "content": " trait Foo { fn foo(&self, bar: Bar<u32>); }\n\n}\n", "file_path": "crates/ra_assists/src/handlers/add_missing_impl_members.rs", "rank": 48, "score": 276182.4832678007 }, { "content": " trait Foo { fn foo(&self, bar: Bar<Baz>); }\n\n}\n", "file_path": "crates/ra_assists/src/handlers/add_missing_impl_members.rs", "rank": 49, "score": 276182.4832678007 }, { "content": "/// Infallible version of `try_extract_range()`.\n\npub fn extract_range(text: &str) -> (TextRange, String) {\n\n match try_extract_range(text) {\n\n None => panic!(\"text should contain cursor marker\"),\n\n Some(result) => result,\n\n }\n\n}\n\n\n", "file_path": "crates/test_utils/src/lib.rs", "rank": 50, "score": 275753.25967988354 }, { "content": "/// Infallible version of `try_extract_offset()`.\n\npub fn extract_offset(text: &str) -> (TextSize, String) {\n\n match try_extract_offset(text) {\n\n None => panic!(\"text should contain cursor marker\"),\n\n Some(result) => result,\n\n }\n\n}\n\n\n", "file_path": "crates/test_utils/src/lib.rs", "rank": 51, "score": 275753.25967988354 }, { "content": "pub fn join_lines(file: &SourceFile, range: TextRange) -> TextEdit {\n\n let range = if range.is_empty() {\n\n let syntax = file.syntax();\n\n let text = syntax.text().slice(range.start()..);\n\n let pos = match text.find_char('\\n') {\n\n None => return TextEditBuilder::default().finish(),\n\n Some(pos) => pos,\n\n };\n\n TextRange::at(range.start() + pos, TextSize::of('\\n'))\n\n } else {\n\n range\n\n };\n\n\n\n let node = match find_covering_element(file.syntax(), range) {\n\n NodeOrToken::Node(node) => node,\n\n NodeOrToken::Token(token) => token.parent(),\n\n };\n\n let mut edit = TextEditBuilder::default();\n\n for token in node.descendants_with_tokens().filter_map(|it| it.into_token()) {\n\n let range = match range.intersect(token.text_range()) {\n", "file_path": "crates/ra_ide/src/join_lines.rs", "rank": 52, "score": 275574.96135140304 }, { "content": "fn enum_variant_matches(cx: &MatchCheckCtx, pat_id: PatId, enum_variant_id: EnumVariantId) -> bool {\n\n Some(enum_variant_id.into()) == cx.infer.variant_resolution_for_pat(pat_id)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n pub(super) use insta::assert_snapshot;\n\n pub(super) use ra_db::fixture::WithFixture;\n\n\n\n pub(super) use crate::{diagnostics::MissingMatchArms, test_db::TestDB};\n\n\n\n pub(super) fn check_diagnostic_message(content: &str) -> String {\n\n TestDB::with_single_file(content).0.diagnostic::<MissingMatchArms>().0\n\n }\n\n\n\n pub(super) fn check_diagnostic(content: &str) {\n\n let diagnostic_count =\n\n TestDB::with_single_file(content).0.diagnostic::<MissingMatchArms>().1;\n\n\n\n assert_eq!(1, diagnostic_count, \"no diagnostic reported\");\n", "file_path": "crates/ra_hir_ty/src/_match.rs", "rank": 53, "score": 275031.1111462101 }, { "content": "pub fn timeit(label: &'static str) -> impl Drop {\n\n struct Guard {\n\n label: &'static str,\n\n start: Instant,\n\n }\n\n\n\n impl Drop for Guard {\n\n fn drop(&mut self) {\n\n eprintln!(\"{}: {:?}\", self.label, self.start.elapsed())\n\n }\n\n }\n\n\n\n Guard { label, start: Instant::now() }\n\n}\n\n\n", "file_path": "crates/stdx/src/lib.rs", "rank": 54, "score": 274988.89520780795 }, { "content": "pub fn rm_rf(path: impl AsRef<Path>) -> Result<()> {\n\n let path = path.as_ref();\n\n if !path.exists() {\n\n return Ok(());\n\n }\n\n if path.is_file() {\n\n fs2::remove_file(path)\n\n } else {\n\n fs2::remove_dir_all(path)\n\n }\n\n}\n\n\n", "file_path": "xtask/src/not_bash.rs", "rank": 55, "score": 274988.8952078079 }, { "content": "pub fn load_cargo(\n\n root: &Path,\n\n load_out_dirs_from_check: bool,\n\n with_proc_macro: bool,\n\n) -> Result<(AnalysisHost, FxHashMap<SourceRootId, PackageRoot>)> {\n\n let root = std::env::current_dir()?.join(root);\n\n let root = ProjectRoot::discover_single(&root)?;\n\n let ws = ProjectWorkspace::load(\n\n root,\n\n &CargoConfig { load_out_dirs_from_check, ..Default::default() },\n\n true,\n\n )?;\n\n\n\n let mut extern_dirs = FxHashSet::default();\n\n extern_dirs.extend(ws.out_dirs());\n\n\n\n let mut project_roots = ws.to_roots();\n\n project_roots.extend(extern_dirs.iter().cloned().map(PackageRoot::new_non_member));\n\n\n\n let (sender, receiver) = unbounded();\n", "file_path": "crates/rust-analyzer/src/cli/load_cargo.rs", "rank": 56, "score": 274672.7313422619 }, { "content": "fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {\n\n let (db, file_id) = TestDB::with_single_file(content);\n\n\n\n let mut buf = String::new();\n\n\n\n let mut infer_def = |inference_result: Arc<InferenceResult>,\n\n body_source_map: Arc<BodySourceMap>| {\n\n let mut types: Vec<(InFile<SyntaxNode>, &Ty)> = Vec::new();\n\n let mut mismatches: Vec<(InFile<SyntaxNode>, &TypeMismatch)> = Vec::new();\n\n\n\n for (pat, ty) in inference_result.type_of_pat.iter() {\n\n let syntax_ptr = match body_source_map.pat_syntax(pat) {\n\n Ok(sp) => {\n\n let root = db.parse_or_expand(sp.file_id).unwrap();\n\n sp.map(|ptr| {\n\n ptr.either(\n\n |it| it.to_node(&root).syntax().clone(),\n\n |it| it.to_node(&root).syntax().clone(),\n\n )\n\n })\n", "file_path": "crates/ra_hir_ty/src/tests.rs", "rank": 57, "score": 273964.5737873977 }, { "content": "/// Extracts `TextRange` or `TextSize` depending on the amount of `<|>` markers\n\n/// found in `text`.\n\n///\n\n/// # Panics\n\n/// Panics if no `<|>` marker is present in the `text`.\n\npub fn extract_range_or_offset(text: &str) -> (RangeOrOffset, String) {\n\n if let Some((range, text)) = try_extract_range(text) {\n\n return (RangeOrOffset::Range(range), text);\n\n }\n\n let (offset, text) = extract_offset(text);\n\n (RangeOrOffset::Offset(offset), text)\n\n}\n\n\n", "file_path": "crates/test_utils/src/lib.rs", "rank": 58, "score": 273427.4825574022 }, { "content": "fn check_file_invariants(file: &SourceFile) {\n\n let root = file.syntax();\n\n validation::validate_block_structure(root);\n\n}\n\n\n", "file_path": "crates/ra_syntax/src/fuzz.rs", "rank": 59, "score": 273394.79633799766 }, { "content": "trait Foo<T> { fn foo(&self, t: T) -> &T; }\n", "file_path": "crates/ra_assists/src/handlers/add_missing_impl_members.rs", "rank": 60, "score": 272872.94545204035 }, { "content": "pub fn reindent(text: &str, indent: &str) -> String {\n\n let indent = format!(\"\\n{}\", indent);\n\n text.lines().intersperse(&indent).collect()\n\n}\n\n\n", "file_path": "crates/ra_fmt/src/lib.rs", "rank": 61, "score": 272571.1711341989 }, { "content": "fn bar(arg: fn() -> Baz) {\n\n ${0:todo!()}\n\n}\n\n\",\n\n )\n\n }\n\n\n\n #[test]\n\n #[ignore]\n\n // FIXME Fix closure type printing to make this test pass\n\n fn add_function_with_closure_arg() {\n\n check_assist(\n\n add_function,\n\n r\"\n", "file_path": "crates/ra_assists/src/handlers/add_function.rs", "rank": 62, "score": 272407.1480102954 }, { "content": "pub trait From<T> {\n\n fn from(T) -> Self;\n\n}\"#,\n\n );\n\n }\n\n}\n", "file_path": "crates/ra_assists/src/handlers/add_from_impl_for_enum.rs", "rank": 63, "score": 272301.4595544344 }, { "content": "pub fn main_loop(ws_roots: Vec<PathBuf>, config: Config, connection: Connection) -> Result<()> {\n\n log::info!(\"initial config: {:#?}\", config);\n\n\n\n // Windows scheduler implements priority boosts: if thread waits for an\n\n // event (like a condvar), and event fires, priority of the thread is\n\n // temporary bumped. This optimization backfires in our case: each time the\n\n // `main_loop` schedules a task to run on a threadpool, the worker threads\n\n // gets a higher priority, and (on a machine with fewer cores) displaces the\n\n // main loop! We work-around this by marking the main loop as a\n\n // higher-priority thread.\n\n //\n\n // https://docs.microsoft.com/en-us/windows/win32/procthread/scheduling-priorities\n\n // https://docs.microsoft.com/en-us/windows/win32/procthread/priority-boosts\n\n // https://github.com/rust-analyzer/rust-analyzer/issues/2835\n\n #[cfg(windows)]\n\n unsafe {\n\n use winapi::um::processthreadsapi::*;\n\n let thread = GetCurrentThread();\n\n let thread_priority_above_normal = 1;\n\n SetThreadPriority(thread, thread_priority_above_normal);\n", "file_path": "crates/rust-analyzer/src/main_loop.rs", "rank": 64, "score": 271884.05793177575 }, { "content": "fn reformat(text: impl std::fmt::Display) -> Result<String> {\n\n let _e = pushenv(\"RUSTUP_TOOLCHAIN\", \"stable\");\n\n ensure_rustfmt()?;\n\n let stdout = run!(\n\n \"rustfmt --config-path {} --config fn_single_line=true\", project_root().join(\"rustfmt.toml\").display();\n\n <text.to_string().as_bytes()\n\n )?;\n\n let preamble = \"Generated file, do not edit by hand, see `xtask/src/codegen`\";\n\n Ok(format!(\"//! {}\\n\\n{}\\n\", preamble, stdout))\n\n}\n\n\n", "file_path": "xtask/src/lib.rs", "rank": 65, "score": 270406.8862336074 }, { "content": "/// Compare a line with an expected pattern.\n\n/// - Use `[..]` as a wildcard to match 0 or more characters on the same line\n\n/// (similar to `.*` in a regex).\n\npub fn lines_match(expected: &str, actual: &str) -> bool {\n\n // Let's not deal with / vs \\ (windows...)\n\n // First replace backslash-escaped backslashes with forward slashes\n\n // which can occur in, for example, JSON output\n\n let expected = expected.replace(r\"\\\\\", \"/\").replace(r\"\\\", \"/\");\n\n let mut actual: &str = &actual.replace(r\"\\\\\", \"/\").replace(r\"\\\", \"/\");\n\n for (i, part) in expected.split(\"[..]\").enumerate() {\n\n match actual.find(part) {\n\n Some(j) => {\n\n if i == 0 && j != 0 {\n\n return false;\n\n }\n\n actual = &actual[j + part.len()..];\n\n }\n\n None => return false,\n\n }\n\n }\n\n actual.is_empty() || expected.ends_with(\"[..]\")\n\n}\n\n\n", "file_path": "crates/test_utils/src/lib.rs", "rank": 66, "score": 270206.1645596876 }, { "content": " trait Foo { fn foo(&self, bar: Bar<u32>::Assoc); }\n\n}\n", "file_path": "crates/ra_assists/src/handlers/add_missing_impl_members.rs", "rank": 67, "score": 270026.45624403056 }, { "content": " trait Foo<T> { fn foo(&self, bar: Bar<T>); }\n\n}\n", "file_path": "crates/ra_assists/src/handlers/add_missing_impl_members.rs", "rank": 68, "score": 270026.45624403056 }, { "content": "pub fn get_rustc_cfg_options(target: Option<&String>) -> CfgOptions {\n\n let mut cfg_options = CfgOptions::default();\n\n\n\n // Some nightly-only cfgs, which are required for stdlib\n\n {\n\n cfg_options.insert_atom(\"target_thread_local\".into());\n\n for &target_has_atomic in [\"8\", \"16\", \"32\", \"64\", \"cas\", \"ptr\"].iter() {\n\n cfg_options.insert_key_value(\"target_has_atomic\".into(), target_has_atomic.into());\n\n cfg_options\n\n .insert_key_value(\"target_has_atomic_load_store\".into(), target_has_atomic.into());\n\n }\n\n }\n\n\n\n let rustc_cfgs = || -> Result<String> {\n\n // `cfg(test)` and `cfg(debug_assertion)` are handled outside, so we suppress them here.\n\n let mut cmd = Command::new(ra_toolchain::rustc());\n\n cmd.args(&[\"--print\", \"cfg\", \"-O\"]);\n\n if let Some(target) = target {\n\n cmd.args(&[\"--target\", target.as_str()]);\n\n }\n", "file_path": "crates/ra_project_model/src/lib.rs", "rank": 69, "score": 268973.37088097027 }, { "content": "fn check_unsize_impl_prerequisites(db: &dyn HirDatabase, krate: CrateId) -> bool {\n\n // the Unsize trait needs to exist and have two type parameters (Self and T)\n\n let unsize_trait = match get_unsize_trait(db, krate) {\n\n Some(t) => t,\n\n None => return false,\n\n };\n\n let generic_params = generics(db.upcast(), unsize_trait.into());\n\n generic_params.len() == 2\n\n}\n\n\n", "file_path": "crates/ra_hir_ty/src/traits/builtin.rs", "rank": 70, "score": 268568.3437174527 }, { "content": "pub fn matching_brace(file: &SourceFile, offset: TextSize) -> Option<TextSize> {\n\n const BRACES: &[SyntaxKind] =\n\n &[T!['{'], T!['}'], T!['['], T![']'], T!['('], T![')'], T![<], T![>]];\n\n let (brace_node, brace_idx) = file\n\n .syntax()\n\n .token_at_offset(offset)\n\n .filter_map(|node| {\n\n let idx = BRACES.iter().position(|&brace| brace == node.kind())?;\n\n Some((node, idx))\n\n })\n\n .next()?;\n\n let parent = brace_node.parent();\n\n let matching_kind = BRACES[brace_idx ^ 1];\n\n let matching_node = parent.children_with_tokens().find(|node| node.kind() == matching_kind)?;\n\n Some(matching_node.text_range().start())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use test_utils::{add_cursor, assert_eq_text, extract_offset};\n", "file_path": "crates/ra_ide/src/matching_brace.rs", "rank": 71, "score": 268555.0075058618 }, { "content": "fn process(map: HashMap<String, String>) {}\n\n\"#####,\n\n )\n\n}\n\n\n", "file_path": "crates/ra_assists/src/tests/generated.rs", "rank": 72, "score": 268075.393680837 }, { "content": "/// Inserts `<|>` marker into the `text` at `offset`.\n\npub fn add_cursor(text: &str, offset: TextSize) -> String {\n\n let offset: usize = offset.into();\n\n let mut res = String::new();\n\n res.push_str(&text[..offset]);\n\n res.push_str(\"<|>\");\n\n res.push_str(&text[offset..]);\n\n res\n\n}\n\n\n\n#[derive(Debug, Eq, PartialEq)]\n\npub struct FixtureEntry {\n\n pub meta: FixtureMeta,\n\n pub text: String,\n\n}\n\n\n\n#[derive(Debug, Eq, PartialEq)]\n\npub enum FixtureMeta {\n\n Root { path: RelativePathBuf },\n\n File(FileMeta),\n\n}\n", "file_path": "crates/test_utils/src/lib.rs", "rank": 73, "score": 267849.854464627 }, { "content": "#[test]\n\nfn doctest_add_impl_default_members() {\n\n check_doc_test(\n\n \"add_impl_default_members\",\n\n r#####\"\n", "file_path": "crates/ra_assists/src/tests/generated.rs", "rank": 74, "score": 266621.37261144136 }, { "content": "struct Ctx<T: Clone> {\n\n data: T,\n\n}\n\n\n\nimpl<T: Clone> Ctx<T> {\n\n fn $0new(data: T) -> Self { Self { data } }\n\n}\n\n\n\n\"#####,\n\n )\n\n}\n\n\n", "file_path": "crates/ra_assists/src/tests/generated.rs", "rank": 75, "score": 266330.55777424265 }, { "content": "fn traverse(node: &SyntaxNode, go: &mut impl FnMut(&SyntaxNode) -> bool) {\n\n if !go(node) {\n\n return;\n\n }\n\n for ref child in node.children() {\n\n traverse(child, go);\n\n }\n\n}\n\n\n", "file_path": "crates/ra_ide/src/ssr.rs", "rank": 76, "score": 265632.92962764465 }, { "content": "pub fn replace(buf: &mut String, from: char, to: &str) {\n\n if !buf.contains(from) {\n\n return;\n\n }\n\n // FIXME: do this in place.\n\n *buf = buf.replace(from, to)\n\n}\n", "file_path": "crates/stdx/src/lib.rs", "rank": 77, "score": 265352.0730860721 }, { "content": "//! Settings for tweaking assists.\n\n//!\n\n//! The fun thing here is `SnippetCap` -- this type can only be created in this\n\n//! module, and we use to statically check that we only produce snippet\n\n//! assists if we are allowed to.\n\n\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\npub struct AssistConfig {\n\n pub snippet_cap: Option<SnippetCap>,\n\n}\n\n\n\nimpl AssistConfig {\n\n pub fn allow_snippets(&mut self, yes: bool) {\n\n self.snippet_cap = if yes { Some(SnippetCap { _private: () }) } else { None }\n\n }\n\n}\n\n\n\n#[derive(Clone, Copy, Debug, PartialEq, Eq)]\n\npub struct SnippetCap {\n\n _private: (),\n\n}\n\n\n\nimpl Default for AssistConfig {\n\n fn default() -> Self {\n\n AssistConfig { snippet_cap: Some(SnippetCap { _private: () }) }\n\n }\n\n}\n", "file_path": "crates/ra_assists/src/assist_config.rs", "rank": 83, "score": 62.16647091662893 }, { "content": " }\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\npub struct CargoConfig {\n\n /// Do not activate the `default` feature.\n\n pub no_default_features: bool,\n\n\n\n /// Activate all available features\n\n pub all_features: bool,\n\n\n\n /// List of features to activate.\n\n /// This will be ignored if `cargo_all_features` is true.\n\n pub features: Vec<String>,\n\n\n\n /// Runs cargo check on launch to figure out the correct values of OUT_DIR\n\n pub load_out_dirs_from_check: bool,\n\n\n\n /// rustc target\n\n pub target: Option<String>,\n", "file_path": "crates/ra_project_model/src/cargo_workspace.rs", "rank": 84, "score": 60.01973629050193 }, { "content": " CargoCommand { command: String, all_targets: bool, all_features: bool, extra_args: Vec<String> },\n\n CustomCommand { command: String, args: Vec<String> },\n\n}\n\n\n\n/// Flycheck wraps the shared state and communication machinery used for\n\n/// running `cargo check` (or other compatible command) and providing\n\n/// diagnostics based on the output.\n\n/// The spawned thread is shut down when this struct is dropped.\n\n#[derive(Debug)]\n\npub struct Flycheck {\n\n // XXX: drop order is significant\n\n cmd_send: Sender<CheckCommand>,\n\n handle: jod_thread::JoinHandle<()>,\n\n pub task_recv: Receiver<CheckTask>,\n\n}\n\n\n\nimpl Flycheck {\n\n pub fn new(config: FlycheckConfig, workspace_root: PathBuf) -> Flycheck {\n\n let (task_send, task_recv) = unbounded::<CheckTask>();\n\n let (cmd_send, cmd_recv) = unbounded::<CheckCommand>();\n", "file_path": "crates/ra_flycheck/src/lib.rs", "rank": 85, "score": 55.69913368783119 }, { "content": "}\n\n\n\nimpl Default for CargoConfig {\n\n fn default() -> Self {\n\n CargoConfig {\n\n no_default_features: false,\n\n all_features: true,\n\n features: Vec::new(),\n\n load_out_dirs_from_check: false,\n\n target: None,\n\n }\n\n }\n\n}\n\n\n\npub type Package = Idx<PackageData>;\n\n\n\npub type Target = Idx<TargetData>;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct PackageData {\n", "file_path": "crates/ra_project_model/src/cargo_workspace.rs", "rank": 86, "score": 55.69670930703738 }, { "content": "//! Settings for tweaking completion.\n\n//!\n\n//! The fun thing here is `SnippetCap` -- this type can only be created in this\n\n//! module, and we use to statically check that we only produce snippet\n\n//! completions if we are allowed to.\n\n\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\npub struct CompletionConfig {\n\n pub enable_postfix_completions: bool,\n\n pub add_call_parenthesis: bool,\n\n pub add_call_argument_snippets: bool,\n\n pub snippet_cap: Option<SnippetCap>,\n\n}\n\n\n\nimpl CompletionConfig {\n\n pub fn allow_snippets(&mut self, yes: bool) {\n\n self.snippet_cap = if yes { Some(SnippetCap { _private: () }) } else { None }\n\n }\n\n}\n\n\n", "file_path": "crates/ra_ide/src/completion/completion_config.rs", "rank": 88, "score": 53.07853776683423 }, { "content": "#[derive(Clone, Copy, Debug, PartialEq, Eq)]\n\npub struct SnippetCap {\n\n _private: (),\n\n}\n\n\n\nimpl Default for CompletionConfig {\n\n fn default() -> Self {\n\n CompletionConfig {\n\n enable_postfix_completions: true,\n\n add_call_parenthesis: true,\n\n add_call_argument_snippets: true,\n\n snippet_cap: Some(SnippetCap { _private: () }),\n\n }\n\n }\n\n}\n", "file_path": "crates/ra_ide/src/completion/completion_config.rs", "rank": 89, "score": 52.23669581393577 }, { "content": "\n\nimpl Default for InlayHintsConfig {\n\n fn default() -> Self {\n\n Self { type_hints: true, parameter_hints: true, chaining_hints: true, max_length: None }\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\npub enum InlayKind {\n\n TypeHint,\n\n ParameterHint,\n\n ChainingHint,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct InlayHint {\n\n pub range: TextRange,\n\n pub kind: InlayKind,\n\n pub label: SmolStr,\n\n}\n", "file_path": "crates/ra_ide/src/inlay_hints.rs", "rank": 91, "score": 49.91539391398823 }, { "content": "use ra_db::FileRange;\n\nuse ra_ide_db::{source_change::SourceChange, RootDatabase};\n\nuse ra_syntax::TextRange;\n\n\n\npub(crate) use crate::assist_context::{AssistContext, Assists};\n\n\n\npub use assist_config::AssistConfig;\n\n\n\n/// Unique identifier of the assist, should not be shown to the user\n\n/// directly.\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\npub struct AssistId(pub &'static str);\n\n\n\n#[derive(Clone, Debug)]\n\npub struct GroupLabel(pub String);\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Assist {\n\n pub id: AssistId,\n\n /// Short description of the assist, as shown in the UI.\n", "file_path": "crates/ra_assists/src/lib.rs", "rank": 92, "score": 49.04385489601968 }, { "content": " } else {\n\n r.args.push(\"--no-run\".into());\n\n }\n\n let debug_lens = CodeLens {\n\n range: r.range,\n\n command: Some(Command {\n\n title: \"Debug\".into(),\n\n command: \"rust-analyzer.debugSingle\".into(),\n\n arguments: Some(vec![to_value(r).unwrap()]),\n\n }),\n\n data: None,\n\n };\n\n lenses.push(debug_lens);\n\n }\n\n }\n\n }\n\n\n\n if world.config.lens.impementations {\n\n // Handle impls\n\n lenses.extend(\n", "file_path": "crates/rust-analyzer/src/main_loop/handlers.rs", "rank": 93, "score": 48.35404257256161 }, { "content": "//! cargo_check provides the functionality needed to run `cargo check` or\n\n//! another compatible command (f.x. clippy) in a background thread and provide\n\n//! LSP diagnostics based on the output of the command.\n\n\n\nuse std::{\n\n io::{self, BufReader},\n\n path::PathBuf,\n\n process::{Command, Stdio},\n\n time::Instant,\n\n};\n\n\n\nuse cargo_metadata::Message;\n\nuse crossbeam_channel::{never, select, unbounded, Receiver, RecvError, Sender};\n\n\n\npub use cargo_metadata::diagnostic::{\n\n Applicability, Diagnostic, DiagnosticLevel, DiagnosticSpan, DiagnosticSpanMacroExpansion,\n\n};\n\n\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\npub enum FlycheckConfig {\n", "file_path": "crates/ra_flycheck/src/lib.rs", "rank": 94, "score": 47.59368568894453 }, { "content": "impl Request for CodeActionRequest {\n\n type Params = lsp_types::CodeActionParams;\n\n type Result = Option<Vec<CodeAction>>;\n\n const METHOD: &'static str = \"textDocument/codeAction\";\n\n}\n\n\n\n#[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)]\n\npub struct CodeAction {\n\n pub title: String,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub group: Option<String>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub kind: Option<String>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub command: Option<lsp_types::Command>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub edit: Option<SnippetWorkspaceEdit>,\n\n}\n\n\n\n#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]\n", "file_path": "crates/rust-analyzer/src/lsp_ext.rs", "rank": 97, "score": 45.50162050611191 }, { "content": "#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]\n\npub enum Adt {\n\n Struct(Struct),\n\n Union(Union),\n\n Enum(Enum),\n\n}\n\nimpl_froms!(Adt: Struct, Union, Enum);\n\n\n\nimpl Adt {\n\n pub fn has_non_default_type_params(self, db: &dyn HirDatabase) -> bool {\n\n let subst = db.generic_defaults(self.into());\n\n subst.iter().any(|ty| ty == &Ty::Unknown)\n\n }\n\n\n\n /// Turns this ADT into a type. Any type parameters of the ADT will be\n\n /// turned into unknown types, which is good for e.g. finding the most\n\n /// general set of completions, but will not look very nice when printed.\n\n pub fn ty(self, db: &dyn HirDatabase) -> Type {\n\n let id = AdtId::from(self);\n\n Type::from_def(db, id.module(db.upcast()).krate, id)\n", "file_path": "crates/ra_hir/src/code_model.rs", "rank": 98, "score": 44.94298092257149 }, { "content": " }\n\n };\n\n\n\n let mut r = to_lsp_runnable(&world, file_id, runnable)?;\n\n if world.config.lens.run {\n\n let lens = CodeLens {\n\n range: r.range,\n\n command: Some(Command {\n\n title: run_title.to_string(),\n\n command: \"rust-analyzer.runSingle\".into(),\n\n arguments: Some(vec![to_value(&r).unwrap()]),\n\n }),\n\n data: None,\n\n };\n\n lenses.push(lens);\n\n }\n\n\n\n if debugee && world.config.lens.debug {\n\n if r.args[0] == \"run\" {\n\n r.args[0] = \"build\".into();\n", "file_path": "crates/rust-analyzer/src/main_loop/handlers.rs", "rank": 99, "score": 44.75480816911161 } ]
Rust
wasm/src/context.rs
PoiScript/blog
e6b08531d30bcb41a5fde53bfe07eb20f2a152c8
use chrono::serde::{ts_milliseconds, ts_milliseconds_option}; use chrono::{DateTime, Utc}; use maud::Markup; use serde::{Deserialize, Serialize}; use std::collections::HashMap; use wasm_bindgen::prelude::*; #[derive(Debug, Serialize, Deserialize)] pub struct OrgMeta { pub slug: String, pub title: String, #[serde(with = "ts_milliseconds")] pub published: DateTime<Utc>, #[serde(default, with = "ts_milliseconds_option")] pub updated: Option<DateTime<Utc>>, pub tags: Vec<String>, } #[derive(Debug, Serialize, Deserialize)] pub struct ImgMeta { pub slug: String, pub width: u32, pub height: u32, } #[wasm_bindgen(typescript_custom_section)] const HIGHLIGHTER_STYLE: &'static str = r#" interface Highlighter { highlight(code: string, lang: string): string; } "#; #[wasm_bindgen] extern "C" { #[wasm_bindgen(typescript_type = "Highlighter")] pub type Highlighter; #[wasm_bindgen(method)] pub fn highlight(this: &Highlighter, code: &str, lang: &str) -> String; } #[wasm_bindgen] pub struct Context { pub(crate) base_url: String, pub(crate) content: Content, pub(crate) highlighter: Highlighter, pub(crate) org_meta: HashMap<String, OrgMeta>, pub(crate) img_meta: HashMap<String, ImgMeta>, } pub enum Content { Txt { status: u32, body: String, }, Amp { status: u32, head: Markup, body: Markup, }, Html { status: u32, head: Markup, body: Markup, }, Rss { status: u32, body: Markup, }, } #[wasm_bindgen] impl Context { #[wasm_bindgen(constructor)] pub fn new(mut base_url: String, highlighter: Highlighter) -> Context { let len = base_url.trim_end_matches('/').len(); base_url.truncate(len); Context { base_url, highlighter, content: Content::Txt { status: 404, body: String::new(), }, org_meta: HashMap::new(), img_meta: HashMap::new(), } } #[wasm_bindgen] pub fn get_type(&self) -> String { match self.content { Content::Txt { .. } => "txt", Content::Amp { .. } => "amp", Content::Html { .. } => "html", Content::Rss { .. } => "rss", } .into() } #[wasm_bindgen] pub fn get_status(&self) -> u32 { match self.content { Content::Amp { status, .. } => status, Content::Html { status, .. } => status, Content::Rss { status, .. } => status, Content::Txt { status, .. } => status, } } #[wasm_bindgen] pub fn get_head(&self) -> Option<String> { match &self.content { Content::Amp { head, .. } => Some(head.0.clone()), Content::Html { head, .. } => Some(head.0.clone()), _ => None, } } #[wasm_bindgen] pub fn get_body(&self) -> Option<String> { match &self.content { Content::Amp { body, .. } => Some(body.0.clone()), Content::Html { body, .. } => Some(body.0.clone()), Content::Rss { body, .. } => Some(body.0.clone()), Content::Txt { body, .. } => Some(body.clone()), } } #[wasm_bindgen] pub fn get_version(&self) -> String { concat!( "Solomon ", env!("CARGO_PKG_VERSION"), " (", env!("CARGO_GIT_HASH"), "): ", env!("CARGO_BUILD_TIME") ) .into() } } impl Context { pub fn find_prev_and_next(&self, date: &DateTime<Utc>) -> (Option<&OrgMeta>, Option<&OrgMeta>) { ( self.org_meta .values() .filter(|org| org.slug.starts_with("/post/") && org.published < *date) .min_by(|a, b| b.published.cmp(&a.published)), self.org_meta .values() .filter(|org| org.slug.starts_with("/post/") && org.published > *date) .min_by(|a, b| b.published.cmp(&a.published)), ) } pub async fn load_org(&self, slug: &str) -> Result<String, JsValue> { let url = format!("{}.org", slug.trim_start_matches('/')); let text = self.load(&url).await?; Ok(text) } pub async fn load_org_meta(&mut self) -> Result<(), JsValue> { if !self.org_meta.is_empty() { return Ok(()); } let text = self.load("org-meta.json").await?; self.org_meta = serde_json::from_str(&text) .map_err(|err| JsValue::from_str(&format!("seder error: {}", err)))?; Ok(()) } pub async fn load_img_meta(&mut self) -> Result<(), JsValue> { if !self.img_meta.is_empty() { return Ok(()); } let text = self.load("img-meta.json").await?; self.img_meta = serde_json::from_str(&text) .map_err(|err| JsValue::from_str(&format!("seder error: {}", err)))?; Ok(()) } async fn load(&self, path: &str) -> Result<String, JsValue> { if cfg!(feature = "worker") { #[wasm_bindgen] extern "C" { #[wasm_bindgen(catch, js_namespace = SOLOMON_KV, js_name = "get")] async fn kv_get(key: &str) -> Result<JsValue, JsValue>; } let text = kv_get(path).await?; let text = text.as_string().unwrap(); Ok(text) } else { use wasm_bindgen::JsCast; use wasm_bindgen_futures::JsFuture; use web_sys::Response; let window = web_sys::window().unwrap(); let url = format!("{}/{}", self.base_url, path); let response = JsFuture::from(window.fetch_with_str(&url)).await?; assert!(response.is_instance_of::<Response>()); let response: Response = response.dyn_into().unwrap(); let text = JsFuture::from(response.text()?).await?; let text = text.as_string().unwrap(); Ok(text) } } }
use chrono::serde::{ts_milliseconds, ts_milliseconds_option}; use chrono::{DateTime, Utc}; use maud::Markup; use serde::{Deserialize, Serialize}; use std::collections::HashMap; use wasm_bindgen::prelude::*; #[derive(Debug, Serialize, Deserialize)] pub struct OrgMeta { pub slug: String, pub title: String, #[serde(with = "ts_milliseconds")] pub published: DateTime<Utc>, #[serde(default, with = "ts_milliseconds_option")] pub updated: Option<DateTime<Utc>>, pub tags: Vec<String>, } #[derive(Debug, Serialize, Deserialize)] pub struct ImgMeta { pub slug: String, pub width: u32, pub height: u32, } #[wasm_bindgen(typescript_custom_section)] const HIGHLIGHTER_STYLE: &'static str = r#" interface Highlighter { highlight(code: string, lang: string): string; } "#; #[wasm_bindgen] extern "C" { #[wasm_bindgen(typescript_type = "Highlighter")] pub type Highlighter; #[wasm_bindgen(method)] pub fn highlight(this: &Highlighter, code: &str, lang: &str) -> String; } #[wasm_bindgen] pub struct Context { pub(crate) base_url: String, pub(crate) content: Content, pub(crate) highlighter: Highlighter, pub(crate) org_meta: HashMap<String, OrgMeta>, pub(crate) img_meta: HashMap<String, ImgMeta>, } pub enum Content { Txt { status: u32, body: String, }, Amp { status: u32, head: Markup, body: Markup, }, Html { status: u32, head: Markup, body: Markup, }, Rss { status: u32, body: Markup, }, } #[wasm_bindgen] impl Context { #[wasm_bindgen(constructor)] pub fn new(mut base_url: String, highlighter: Highlighter) -> Context { let len = base_url.trim_end_matches('/').len(); base_url.truncate(len); Context { base_url, highlighter, content: Content::Txt { status: 404, body: String::new(), }, org_meta: HashMap::new(), img_meta: HashMap::new(), } } #[wasm_bindgen]
#[wasm_bindgen] pub fn get_status(&self) -> u32 { match self.content { Content::Amp { status, .. } => status, Content::Html { status, .. } => status, Content::Rss { status, .. } => status, Content::Txt { status, .. } => status, } } #[wasm_bindgen] pub fn get_head(&self) -> Option<String> { match &self.content { Content::Amp { head, .. } => Some(head.0.clone()), Content::Html { head, .. } => Some(head.0.clone()), _ => None, } } #[wasm_bindgen] pub fn get_body(&self) -> Option<String> { match &self.content { Content::Amp { body, .. } => Some(body.0.clone()), Content::Html { body, .. } => Some(body.0.clone()), Content::Rss { body, .. } => Some(body.0.clone()), Content::Txt { body, .. } => Some(body.clone()), } } #[wasm_bindgen] pub fn get_version(&self) -> String { concat!( "Solomon ", env!("CARGO_PKG_VERSION"), " (", env!("CARGO_GIT_HASH"), "): ", env!("CARGO_BUILD_TIME") ) .into() } } impl Context { pub fn find_prev_and_next(&self, date: &DateTime<Utc>) -> (Option<&OrgMeta>, Option<&OrgMeta>) { ( self.org_meta .values() .filter(|org| org.slug.starts_with("/post/") && org.published < *date) .min_by(|a, b| b.published.cmp(&a.published)), self.org_meta .values() .filter(|org| org.slug.starts_with("/post/") && org.published > *date) .min_by(|a, b| b.published.cmp(&a.published)), ) } pub async fn load_org(&self, slug: &str) -> Result<String, JsValue> { let url = format!("{}.org", slug.trim_start_matches('/')); let text = self.load(&url).await?; Ok(text) } pub async fn load_org_meta(&mut self) -> Result<(), JsValue> { if !self.org_meta.is_empty() { return Ok(()); } let text = self.load("org-meta.json").await?; self.org_meta = serde_json::from_str(&text) .map_err(|err| JsValue::from_str(&format!("seder error: {}", err)))?; Ok(()) } pub async fn load_img_meta(&mut self) -> Result<(), JsValue> { if !self.img_meta.is_empty() { return Ok(()); } let text = self.load("img-meta.json").await?; self.img_meta = serde_json::from_str(&text) .map_err(|err| JsValue::from_str(&format!("seder error: {}", err)))?; Ok(()) } async fn load(&self, path: &str) -> Result<String, JsValue> { if cfg!(feature = "worker") { #[wasm_bindgen] extern "C" { #[wasm_bindgen(catch, js_namespace = SOLOMON_KV, js_name = "get")] async fn kv_get(key: &str) -> Result<JsValue, JsValue>; } let text = kv_get(path).await?; let text = text.as_string().unwrap(); Ok(text) } else { use wasm_bindgen::JsCast; use wasm_bindgen_futures::JsFuture; use web_sys::Response; let window = web_sys::window().unwrap(); let url = format!("{}/{}", self.base_url, path); let response = JsFuture::from(window.fetch_with_str(&url)).await?; assert!(response.is_instance_of::<Response>()); let response: Response = response.dyn_into().unwrap(); let text = JsFuture::from(response.text()?).await?; let text = text.as_string().unwrap(); Ok(text) } } }
pub fn get_type(&self) -> String { match self.content { Content::Txt { .. } => "txt", Content::Amp { .. } => "amp", Content::Html { .. } => "html", Content::Rss { .. } => "rss", } .into() }
function_block-function_prefix_line
[ { "content": "pub fn get_id(n: usize, s: &str) -> String {\n\n let mut hasher = DefaultHasher::new();\n\n\n\n for c in s.chars() {\n\n hasher.write_u32(c as u32);\n\n }\n\n\n\n format!(\"{n:02x}{:.6x}\", hasher.finish() as u32)\n\n}\n", "file_path": "wasm/src/utils.rs", "rank": 0, "score": 119540.61385972479 }, { "content": "fn up_next_next(org: &OrgMeta) -> Markup {\n\n html! {\n\n a.link.end data-router href=(org.slug) {\n\n div {\n\n .label { \"Next\" }\n\n .title { (org.title) }\n\n }\n\n .icon.right aria-hidden=\"true\" {\n\n svg fill=\"currentColor\"\n\n focusable=\"false\"\n\n height=\"100%\"\n\n width=\"100%\"\n\n preserveAspectRatio=\"xMidYMid meet\"\n\n xmlns=\"http://www.w3.org/2000/svg\"\n\n {\n\n path d=\"M8.6 16.3l4.6-4.6-4.6-4.5L10 5.7l6 6-6 6z\" { }\n\n }\n\n }\n\n }\n\n }\n", "file_path": "wasm/src/partials/up_next.rs", "rank": 2, "score": 71888.5040270525 }, { "content": "fn up_next_prev(org: &OrgMeta) -> Markup {\n\n html! {\n\n a.link.start data-router href=(org.slug) {\n\n .icon.left aria-hidden=\"true\" {\n\n svg fill=\"currentColor\"\n\n focusable=\"false\"\n\n height=\"100%\"\n\n preserveAspectRatio=\"xMidYMid meet\"\n\n width=\"100%\"\n\n xmlns=\"http://www.w3.org/2000/svg\"\n\n {\n\n path d=\"M15.4 16.6L10.8 12l4.6-4.6L14 6l-6 6 6 6 1.4-1.4z\" { }\n\n }\n\n }\n\n div {\n\n .label { \"Prev\" }\n\n .title { (org.title) }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "wasm/src/partials/up_next.rs", "rank": 3, "score": 71888.5040270525 }, { "content": "const updateHead = (head: string) => {\n\n const start = document.querySelector('meta[name=\"wasm-head-start\"]');\n\n const end = document.querySelector('meta[name=\"wasm-head-end\"]');\n\n\n\n if (!start || !end) {\n\n console.error(\n\n 'meta[name=\"wasm-head-start\"] or meta[name=\"wasm-head-end\"] not found'\n\n );\n\n return;\n\n }\n\n\n\n let el = start.nextElementSibling;\n\n while (el && el !== end) {\n\n let next = el.nextElementSibling;\n\n el.remove();\n\n el = next;\n\n }\n\n\n\n const wrapper = document.createElement(\"div\");\n\n wrapper.innerHTML = head;\n\n [...wrapper.children].forEach((child) => {\n\n start.insertAdjacentElement(\"afterend\", child);\n\n });\n", "file_path": "web/src/main.ts", "rank": 4, "score": 47042.305778941736 }, { "content": "export const html = (ctx: wasm_bindgen.Context): Response => {\n\n return new Response(\n\n `<!DOCTYPE html>\n\n<html lang=\"zh-Hans\">\n\n <head>\n\n <meta charset=\"utf-8\" />\n\n <meta name=\"description\" content=\"PoiScript's Blog\" />\n\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n\n <meta name=\"application-name\" content=\"solomon\" />\n\n <meta name=\"theme-color\" content=\"#673ab7\" />\n\n <meta name=\"apple-mobile-web-app-title\" content=\"solomon\" />\n\n <meta name=\"apple-mobile-web-app-capable\" content=\"yes\" />\n\n <meta name=\"apple-mobile-web-app-status-bar-style\" content=\"black\" />\n\n <link rel=\"apple-touch-icon\" sizes=\"120x120\" href=\"${base}favicon/touch-icon.png\" />\n\n <link rel=\"shortcut icon\" sizes=\"32x32\" href=\"${base}favicon/favicon.ico\" />\n\n <link rel=\"icon\" sizes=\"192x192\" href=\"${base}favicon/favicon-192x192.png\" />\n\n <link rel=\"icon\" sizes=\"any\" type=\"image/svg+xml\" href=\"${base}favicon/favicon.svg\" />\n\n <meta name=\"wasm-head-start\" content=\"\" />\n\n ${ctx.get_head()}\n\n <meta name=\"wasm-head-end\" content=\"\" />\n\n <script type=\"module\" crossorigin src=\"${base}${main.file}\"></script>\n\n ${main.imports\n\n .map(\n\n (i) => `<link rel=\"modulepreload\" href=\"${base}${manifest[i].file}\">`\n\n )\n\n .join(\"\")}\n\n ${main.css\n\n .map((i) => `<link rel=\"stylesheet\" href=\"${base}${i}\">`)\n\n .join(\"\")}\n\n </head>\n\n <body class=\"root\">\n\n ${ctx.get_body()}\n\n ${\n\n base === \"https://blog.poi.cat/\"\n\n ? `<script defer src='https://static.cloudflareinsights.com/beacon.min.js' data-cf-beacon='{\"token\": \"3c4b6155b33b47bda87cfbe4184a722e\"}'></script>`\n\n : \"\"\n\n }\n\n </body>\n\n</html>`,\n\n {\n\n status: ctx.get_status(),\n\n headers: {\n\n \"content-type\": \"text/html; charset=utf-8\",\n\n \"cache-control\": \"public, max-age=600\", // 10 minutes\n\n \"cf-cache-status\": \"MISS\",\n\n },\n\n }\n\n );\n", "file_path": "worker/src/response.ts", "rank": 5, "score": 43360.770501896644 }, { "content": "export const txt = (ctx: wasm_bindgen.Context): Response => {\n\n return new Response(ctx.get_body(), {\n\n status: ctx.get_status(),\n\n headers: {\n\n \"content-type\": \"text/plain; charset=utf-8\",\n\n },\n\n });\n", "file_path": "worker/src/response.ts", "rank": 6, "score": 43360.770501896644 }, { "content": "export const amp = async (ctx: wasm_bindgen.Context): Promise<Response> => {\n\n return new Response(\n\n `<!DOCTYPE html>\n\n<html ⚡ lang=\"zh-Hans\">\n\n <head>\n\n <meta charset=\"utf-8\" />\n\n ${ctx.get_head()}\n\n <meta name=\"description\" content=\"PoiScript's Blog\" />\n\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n\n <meta name=\"application-name\" content=\"solomon\" />\n\n <meta name=\"theme-color\" content=\"#673ab7\" />\n\n <meta name=\"apple-mobile-web-app-title\" content=\"solomon\" />\n\n <meta name=\"apple-mobile-web-app-capable\" content=\"yes\" />\n\n <meta name=\"apple-mobile-web-app-status-bar-style\" content=\"black\" />\n\n <link rel=\"apple-touch-icon\" sizes=\"120x120\" href=\"${base}favicon/touch-icon.png\" />\n\n <link rel=\"shortcut icon\" sizes=\"32x32\" href=\"${base}favicon/favicon.ico\" />\n\n <link rel=\"icon\" sizes=\"192x192\" href=\"${base}favicon/favicon-192x192.png\" />\n\n <link rel=\"icon\" sizes=\"any\" type=\"image/svg+xml\" href=\"${base}favicon/favicon.svg\" />\n\n <script async src=\"https://cdn.ampproject.org/v0.js\"></script>\n\n <style amp-boilerplate>body{-webkit-animation:-amp-start 8s steps(1,end) 0s 1 normal both;-moz-animation:-amp-start 8s steps(1,end) 0s 1 normal both;-ms-animation:-amp-start 8s steps(1,end) 0s 1 normal both;animation:-amp-start 8s steps(1,end) 0s 1 normal both}@-webkit-keyframes -amp-start{from{visibility:hidden}to{visibility:visible}}@-moz-keyframes -amp-start{from{visibility:hidden}to{visibility:visible}}@-ms-keyframes -amp-start{from{visibility:hidden}to{visibility:visible}}@-o-keyframes -amp-start{from{visibility:hidden}to{visibility:visible}}@keyframes -amp-start{from{visibility:hidden}to{visibility:visible}}</style><noscript><style amp-boilerplate>body{-webkit-animation:none;-moz-animation:none;-ms-animation:none;animation:none}</style></noscript>\n\n <style amp-custom>${(\n\n await Promise.all(main.css.map((c) => SOLOMON_KV.get(c)))\n\n ).join(\"\")}</style>\n\n </head>\n\n <body class=\"root\">\n\n ${ctx.get_body()}\n\n </body>\n\n</html>`,\n\n {\n\n status: ctx.get_status(),\n\n headers: {\n\n \"content-type\": \"text/html; charset=utf-8\",\n\n \"cache-control\": \"public, max-age=600\", // 10 minutes\n\n \"cf-cache-status\": \"MISS\",\n\n },\n\n }\n\n );\n", "file_path": "worker/src/response.ts", "rank": 7, "score": 43360.770501896644 }, { "content": "export const rss = (ctx: wasm_bindgen.Context): Response => {\n\n return new Response(ctx.get_body(), {\n\n status: ctx.get_status(),\n\n headers: {\n\n \"content-type\": \"text/xml; charset=utf-8\",\n\n \"cache-control\": \"public, max-age=86400\", // 1 day\n\n \"cf-cache-status\": \"MISS\",\n\n },\n\n });\n", "file_path": "worker/src/response.ts", "rank": 8, "score": 43172.55578964349 }, { "content": "fn main() {\n\n {\n\n let output = Command::new(\"git\")\n\n .args(&[\"rev-parse\", \"--short\", \"HEAD\"])\n\n .output()\n\n .unwrap();\n\n\n\n let git_hash = String::from_utf8(output.stdout).unwrap();\n\n\n\n println!(\"cargo:rustc-env=CARGO_GIT_HASH={}\", git_hash);\n\n }\n\n\n\n {\n\n let output = Command::new(\"date\").args(&[\"-R\"]).output().unwrap();\n\n\n\n let git_hash = String::from_utf8(output.stdout).unwrap();\n\n\n\n println!(\"cargo:rustc-env=CARGO_BUILD_TIME={}\", git_hash);\n\n }\n\n}\n", "file_path": "wasm/build.rs", "rank": 9, "score": 42609.723177004984 }, { "content": "use maud::{html, Markup, PreEscaped, Render};\n\n\n\npub struct Heading<'a> {\n\n pub title: &'a str,\n\n pub subtitle: Option<&'a str>,\n\n}\n\n\n\nimpl<'a> Render for Heading<'a> {\n\n fn render(&self) -> Markup {\n\n html! {\n\n div.\"title-section\" {\n\n h1.title { (self.title) }\n\n\n\n @if let Some(subtitle) = self.subtitle {\n\n h2.subtitle { (PreEscaped(subtitle)) }\n\n }\n\n }\n\n }\n\n }\n\n}\n", "file_path": "wasm/src/partials/heading.rs", "rank": 10, "score": 27605.071327666516 }, { "content": " posts.sort_by(|a, b| b.published.cmp(&a.published));\n\n\n\n ctx.content = Content::Html {\n\n status: 200,\n\n head: html! {\n\n title { \"#\"(tag)\"☆Solomon\" }\n\n },\n\n body: html! {\n\n (Header)\n\n main.main {\n\n ( Heading { title: &format!(\"#{}\", tag), subtitle: None } )\n\n @for post in posts {\n\n ( PostItem { meta: post } )\n\n }\n\n }\n\n (Footer)\n\n },\n\n };\n\n\n\n Ok(ctx)\n\n}\n", "file_path": "wasm/src/pages/tag.rs", "rank": 11, "score": 27602.643451968343 }, { "content": "use maud::html;\n\nuse wasm_bindgen::JsValue;\n\n\n\nuse crate::context::{Content, Context};\n\nuse crate::pages::not_found;\n\nuse crate::partials::{Footer, Header, Heading, PostItem};\n\n\n\npub async fn tag(mut ctx: Context, tag: &str) -> Result<Context, JsValue> {\n\n ctx.load_org_meta().await?;\n\n\n\n let mut posts: Vec<_> = ctx\n\n .org_meta\n\n .values()\n\n .filter(|org| org.tags.iter().any(|t| t == tag))\n\n .collect();\n\n\n\n if posts.is_empty() {\n\n return not_found(ctx).await;\n\n }\n\n\n", "file_path": "wasm/src/pages/tag.rs", "rank": 12, "score": 27601.28529958817 }, { "content": "use chrono::Utc;\n\nuse futures_util::future::try_join_all;\n\nuse maud::html;\n\nuse wasm_bindgen::prelude::*;\n\n\n\nuse crate::context::{Content, Context};\n\nuse crate::partials::{Article, Mode};\n\n\n\npub async fn rss(mut ctx: Context) -> Result<Context, JsValue> {\n\n ctx.load_org_meta().await?;\n\n\n\n let mut posts: Vec<_> = ctx\n\n .org_meta\n\n .values()\n\n .filter(|org| org.slug.starts_with(\"/post/\"))\n\n .collect();\n\n\n\n posts.sort_by(|a, b| b.published.cmp(&a.published));\n\n\n\n posts.truncate(5);\n", "file_path": "wasm/src/pages/rss.rs", "rank": 13, "score": 27462.768103604867 }, { "content": " @for (post, org) in posts.iter().zip(orgs.iter()) {\n\n item {\n\n title { (&post.title) }\n\n author { \"PoiScript\" }\n\n link { (ctx.base_url)(post.slug) }\n\n guid isPermaLink=\"false\" { (post.slug) }\n\n @for tag in &post.tags {\n\n category { (tag) }\n\n }\n\n pubDate { (post.published.to_rfc2822()) }\n\n description {\n\n (Article {\n\n mode: Mode::Rss,\n\n org: &orgize::Org::parse(org),\n\n ctx: &ctx,\n\n })\n\n }\n\n }\n\n }\n\n }\n\n }\n\n },\n\n };\n\n\n\n Ok(ctx)\n\n}\n", "file_path": "wasm/src/pages/rss.rs", "rank": 14, "score": 27458.099462003996 }, { "content": "\n\n let orgs = try_join_all(posts.iter().map(|post| ctx.load_org(&post.slug))).await?;\n\n\n\n ctx.content = Content::Rss {\n\n status: 200,\n\n body: html! {\n\n rss version=\"2.0\"\n\n xmlns:atom=\"http://www.w3.org/2005/Atom\"\n\n xmlns:content=\"http://purl.org/rss/1.0/modules/content/\"\n\n xmlns:dc=\"http://purl.org/dc/elements/1.1/\"\n\n {\n\n channel {\n\n title { \"solomon\" }\n\n description { \"PoiScript's Blog\" }\n\n link rel=\"self\" href={ (ctx.base_url)(\"/rss\")} {}\n\n link rel=\"alternate\" href={ (ctx.base_url) } {}\n\n generator { \"solomon \"(env!(\"CARGO_PKG_VERSION\")) }\n\n lastBuildDate { (Utc::now().to_rfc2822()) }\n\n language { \"zh-Hans\" }\n\n copyright { \"Content licensed under CC-BY-SA-4.0.\" }\n", "file_path": "wasm/src/pages/rss.rs", "rank": 15, "score": 27457.326642110576 }, { "content": "use maud::Render;\n\nuse orgize::{\n\n export::{DefaultHtmlHandler, HtmlHandler},\n\n Element, Event, Org,\n\n};\n\nuse std::cmp::min;\n\nuse std::io::Write;\n\n\n\nuse crate::utils::get_id;\n\n\n\npub struct TableOfContent<'a> {\n\n pub org: &'a Org<'a>,\n\n}\n\n\n\nimpl<'a> Render for TableOfContent<'a> {\n\n fn render_to(&self, buffer: &mut String) {\n\n let mut handler = DefaultHtmlHandler::default();\n\n let mut in_title = false;\n\n let mut title_n = 0;\n\n\n", "file_path": "wasm/src/partials/table_of_content.rs", "rank": 26, "score": 26611.309241425006 }, { "content": " for event in self.org.iter() {\n\n let w = unsafe { buffer.as_mut_vec() };\n\n\n\n match event {\n\n Event::Start(Element::Title(title)) => {\n\n if title_n == 0 {\n\n let _ = write!(\n\n w,\n\n r#\"<div class=\"toc\"><div class=\"heading\">Table of content</div>\"#\n\n );\n\n }\n\n\n\n title_n += 1;\n\n in_title = true;\n\n\n\n let id = get_id(title_n, &title.raw);\n\n let level = min(title.level, 6);\n\n\n\n let _ = write!(w, r##\"<div class=\"level-{level}\"><a href=\"#{id}\">\"##,);\n\n }\n", "file_path": "wasm/src/partials/table_of_content.rs", "rank": 27, "score": 26602.566512348436 }, { "content": " Event::End(Element::Title(_)) => {\n\n in_title = false;\n\n\n\n let _ = write!(w, r#\"</a></div>\"#);\n\n }\n\n\n\n Event::Start(element) => {\n\n if in_title {\n\n let _ = handler.start(w, element);\n\n }\n\n }\n\n Event::End(element) => {\n\n if in_title {\n\n let _ = handler.end(w, element);\n\n }\n\n }\n\n }\n\n }\n\n\n\n if title_n > 0 {\n\n buffer.push_str(r#\"</div>\"#);\n\n }\n\n }\n\n}\n", "file_path": "wasm/src/partials/table_of_content.rs", "rank": 28, "score": 26601.145863642712 }, { "content": "fn should_insert_space(c1: Option<char>, c2: Option<char>) -> bool {\n\n const CJK_CHARACTERS: Range<u32> = 0x4E00..0x9FFF;\n\n\n\n const CJK_PUNCTUATIONS: [char; 14] = [\n\n '。', '?', ',', '、', ';', ':', '“', '”', '「', '」', '(', ')', '《', '》',\n\n ];\n\n\n\n if let (Some(c1), Some(c2)) = (c1, c2) {\n\n (c1.is_ascii_graphic() && c2.is_ascii_graphic())\n\n || (c1.is_ascii_graphic()\n\n && CJK_CHARACTERS.contains(&(c2 as u32))\n\n && !CJK_PUNCTUATIONS.contains(&c2))\n\n || (c2.is_ascii_graphic()\n\n && CJK_CHARACTERS.contains(&(c1 as u32))\n\n && !CJK_PUNCTUATIONS.contains(&c1))\n\n } else {\n\n false\n\n }\n\n}\n", "file_path": "wasm/src/partials/article.rs", "rank": 29, "score": 26506.59364785375 }, { "content": "const updatePage = async (url: string, ctx: Context): Promise<Context> => {\n\n showProgress();\n\n ctx = await render(url, ctx);\n\n updateHead(ctx.get_head());\n\n hideProgress();\n\n document.body.innerHTML = ctx.get_body();\n\n window.scrollTo({ top: 0, behavior: \"smooth\" });\n\n return ctx;\n", "file_path": "web/src/main.ts", "rank": 30, "score": 23521.152889470868 }, { "content": "import { defineConfig } from \"vite\";\n\nimport { readFileSync } from \"fs\";\n\n\n\nconst { WORKER_NAME = \"blogdev\" } = process.env;\n\n\n\nexport default defineConfig(({ command }) => ({\n\n base: command === \"serve\" ? \"/\" : `https://${WORKER_NAME}.poi.cat/`,\n\n\n\n publicDir: false,\n\n\n\n build: {\n\n assetsDir: \"\",\n\n emptyOutDir: true,\n\n minify: false,\n\n target: \"es2020\",\n\n\n\n rollupOptions: {\n\n input: \"/src/main.ts\",\n\n\n\n output: {\n\n format: \"module\",\n\n entryFileNames: `[name].js`,\n\n chunkFileNames: `[name].js`,\n\n assetFileNames: `[name].[ext]`,\n\n manualChunks: undefined,\n\n },\n\n },\n\n },\n\n\n\n define: {\n\n WASM_BINDGEN_SCRIPT: readFileSync(\"./pkg/solomon.js\", \"utf-8\"),\n\n },\n\n}));\n", "file_path": "worker/vite.config.ts", "rank": 31, "score": 19135.56176836374 }, { "content": "import Prism from \"prismjs\";\n\nimport \"prismjs/components/prism-typescript\";\n\nimport \"prismjs/components/prism-lisp\";\n\nimport \"prismjs/components/prism-rust\";\n\nimport \"prismjs/components/prism-http\";\n\nimport \"prismjs/components/prism-bash\";\n\nimport \"prismjs/components/prism-yaml\";\n\n\n\nimport mime from \"./mime\";\n\nimport { html, rss, amp, txt } from \"./response\";\n\n\n\naddEventListener(\"fetch\", (event: FetchEvent) => {\n\n try {\n\n event.respondWith(handleRequest(event));\n\n } catch (e: any) {\n\n event.respondWith(new Response(e.toString(), { status: 500 }));\n\n }\n\n});\n\n\n\nasync function handleRequest(event: FetchEvent): Promise<Response> {\n\n // @ts-ignore\n\n const cache = caches.default;\n\n const url = new URL(event.request.url).pathname;\n\n\n\n // cache\n\n {\n\n const response = await cache.match(event.request);\n\n\n\n if (response) {\n\n const headers = new Headers(response.headers);\n\n headers.set(\"cf-cache-status\", \"HIT\");\n\n\n\n return new Response(response.body, {\n\n headers,\n\n status: response.status || 200,\n\n statusText: response.statusText || \"OK\",\n\n });\n\n }\n\n }\n\n\n\n // assets\n\n {\n\n // kv key cannot be empty\n\n if (url.length > 1) {\n\n const { value: buffer, metadata = {} } =\n\n await SOLOMON_KV.getWithMetadata<any>(url.substring(1), \"arrayBuffer\");\n\n\n\n if (buffer) {\n\n if (\n\n event.request.headers.has(\"if-none-match\") &&\n\n event.request.headers.get(\"if-none-match\") === metadata?.etag\n\n ) {\n\n return new Response(null, {\n\n status: 304,\n\n headers: {\n\n \"content-type\": mime(url),\n\n etag: metadata?.etag,\n\n \"cf-cache-status\": \"MISS\",\n\n },\n\n });\n\n }\n\n\n\n const cache_control =\n\n url.endsWith(\".js\") || url.endsWith(\".css\") || url.endsWith(\".wasm\")\n\n ? \"public, max-age=31536000, immutable\"\n\n : \"public, no-cache\";\n\n\n\n const response = new Response(buffer, {\n\n status: 200,\n\n headers: {\n\n \"content-type\": mime(url),\n\n etag: metadata?.etag,\n\n \"cf-cache-status\": \"MISS\",\n\n \"cache-control\": cache_control,\n\n },\n\n });\n\n\n\n event.waitUntil(cache.put(event.request, response.clone()));\n\n\n\n return response;\n\n }\n\n }\n\n }\n\n\n\n // wasm\n\n {\n\n WASM_BINDGEN_SCRIPT;\n\n\n\n await wasm_bindgen(SOLOMON_WSAM);\n\n\n\n let ctx = new wasm_bindgen.Context(import.meta.env.BASE_URL, {\n\n highlight: (code, lang) => {\n\n if (lang in Prism.languages) {\n\n return Prism.highlight(code, Prism.languages[lang], lang);\n\n }\n\n\n\n return code;\n\n },\n\n });\n\n ctx = await wasm_bindgen.render(url, ctx);\n\n\n\n let response: Response = null;\n\n\n\n switch (ctx.get_type()) {\n\n case \"html\": {\n\n response = html(ctx);\n\n break;\n\n }\n\n case \"rss\": {\n\n response = rss(ctx);\n\n break;\n\n }\n\n case \"amp\": {\n\n response = await amp(ctx);\n\n break;\n\n }\n\n case \"txt\": {\n\n response = txt(ctx);\n\n break;\n\n }\n\n }\n\n\n\n event.waitUntil(cache.put(event.request, response.clone()));\n\n\n\n return response;\n\n }\n\n}\n", "file_path": "worker/src/main.ts", "rank": 32, "score": 19135.56176836374 }, { "content": "import manifest from \"../../web/dist/manifest.json\";\n\n\n\nconst base = import.meta.env.BASE_URL;\n\nconst main = manifest[\"src/main.ts\"];\n\n\n\nexport const html = (ctx: wasm_bindgen.Context): Response => {\n\n return new Response(\n\n `<!DOCTYPE html>\n\n<html lang=\"zh-Hans\">\n\n <head>\n\n <meta charset=\"utf-8\" />\n\n <meta name=\"description\" content=\"PoiScript's Blog\" />\n\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n\n <meta name=\"application-name\" content=\"solomon\" />\n\n <meta name=\"theme-color\" content=\"#673ab7\" />\n\n <meta name=\"apple-mobile-web-app-title\" content=\"solomon\" />\n\n <meta name=\"apple-mobile-web-app-capable\" content=\"yes\" />\n\n <meta name=\"apple-mobile-web-app-status-bar-style\" content=\"black\" />\n\n <link rel=\"apple-touch-icon\" sizes=\"120x120\" href=\"${base}favicon/touch-icon.png\" />\n\n <link rel=\"shortcut icon\" sizes=\"32x32\" href=\"${base}favicon/favicon.ico\" />\n\n <link rel=\"icon\" sizes=\"192x192\" href=\"${base}favicon/favicon-192x192.png\" />\n\n <link rel=\"icon\" sizes=\"any\" type=\"image/svg+xml\" href=\"${base}favicon/favicon.svg\" />\n\n <meta name=\"wasm-head-start\" content=\"\" />\n\n ${ctx.get_head()}\n\n <meta name=\"wasm-head-end\" content=\"\" />\n\n <script type=\"module\" crossorigin src=\"${base}${main.file}\"></script>\n\n ${main.imports\n\n .map(\n\n (i) => `<link rel=\"modulepreload\" href=\"${base}${manifest[i].file}\">`\n\n )\n\n .join(\"\")}\n\n ${main.css\n\n .map((i) => `<link rel=\"stylesheet\" href=\"${base}${i}\">`)\n\n .join(\"\")}\n\n </head>\n\n <body class=\"root\">\n\n ${ctx.get_body()}\n\n ${\n\n base === \"https://blog.poi.cat/\"\n\n ? `<script defer src='https://static.cloudflareinsights.com/beacon.min.js' data-cf-beacon='{\"token\": \"3c4b6155b33b47bda87cfbe4184a722e\"}'></script>`\n\n : \"\"\n\n }\n\n </body>\n\n</html>`,\n\n {\n\n status: ctx.get_status(),\n\n headers: {\n\n \"content-type\": \"text/html; charset=utf-8\",\n\n \"cache-control\": \"public, max-age=600\", // 10 minutes\n\n \"cf-cache-status\": \"MISS\",\n\n },\n\n }\n\n );\n\n};\n\n\n\nexport const amp = async (ctx: wasm_bindgen.Context): Promise<Response> => {\n\n return new Response(\n\n `<!DOCTYPE html>\n\n<html ⚡ lang=\"zh-Hans\">\n\n <head>\n\n <meta charset=\"utf-8\" />\n\n ${ctx.get_head()}\n\n <meta name=\"description\" content=\"PoiScript's Blog\" />\n\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n\n <meta name=\"application-name\" content=\"solomon\" />\n\n <meta name=\"theme-color\" content=\"#673ab7\" />\n\n <meta name=\"apple-mobile-web-app-title\" content=\"solomon\" />\n\n <meta name=\"apple-mobile-web-app-capable\" content=\"yes\" />\n\n <meta name=\"apple-mobile-web-app-status-bar-style\" content=\"black\" />\n\n <link rel=\"apple-touch-icon\" sizes=\"120x120\" href=\"${base}favicon/touch-icon.png\" />\n\n <link rel=\"shortcut icon\" sizes=\"32x32\" href=\"${base}favicon/favicon.ico\" />\n\n <link rel=\"icon\" sizes=\"192x192\" href=\"${base}favicon/favicon-192x192.png\" />\n\n <link rel=\"icon\" sizes=\"any\" type=\"image/svg+xml\" href=\"${base}favicon/favicon.svg\" />\n\n <script async src=\"https://cdn.ampproject.org/v0.js\"></script>\n\n <style amp-boilerplate>body{-webkit-animation:-amp-start 8s steps(1,end) 0s 1 normal both;-moz-animation:-amp-start 8s steps(1,end) 0s 1 normal both;-ms-animation:-amp-start 8s steps(1,end) 0s 1 normal both;animation:-amp-start 8s steps(1,end) 0s 1 normal both}@-webkit-keyframes -amp-start{from{visibility:hidden}to{visibility:visible}}@-moz-keyframes -amp-start{from{visibility:hidden}to{visibility:visible}}@-ms-keyframes -amp-start{from{visibility:hidden}to{visibility:visible}}@-o-keyframes -amp-start{from{visibility:hidden}to{visibility:visible}}@keyframes -amp-start{from{visibility:hidden}to{visibility:visible}}</style><noscript><style amp-boilerplate>body{-webkit-animation:none;-moz-animation:none;-ms-animation:none;animation:none}</style></noscript>\n\n <style amp-custom>${(\n\n await Promise.all(main.css.map((c) => SOLOMON_KV.get(c)))\n\n ).join(\"\")}</style>\n\n </head>\n\n <body class=\"root\">\n\n ${ctx.get_body()}\n\n </body>\n\n</html>`,\n\n {\n\n status: ctx.get_status(),\n\n headers: {\n\n \"content-type\": \"text/html; charset=utf-8\",\n\n \"cache-control\": \"public, max-age=600\", // 10 minutes\n\n \"cf-cache-status\": \"MISS\",\n\n },\n\n }\n\n );\n\n};\n\n\n\nexport const rss = (ctx: wasm_bindgen.Context): Response => {\n\n return new Response(ctx.get_body(), {\n\n status: ctx.get_status(),\n\n headers: {\n\n \"content-type\": \"text/xml; charset=utf-8\",\n\n \"cache-control\": \"public, max-age=86400\", // 1 day\n\n \"cf-cache-status\": \"MISS\",\n\n },\n\n });\n\n};\n\n\n\nexport const txt = (ctx: wasm_bindgen.Context): Response => {\n\n return new Response(ctx.get_body(), {\n\n status: ctx.get_status(),\n\n headers: {\n\n \"content-type\": \"text/plain; charset=utf-8\",\n\n },\n\n });\n\n};\n", "file_path": "worker/src/response.ts", "rank": 33, "score": 19135.56176836374 }, { "content": "import Prism from \"prismjs\";\n\nimport \"prismjs/components/prism-typescript\";\n\nimport \"prismjs/components/prism-lisp\";\n\nimport \"prismjs/components/prism-rust\";\n\nimport \"prismjs/components/prism-http\";\n\nimport \"prismjs/components/prism-bash\";\n\nimport \"prismjs/components/prism-yaml\";\n\n\n\nimport init, { render, Context } from \"../pkg\";\n\n\n\nimport \"prismjs/themes/prism.css\";\n\nimport \"./index.less\";\n\nimport { showProgress, hideProgress } from \"./progress\";\n\n\n\nconst updateHead = (head: string) => {\n\n const start = document.querySelector('meta[name=\"wasm-head-start\"]');\n\n const end = document.querySelector('meta[name=\"wasm-head-end\"]');\n\n\n\n if (!start || !end) {\n\n console.error(\n\n 'meta[name=\"wasm-head-start\"] or meta[name=\"wasm-head-end\"] not found'\n\n );\n\n return;\n\n }\n\n\n\n let el = start.nextElementSibling;\n\n while (el && el !== end) {\n\n let next = el.nextElementSibling;\n\n el.remove();\n\n el = next;\n\n }\n\n\n\n const wrapper = document.createElement(\"div\");\n\n wrapper.innerHTML = head;\n\n [...wrapper.children].forEach((child) => {\n\n start.insertAdjacentElement(\"afterend\", child);\n\n });\n\n};\n\n\n\nconst updatePage = async (url: string, ctx: Context): Promise<Context> => {\n\n showProgress();\n\n ctx = await render(url, ctx);\n\n updateHead(ctx.get_head());\n\n hideProgress();\n\n document.body.innerHTML = ctx.get_body();\n\n window.scrollTo({ top: 0, behavior: \"smooth\" });\n\n return ctx;\n\n};\n\n\n\nconst main = async () => {\n\n await init();\n\n let ctx = new Context(import.meta.env.BASE_URL, {\n\n highlight: (code, lang) => {\n\n if (lang in Prism.languages) {\n\n return Prism.highlight(code, Prism.languages[lang], lang);\n\n }\n\n\n\n return code;\n\n },\n\n });\n\n\n\n console.log(ctx.get_version());\n\n\n\n if (import.meta.env.DEV) {\n\n ctx = await updatePage(location.pathname, ctx);\n\n }\n\n\n\n let previousUrl = location.pathname;\n\n\n\n document.addEventListener(\"click\", async (event) => {\n\n if (!(event.target instanceof Element)) return null;\n\n\n\n const anchor = event.target.closest<HTMLAnchorElement>(\n\n \"a[data-router][href]\"\n\n );\n\n\n\n if (!anchor) return;\n\n\n\n const newUrl = new URL(anchor.href);\n\n\n\n // external links\n\n if (newUrl.hostname && newUrl.hostname !== location.hostname) return;\n\n\n\n // prevent reload\n\n event.preventDefault();\n\n\n\n // current url\n\n if (newUrl.pathname !== previousUrl) {\n\n previousUrl = newUrl.pathname;\n\n console.log(\"pushState:\", newUrl.pathname);\n\n\n\n window.history.pushState(null, \"\", newUrl.pathname);\n\n ctx = await updatePage(newUrl.pathname, ctx);\n\n }\n\n });\n\n\n\n // back or forward buttons are clicked\n\n window.addEventListener(\"popstate\", async () => {\n\n if (previousUrl !== location.pathname) {\n\n previousUrl = location.pathname;\n\n console.log(\"popState:\", location.pathname);\n\n\n\n ctx = await updatePage(location.pathname, ctx);\n\n }\n\n });\n\n};\n\n\n\nPrism.highlightAll();\n\n\n\nconsole.time(\"init\");\n\n\n\nmain()\n\n .catch(console.error)\n\n .finally(() => console.timeEnd(\"init\"));\n", "file_path": "web/src/main.ts", "rank": 34, "score": 19135.56176836374 }, { "content": "import { defineConfig } from \"vite\";\n\n\n\nconst { WORKER_NAME = \"blogdev\" } = process.env;\n\n\n\nexport default defineConfig(({ command }) => ({\n\n base: command === \"serve\" ? \"/\" : `https://${WORKER_NAME}.poi.cat/`,\n\n\n\n publicDir: \"../public\",\n\n\n\n build: {\n\n assetsDir: \"\",\n\n emptyOutDir: true,\n\n manifest: true,\n\n\n\n rollupOptions: {\n\n // don't include index.html in output bundle\n\n input: command === \"build\" ? \"/src/main.ts\" : undefined,\n\n },\n\n },\n\n}));\n", "file_path": "web/vite.config.ts", "rank": 35, "score": 19135.56176836374 }, { "content": "const mime = (filename: string): string => {\n\n const ext = filename.split(\".\").pop();\n\n\n\n switch (ext) {\n\n case \"aac\":\n\n return \"audio/aac\";\n\n case \"abw\":\n\n return \"application/x-abiword\";\n\n case \"arc\":\n\n return \"application/x-freearc\";\n\n case \"avif\":\n\n return \"image/avif\";\n\n case \"avi\":\n\n return \"video/x-msvideo\";\n\n case \"azw\":\n\n return \"application/vnd.amazon.ebook\";\n\n case \"bin\":\n\n return \"application/octet-stream\";\n\n case \"bmp\":\n\n return \"image/bmp\";\n\n case \"bz\":\n\n return \"application/x-bzip\";\n\n case \"bz2\":\n\n return \"application/x-bzip2\";\n\n case \"cda\":\n\n return \"application/x-cdf\";\n\n case \"csh\":\n\n return \"application/x-csh\";\n\n case \"css\":\n\n return \"text/css\";\n\n case \"csv\":\n\n return \"text/csv\";\n\n case \"doc\":\n\n return \"application/msword\";\n\n case \"docx\":\n\n return \"application/vnd.openxmlformats-officedocument.wordprocessingml.document\";\n\n case \"eot\":\n\n return \"application/vnd.ms-fontobject\";\n\n case \"epub\":\n\n return \"application/epub+zip\";\n\n case \"gz\":\n\n return \"application/gzip\";\n\n case \"gif\":\n\n return \"image/gif\";\n\n case \"htm\":\n\n case \"html\":\n\n return \"text/html\";\n\n case \"ico\":\n\n return \"image/vnd.microsoft.icon\";\n\n case \"ics\":\n\n return \"text/calendar\";\n\n case \"jar\":\n\n return \"application/java-archive\";\n\n case \"jpeg\":\n\n case \"jpg\":\n\n return \"image/jpeg\";\n\n case \"js\":\n\n return \"text/javascript\";\n\n case \"json\":\n\n return \"application/json\";\n\n case \"jsonld\":\n\n return \"application/ld+json\";\n\n case \"mjs\":\n\n return \"text/javascript\";\n\n case \"mp3\":\n\n return \"audio/mpeg\";\n\n case \"mp4\":\n\n return \"video/mp4\";\n\n case \"mpeg\":\n\n return \"video/mpeg\";\n\n case \"mpkg\":\n\n return \"application/vnd.apple.installer+xml\";\n\n case \"odp\":\n\n return \"application/vnd.oasis.opendocument.presentation\";\n\n case \"ods\":\n\n return \"application/vnd.oasis.opendocument.spreadsheet\";\n\n case \"odt\":\n\n return \"application/vnd.oasis.opendocument.text\";\n\n case \"oga\":\n\n return \"audio/ogg\";\n\n case \"ogv\":\n\n return \"video/ogg\";\n\n case \"ogx\":\n\n return \"application/ogg\";\n\n case \"opus\":\n\n return \"audio/opus\";\n\n case \"otf\":\n\n return \"font/otf\";\n\n case \"png\":\n\n return \"image/png\";\n\n case \"pdf\":\n\n return \"application/pdf\";\n\n case \"php\":\n\n return \"application/x-httpd-php\";\n\n case \"ppt\":\n\n return \"application/vnd.ms-powerpoint\";\n\n case \"pptx\":\n\n return \"application/vnd.openxmlformats-officedocument.presentationml.presentation\";\n\n case \"rar\":\n\n return \"application/vnd.rar\";\n\n case \"rtf\":\n\n return \"application/rtf\";\n\n case \"sh\":\n\n return \"application/x-sh\";\n\n case \"svg\":\n\n return \"image/svg+xml\";\n\n case \"swf\":\n\n return \"application/x-shockwave-flash\";\n\n case \"tar\":\n\n return \"application/x-tar\";\n\n case \"ttf\":\n\n return \"font/ttf\";\n\n case \"txt\":\n\n case \"org\":\n\n case \"gpg\":\n\n return \"text/plain\";\n\n case \"vsd\":\n\n return \"application/vnd.visio\";\n\n case \"wav\":\n\n return \"audio/wav\";\n\n case \"weba\":\n\n return \"audio/webm\";\n\n case \"webm\":\n\n return \"video/webm\";\n\n case \"webp\":\n\n return \"image/webp\";\n\n case \"woff\":\n\n return \"font/woff\";\n\n case \"woff2\":\n\n return \"font/woff2\";\n\n case \"xhtml\":\n\n return \"application/xhtml+xml\";\n\n case \"xls\":\n\n return \"application/vnd.ms-excel\";\n\n case \"xlsx\":\n\n return \"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet\";\n\n case \"xml\":\n\n return \"application/xml\";\n\n case \"xul\":\n\n return \"application/vnd.mozilla.xul+xml\";\n\n case \"zip\":\n\n return \"application/zip\";\n\n case \"7z\":\n\n return \"application/x-7z-compressed\";\n\n case \"wasm\":\n\n return \"application/wasm\";\n\n default:\n\n return \"application/octet-stream\";\n\n }\n\n};\n\n\n\nexport default mime;\n", "file_path": "worker/src/mime.ts", "rank": 36, "score": 19135.56176836374 }, { "content": "const bar = document.createElement(\"div\");\n\nbar.classList.add(\"progress-bar\");\n\nbar.innerHTML = `<div class=\"fill\"></div>`;\n\n\n\nexport const showProgress = () => document.body.appendChild(bar);\n\n\n\nexport const hideProgress = () => document.body.removeChild(bar);\n", "file_path": "web/src/progress.ts", "rank": 37, "score": 19135.56176836374 }, { "content": "use maud::{html, Markup, Render};\n\n\n\nuse crate::context::OrgMeta;\n\n\n\npub struct PostItem<'a> {\n\n pub meta: &'a OrgMeta,\n\n}\n\n\n\nimpl<'a> Render for PostItem<'a> {\n\n fn render(&self) -> Markup {\n\n html! {\n\n .\"post-item\" {\n\n a.title data-router href=(self.meta.slug) { (self.meta.title) }\n\n .subtitle {\n\n (self.meta.published.format(\"%F\"))\n\n \" ·\"\n\n\n\n @for tag in &self.meta.tags {\n\n \" \"\n\n a.tag data-router href={ \"/tag/\"(tag) } {\n\n \"#\" (tag)\n\n }\n\n }\n\n }\n\n }\n\n }\n\n }\n\n}\n", "file_path": "wasm/src/partials/post_item.rs", "rank": 38, "score": 22.199376774209288 }, { "content": "use maud::html;\n\nuse wasm_bindgen::JsValue;\n\n\n\nuse crate::context::{Content, Context};\n\nuse crate::partials::{Footer, Header, Heading};\n\n\n\npub async fn not_found(mut ctx: Context) -> Result<Context, JsValue> {\n\n ctx.content = Content::Html {\n\n status: 404,\n\n head: html! {\n\n title { \"Not Found☆Solomon\" }\n\n meta property=\"og:title\" content=\"Not Found☆Solomon\";\n\n meta property=\"og:type\" content=\"website\";\n\n meta property=\"og:image\" content={ (ctx.base_url)\"/amp-image.jpg\"};\n\n },\n\n body: html! {\n\n (Header)\n\n main.main {\n\n (Heading { title: \"Not Found\", subtitle: None })\n\n \"Not Found\"\n\n }\n\n (Footer)\n\n },\n\n };\n\n\n\n Ok(ctx)\n\n}\n", "file_path": "wasm/src/pages/not_found.rs", "rank": 39, "score": 19.6297893127585 }, { "content": "use maud::html;\n\nuse wasm_bindgen::prelude::*;\n\n\n\nuse crate::context::{Content, Context};\n\nuse crate::partials::{Footer, Header, Heading};\n\n\n\npub struct Link {\n\n id: &'static str,\n\n src: &'static str,\n\n name: &'static str,\n\n}\n\n\n\npub const LINKS: &[Link] = &[\n\n Link {\n\n id: \"petercxy\",\n\n src: \"https://typeblog.net\",\n\n name: \"PeterCxy\",\n\n },\n\n Link {\n\n id: \"fiveyellowmice\",\n", "file_path": "wasm/src/pages/link.rs", "rank": 40, "score": 18.724317697433634 }, { "content": "use json::object;\n\nuse maud::Render;\n\n\n\nuse crate::context::{Context, OrgMeta};\n\n\n\npub struct Schema<'a> {\n\n pub ctx: &'a Context,\n\n pub meta: &'a OrgMeta,\n\n}\n\n\n\nimpl<'a> Render for Schema<'a> {\n\n fn render_to(&self, buffer: &mut String) {\n\n let value = object! {\n\n \"@context\": \"http://schema.org\",\n\n \"@type\": \"BlogPosting\",\n\n \"url\": format!(\"{}/amp{}\", self.ctx.base_url, self.meta.slug),\n\n \"name\": \"Solomon\",\n\n \"headline\": format!(\"{}☆Solomon\", self.meta.title),\n\n \"description\": \"PoiScript's Blog\",\n\n \"mainEntityOfPage\": self.ctx.base_url.as_str(),\n", "file_path": "wasm/src/partials/schema.rs", "rank": 41, "score": 17.660770626543783 }, { "content": "use std::fmt::Write;\n\nuse wasm_bindgen::prelude::*;\n\n\n\nuse crate::context::{Content, Context};\n\n\n\npub async fn sitemap(mut ctx: Context) -> Result<Context, JsValue> {\n\n ctx.load_org_meta().await?;\n\n\n\n let mut body = String::new();\n\n\n\n let base_url = ctx.base_url.as_str();\n\n\n\n let _ = writeln!(&mut body, \"{base_url}\");\n\n let _ = writeln!(&mut body, \"{base_url}/link\");\n\n\n\n // posts\n\n {\n\n for post in ctx.org_meta.values() {\n\n let slug = post.slug.as_str();\n\n let _ = writeln!(&mut body, \"{base_url}{slug}\");\n", "file_path": "wasm/src/pages/sitemap.rs", "rank": 42, "score": 17.55314288733719 }, { "content": "use maud::html;\n\nuse wasm_bindgen::prelude::*;\n\n\n\nuse crate::context::{Content, Context};\n\nuse crate::partials::{Footer, Header, PostItem};\n\n\n\npub async fn home(mut ctx: Context) -> Result<Context, JsValue> {\n\n ctx.load_org_meta().await?;\n\n\n\n let mut posts: Vec<_> = ctx\n\n .org_meta\n\n .values()\n\n .filter(|org| org.slug.starts_with(\"/post/\"))\n\n .collect();\n\n\n\n posts.sort_by(|a, b| b.published.cmp(&a.published));\n\n\n\n ctx.content = Content::Html {\n\n status: 200,\n\n head: html! {\n", "file_path": "wasm/src/pages/home.rs", "rank": 43, "score": 17.530657639721493 }, { "content": "use maud::html;\n\nuse wasm_bindgen::JsValue;\n\n\n\nuse crate::context::{Content, Context};\n\nuse crate::pages::not_found::not_found;\n\nuse crate::partials::{\n\n Article, Footer, Header, Heading, Mode, OgDescription, Schema, TableOfContent, UpNext,\n\n};\n\n\n\npub async fn post(mut ctx: Context, slug: &str, is_amp: bool) -> Result<Context, JsValue> {\n\n ctx.load_org_meta().await?;\n\n ctx.load_img_meta().await?;\n\n\n\n let key = format!(\"/post/{}\", slug);\n\n\n\n if let Some(meta) = ctx.org_meta.get(&key) {\n\n let content = ctx.load_org(&key).await?;\n\n let org = orgize::Org::parse(&content);\n\n\n\n let subtitle = html! {\n", "file_path": "wasm/src/pages/post.rs", "rank": 44, "score": 17.45124983076423 }, { "content": " (meta.published.format(\"%F\"))\n\n \" ·\"\n\n @for tag in &meta.tags {\n\n \" \"\n\n a.tag data-router href={ \"/tag/\"(tag) } {\n\n \"#\" (tag)\n\n }\n\n }\n\n \" · \"\n\n a.source target=\"_blank\" href={ (key)\".org\" } { \"source\" }\n\n }\n\n .into_string();\n\n\n\n let (prev, next) = ctx.find_prev_and_next(&meta.published);\n\n\n\n let body = html! {\n\n (Header)\n\n main.main {\n\n (Heading { title: &meta.title, subtitle: Some(&subtitle) })\n\n (TableOfContent { org: &org })\n", "file_path": "wasm/src/pages/post.rs", "rank": 45, "score": 17.21602783092527 }, { "content": " (meta.published.format(\"%F\"))\n\n \" · \"\n\n a.source target=\"_blank\" href={ (key)\".org\" } { \"source\" }\n\n }\n\n .into_string();\n\n\n\n let body = html! {\n\n (Header)\n\n main.main {\n\n (Heading { title: &meta.title, subtitle: Some(&subtitle) })\n\n (TableOfContent { org: &org })\n\n (Article {\n\n mode: if cfg!(feature = \"worker\") && is_amp { Mode::Amp } else { Mode::Html },\n\n org: &org,\n\n ctx: &ctx\n\n })\n\n }\n\n (Footer)\n\n };\n\n\n", "file_path": "wasm/src/pages/about.rs", "rank": 46, "score": 16.499721225046773 }, { "content": "use maud::{html, Markup, Render};\n\nuse orgize::{Element, Event, Org};\n\n\n\npub struct OgDescription<'a> {\n\n pub org: &'a Org<'a>,\n\n}\n\n\n\nimpl<'a> Render for OgDescription<'a> {\n\n fn render(&self) -> Markup {\n\n let mut description = String::with_capacity(200);\n\n\n\n for event in self.org.iter() {\n\n if description.len() >= 200 {\n\n break;\n\n }\n\n\n\n match event {\n\n Event::Start(Element::Text { value })\n\n | Event::Start(Element::Code { value })\n\n | Event::Start(Element::Verbatim { value }) => {\n", "file_path": "wasm/src/partials/og_description.rs", "rank": 47, "score": 16.01006772075264 }, { "content": " } @else {\n\n link rel=\"amphtml\" href={ (ctx.base_url)\"/amp\"(meta.slug) };\n\n }\n\n };\n\n\n\n let status = 200;\n\n\n\n ctx.content = if cfg!(feature = \"worker\") && is_amp {\n\n Content::Amp { status, head, body }\n\n } else {\n\n Content::Html { status, head, body }\n\n };\n\n\n\n Ok(ctx)\n\n } else {\n\n not_found(ctx).await\n\n }\n\n}\n", "file_path": "wasm/src/pages/post.rs", "rank": 48, "score": 15.007256310287572 }, { "content": "use wasm_bindgen::prelude::*;\n\n\n\nuse crate::context::{Content, Context};\n\n\n\npub async fn version(mut ctx: Context) -> Result<Context, JsValue> {\n\n ctx.content = Content::Txt {\n\n status: 200,\n\n body: ctx.get_version(),\n\n };\n\n\n\n Ok(ctx)\n\n}\n", "file_path": "wasm/src/pages/version.rs", "rank": 49, "score": 14.882464164747136 }, { "content": "use maud::html;\n\nuse wasm_bindgen::JsValue;\n\n\n\nuse crate::context::{Content, Context};\n\nuse crate::partials::{\n\n Article, Footer, Header, Heading, Mode, OgDescription, Schema, TableOfContent,\n\n};\n\n\n\nuse super::not_found::not_found;\n\n\n\npub async fn about(mut ctx: Context, is_amp: bool) -> Result<Context, JsValue> {\n\n ctx.load_org_meta().await?;\n\n\n\n let key = \"/about\";\n\n\n\n if let Some(meta) = ctx.org_meta.get(key) {\n\n let content = ctx.load_org(key).await?;\n\n let org = orgize::Org::parse(&content);\n\n\n\n let subtitle = html! {\n", "file_path": "wasm/src/pages/about.rs", "rank": 50, "score": 14.715065556774796 }, { "content": " let _ = writeln!(&mut body, \"{base_url}/amp{slug}\");\n\n }\n\n }\n\n\n\n // tags\n\n {\n\n let mut tags = ctx\n\n .org_meta\n\n .values()\n\n .map(|p| p.tags.iter())\n\n .flatten()\n\n .collect::<Vec<_>>();\n\n\n\n tags.sort();\n\n tags.dedup();\n\n\n\n for tag in tags {\n\n let _ = writeln!(&mut body, \"{base_url}/tag/{tag}\");\n\n }\n\n }\n\n\n\n ctx.content = Content::Txt { status: 200, body };\n\n\n\n Ok(ctx)\n\n}\n", "file_path": "wasm/src/pages/sitemap.rs", "rank": 51, "score": 14.677853528661025 }, { "content": " pub org: &'a Org<'a>,\n\n pub ctx: &'a Context,\n\n}\n\n\n\nimpl<'a> Render for Article<'a> {\n\n fn render_to(&self, buffer: &mut String) {\n\n let mut title_n = 0;\n\n let mut handler = DefaultHtmlHandler::default();\n\n\n\n let mut last_char = None;\n\n\n\n for event in self.org.iter() {\n\n match event {\n\n Event::Start(Element::Link(link)) if link.path.starts_with(\"file:\") => {\n\n let key = &link.path[5..];\n\n\n\n let (height, width, style) = self\n\n .ctx\n\n .img_meta\n\n .get(key)\n", "file_path": "wasm/src/partials/article.rs", "rank": 52, "score": 14.486361013626489 }, { "content": "use maud::{html, Markup, Render};\n\n\n\npub struct Header;\n\n\n\nimpl Render for Header {\n\n fn render(&self) -> Markup {\n\n html! {\n\n header.header.toolbar {\n\n .wrapper {\n\n a.homepage.link data-router href=\"/\" {\n\n span.logo aria-hidden=\"true\" {\n\n svg xmlns=\"http://www.w3.org/2000/svg\"\n\n width=\"100%\" height=\"100%\"\n\n {\n\n g fill=\"none\" stroke=\"currentColor\" stroke-width=\"1.6\" {\n\n path d=\"M11.6 17.2H4L14.4 1.6l-10 6 7.2 9.6z\" { }\n\n path d=\"M12.4 6.8H20L9.6 22.4l10-6-7.2-9.6z\" { }\n\n }\n\n }\n\n }\n", "file_path": "wasm/src/partials/header.rs", "rank": 53, "score": 13.925201802506415 }, { "content": "use maud::{html, PreEscaped, Render};\n\nuse orgize::{\n\n export::{DefaultHtmlHandler, HtmlEscape, HtmlHandler},\n\n Element, Event, Org,\n\n};\n\nuse std::cmp::min;\n\nuse std::fmt::Write;\n\nuse std::ops::Range;\n\n\n\nuse crate::utils::get_id;\n\nuse crate::Context;\n\n\n\npub enum Mode {\n\n Html,\n\n Rss,\n\n Amp,\n\n}\n\n\n\npub struct Article<'a> {\n\n pub mode: Mode,\n", "file_path": "wasm/src/partials/article.rs", "rank": 54, "score": 13.833814864705845 }, { "content": " let head = html! {\n\n title { \"About☆Solomon\" }\n\n meta property=\"og:title\" content=\"About☆Solomon\";\n\n meta property=\"og:type\" content=\"article\";\n\n meta property=\"og:image\" content={ (ctx.base_url)\"/amp-image.jpg\"};\n\n meta property=\"og:url\" content={ (ctx.base_url)\"/about\" };\n\n (OgDescription { org: &org })\n\n @if cfg!(feature = \"worker\") && is_amp {\n\n link rel=\"canonical\" href={ (ctx.base_url)\"/about\" };\n\n script type=\"application/ld+json\" { (Schema { ctx: &ctx, meta }) }\n\n } @else {\n\n link rel=\"amphtml\" href={ (ctx.base_url)\"/amp/about\" };\n\n }\n\n };\n\n\n\n let status = 200;\n\n\n\n ctx.content = if cfg!(feature = \"worker\") && is_amp {\n\n Content::Amp { status, body, head }\n\n } else {\n\n Content::Html { status, body, head }\n\n };\n\n\n\n Ok(ctx)\n\n } else {\n\n not_found(ctx).await\n\n }\n\n}\n", "file_path": "wasm/src/pages/about.rs", "rank": 55, "score": 13.667861072663344 }, { "content": " let level = min(title.level, 6);\n\n\n\n match self.mode {\n\n Mode::Amp | Mode::Html => {\n\n title_n += 1;\n\n let id = get_id(title_n, &title.raw);\n\n\n\n let _ = write!(\n\n buffer,\n\n r##\"<a class=\"anchor\" href=\"#{id}\"></a><h{level} id=\"{id}\">\"##\n\n );\n\n }\n\n Mode::Rss => {\n\n let _ = write!(buffer, r##\"<h{level}>\"##);\n\n }\n\n }\n\n }\n\n\n\n // code highlighting\n\n Event::Start(Element::InlineSrc(inline_src)) => html! {\n", "file_path": "wasm/src/partials/article.rs", "rank": 56, "score": 13.145172344388474 }, { "content": " code class={ \"lang-\"(inline_src.lang) } {(PreEscaped(\n\n self.ctx.highlighter.highlight(&inline_src.body, &inline_src.lang)\n\n ))}\n\n }\n\n .render_to(buffer),\n\n\n\n Event::Start(Element::SourceBlock(block)) => html! {\n\n pre {\n\n code class={ \"lang-\"(block.language) } {(PreEscaped(\n\n self.ctx.highlighter.highlight(&block.contents, &block.language)\n\n ))}\n\n }\n\n }\n\n .render_to(buffer),\n\n\n\n Event::Start(element) => {\n\n let _ = handler.start(unsafe { buffer.as_mut_vec() }, element);\n\n }\n\n Event::End(element) => {\n\n let _ = handler.end(unsafe { buffer.as_mut_vec() }, element);\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "wasm/src/partials/article.rs", "rank": 57, "score": 13.126992187251112 }, { "content": " (Article {\n\n mode: if cfg!(feature = \"worker\") && is_amp { Mode::Amp } else { Mode::Html },\n\n org: &org,\n\n ctx: &ctx\n\n })\n\n (UpNext { prev, next })\n\n }\n\n (Footer)\n\n };\n\n\n\n let head = html! {\n\n title { (meta.title)\"☆Solomon\" }\n\n meta property=\"og:title\" content={ (meta.title)\"☆Solomon\" };\n\n meta property=\"og:type\" content=\"article\";\n\n meta property=\"og:image\" content={ (ctx.base_url)\"/amp-image.jpg\"};\n\n meta property=\"og:url\" content={ (ctx.base_url)(meta.slug)};\n\n (OgDescription { org: &org })\n\n @if cfg!(feature = \"worker\") && is_amp {\n\n link rel=\"canonical\" href={ (ctx.base_url)(meta.slug)};\n\n script type=\"application/ld+json\" { (Schema { ctx: &ctx, meta }) }\n", "file_path": "wasm/src/pages/post.rs", "rank": 58, "score": 12.934227498432783 }, { "content": "use maud::{html, Markup, Render};\n\n\n\npub struct Footer;\n\n\n\nimpl Render for Footer {\n\n fn render(&self) -> Markup {\n\n html! {\n\n footer.footer.toolbar {\n\n .wrapper {\n\n div.links {\n\n a.link href=\"/rss\" { \"RSS\" }\n\n span.separator aria-hidden=\"true\" { \"/\" }\n\n a.link href=\"https://github.com/PoiScript/solomon\" { \"GitHub\" }\n\n }\n\n span.spacer { }\n\n span.license { \"CC-BY-SA-4.0\" }\n\n }\n\n }\n\n }\n\n }\n\n}\n", "file_path": "wasm/src/partials/footer.rs", "rank": 59, "score": 12.695160524545049 }, { "content": " src: \"https://ekyu.moe\",\n\n name: \"Equim\",\n\n },\n\n Link {\n\n id: \"nanpuyue\",\n\n src: \"https://blog.nanpuyue.com\",\n\n name: \"南浦月\",\n\n },\n\n Link {\n\n id: \"szclsya\",\n\n src: \"https://szclsya.me\",\n\n name: \"Leo Shen\",\n\n },\n\n];\n\n\n\npub async fn link(mut ctx: Context) -> Result<Context, JsValue> {\n\n ctx.content = Content::Html {\n\n status: 200,\n\n head: html! {\n\n title { \"Link☆Solomon\" }\n", "file_path": "wasm/src/pages/link.rs", "rank": 60, "score": 12.325759579696026 }, { "content": "mod about;\n\nmod home;\n\nmod link;\n\nmod not_found;\n\nmod post;\n\nmod rss;\n\nmod sitemap;\n\nmod tag;\n\nmod version;\n\n\n\npub use about::*;\n\npub use home::*;\n\npub use link::*;\n\npub use not_found::*;\n\npub use post::*;\n\npub use rss::*;\n\npub use sitemap::*;\n\npub use tag::*;\n\npub use version::*;\n", "file_path": "wasm/src/pages/mod.rs", "rank": 61, "score": 11.705915695309466 }, { "content": " height=[height]\n\n alt=[alt];\n\n },\n\n Mode::Rss => {\n\n img loading=\"lazy\"\n\n src={ (self.ctx.base_url)(path) }\n\n width=[width]\n\n height=[height]\n\n alt=[alt];\n\n },\n\n Mode::Amp => {\n\n amp-img style=[style]\n\n src={ (self.ctx.base_url)(path) }\n\n width=[width]\n\n height=[height]\n\n alt=[alt] {}\n\n }\n\n }\n\n @if let Some(alt) = alt {\n\n figcaption {(alt)}\n", "file_path": "wasm/src/partials/article.rs", "rank": 62, "score": 11.196563483550339 }, { "content": "mod article;\n\nmod footer;\n\nmod header;\n\nmod heading;\n\nmod og_description;\n\nmod post_item;\n\nmod schema;\n\nmod table_of_content;\n\nmod up_next;\n\n\n\npub use article::*;\n\npub use footer::*;\n\npub use header::*;\n\npub use heading::*;\n\npub use og_description::*;\n\npub use post_item::*;\n\npub use schema::*;\n\npub use table_of_content::*;\n\npub use up_next::*;\n", "file_path": "wasm/src/partials/mod.rs", "rank": 63, "score": 11.037170249615567 }, { "content": " \"publisher\": {\n\n \"@type\": \"Organization\",\n\n \"name\": \"Solomon\",\n\n \"logo\": {\n\n \"@type\": \"ImageObject\",\n\n \"url\": format!(\"{}/amp-logo.jpg\", self.ctx.base_url),\n\n \"height\": 60usize,\n\n \"width\": 600usize\n\n }\n\n },\n\n \"datePublished\": self.meta.published.to_rfc2822(),\n\n \"dateModified\": self.meta.updated.map(|dt| dt.to_rfc2822()),\n\n \"author\": {\n\n \"@type\": \"Person\",\n\n \"name\": \"PoiScript\"\n\n }\n\n };\n\n\n\n let _ = value.write(unsafe { buffer.as_mut_vec() });\n\n }\n\n}\n", "file_path": "wasm/src/partials/schema.rs", "rank": 64, "score": 10.734135413240985 }, { "content": "use maud::{html, Markup, Render};\n\n\n\nuse crate::context::OrgMeta;\n\n\n", "file_path": "wasm/src/partials/up_next.rs", "rank": 65, "score": 10.629455898372175 }, { "content": " title { \"Home☆Solomon\" }\n\n meta property=\"og:title\" content=\"Home☆Solomon\";\n\n meta property=\"og:type\" content=\"website\";\n\n meta property=\"og:image\" content={ (ctx.base_url)\"/amp-image.jpg\"};\n\n meta property=\"og:url\" content={ (ctx.base_url) };\n\n },\n\n body: html! {\n\n (Header)\n\n main.main {\n\n @for post in posts {\n\n (PostItem { meta: post })\n\n }\n\n }\n\n (Footer)\n\n },\n\n };\n\n\n\n Ok(ctx)\n\n}\n", "file_path": "wasm/src/pages/home.rs", "rank": 66, "score": 10.265023528658483 }, { "content": " meta property=\"og:title\" content=\"Link☆Solomon\";\n\n meta property=\"og:type\" content=\"website\";\n\n meta property=\"og:image\" content={ (ctx.base_url)\"/amp-image.jpg\"};\n\n meta property=\"og:url\" content={ (ctx.base_url)\"/link\" };\n\n },\n\n body: html! {\n\n (Header)\n\n main.main {\n\n (Heading { title: \"Link\", subtitle: None })\n\n .\"link-list\" {\n\n @for link in LINKS.iter() {\n\n a.item target=\"_blank\" href=(link.src) {\n\n img.profile\n\n src={ (ctx.base_url)\"/avatars/\"(link.id)\".jpg\"}\n\n alt={ \"avatar for \"(link.name) };\n\n .text {\n\n .line { (link.name) }\n\n .line { (link.src) }\n\n }\n\n }\n", "file_path": "wasm/src/pages/link.rs", "rank": 67, "score": 10.26092317412051 }, { "content": "\n\n Event::Start(Element::Document { .. }) => match self.mode {\n\n Mode::Amp | Mode::Html => {\n\n let _ = write!(buffer, \"<article>\");\n\n }\n\n Mode::Rss => {\n\n let _ = write!(buffer, \"<![CDATA[\");\n\n }\n\n },\n\n\n\n Event::End(Element::Document { .. }) => match self.mode {\n\n Mode::Amp | Mode::Html => {\n\n let _ = write!(buffer, \"</article>\");\n\n }\n\n Mode::Rss => {\n\n let _ = write!(buffer, \"]]>\");\n\n }\n\n },\n\n\n\n Event::Start(Element::Title(title)) => {\n", "file_path": "wasm/src/partials/article.rs", "rank": 68, "score": 9.86993105497155 }, { "content": "\n\n [\"about\"] => pages::about(ctx, false).await,\n\n\n\n #[cfg(feature = \"worker\")]\n\n [\"amp\", \"about\"] => pages::about(ctx, true).await,\n\n\n\n [\"post\", slug] => pages::post(ctx, slug, false).await,\n\n\n\n #[cfg(feature = \"worker\")]\n\n [\"amp\", \"post\", slug] => pages::post(ctx, slug, true).await,\n\n\n\n [\"tag\", tag] => pages::tag(ctx, tag).await,\n\n\n\n [\"link\"] => pages::link(ctx).await,\n\n\n\n #[cfg(feature = \"worker\")]\n\n [\"rss\"] | [\"feed.xml\"] | [\"atom.xml\"] => pages::rss(ctx).await,\n\n\n\n #[cfg(feature = \"worker\")]\n\n [\"sitemap\"] => pages::sitemap(ctx).await,\n\n\n\n #[cfg(feature = \"worker\")]\n\n [\"version\"] => pages::version(ctx).await,\n\n\n\n _ => pages::not_found(ctx).await,\n\n }\n\n}\n", "file_path": "wasm/src/lib.rs", "rank": 69, "score": 9.862576490686 }, { "content": "use wasm_bindgen::prelude::*;\n\n\n\nmod context;\n\nmod pages;\n\nmod partials;\n\nmod utils;\n\n\n\n#[global_allocator]\n\nstatic ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT;\n\n\n\nuse context::Context;\n\n\n\n#[wasm_bindgen]\n\npub async fn render(url: String, ctx: Context) -> Result<Context, JsValue> {\n\n std::panic::set_hook(Box::new(console_error_panic_hook::hook));\n\n\n\n let segs: Vec<_> = url.trim_matches('/').split('/').take(3).collect();\n\n\n\n match segs[..] {\n\n [\"\"] => pages::home(ctx).await,\n", "file_path": "wasm/src/lib.rs", "rank": 70, "score": 9.74734731897069 }, { "content": " .map(|meta| {\n\n (\n\n Some(meta.height),\n\n Some(meta.width),\n\n Some(html! { \"max-height:\"(meta.height)\"px;max-width:\"(meta.width)\"px;\" }),\n\n )\n\n })\n\n .unwrap_or_default();\n\n\n\n let path = key;\n\n let alt = link.desc.as_ref();\n\n\n\n html! {\n\n figure {\n\n @match self.mode {\n\n Mode::Html => {\n\n img style=[style]\n\n loading=\"lazy\"\n\n src={ (self.ctx.base_url)(path) }\n\n width=[width]\n", "file_path": "wasm/src/partials/article.rs", "rank": 71, "score": 8.863449034425079 }, { "content": "}\n\n\n\npub struct UpNext<'a> {\n\n pub prev: Option<&'a OrgMeta>,\n\n pub next: Option<&'a OrgMeta>,\n\n}\n\n\n\nimpl<'a> Render for UpNext<'a> {\n\n fn render(&self) -> Markup {\n\n html! {\n\n .up-next {\n\n .nav.start {\n\n @if let Some(prev) = self.prev {\n\n ( up_next_prev(prev) )\n\n }\n\n }\n\n .nav.end {\n\n @if let Some(next) = self.next {\n\n ( up_next_next(next) )\n\n }\n\n }\n\n }\n\n }\n\n }\n\n}\n", "file_path": "wasm/src/partials/up_next.rs", "rank": 72, "score": 8.80346775893746 }, { "content": " for line in value.lines() {\n\n let text = line.trim();\n\n description += text;\n\n }\n\n }\n\n\n\n Event::Start(Element::Link(link)) => {\n\n description += link.desc.as_ref().unwrap_or(&link.path)\n\n }\n\n\n\n Event::End(Element::Paragraph { .. }) | Event::End(Element::Title(_)) => {\n\n description += \" \";\n\n }\n\n\n\n _ => {}\n\n }\n\n }\n\n\n\n html! {\n\n @if !description.is_empty() {\n\n meta property=\"og:description\" content={(description)};\n\n }\n\n }\n\n }\n\n}\n", "file_path": "wasm/src/partials/og_description.rs", "rank": 73, "score": 5.399222174770215 }, { "content": " let text = line.trim();\n\n let first_char = text.chars().next();\n\n if should_insert_space(last_char, first_char) {\n\n let _ = write!(buffer, \" \");\n\n }\n\n last_char = text.chars().last();\n\n\n\n let _ = write!(buffer, \"{}\", HtmlEscape(text));\n\n }\n\n }\n\n\n\n Event::Start(Element::Verbatim { value })\n\n | Event::Start(Element::Code { value }) => {\n\n let text = value.trim();\n\n if should_insert_space(last_char, text.chars().next()) {\n\n let _ = write!(buffer, \" \");\n\n }\n\n last_char = text.chars().last();\n\n let _ = write!(buffer, \"<code>{}</code>\", HtmlEscape(text));\n\n }\n", "file_path": "wasm/src/partials/article.rs", "rank": 74, "score": 4.805009976031895 }, { "content": "use std::process::Command;\n\n\n", "file_path": "wasm/build.rs", "rank": 75, "score": 2.721719325131964 }, { "content": "use std::{collections::hash_map::DefaultHasher, hash::Hasher};\n\n\n", "file_path": "wasm/src/utils.rs", "rank": 76, "score": 2.5511848132937764 }, { "content": " r#\"<a target=\"_blank\" rel=\"noreferrer noopener\" href=\"{}\">\"#,\n\n HtmlEscape(&link.path)\n\n );\n\n\n\n for line in text.lines() {\n\n let text = line.trim();\n\n let first_char = text.chars().next();\n\n if should_insert_space(last_char, first_char) {\n\n let _ = write!(buffer, \" \");\n\n }\n\n last_char = text.chars().last();\n\n\n\n let _ = write!(buffer, \"{}\", HtmlEscape(text));\n\n }\n\n\n\n let _ = write!(buffer, \"</a>\");\n\n }\n\n\n\n Event::Start(Element::Text { value }) => {\n\n for line in value.lines() {\n", "file_path": "wasm/src/partials/article.rs", "rank": 77, "score": 2.1050227337004994 } ]
Rust
tests/poll.rs
Licenser/mio
0d8f48d24c577e5379c936e72610a997aa716e8c
use mio::net::{TcpListener, TcpStream}; use mio::*; use std::net; use std::sync::{Arc, Barrier}; use std::thread::{self, sleep}; use std::time::Duration; mod util; use util::{any_local_address, assert_send, assert_sync, init}; #[test] fn is_send_and_sync() { assert_sync::<Poll>(); assert_send::<Poll>(); assert_sync::<Registry>(); assert_send::<Registry>(); } #[test] fn run_once_with_nothing() { init(); let mut events = Events::with_capacity(16); let mut poll = Poll::new().unwrap(); poll.poll(&mut events, Some(Duration::from_millis(100))) .unwrap(); } #[test] fn add_then_drop() { init(); let mut events = Events::with_capacity(16); let l = TcpListener::bind(any_local_address()).unwrap(); let mut poll = Poll::new().unwrap(); poll.registry() .register(&l, Token(1), Interests::READABLE | Interests::WRITABLE) .unwrap(); drop(l); poll.poll(&mut events, Some(Duration::from_millis(100))) .unwrap(); } #[test] fn zero_duration_polls_events() { init(); let mut poll = Poll::new().unwrap(); let mut events = Events::with_capacity(16); let listener = net::TcpListener::bind(any_local_address()).unwrap(); let addr = listener.local_addr().unwrap(); let streams: Vec<TcpStream> = (0..3) .map(|n| { let stream = TcpStream::connect(addr).unwrap(); poll.registry() .register(&stream, Token(n), Interests::WRITABLE) .unwrap(); stream }) .collect(); sleep(Duration::from_millis(10)); poll.poll(&mut events, Some(Duration::from_nanos(0))) .unwrap(); assert!(!events.is_empty()); drop(streams); drop(listener); } #[test] fn test_poll_closes_fd() { init(); for _ in 0..2000 { let mut poll = Poll::new().unwrap(); let mut events = Events::with_capacity(4); poll.poll(&mut events, Some(Duration::from_millis(0))) .unwrap(); drop(poll); } } #[test] fn test_drop_cancels_interest_and_shuts_down() { init(); use mio::net::TcpStream; use std::io; use std::io::Read; use std::net::TcpListener; use std::thread; let l = TcpListener::bind("127.0.0.1:0").unwrap(); let addr = l.local_addr().unwrap(); let t = thread::spawn(move || { let mut s = l.incoming().next().unwrap().unwrap(); s.set_read_timeout(Some(Duration::from_secs(5))) .expect("set_read_timeout"); let r = s.read(&mut [0; 16]); match r { Ok(_) => (), Err(e) => { if e.kind() != io::ErrorKind::UnexpectedEof { panic!(e); } } } }); let mut poll = Poll::new().unwrap(); let mut s = TcpStream::connect(addr).unwrap(); poll.registry() .register(&s, Token(1), Interests::READABLE | Interests::WRITABLE) .unwrap(); let mut events = Events::with_capacity(16); 'outer: loop { poll.poll(&mut events, None).unwrap(); for event in &events { if event.token() == Token(1) { break 'outer; } } } let mut b = [0; 1024]; match s.read(&mut b) { Ok(_) => panic!("unexpected ok"), Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => (), Err(e) => panic!("unexpected error: {:?}", e), } drop(s); t.join().unwrap(); } #[test] fn test_registry_behind_arc() { init(); let mut poll = Poll::new().unwrap(); let registry = Arc::new(poll.registry().try_clone().unwrap()); let mut events = Events::with_capacity(128); let listener = TcpListener::bind(any_local_address()).unwrap(); let addr = listener.local_addr().unwrap(); let barrier = Arc::new(Barrier::new(3)); let registry2 = Arc::clone(&registry); let registry3 = Arc::clone(&registry); let barrier2 = Arc::clone(&barrier); let barrier3 = Arc::clone(&barrier); let handle1 = thread::spawn(move || { registry2 .register(&listener, Token(0), Interests::READABLE) .unwrap(); barrier2.wait(); }); let handle2 = thread::spawn(move || { let stream = TcpStream::connect(addr).unwrap(); registry3 .register(&stream, Token(1), Interests::READABLE | Interests::WRITABLE) .unwrap(); barrier3.wait(); }); poll.poll(&mut events, Some(Duration::from_millis(1000))) .unwrap(); assert!(events.iter().count() >= 1); barrier.wait(); handle1.join().unwrap(); handle2.join().unwrap(); } #[test] #[cfg(any(target_os = "linux", target_os = "windows"))] pub fn test_double_register() { init(); let poll = Poll::new().unwrap(); let l = TcpListener::bind("127.0.0.1:0".parse().unwrap()).unwrap(); poll.registry() .register(&l, Token(0), Interests::READABLE) .unwrap(); assert!(poll .registry() .register(&l, Token(1), Interests::READABLE) .is_err()); }
use mio::net::{TcpListener, TcpStream}; use mio::*; use std::net; use std::sync::{Arc, Barrier}; use std::thread::{self, sleep}; use std::time::Duration; mod util; use util::{any_local_address, assert_send, assert_sync, init}; #[test] fn is_send_and_sync() { assert_sync::<Poll>(); assert_send::<Poll>(); assert_sync::<Registry>(); assert_send::<Registry>(); } #[test]
#[test] fn add_then_drop() { init(); let mut events = Events::with_capacity(16); let l = TcpListener::bind(any_local_address()).unwrap(); let mut poll = Poll::new().unwrap(); poll.registry() .register(&l, Token(1), Interests::READABLE | Interests::WRITABLE) .unwrap(); drop(l); poll.poll(&mut events, Some(Duration::from_millis(100))) .unwrap(); } #[test] fn zero_duration_polls_events() { init(); let mut poll = Poll::new().unwrap(); let mut events = Events::with_capacity(16); let listener = net::TcpListener::bind(any_local_address()).unwrap(); let addr = listener.local_addr().unwrap(); let streams: Vec<TcpStream> = (0..3) .map(|n| { let stream = TcpStream::connect(addr).unwrap(); poll.registry() .register(&stream, Token(n), Interests::WRITABLE) .unwrap(); stream }) .collect(); sleep(Duration::from_millis(10)); poll.poll(&mut events, Some(Duration::from_nanos(0))) .unwrap(); assert!(!events.is_empty()); drop(streams); drop(listener); } #[test] fn test_poll_closes_fd() { init(); for _ in 0..2000 { let mut poll = Poll::new().unwrap(); let mut events = Events::with_capacity(4); poll.poll(&mut events, Some(Duration::from_millis(0))) .unwrap(); drop(poll); } } #[test] fn test_drop_cancels_interest_and_shuts_down() { init(); use mio::net::TcpStream; use std::io; use std::io::Read; use std::net::TcpListener; use std::thread; let l = TcpListener::bind("127.0.0.1:0").unwrap(); let addr = l.local_addr().unwrap(); let t = thread::spawn(move || { let mut s = l.incoming().next().unwrap().unwrap(); s.set_read_timeout(Some(Duration::from_secs(5))) .expect("set_read_timeout"); let r = s.read(&mut [0; 16]); match r { Ok(_) => (), Err(e) => { if e.kind() != io::ErrorKind::UnexpectedEof { panic!(e); } } } }); let mut poll = Poll::new().unwrap(); let mut s = TcpStream::connect(addr).unwrap(); poll.registry() .register(&s, Token(1), Interests::READABLE | Interests::WRITABLE) .unwrap(); let mut events = Events::with_capacity(16); 'outer: loop { poll.poll(&mut events, None).unwrap(); for event in &events { if event.token() == Token(1) { break 'outer; } } } let mut b = [0; 1024]; match s.read(&mut b) { Ok(_) => panic!("unexpected ok"), Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => (), Err(e) => panic!("unexpected error: {:?}", e), } drop(s); t.join().unwrap(); } #[test] fn test_registry_behind_arc() { init(); let mut poll = Poll::new().unwrap(); let registry = Arc::new(poll.registry().try_clone().unwrap()); let mut events = Events::with_capacity(128); let listener = TcpListener::bind(any_local_address()).unwrap(); let addr = listener.local_addr().unwrap(); let barrier = Arc::new(Barrier::new(3)); let registry2 = Arc::clone(&registry); let registry3 = Arc::clone(&registry); let barrier2 = Arc::clone(&barrier); let barrier3 = Arc::clone(&barrier); let handle1 = thread::spawn(move || { registry2 .register(&listener, Token(0), Interests::READABLE) .unwrap(); barrier2.wait(); }); let handle2 = thread::spawn(move || { let stream = TcpStream::connect(addr).unwrap(); registry3 .register(&stream, Token(1), Interests::READABLE | Interests::WRITABLE) .unwrap(); barrier3.wait(); }); poll.poll(&mut events, Some(Duration::from_millis(1000))) .unwrap(); assert!(events.iter().count() >= 1); barrier.wait(); handle1.join().unwrap(); handle2.join().unwrap(); } #[test] #[cfg(any(target_os = "linux", target_os = "windows"))] pub fn test_double_register() { init(); let poll = Poll::new().unwrap(); let l = TcpListener::bind("127.0.0.1:0".parse().unwrap()).unwrap(); poll.registry() .register(&l, Token(0), Interests::READABLE) .unwrap(); assert!(poll .registry() .register(&l, Token(1), Interests::READABLE) .is_err()); }
fn run_once_with_nothing() { init(); let mut events = Events::with_capacity(16); let mut poll = Poll::new().unwrap(); poll.poll(&mut events, Some(Duration::from_millis(100))) .unwrap(); }
function_block-full_function
[ { "content": "pub fn init() {\n\n static INIT: Once = Once::new();\n\n\n\n INIT.call_once(|| {\n\n env_logger::try_init().expect(\"unable to initialise logger\");\n\n })\n\n}\n\n\n", "file_path": "tests/util/mod.rs", "rank": 0, "score": 197869.68898741424 }, { "content": "/// Initialise the network stack for Windows.\n\nfn init() {\n\n static INIT: Once = Once::new();\n\n INIT.call_once(|| {\n\n // Let standard library call `WSAStartup` for us, we can't do it\n\n // ourselves because otherwise using any type in `std::net` would panic\n\n // when it tries to call `WSAStartup` a second time.\n\n drop(std::net::UdpSocket::bind(\"127.0.0.1:0\"));\n\n });\n\n}\n\n\n", "file_path": "src/sys/windows/mod.rs", "rank": 1, "score": 142485.2769129411 }, { "content": "pub fn assert_send<T: Send>() {}\n\n\n", "file_path": "tests/util/mod.rs", "rank": 2, "score": 142465.9890403588 }, { "content": "pub fn assert_sync<T: Sync>() {}\n", "file_path": "tests/util/mod.rs", "rank": 3, "score": 142465.9890403588 }, { "content": "/// Bind to any port on localhost.\n\npub fn any_local_address() -> SocketAddr {\n\n \"127.0.0.1:0\".parse().unwrap()\n\n}\n\n\n", "file_path": "tests/util/mod.rs", "rank": 4, "score": 125025.72031421952 }, { "content": "/// Bind to any port on localhost, using a IPv6 address.\n\npub fn any_local_ipv6_address() -> SocketAddr {\n\n \"[::1]:0\".parse().unwrap()\n\n}\n", "file_path": "tests/util/mod.rs", "rank": 5, "score": 120712.7057114367 }, { "content": "#[test]\n\nfn is_send_and_sync() {\n\n assert_send::<Waker>();\n\n assert_sync::<Waker>();\n\n}\n\n\n", "file_path": "tests/waker.rs", "rank": 6, "score": 112329.72213456288 }, { "content": "#[test]\n\nfn is_send_and_sync() {\n\n assert_send::<TcpListener>();\n\n assert_sync::<TcpListener>();\n\n\n\n assert_send::<TcpStream>();\n\n assert_sync::<TcpStream>();\n\n}\n\n\n", "file_path": "tests/tcp.rs", "rank": 8, "score": 112329.72213456288 }, { "content": "#[test]\n\nfn is_send_and_sync() {\n\n assert_send::<UdpSocket>();\n\n assert_sync::<UdpSocket>();\n\n}\n\n\n\npub struct UdpHandlerSendRecv {\n\n tx: UdpSocket,\n\n rx: UdpSocket,\n\n msg: &'static str,\n\n buf: Bytes,\n\n rx_buf: BytesMut,\n\n connected: bool,\n\n shutdown: bool,\n\n}\n\n\n\nimpl UdpHandlerSendRecv {\n\n fn new(tx: UdpSocket, rx: UdpSocket, connected: bool, msg: &'static str) -> UdpHandlerSendRecv {\n\n UdpHandlerSendRecv {\n\n tx,\n\n rx,\n\n msg,\n\n buf: Bytes::from_static(msg.as_bytes()),\n\n rx_buf: BytesMut::with_capacity(1024),\n\n connected,\n\n shutdown: false,\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/udp_socket.rs", "rank": 9, "score": 109304.61685183244 }, { "content": "pub fn expect_no_events(poll: &mut Poll, events: &mut Events) {\n\n poll.poll(events, Some(Duration::from_millis(50)))\n\n .expect(\"unable to poll\");\n\n assert!(events.is_empty(), \"received events, but didn't expect any\");\n\n}\n\n\n", "file_path": "tests/util/mod.rs", "rank": 10, "score": 103588.70060359847 }, { "content": "// Not all functions are used by all tests.\n\n#![allow(dead_code)]\n\n\n\nuse std::io::{self, Read, Write};\n\nuse std::net::SocketAddr;\n\nuse std::sync::Once;\n\nuse std::time::Duration;\n\n\n\nuse bytes::{Buf, BufMut};\n\nuse mio::{Events, Poll};\n\n\n", "file_path": "tests/util/mod.rs", "rank": 11, "score": 102459.68785231849 }, { "content": " }\n\n}\n\n\n\nimpl<T: Write> TryWrite for T {\n\n fn try_write(&mut self, src: &[u8]) -> io::Result<Option<usize>> {\n\n self.write(src).map_non_block()\n\n }\n\n}\n\n\n\n/*\n\n *\n\n * ===== Helpers =====\n\n *\n\n */\n\n\n", "file_path": "tests/util/mod.rs", "rank": 12, "score": 102448.97954427717 }, { "content": "#[test]\n\nfn is_tests() {\n\n assert!(Interests::READABLE.is_readable());\n\n assert!(!Interests::READABLE.is_writable());\n\n assert!(!Interests::WRITABLE.is_readable());\n\n assert!(Interests::WRITABLE.is_writable());\n\n assert!(!Interests::WRITABLE.is_aio());\n\n assert!(!Interests::WRITABLE.is_lio());\n\n}\n\n\n", "file_path": "tests/interests.rs", "rank": 13, "score": 97842.60983156334 }, { "content": "#[test]\n\nfn test_write_shutdown() {\n\n init();\n\n\n\n let mut poll = Poll::new().unwrap();\n\n\n\n let listener = std::net::TcpListener::bind(\"127.0.0.1:0\").unwrap();\n\n let addr = listener.local_addr().unwrap();\n\n\n\n let interests = Interests::READABLE | Interests::WRITABLE;\n\n\n\n let client = TcpStream::connect(addr).unwrap();\n\n poll.registry()\n\n .register(&client, Token(0), interests)\n\n .unwrap();\n\n\n\n let (socket, _) = listener.accept().unwrap();\n\n\n\n wait!(poll, is_writable, false);\n\n\n\n let mut events = Events::with_capacity(16);\n", "file_path": "tests/tcp.rs", "rank": 14, "score": 91964.92285428096 }, { "content": "#[test]\n\nfn test_registering_after_deregistering() {\n\n init();\n\n\n\n let mut poll = Poll::new().unwrap();\n\n let mut events = Events::with_capacity(8);\n\n\n\n let server = TcpListener::bind(any_local_address()).unwrap();\n\n\n\n poll.registry()\n\n .register(&server, SERVER, Interests::READABLE)\n\n .unwrap();\n\n\n\n poll.registry().deregister(&server).unwrap();\n\n\n\n poll.registry()\n\n .register(&server, SERVER, Interests::READABLE)\n\n .unwrap();\n\n\n\n poll.poll(&mut events, Some(Duration::from_millis(100)))\n\n .unwrap();\n\n assert!(events.is_empty());\n\n}\n", "file_path": "tests/registering.rs", "rank": 15, "score": 91964.92285428096 }, { "content": "#[test]\n\nfn bit_or() {\n\n let interests = Interests::READABLE | Interests::WRITABLE;\n\n assert!(interests.is_readable());\n\n assert!(interests.is_writable());\n\n}\n\n\n", "file_path": "tests/interests.rs", "rank": 18, "score": 88722.19343392942 }, { "content": "#[test]\n\nfn peek() {\n\n init();\n\n\n\n const N: usize = 16 * 1024 * 1024;\n\n struct H {\n\n amt: usize,\n\n socket: TcpStream,\n\n shutdown: bool,\n\n }\n\n\n\n let l = net::TcpListener::bind(\"127.0.0.1:0\").unwrap();\n\n let addr = l.local_addr().unwrap();\n\n\n\n let t = thread::spawn(move || {\n\n let mut s = l.accept().unwrap().0;\n\n let b = [0; 1024];\n\n let mut amt = 0;\n\n while amt < N {\n\n amt += s.write(&b).unwrap();\n\n }\n", "file_path": "tests/tcp.rs", "rank": 19, "score": 88722.19343392942 }, { "content": "#[test]\n\nfn waker() {\n\n init();\n\n\n\n let mut poll = Poll::new().expect(\"unable to create new Poll instance\");\n\n let mut events = Events::with_capacity(10);\n\n\n\n let token = Token(10);\n\n let waker = Waker::new(poll.registry(), token).expect(\"unable to create waker\");\n\n\n\n waker.wake().expect(\"unable to wake\");\n\n expect_waker_event(&mut poll, &mut events, token);\n\n}\n\n\n", "file_path": "tests/waker.rs", "rank": 20, "score": 88722.19343392942 }, { "content": "#[test]\n\nfn read() {\n\n init();\n\n\n\n const N: usize = 16 * 1024 * 1024;\n\n struct H {\n\n amt: usize,\n\n socket: TcpStream,\n\n shutdown: bool,\n\n }\n\n\n\n let l = net::TcpListener::bind(\"127.0.0.1:0\").unwrap();\n\n let addr = l.local_addr().unwrap();\n\n\n\n let t = thread::spawn(move || {\n\n let mut s = l.accept().unwrap().0;\n\n let b = [0; 1024];\n\n let mut amt = 0;\n\n while amt < N {\n\n amt += s.write(&b).unwrap();\n\n }\n", "file_path": "tests/tcp.rs", "rank": 21, "score": 88722.19343392942 }, { "content": "#[test]\n\nfn connect() {\n\n init();\n\n\n\n struct H {\n\n hit: u32,\n\n shutdown: bool,\n\n }\n\n\n\n let l = net::TcpListener::bind(\"127.0.0.1:0\").unwrap();\n\n let addr = l.local_addr().unwrap();\n\n\n\n let (tx, rx) = channel();\n\n let (tx2, rx2) = channel();\n\n let t = thread::spawn(move || {\n\n let s = l.accept().unwrap();\n\n rx.recv().unwrap();\n\n drop(s);\n\n tx2.send(()).unwrap();\n\n });\n\n\n", "file_path": "tests/tcp.rs", "rank": 22, "score": 88722.19343392942 }, { "content": "#[test]\n\nfn accept() {\n\n init();\n\n\n\n struct H {\n\n hit: bool,\n\n listener: TcpListener,\n\n shutdown: bool,\n\n }\n\n\n\n let l = TcpListener::bind(\"127.0.0.1:0\".parse().unwrap()).unwrap();\n\n let addr = l.local_addr().unwrap();\n\n\n\n let t = thread::spawn(move || {\n\n net::TcpStream::connect(addr).unwrap();\n\n });\n\n\n\n let mut poll = Poll::new().unwrap();\n\n\n\n poll.registry()\n\n .register(&l, Token(1), Interests::READABLE)\n", "file_path": "tests/tcp.rs", "rank": 23, "score": 88722.19343392942 }, { "content": "#[test]\n\nfn issue_776() {\n\n init();\n\n\n\n let l = net::TcpListener::bind(\"127.0.0.1:0\").unwrap();\n\n let addr = l.local_addr().unwrap();\n\n\n\n let t = thread::spawn(move || {\n\n let mut s = l.accept().expect(\"accept\").0;\n\n s.set_read_timeout(Some(Duration::from_secs(5)))\n\n .expect(\"set_read_timeout\");\n\n let _ = s.read(&mut [0; 16]).expect(\"read\");\n\n });\n\n\n\n let mut poll = Poll::new().unwrap();\n\n let mut s = TcpStream::connect(addr).unwrap();\n\n\n\n poll.registry()\n\n .register(&s, Token(1), Interests::READABLE | Interests::WRITABLE)\n\n .unwrap();\n\n let mut events = Events::with_capacity(16);\n", "file_path": "tests/regressions.rs", "rank": 24, "score": 88722.19343392942 }, { "content": "#[test]\n\nfn write() {\n\n init();\n\n\n\n const N: usize = 16 * 1024 * 1024;\n\n struct H {\n\n amt: usize,\n\n socket: TcpStream,\n\n shutdown: bool,\n\n }\n\n\n\n let l = net::TcpListener::bind(\"127.0.0.1:0\").unwrap();\n\n let addr = l.local_addr().unwrap();\n\n\n\n let t = thread::spawn(move || {\n\n let mut s = l.accept().unwrap().0;\n\n let mut b = [0; 1024];\n\n let mut amt = 0;\n\n while amt < N {\n\n amt += s.read(&mut b).unwrap();\n\n }\n", "file_path": "tests/tcp.rs", "rank": 25, "score": 88722.19343392942 }, { "content": "pub trait TryWrite {\n\n fn try_write_buf<B: Buf>(&mut self, buf: &mut B) -> io::Result<Option<usize>>\n\n where\n\n Self: Sized,\n\n {\n\n let res = self.try_write(buf.bytes());\n\n\n\n if let Ok(Some(cnt)) = res {\n\n buf.advance(cnt);\n\n }\n\n\n\n res\n\n }\n\n\n\n fn try_write(&mut self, buf: &[u8]) -> io::Result<Option<usize>>;\n\n}\n\n\n\nimpl<T: Read> TryRead for T {\n\n fn try_read(&mut self, dst: &mut [u8]) -> io::Result<Option<usize>> {\n\n self.read(dst).map_non_block()\n", "file_path": "tests/util/mod.rs", "rank": 26, "score": 88480.89191704756 }, { "content": "pub trait TryRead {\n\n fn try_read_buf<B: BufMut>(&mut self, buf: &mut B) -> io::Result<Option<usize>>\n\n where\n\n Self: Sized,\n\n {\n\n // Reads the length of the slice supplied by buf.mut_bytes into the buffer\n\n // This is not guaranteed to consume an entire datagram or segment.\n\n // If your protocol is msg based (instead of continuous stream) you should\n\n // ensure that your buffer is large enough to hold an entire segment (1532 bytes if not jumbo\n\n // frames)\n\n let res = self.try_read(unsafe { buf.bytes_mut() });\n\n\n\n if let Ok(Some(cnt)) = res {\n\n unsafe {\n\n buf.advance_mut(cnt);\n\n }\n\n }\n\n\n\n res\n\n }\n\n\n\n fn try_read(&mut self, buf: &mut [u8]) -> io::Result<Option<usize>>;\n\n}\n\n\n", "file_path": "tests/util/mod.rs", "rank": 27, "score": 88480.89191704756 }, { "content": "#[test]\n\npub fn test_multicast() {\n\n init();\n\n\n\n debug!(\"Starting TEST_UDP_CONNECTIONLESS\");\n\n let mut poll = Poll::new().unwrap();\n\n\n\n let tx = UdpSocket::bind(any_local_address()).unwrap();\n\n let rx = UdpSocket::bind(any_local_address()).unwrap();\n\n\n\n info!(\"Joining group 227.1.1.100\");\n\n let any = \"0.0.0.0\".parse().unwrap();\n\n rx.join_multicast_v4(\"227.1.1.100\".parse().unwrap(), any)\n\n .unwrap();\n\n\n\n info!(\"Joining group 227.1.1.101\");\n\n rx.join_multicast_v4(\"227.1.1.101\".parse().unwrap(), any)\n\n .unwrap();\n\n\n\n info!(\"Registering SENDER\");\n\n poll.registry()\n", "file_path": "tests/udp_socket.rs", "rank": 29, "score": 86702.2537490665 }, { "content": "#[test]\n\npub fn test_echo_server() {\n\n init();\n\n\n\n debug!(\"Starting TEST_ECHO_SERVER\");\n\n let mut poll = Poll::new().unwrap();\n\n\n\n let srv = TcpListener::bind(any_local_address()).unwrap();\n\n let addr = srv.local_addr().unwrap();\n\n\n\n info!(\"listen for connections\");\n\n poll.registry()\n\n .register(&srv, SERVER, Interests::READABLE)\n\n .unwrap();\n\n\n\n let sock = TcpStream::connect(addr).unwrap();\n\n\n\n // Connect to the server\n\n poll.registry()\n\n .register(&sock, CLIENT, Interests::WRITABLE)\n\n .unwrap();\n", "file_path": "tests/tcp.rs", "rank": 30, "score": 86702.2537490665 }, { "content": "#[test]\n\npub fn test_register_deregister() {\n\n init();\n\n\n\n debug!(\"Starting TEST_REGISTER_DEREGISTER\");\n\n let mut poll = Poll::new().unwrap();\n\n let mut events = Events::with_capacity(1024);\n\n\n\n let server = TcpListener::bind(any_local_address()).unwrap();\n\n let addr = server.local_addr().unwrap();\n\n\n\n info!(\"register server socket\");\n\n poll.registry()\n\n .register(&server, SERVER, Interests::READABLE)\n\n .unwrap();\n\n\n\n let client = TcpStream::connect(addr).unwrap();\n\n\n\n // Register client socket only as writable\n\n poll.registry()\n\n .register(&client, CLIENT, Interests::READABLE)\n", "file_path": "tests/registering.rs", "rank": 31, "score": 86702.2537490665 }, { "content": "#[test]\n\nfn listen_then_close() {\n\n init();\n\n\n\n let mut poll = Poll::new().unwrap();\n\n let l = TcpListener::bind(\"127.0.0.1:0\".parse().unwrap()).unwrap();\n\n\n\n poll.registry()\n\n .register(&l, Token(1), Interests::READABLE)\n\n .unwrap();\n\n drop(l);\n\n\n\n let mut events = Events::with_capacity(128);\n\n\n\n poll.poll(&mut events, Some(Duration::from_millis(100)))\n\n .unwrap();\n\n\n\n for event in &events {\n\n if event.token() == Token(1) {\n\n panic!(\"recieved ready() on a closed TcpListener\")\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/tcp.rs", "rank": 33, "score": 85378.7148108821 }, { "content": "#[test]\n\nfn fmt_debug() {\n\n assert_eq!(format!(\"{:?}\", Interests::READABLE), \"READABLE\");\n\n assert_eq!(format!(\"{:?}\", Interests::WRITABLE), \"WRITABLE\");\n\n assert_eq!(\n\n format!(\"{:?}\", Interests::READABLE | Interests::WRITABLE),\n\n \"READABLE | WRITABLE\"\n\n );\n\n #[cfg(any(\n\n target_os = \"dragonfly\",\n\n target_os = \"freebsd\",\n\n target_os = \"ios\",\n\n target_os = \"macos\"\n\n ))]\n\n {\n\n assert_eq!(format!(\"{:?}\", Interests::AIO), \"AIO\");\n\n }\n\n #[cfg(any(target_os = \"freebsd\"))]\n\n {\n\n assert_eq!(format!(\"{:?}\", Interests::LIO), \"LIO\");\n\n }\n\n}\n", "file_path": "tests/interests.rs", "rank": 35, "score": 85378.7148108821 }, { "content": "#[test]\n\nfn write_error() {\n\n init();\n\n\n\n let mut poll = Poll::new().unwrap();\n\n let mut events = Events::with_capacity(16);\n\n let (tx, rx) = channel();\n\n\n\n let listener = net::TcpListener::bind(\"127.0.0.1:0\").unwrap();\n\n let addr = listener.local_addr().unwrap();\n\n let t = thread::spawn(move || {\n\n let (conn, _addr) = listener.accept().unwrap();\n\n rx.recv().unwrap();\n\n drop(conn);\n\n });\n\n\n\n let mut s = TcpStream::connect(addr).unwrap();\n\n poll.registry()\n\n .register(&s, Token(0), Interests::READABLE | Interests::WRITABLE)\n\n .unwrap();\n\n\n", "file_path": "tests/tcp.rs", "rank": 36, "score": 85378.7148108821 }, { "content": "#[test]\n\nfn connect_then_close() {\n\n init();\n\n\n\n struct H {\n\n listener: TcpListener,\n\n shutdown: bool,\n\n }\n\n\n\n let mut poll = Poll::new().unwrap();\n\n let l = TcpListener::bind(\"127.0.0.1:0\".parse().unwrap()).unwrap();\n\n let s = TcpStream::connect(l.local_addr().unwrap()).unwrap();\n\n\n\n poll.registry()\n\n .register(&l, Token(1), Interests::READABLE)\n\n .unwrap();\n\n poll.registry()\n\n .register(&s, Token(2), Interests::READABLE)\n\n .unwrap();\n\n\n\n let mut events = Events::with_capacity(128);\n", "file_path": "tests/tcp.rs", "rank": 37, "score": 85378.7148108821 }, { "content": "#[test]\n\nfn write_then_drop() {\n\n init();\n\n\n\n let a = TcpListener::bind(\"127.0.0.1:0\".parse().unwrap()).unwrap();\n\n let addr = a.local_addr().unwrap();\n\n let mut s = TcpStream::connect(addr).unwrap();\n\n\n\n let mut poll = Poll::new().unwrap();\n\n\n\n poll.registry()\n\n .register(&a, Token(1), Interests::READABLE)\n\n .unwrap();\n\n\n\n poll.registry()\n\n .register(&s, Token(3), Interests::READABLE)\n\n .unwrap();\n\n\n\n let mut events = Events::with_capacity(1024);\n\n while events.is_empty() {\n\n poll.poll(&mut events, None).unwrap();\n", "file_path": "tests/tcp.rs", "rank": 38, "score": 85378.7148108821 }, { "content": "#[test]\n\nfn write_then_deregister() {\n\n init();\n\n\n\n let a = TcpListener::bind(\"127.0.0.1:0\".parse().unwrap()).unwrap();\n\n let addr = a.local_addr().unwrap();\n\n let mut s = TcpStream::connect(addr).unwrap();\n\n\n\n let mut poll = Poll::new().unwrap();\n\n\n\n poll.registry()\n\n .register(&a, Token(1), Interests::READABLE)\n\n .unwrap();\n\n poll.registry()\n\n .register(&s, Token(3), Interests::READABLE)\n\n .unwrap();\n\n\n\n let mut events = Events::with_capacity(1024);\n\n while events.is_empty() {\n\n poll.poll(&mut events, None).unwrap();\n\n }\n", "file_path": "tests/tcp.rs", "rank": 39, "score": 85378.7148108821 }, { "content": "#[test]\n\nfn connect_error() {\n\n init();\n\n\n\n let mut poll = Poll::new().unwrap();\n\n let mut events = Events::with_capacity(16);\n\n\n\n // Pick a \"random\" port that shouldn't be in use.\n\n let l = match TcpStream::connect(\"127.0.0.1:38381\".parse().unwrap()) {\n\n Ok(l) => l,\n\n Err(ref e) if e.kind() == io::ErrorKind::ConnectionRefused => {\n\n // Connection failed synchronously. This is not a bug, but it\n\n // unfortunately doesn't get us the code coverage we want.\n\n return;\n\n }\n\n Err(e) => panic!(\"TcpStream::connect unexpected error {:?}\", e),\n\n };\n\n\n\n poll.registry()\n\n .register(&l, Token(0), Interests::WRITABLE)\n\n .unwrap();\n", "file_path": "tests/tcp.rs", "rank": 40, "score": 85378.7148108821 }, { "content": "#[test]\n\n#[cfg(unix)]\n\n#[cfg(not(debug_assertions))]\n\nfn assert_size() {\n\n use mio::net::*;\n\n use std::mem::size_of;\n\n\n\n // Without debug assertions enabled `TcpListener`, `TcpStream` and `UdpSocket` should have the\n\n // same size as the system specific socket, i.e. just a file descriptor on Unix platforms.\n\n assert_eq!(size_of::<TcpListener>(), size_of::<std::net::TcpListener>());\n\n assert_eq!(size_of::<TcpStream>(), size_of::<std::net::TcpStream>());\n\n assert_eq!(size_of::<UdpSocket>(), size_of::<std::net::UdpSocket>());\n\n}\n", "file_path": "tests/size.rs", "rank": 42, "score": 85378.34288650668 }, { "content": "#[test]\n\n#[cfg(debug_assertions)] // Check is only present when debug assertions are enabled.\n\nfn test_tcp_register_multiple_event_loops() {\n\n init();\n\n\n\n let listener = TcpListener::bind(any_local_address()).unwrap();\n\n let addr = listener.local_addr().unwrap();\n\n\n\n let poll1 = Poll::new().unwrap();\n\n poll1\n\n .registry()\n\n .register(\n\n &listener,\n\n Token(0),\n\n Interests::READABLE | Interests::WRITABLE,\n\n )\n\n .unwrap();\n\n\n\n let poll2 = Poll::new().unwrap();\n\n\n\n // Try registering the same socket with the initial one\n\n let res = poll2.registry().register(\n", "file_path": "tests/registering.rs", "rank": 43, "score": 84727.80248918587 }, { "content": "#[test]\n\n#[cfg(debug_assertions)] // Check is only present when debug assertions are enabled.\n\nfn test_udp_register_multiple_event_loops() {\n\n init();\n\n\n\n let socket = UdpSocket::bind(any_local_address()).unwrap();\n\n\n\n let poll1 = Poll::new().unwrap();\n\n poll1\n\n .registry()\n\n .register(&socket, Token(0), Interests::READABLE | Interests::WRITABLE)\n\n .unwrap();\n\n\n\n let poll2 = Poll::new().unwrap();\n\n\n\n // Try registering the same socket with the initial one\n\n let res =\n\n poll2\n\n .registry()\n\n .register(&socket, Token(0), Interests::READABLE | Interests::WRITABLE);\n\n assert!(res.is_err());\n\n assert_eq!(res.unwrap_err().kind(), io::ErrorKind::Other);\n", "file_path": "tests/registering.rs", "rank": 44, "score": 84727.80248918587 }, { "content": "/// A helper trait to provide the map_non_block function on Results.\n\ntrait MapNonBlock<T> {\n\n /// Maps a `Result<T>` to a `Result<Option<T>>` by converting\n\n /// operation-would-block errors into `Ok(None)`.\n\n fn map_non_block(self) -> io::Result<Option<T>>;\n\n}\n\n\n\nimpl<T> MapNonBlock<T> for io::Result<T> {\n\n fn map_non_block(self) -> io::Result<Option<T>> {\n\n use std::io::ErrorKind::WouldBlock;\n\n\n\n match self {\n\n Ok(value) => Ok(Some(value)),\n\n Err(err) => {\n\n if let WouldBlock = err.kind() {\n\n Ok(None)\n\n } else {\n\n Err(err)\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/util/mod.rs", "rank": 45, "score": 84634.48340631562 }, { "content": "#[test]\n\npub fn test_close_on_drop() {\n\n init();\n\n debug!(\"Starting TEST_CLOSE_ON_DROP\");\n\n let mut poll = Poll::new().unwrap();\n\n\n\n // == Create & setup server socket\n\n let srv = TcpListener::bind(any_local_address()).unwrap();\n\n let addr = srv.local_addr().unwrap();\n\n\n\n poll.registry()\n\n .register(&srv, SERVER, Interests::READABLE)\n\n .unwrap();\n\n\n\n // == Create & setup client socket\n\n let sock = TcpStream::connect(addr).unwrap();\n\n\n\n poll.registry()\n\n .register(&sock, CLIENT, Interests::WRITABLE)\n\n .unwrap();\n\n\n", "file_path": "tests/close_on_drop.rs", "rank": 46, "score": 84297.16698750693 }, { "content": "#[test]\n\npub fn test_udp_socket() {\n\n init();\n\n\n\n let tx = UdpSocket::bind(any_local_address()).unwrap();\n\n let rx = UdpSocket::bind(any_local_address()).unwrap();\n\n\n\n test_send_recv_udp(tx, rx, false);\n\n}\n\n\n", "file_path": "tests/udp_socket.rs", "rank": 47, "score": 84297.16698750693 }, { "content": "#[test]\n\nfn local_addr_ready() {\n\n init();\n\n\n\n let addr = \"127.0.0.1:0\".parse().unwrap();\n\n let server = TcpListener::bind(addr).unwrap();\n\n let addr = server.local_addr().unwrap();\n\n\n\n let mut poll = Poll::new().unwrap();\n\n poll.registry()\n\n .register(&server, LISTEN, Interests::READABLE)\n\n .unwrap();\n\n\n\n let sock = TcpStream::connect(addr).unwrap();\n\n poll.registry()\n\n .register(&sock, CLIENT, Interests::READABLE)\n\n .unwrap();\n\n\n\n let mut events = Events::with_capacity(1024);\n\n\n\n let mut handler = MyHandler {\n", "file_path": "tests/tcp.rs", "rank": 48, "score": 82353.60952815165 }, { "content": "#[test]\n\nfn bind_twice_bad() {\n\n init();\n\n\n\n let l1 = TcpListener::bind(\"127.0.0.1:0\".parse().unwrap()).unwrap();\n\n let addr = l1.local_addr().unwrap();\n\n assert!(TcpListener::bind(addr).is_err());\n\n}\n\n\n", "file_path": "tests/tcp.rs", "rank": 49, "score": 82353.60952815165 }, { "content": "#[test]\n\n#[cfg(unix)]\n\nfn connection_reset_by_peer() {\n\n init();\n\n\n\n let mut poll = Poll::new().unwrap();\n\n let mut events = Events::with_capacity(16);\n\n let mut buf = [0u8; 16];\n\n\n\n // Create listener\n\n let l = TcpListener::bind(\"127.0.0.1:0\".parse().unwrap()).unwrap();\n\n let addr = l.local_addr().unwrap();\n\n\n\n // Connect client\n\n let client = net2::TcpBuilder::new_v4().unwrap().to_tcp_stream().unwrap();\n\n\n\n client.set_linger(Some(Duration::from_millis(0))).unwrap();\n\n client.connect(&addr).unwrap();\n\n\n\n // Convert to Mio stream\n\n // FIXME: how to convert the stream on Windows?\n\n let client = unsafe { TcpStream::from_raw_fd(client.into_raw_fd()) };\n", "file_path": "tests/tcp.rs", "rank": 50, "score": 82353.45294564715 }, { "content": "#[test]\n\npub fn test_reregister_different_without_poll() {\n\n init();\n\n\n\n let mut events = Events::with_capacity(1024);\n\n let mut poll = Poll::new().unwrap();\n\n\n\n // Create the listener\n\n let l = TcpListener::bind(\"127.0.0.1:0\".parse().unwrap()).unwrap();\n\n\n\n // Register the listener with `Poll`\n\n poll.registry()\n\n .register(&l, Token(0), Interests::READABLE)\n\n .unwrap();\n\n\n\n let s1 = TcpStream::connect(l.local_addr().unwrap()).unwrap();\n\n poll.registry()\n\n .register(&s1, Token(2), Interests::READABLE)\n\n .unwrap();\n\n\n\n const TIMEOUT: Duration = Duration::from_millis(200);\n\n sleep(TIMEOUT);\n\n\n\n poll.registry()\n\n .reregister(&l, Token(0), Interests::WRITABLE)\n\n .unwrap();\n\n\n\n poll.poll(&mut events, Some(TIMEOUT)).unwrap();\n\n assert!(events.iter().next().is_none());\n\n}\n\n\n", "file_path": "tests/registering.rs", "rank": 51, "score": 82063.24137117897 }, { "content": "#[test]\n\npub fn test_udp_socket_discard() {\n\n init();\n\n\n\n let tx = UdpSocket::bind(any_local_address()).unwrap();\n\n let rx = UdpSocket::bind(any_local_address()).unwrap();\n\n let udp_outside = UdpSocket::bind(any_local_address()).unwrap();\n\n\n\n let tx_addr = tx.local_addr().unwrap();\n\n let rx_addr = rx.local_addr().unwrap();\n\n\n\n assert!(tx.connect(rx_addr).is_ok());\n\n assert!(udp_outside.connect(rx_addr).is_ok());\n\n assert!(rx.connect(tx_addr).is_ok());\n\n\n\n let mut poll = Poll::new().unwrap();\n\n\n\n let r = udp_outside.send(b\"hello world\");\n\n assert!(r.is_ok() || r.unwrap_err().kind() == ErrorKind::WouldBlock);\n\n\n\n poll.registry()\n", "file_path": "tests/udp_socket.rs", "rank": 52, "score": 82063.24137117897 }, { "content": "#[test]\n\npub fn test_udp_socket_send_recv() {\n\n init();\n\n\n\n let (tx, rx) = connected_sockets();\n\n\n\n test_send_recv_udp(tx, rx, true);\n\n}\n\n\n", "file_path": "tests/udp_socket.rs", "rank": 53, "score": 79982.29381485778 }, { "content": "#[test]\n\nfn multiple_writes_immediate_success() {\n\n init();\n\n\n\n const N: usize = 16;\n\n let l = net::TcpListener::bind(\"127.0.0.1:0\").unwrap();\n\n let addr = l.local_addr().unwrap();\n\n\n\n let t = thread::spawn(move || {\n\n let mut s = l.accept().unwrap().0;\n\n let mut b = [0; 1024];\n\n let mut amt = 0;\n\n while amt < 1024 * N {\n\n for byte in b.iter_mut() {\n\n *byte = 0;\n\n }\n\n let n = s.read(&mut b).unwrap();\n\n amt += n;\n\n for byte in b[..n].iter() {\n\n assert_eq!(*byte, 1);\n\n }\n", "file_path": "tests/tcp.rs", "rank": 54, "score": 79603.46996239718 }, { "content": "#[test]\n\nfn waker_wakeup_different_thread() {\n\n init();\n\n\n\n let mut poll = Poll::new().expect(\"unable to create new Poll instance\");\n\n let mut events = Events::with_capacity(10);\n\n\n\n let token = Token(10);\n\n let waker = Waker::new(poll.registry(), token).expect(\"unable to create waker\");\n\n\n\n let waker = Arc::new(waker);\n\n let waker1 = Arc::clone(&waker);\n\n let handle = thread::spawn(move || {\n\n waker1.wake().expect(\"unable to wake\");\n\n });\n\n\n\n expect_waker_event(&mut poll, &mut events, token);\n\n\n\n expect_no_events(&mut poll, &mut events);\n\n\n\n handle.join().unwrap();\n\n}\n\n\n", "file_path": "tests/waker.rs", "rank": 55, "score": 79603.46996239718 }, { "content": "#[test]\n\nfn waker_multiple_wakeups_same_thread() {\n\n init();\n\n\n\n let mut poll = Poll::new().expect(\"unable to create new Poll instance\");\n\n let mut events = Events::with_capacity(10);\n\n\n\n let token = Token(10);\n\n let waker = Waker::new(poll.registry(), token).expect(\"unable to create waker\");\n\n\n\n for _ in 0..3 {\n\n waker.wake().expect(\"unable to wake\");\n\n }\n\n expect_waker_event(&mut poll, &mut events, token);\n\n}\n\n\n", "file_path": "tests/waker.rs", "rank": 56, "score": 79603.46996239718 }, { "content": "#[test]\n\nfn waker_multiple_wakeups_different_thread() {\n\n init();\n\n\n\n let mut poll = Poll::new().expect(\"unable to create new Poll instance\");\n\n let mut events = Events::with_capacity(10);\n\n\n\n let token = Token(10);\n\n let waker = Waker::new(poll.registry(), token).expect(\"unable to create waker\");\n\n let waker = Arc::new(waker);\n\n let waker1 = Arc::clone(&waker);\n\n let waker2 = Arc::clone(&waker1);\n\n\n\n let handle1 = thread::spawn(move || {\n\n waker1.wake().expect(\"unable to wake\");\n\n });\n\n\n\n let barrier = Arc::new(Barrier::new(2));\n\n let barrier2 = barrier.clone();\n\n let handle2 = thread::spawn(move || {\n\n barrier2.wait();\n", "file_path": "tests/waker.rs", "rank": 58, "score": 77092.43649067568 }, { "content": "#[test]\n\nfn assert_event_source_implemented_for() {\n\n fn assert_event_source<E: event::Source>() {}\n\n\n\n assert_event_source::<Box<dyn event::Source>>();\n\n assert_event_source::<Box<TcpStream>>();\n\n assert_event_source::<Arc<dyn event::Source>>();\n\n assert_event_source::<Arc<TcpStream>>();\n\n assert_event_source::<Rc<dyn event::Source>>();\n\n assert_event_source::<Rc<TcpStream>>();\n\n}\n", "file_path": "tests/event_source.rs", "rank": 59, "score": 77092.43649067568 }, { "content": "/// Returns the sender and the receiver\n\nfn connected_sockets() -> (UdpSocket, UdpSocket) {\n\n let tx = UdpSocket::bind(any_local_address()).unwrap();\n\n let rx = UdpSocket::bind(any_local_address()).unwrap();\n\n\n\n let tx_addr = tx.local_addr().unwrap();\n\n let rx_addr = rx.local_addr().unwrap();\n\n\n\n assert!(tx.connect(rx_addr).is_ok());\n\n assert!(rx.connect(tx_addr).is_ok());\n\n\n\n (tx, rx)\n\n}\n\n\n", "file_path": "tests/udp_socket.rs", "rank": 60, "score": 67917.99729929729 }, { "content": "fn inaddr_any(other: SocketAddr) -> SocketAddr {\n\n match other {\n\n SocketAddr::V4(..) => {\n\n let any = Ipv4Addr::new(0, 0, 0, 0);\n\n let addr = SocketAddrV4::new(any, 0);\n\n SocketAddr::V4(addr)\n\n }\n\n SocketAddr::V6(..) => {\n\n let any = Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 0);\n\n let addr = SocketAddrV6::new(any, 0, 0, 0);\n\n SocketAddr::V6(addr)\n\n }\n\n }\n\n}\n", "file_path": "src/sys/windows/mod.rs", "rank": 61, "score": 67497.29086875521 }, { "content": "#[cfg(test)]\n\nfn test_send_recv_udp(tx: UdpSocket, rx: UdpSocket, connected: bool) {\n\n init();\n\n\n\n debug!(\"Starting TEST_UDP_SOCKETS\");\n\n let mut poll = Poll::new().unwrap();\n\n\n\n // ensure that the sockets are non-blocking\n\n let mut buf = [0; 128];\n\n assert_eq!(\n\n ErrorKind::WouldBlock,\n\n rx.recv_from(&mut buf).unwrap_err().kind()\n\n );\n\n\n\n info!(\"Registering SENDER\");\n\n poll.registry()\n\n .register(&tx, SENDER, Interests::WRITABLE)\n\n .unwrap();\n\n\n\n info!(\"Registering LISTENER\");\n\n poll.registry()\n", "file_path": "tests/udp_socket.rs", "rank": 62, "score": 62698.01263694941 }, { "content": "fn socket_addr(addr: &SocketAddr) -> (*const SOCKADDR, c_int) {\n\n match addr {\n\n SocketAddr::V4(ref addr) => (\n\n addr as *const _ as *const SOCKADDR,\n\n size_of_val(addr) as c_int,\n\n ),\n\n SocketAddr::V6(ref addr) => (\n\n addr as *const _ as *const SOCKADDR,\n\n size_of_val(addr) as c_int,\n\n ),\n\n }\n\n}\n\n\n", "file_path": "src/sys/windows/mod.rs", "rank": 63, "score": 59389.62170648521 }, { "content": "fn expect_waker_event(poll: &mut Poll, events: &mut Events, token: Token) {\n\n poll.poll(events, Some(Duration::from_millis(100))).unwrap();\n\n assert!(!events.is_empty());\n\n for event in events.iter() {\n\n assert_eq!(event.token(), token);\n\n assert!(event.is_readable());\n\n }\n\n}\n", "file_path": "tests/waker.rs", "rank": 64, "score": 54160.13693759541 }, { "content": "/// Create a new non-blocking socket.\n\nfn new_socket(addr: SocketAddr, socket_type: c_int) -> io::Result<SOCKET> {\n\n let domain = match addr {\n\n SocketAddr::V4(..) => PF_INET,\n\n SocketAddr::V6(..) => PF_INET6,\n\n };\n\n\n\n syscall!(\n\n socket(domain, socket_type, 0),\n\n PartialEq::eq,\n\n INVALID_SOCKET\n\n )\n\n .and_then(|socket| {\n\n syscall!(ioctlsocket(socket, FIONBIO, &mut 1), PartialEq::ne, 0).map(|_| socket as SOCKET)\n\n })\n\n}\n\n\n", "file_path": "src/sys/windows/mod.rs", "rank": 65, "score": 53074.172305195025 }, { "content": "#[test]\n\nfn does_not_register_rw() {\n\n use crate::unix::SourceFd;\n\n use crate::{Poll, Token};\n\n\n\n let kq = unsafe { libc::kqueue() };\n\n let kqf = SourceFd(&kq);\n\n let poll = Poll::new().unwrap();\n\n\n\n // Registering kqueue fd will fail if write is requested (On anything but\n\n // some versions of macOS).\n\n poll.registry()\n\n .register(&kqf, Token(1234), Interests::READABLE)\n\n .unwrap();\n\n}\n", "file_path": "src/sys/unix/kqueue.rs", "rank": 66, "score": 51110.90538323379 }, { "content": "/// Register `changes` with `kq`ueue.\n\nfn kevent_register(\n\n kq: RawFd,\n\n changes: &mut [libc::kevent],\n\n ignored_errors: &[Data],\n\n) -> io::Result<()> {\n\n syscall!(kevent(\n\n kq,\n\n changes.as_ptr(),\n\n changes.len() as Count,\n\n changes.as_mut_ptr(),\n\n changes.len() as Count,\n\n ptr::null(),\n\n ))\n\n .map(|_| ())\n\n .or_else(|err| {\n\n // According to the manual page of FreeBSD: \"When kevent() call fails\n\n // with EINTR error, all changes in the changelist have been applied\",\n\n // so we can safely ignore it.\n\n if err.raw_os_error() == Some(libc::EINTR) {\n\n Ok(())\n\n } else {\n\n Err(err)\n\n }\n\n })\n\n .and_then(|()| check_errors(&changes, ignored_errors))\n\n}\n\n\n", "file_path": "src/sys/unix/kqueue.rs", "rank": 67, "score": 51106.43293873769 }, { "content": "#[test]\n\n#[cfg(unix)]\n\npub fn as_raw_fd() {\n\n let poll = Poll::new().unwrap();\n\n assert!(poll.as_raw_fd() > 0);\n\n}\n", "file_path": "src/poll.rs", "rank": 68, "score": 49839.19832098896 }, { "content": "#[test]\n\nfn assert_close_on_exec_flag() {\n\n // This assertion need to be true for Selector::new.\n\n assert_eq!(libc::O_CLOEXEC, libc::EPOLL_CLOEXEC);\n\n}\n", "file_path": "src/sys/unix/epoll.rs", "rank": 69, "score": 48672.093941764804 }, { "content": "fn interests_to_epoll(interests: Interests) -> u32 {\n\n let mut kind = EPOLLET;\n\n\n\n if interests.is_readable() {\n\n kind |= EPOLLIN;\n\n }\n\n\n\n if interests.is_writable() {\n\n kind |= EPOLLOUT;\n\n }\n\n\n\n kind as u32\n\n}\n\n\n\nimpl AsRawFd for Selector {\n\n fn as_raw_fd(&self) -> RawFd {\n\n self.ep\n\n }\n\n}\n\n\n", "file_path": "src/sys/unix/epoll.rs", "rank": 70, "score": 41167.87997648805 }, { "content": "pub fn is_lio(_: &Event) -> bool {\n\n // Not supported.\n\n false\n\n}\n\n\n\npub struct Events {\n\n /// Raw I/O event completions are filled in here by the call to `get_many`\n\n /// on the completion port above. These are then processed to run callbacks\n\n /// which figure out what to do after the event is done.\n\n pub statuses: Box<[CompletionStatus]>,\n\n\n\n /// Literal events returned by `get` to the upwards `EventLoop`. This file\n\n /// doesn't really modify this (except for the waker), instead almost all\n\n /// events are filled in by the `ReadinessQueue` from the `poll` module.\n\n pub events: Vec<Event>,\n\n}\n\n\n\nimpl Events {\n\n pub fn with_capacity(cap: usize) -> Events {\n\n // Note that it's possible for the output `events` to grow beyond the\n", "file_path": "src/sys/windows/event.rs", "rank": 71, "score": 40864.29174901088 }, { "content": "pub fn is_aio(_: &Event) -> bool {\n\n // Not supported.\n\n false\n\n}\n\n\n", "file_path": "src/sys/windows/event.rs", "rank": 72, "score": 40864.29174901088 }, { "content": "struct TestHandler {\n\n server: TcpListener,\n\n client: TcpStream,\n\n state: usize,\n\n}\n\n\n\nimpl TestHandler {\n\n fn new(srv: TcpListener, cli: TcpStream) -> TestHandler {\n\n TestHandler {\n\n server: srv,\n\n client: cli,\n\n state: 0,\n\n }\n\n }\n\n\n\n fn handle_read(&mut self, registry: &Registry, token: Token) {\n\n match token {\n\n SERVER => {\n\n trace!(\"handle_read; token=SERVER\");\n\n let mut sock = self.server.accept().unwrap().0;\n", "file_path": "tests/registering.rs", "rank": 73, "score": 40742.64564495125 }, { "content": "fn interests_to_afd_flags(interests: Interests) -> u32 {\n\n let mut flags = 0;\n\n\n\n if interests.is_readable() {\n\n // AFD_POLL_DISCONNECT for is_read_hup()\n\n flags |= AFD_POLL_RECEIVE | AFD_POLL_ACCEPT | AFD_POLL_DISCONNECT;\n\n }\n\n\n\n if interests.is_writable() {\n\n flags |= AFD_POLL_SEND;\n\n }\n\n\n\n flags\n\n}\n\n\n", "file_path": "src/sys/windows/selector.rs", "rank": 74, "score": 40177.3172362857 }, { "content": "pub fn is_readable(event: &Event) -> bool {\n\n if is_error(event) || is_read_hup(event) {\n\n return true;\n\n }\n\n event.flags & (afd::AFD_POLL_RECEIVE | afd::AFD_POLL_ACCEPT) != 0\n\n}\n\n\n", "file_path": "src/sys/windows/event.rs", "rank": 75, "score": 39791.19418028705 }, { "content": "pub fn is_priority(event: &Event) -> bool {\n\n event.flags & afd::AFD_POLL_RECEIVE_EXPEDITED != 0\n\n}\n\n\n", "file_path": "src/sys/windows/event.rs", "rank": 76, "score": 39791.19418028705 }, { "content": "pub fn is_writable(event: &Event) -> bool {\n\n if is_error(event) {\n\n return true;\n\n }\n\n event.flags & afd::AFD_POLL_SEND != 0\n\n}\n\n\n", "file_path": "src/sys/windows/event.rs", "rank": 77, "score": 39791.19418028705 }, { "content": "pub fn token(event: &Event) -> Token {\n\n Token(event.data as usize)\n\n}\n\n\n", "file_path": "src/sys/windows/event.rs", "rank": 78, "score": 39791.19418028705 }, { "content": "pub fn is_hup(event: &Event) -> bool {\n\n event.flags & afd::AFD_POLL_ABORT != 0\n\n}\n\n\n", "file_path": "src/sys/windows/event.rs", "rank": 79, "score": 39791.19418028705 }, { "content": "pub fn is_error(event: &Event) -> bool {\n\n event.flags & afd::AFD_POLL_CONNECT_FAIL != 0\n\n}\n\n\n", "file_path": "src/sys/windows/event.rs", "rank": 80, "score": 39791.19418028705 }, { "content": "pub fn selector(registry: &Registry) -> &sys::Selector {\n\n &registry.selector\n\n}\n\n\n\n#[cfg(debug_assertions)]\n\nimpl SelectorId {\n\n pub fn new() -> SelectorId {\n\n SelectorId {\n\n id: AtomicUsize::new(0),\n\n }\n\n }\n\n\n\n pub fn associate_selector(&self, registry: &Registry) -> io::Result<()> {\n\n let selector_id = self.id.load(Ordering::SeqCst);\n\n\n\n if selector_id != 0 && selector_id != registry.selector.id() {\n\n Err(io::Error::new(\n\n io::ErrorKind::Other,\n\n \"socket already registered\",\n\n ))\n", "file_path": "src/poll.rs", "rank": 81, "score": 39713.414599682575 }, { "content": "#[derive(Debug, PartialEq)]\n\nenum TestState {\n\n Initial,\n\n AfterRead,\n\n}\n\n\n", "file_path": "tests/close_on_drop.rs", "rank": 82, "score": 39460.44003443765 }, { "content": "struct TestHandler {\n\n srv: TcpListener,\n\n cli: TcpStream,\n\n state: TestState,\n\n shutdown: bool,\n\n}\n\n\n\nimpl TestHandler {\n\n fn new(srv: TcpListener, cli: TcpStream) -> TestHandler {\n\n TestHandler {\n\n srv,\n\n cli,\n\n state: Initial,\n\n shutdown: false,\n\n }\n\n }\n\n\n\n fn handle_read(&mut self, poll: &mut Poll, tok: Token) {\n\n debug!(\"readable; tok={:?}\", tok);\n\n\n", "file_path": "tests/close_on_drop.rs", "rank": 83, "score": 39460.44003443765 }, { "content": "pub fn is_read_hup(event: &Event) -> bool {\n\n event.flags & afd::AFD_POLL_DISCONNECT != 0\n\n}\n\n\n", "file_path": "src/sys/windows/event.rs", "rank": 84, "score": 38800.6314400847 }, { "content": "use std::sync::{Arc, Barrier};\n\nuse std::thread;\n\nuse std::time::Duration;\n\n\n\nuse mio::{Events, Poll, Token, Waker};\n\n\n\nmod util;\n\n\n\nuse util::{assert_send, assert_sync, expect_no_events, init};\n\n\n\n#[test]\n", "file_path": "tests/waker.rs", "rank": 86, "score": 34846.66153445324 }, { "content": "use std::io::{self, Read};\n\nuse std::time::Duration;\n\nuse std::{net, thread};\n\n\n\nuse mio::net::TcpStream;\n\nuse mio::{Events, Interests, Poll, Token};\n\n\n\nmod util;\n\n\n\nuse util::init;\n\n\n\n#[test]\n", "file_path": "tests/regressions.rs", "rank": 87, "score": 34844.97081036366 }, { "content": "use std::io::{self, Write};\n\nuse std::thread::sleep;\n\nuse std::time::Duration;\n\n\n\nuse log::{debug, info, trace};\n\n\n\n#[cfg(debug_assertions)]\n\nuse mio::net::UdpSocket;\n\nuse mio::net::{TcpListener, TcpStream};\n\nuse mio::{Events, Interests, Poll, Registry, Token};\n\n\n\nmod util;\n\n\n\nuse util::{any_local_address, init};\n\n\n\nconst SERVER: Token = Token(0);\n\nconst CLIENT: Token = Token(1);\n\n\n", "file_path": "tests/registering.rs", "rank": 88, "score": 34842.28457763183 }, { "content": "use std::io::{self, Read, Write};\n\nuse std::net::Shutdown;\n\n#[cfg(unix)]\n\nuse std::os::unix::io::{FromRawFd, IntoRawFd};\n\nuse std::sync::mpsc::channel;\n\nuse std::time::Duration;\n\nuse std::{net, thread};\n\n\n\nuse bytes::{Buf, Bytes, BytesMut};\n\nuse log::{debug, info};\n\n#[cfg(unix)]\n\nuse net2::TcpStreamExt;\n\nuse slab::Slab;\n\n\n\nuse mio::net::{TcpListener, TcpStream};\n\nuse mio::{Events, Interests, Poll, Registry, Token};\n\n\n\nmod util;\n\n\n\nuse util::{any_local_address, assert_send, assert_sync, init, TryRead, TryWrite};\n\n\n\nconst LISTEN: Token = Token(0);\n\nconst CLIENT: Token = Token(1);\n\nconst SERVER: Token = Token(2);\n\n\n\n#[test]\n", "file_path": "tests/tcp.rs", "rank": 89, "score": 34838.278333744034 }, { "content": "use mio::Interests;\n\n\n\n#[test]\n", "file_path": "tests/interests.rs", "rank": 90, "score": 34835.61490681323 }, { "content": "#[test]\n\n#[cfg(unix)]\n\n#[cfg(not(debug_assertions))]\n", "file_path": "tests/size.rs", "rank": 91, "score": 34827.11211161038 }, { "content": " server = sock;\n\n break 'outer;\n\n }\n\n Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => {}\n\n Err(e) => panic!(\"unexpected error {:?}\", e),\n\n }\n\n }\n\n }\n\n }\n\n\n\n // Close the connection\n\n drop(client);\n\n\n\n // Wait a moment\n\n thread::sleep(Duration::from_millis(100));\n\n\n\n // Register interest in the server socket\n\n poll.registry()\n\n .register(&server, Token(3), Interests::READABLE)\n\n .unwrap();\n", "file_path": "tests/tcp.rs", "rank": 93, "score": 34825.78853543721 }, { "content": " Err(e) => {\n\n println!(\"good error: {}\", e);\n\n break;\n\n }\n\n }\n\n }\n\n}\n\n\n\nmacro_rules! wait {\n\n ($poll:ident, $ready:ident, $expect_hup: expr) => {{\n\n use std::time::Instant;\n\n\n\n let now = Instant::now();\n\n let mut events = Events::with_capacity(16);\n\n let mut found = false;\n\n\n\n while !found {\n\n if now.elapsed() > Duration::from_secs(5) {\n\n panic!(\"not ready\");\n\n }\n", "file_path": "tests/tcp.rs", "rank": 94, "score": 34825.660215507974 }, { "content": " waker2.wake().expect(\"unable to wake\");\n\n });\n\n\n\n // Receive the event from thread 1.\n\n expect_waker_event(&mut poll, &mut events, token);\n\n\n\n // Unblock thread 2.\n\n barrier.wait();\n\n\n\n // Now we need to receive another event from thread 2.\n\n expect_waker_event(&mut poll, &mut events, token);\n\n\n\n expect_no_events(&mut poll, &mut events);\n\n\n\n handle1.join().unwrap();\n\n handle2.join().unwrap();\n\n}\n\n\n", "file_path": "tests/waker.rs", "rank": 95, "score": 34825.540859530054 }, { "content": " .unwrap();\n\n\n\n let mut handler = TestHandler::new(server, client);\n\n\n\n loop {\n\n poll.poll(&mut events, None).unwrap();\n\n\n\n if let Some(event) = events.iter().next() {\n\n if event.is_readable() {\n\n handler.handle_read(poll.registry(), event.token());\n\n }\n\n\n\n if event.is_writable() {\n\n handler.handle_write(poll.registry(), event.token());\n\n break;\n\n }\n\n }\n\n }\n\n\n\n poll.poll(&mut events, Some(Duration::from_millis(100)))\n\n .unwrap();\n\n assert!(events.iter().next().is_none());\n\n}\n\n\n", "file_path": "tests/registering.rs", "rank": 97, "score": 34825.3793708644 }, { "content": " 'outer: loop {\n\n poll.poll(&mut events, None).unwrap();\n\n for event in &events {\n\n if event.token() == Token(1) {\n\n // connected\n\n break 'outer;\n\n }\n\n }\n\n }\n\n\n\n let mut b = [0; 1024];\n\n match s.read(&mut b) {\n\n Ok(_) => panic!(\"unexpected ok\"),\n\n Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => (),\n\n Err(e) => panic!(\"unexpected error: {:?}\", e),\n\n }\n\n\n\n drop(s);\n\n t.join().unwrap();\n\n}\n", "file_path": "tests/regressions.rs", "rank": 98, "score": 34823.0115914897 } ]
Rust
src/direct.rs
passcod/streampager
b562cb044029a9f67512d4a5ca8bf6fd3a490f30
use crate::config::{InterfaceMode, WrappingMode}; use crate::event::{Event, EventStream}; use crate::file::File; use crate::line::Line; use crate::progress::Progress; use anyhow::Result; use bit_set::BitSet; use std::time::{Duration, Instant}; use termwiz::input::InputEvent; use termwiz::surface::change::Change; use termwiz::surface::Position; use termwiz::terminal::Terminal; use vec_map::VecMap; #[derive(Debug)] pub(crate) enum Outcome { RenderIncomplete, RenderNothing, RenderComplete, Interrupted, } pub(crate) fn direct<T: Terminal>( term: &mut T, output_files: &[File], error_files: &[File], progress: Option<&Progress>, events: &mut EventStream, mode: InterfaceMode, ) -> Result<Outcome> { if mode == InterfaceMode::FullScreen { return Ok(Outcome::RenderNothing); } let delayed_deadline = match mode { InterfaceMode::Delayed(duration) => Some(Instant::now() + duration), _ => None, }; let mut loading = BitSet::with_capacity(output_files.len() + error_files.len()); for file in output_files.iter().chain(error_files.iter()) { loading.insert(file.index()); } let mut last_read = VecMap::new(); let mut collect_unread = |files: &[File], max_lines: usize| -> Vec<Vec<u8>> { let mut result = Vec::new(); for file in files.iter() { let index = file.index(); let mut lines = file.lines(); let last = last_read.get(index).cloned().unwrap_or(0); file.set_needed_lines(last + max_lines); if lines > 0 && !file.loaded() && file .with_line(lines - 1, |l| !l.ends_with(b"\n")) .unwrap_or(true) { lines -= 1; } if lines >= last { let lines = (last + max_lines).min(lines); result.reserve(lines - last); for i in last..lines { file.with_line(i, |l| result.push(l.to_vec())); } last_read.insert(index, lines); } } result }; let read_progress_lines = || -> Vec<Vec<u8>> { let line_count = progress.map(|p| p.lines()).unwrap_or(0); (0..line_count) .filter_map(|i| progress.and_then(|p| p.with_line(i, |l| l.to_vec()))) .collect::<Vec<_>>() }; let mut state = StreamingLines::default(); let delayed = delayed_deadline.is_some(); let has_one_screen_limit = match mode { InterfaceMode::Direct => false, _ => true, }; let mut render = |term: &mut T, h: usize, w: usize| -> Result<Option<Outcome>> { let append_output_lines = collect_unread(output_files, h + 2); let append_error_lines = collect_unread(error_files, h + 2); let progress_lines = read_progress_lines(); state.add_lines(append_output_lines, append_error_lines, progress_lines); if delayed { if has_one_screen_limit && state.height(w) >= h { return Ok(Some(Outcome::RenderNothing)); } } else { if has_one_screen_limit && state.height(w) >= h { return Ok(Some(Outcome::RenderIncomplete)); } let changes = state.render_pending_lines(w)?; term.render(&changes)?; } Ok(None) }; let mut size = term.get_screen_size()?; let mut loaded = BitSet::with_capacity(loading.capacity()); let mut remaining = output_files.len() + error_files.len(); while remaining > 0 { match events.get(term, Some(Duration::from_millis(10)))? { Some(Event::Loaded(i)) => { if loading.contains(i) && loaded.insert(i) { remaining -= 1; } } Some(Event::Input(InputEvent::Resized { .. })) => { size = term.get_screen_size()?; } Some(Event::Input(InputEvent::Key(key))) => { use termwiz::input::{KeyCode::Char, Modifiers}; match (key.modifiers, key.key) { (Modifiers::NONE, Char('q')) | (Modifiers::CTRL, Char('C')) => { return Ok(Outcome::Interrupted); } (Modifiers::NONE, Char('f')) | (Modifiers::NONE, Char(' ')) => { let outcome = if delayed { Outcome::RenderNothing } else { Outcome::RenderIncomplete }; return Ok(outcome); } _ => (), } } _ => (), } if let Some(deadline) = delayed_deadline { if deadline <= Instant::now() { return Ok(Outcome::RenderNothing); } } if let Some(outcome) = render(term, size.rows, size.cols)? { return Ok(outcome); } } if delayed { term.render(&state.render_pending_lines(size.cols)?)?; } Ok(Outcome::RenderComplete) } #[derive(Default)] struct StreamingLines { past_output_row_count: usize, new_output_lines: Vec<Vec<u8>>, error_lines: Vec<Vec<u8>>, progress_lines: Vec<Vec<u8>>, erase_row_count: usize, pending_changes: bool, } impl StreamingLines { fn add_lines( &mut self, mut append_output_lines: Vec<Vec<u8>>, mut append_error_lines: Vec<Vec<u8>>, replace_progress_lines: Vec<Vec<u8>>, ) { if append_output_lines.is_empty() && append_error_lines.is_empty() && replace_progress_lines == self.progress_lines { return; } self.new_output_lines.append(&mut append_output_lines); self.error_lines.append(&mut append_error_lines); self.progress_lines = replace_progress_lines; self.pending_changes = true; } fn render_pending_lines(&mut self, terminal_width: usize) -> Result<Vec<Change>> { if !self.pending_changes { return Ok(Vec::new()); } let line_count = self.new_output_lines.len() + self.error_lines.len() + self.progress_lines.len(); let mut changes = Vec::with_capacity(line_count * 2 + 2); if self.erase_row_count > 0 { let dy = -(self.erase_row_count as isize); changes.push(Change::CursorPosition { x: Position::Relative(0), y: Position::Relative(dy), }); changes.push(Change::ClearToEndOfScreen(Default::default())); } let mut render = |lines| -> Result<_> { let mut row_count = 0; for line in lines { let line = Line::new(0, line); let height = line.height(terminal_width, WrappingMode::GraphemeBoundary); for row in 0..height { line.render_wrapped( &mut changes, row, terminal_width, WrappingMode::GraphemeBoundary, None, )?; changes.push(Change::CursorPosition { x: Position::Absolute(0), y: Position::Relative(1), }); } row_count += height; } Ok(row_count) }; let new_output_row_count = render(self.new_output_lines.iter())?; let error_row_count = render(self.error_lines.iter())?; let progress_row_count = render(self.progress_lines.iter())?; self.past_output_row_count += new_output_row_count; self.new_output_lines.clear(); self.erase_row_count = error_row_count + progress_row_count; self.pending_changes = false; Ok(changes) } fn height(&self, terminal_width: usize) -> usize { let mut row_count = self.past_output_row_count; for line in self .new_output_lines .iter() .chain(self.error_lines.iter()) .chain(self.progress_lines.iter()) { let line = Line::new(0, line); row_count += line.height(terminal_width, WrappingMode::GraphemeBoundary); } row_count } }
use crate::config::{InterfaceMode, WrappingMode}; use crate::event::{Event, EventStream}; use crate::file::File; use crate::line::Line; use crate::progress::Progress; use anyhow::Result; use bit_set::BitSet; use std::time::{Duration, Instant}; use termwiz::input::InputEvent; use termwiz::surface::change::Change; use termwiz::surface::Position; use termwiz::terminal::Terminal; use vec_map::VecMap; #[derive(Debug)] pub(crate) enum Outcome { RenderIncomplete, RenderNothing, RenderComplete, Interrupted, } pub(crate) fn direct<T: Terminal>( term: &mut T, output_files: &[File], error_files: &[File], progress: Option<&Progress>, events: &mut EventStream, mode: InterfaceMode, ) -> Result<Outcome> { if mode == InterfaceMode::FullScreen { return Ok(Outcome::RenderNothing); } let delayed_deadline = match mode { InterfaceMode::Delayed(duration) => Some(Instant::now() + duration), _ => None, }; let mut loading = BitSet::with_capacity(output_files.len() + error_files.len()); for file in output_files.iter().chain(error_files.iter()) { loading.insert(file.index()); } let mut last_read = VecMap::new(); let mut collect_unread = |files: &[File], max_lines: usize| -> Vec<Vec<u8>> { let mut result = Vec::new(); for file in files.iter() { let index = file.index(); let mut lines = file.lines(); let last = last_read.get(index).cloned().unwrap_or(0); file.set_needed_lines(last + max_lines); if lines > 0 && !file.loaded() && file .with_line(lines - 1, |l| !l.ends_with(b"\n")) .unwrap_or(true) { lines -= 1; } if lines >= last { let lines = (last + max_lines).min(lines); result.reserve(lines - last); for i in last..lines { file.with_line(i, |l| result.push(l.to_vec())); } last_read.insert(index, lines); } } result }; let read_progress_lines = || -> Vec<Vec<u8>> { let line_count = progress.map(|p| p.lines()).unwrap_or(0); (0..line_count) .filter_map(|i| progress.and_then(|p| p.with_line(i, |l| l.to_vec()))) .collect::<Vec<_>>() }; let mut state = StreamingLines::default(); let delayed = delayed_deadline.is_some(); let has_one_screen_limit = match mode { InterfaceMode::Direct => false, _ => true, }; let mut render = |term: &mut T, h: usize, w: usize| -> Result<Option<Outcome>> { let append_output_lines = collect_unread(output_files, h + 2); let append_error_lines = collect_unread(error_files, h + 2); let progress_lines = read_progress_lines(); state.add_lines(append_output_lines, append_error_lines, progress_lines); if delayed { if has_one_screen_limit && state.height(w) >= h { return Ok(Some(Outcome::RenderNothing)); } } else { if has_one_screen_limit && state.height(w) >= h { return Ok(Some(Outcome::RenderIncomplete)); } let changes = state.render_pending_lines(w)?; term.render(&changes)?; } Ok(None) }; let mut size = term.get_screen_size()?; let mut loaded = BitSet::with_capacity(loading.capacity()); let mut remaining = output_files.len() + error_files.len(); while remaining > 0 { match events.get(term, Some(Duration::from_millis(10)))? { Some(Event::Loaded(i)) => { if loading.contains(i) && loaded.insert(i) { remaining -= 1; } } Some(Event::Input(InputEvent::Resized { .. })) => { size = term.get_screen_size()?; } Some(Event::Input(InputEvent::Key(key))) => { use termwiz::input::{KeyCode::Char, Modifiers}; match (key.modifiers, key.key) { (Modifiers::NONE, Char('q')) | (Modifiers::CTRL, Char('C')) => { return Ok(Outcome::Interrupted); } (Modifiers::NONE, Char('f')) | (Modifiers::NONE, Char(' ')) => { let outcome = if delayed { Outcome::RenderNothing } else { Outcome::RenderIncomplete }; return Ok(outcom
#[derive(Default)] struct StreamingLines { past_output_row_count: usize, new_output_lines: Vec<Vec<u8>>, error_lines: Vec<Vec<u8>>, progress_lines: Vec<Vec<u8>>, erase_row_count: usize, pending_changes: bool, } impl StreamingLines { fn add_lines( &mut self, mut append_output_lines: Vec<Vec<u8>>, mut append_error_lines: Vec<Vec<u8>>, replace_progress_lines: Vec<Vec<u8>>, ) { if append_output_lines.is_empty() && append_error_lines.is_empty() && replace_progress_lines == self.progress_lines { return; } self.new_output_lines.append(&mut append_output_lines); self.error_lines.append(&mut append_error_lines); self.progress_lines = replace_progress_lines; self.pending_changes = true; } fn render_pending_lines(&mut self, terminal_width: usize) -> Result<Vec<Change>> { if !self.pending_changes { return Ok(Vec::new()); } let line_count = self.new_output_lines.len() + self.error_lines.len() + self.progress_lines.len(); let mut changes = Vec::with_capacity(line_count * 2 + 2); if self.erase_row_count > 0 { let dy = -(self.erase_row_count as isize); changes.push(Change::CursorPosition { x: Position::Relative(0), y: Position::Relative(dy), }); changes.push(Change::ClearToEndOfScreen(Default::default())); } let mut render = |lines| -> Result<_> { let mut row_count = 0; for line in lines { let line = Line::new(0, line); let height = line.height(terminal_width, WrappingMode::GraphemeBoundary); for row in 0..height { line.render_wrapped( &mut changes, row, terminal_width, WrappingMode::GraphemeBoundary, None, )?; changes.push(Change::CursorPosition { x: Position::Absolute(0), y: Position::Relative(1), }); } row_count += height; } Ok(row_count) }; let new_output_row_count = render(self.new_output_lines.iter())?; let error_row_count = render(self.error_lines.iter())?; let progress_row_count = render(self.progress_lines.iter())?; self.past_output_row_count += new_output_row_count; self.new_output_lines.clear(); self.erase_row_count = error_row_count + progress_row_count; self.pending_changes = false; Ok(changes) } fn height(&self, terminal_width: usize) -> usize { let mut row_count = self.past_output_row_count; for line in self .new_output_lines .iter() .chain(self.error_lines.iter()) .chain(self.progress_lines.iter()) { let line = Line::new(0, line); row_count += line.height(terminal_width, WrappingMode::GraphemeBoundary); } row_count } }
e); } _ => (), } } _ => (), } if let Some(deadline) = delayed_deadline { if deadline <= Instant::now() { return Ok(Outcome::RenderNothing); } } if let Some(outcome) = render(term, size.rows, size.cols)? { return Ok(outcome); } } if delayed { term.render(&state.render_pending_lines(size.cols)?)?; } Ok(Outcome::RenderComplete) }
function_block-function_prefixed
[ { "content": "/// Determine the rendering width for a character.\n\nfn render_width(c: char) -> usize {\n\n if c < ' ' || c == '\\x7F' {\n\n // Render as <XX>\n\n 4\n\n } else if let Some(w) = c.width() {\n\n // Render as the character itself\n\n w\n\n } else {\n\n // Render as <U+XXXX>\n\n 8\n\n }\n\n}\n\n\n", "file_path": "src/prompt.rs", "rank": 0, "score": 138685.04143771302 }, { "content": "fn line_count(newlines: &[usize], length: usize) -> usize {\n\n let mut lines = newlines.len();\n\n let after_last_newline_offset = if lines == 0 {\n\n 0\n\n } else {\n\n newlines[lines - 1] + 1\n\n };\n\n if length > after_last_newline_offset {\n\n lines += 1;\n\n }\n\n lines\n\n}\n", "file_path": "src/file.rs", "rank": 1, "score": 135123.1443770855 }, { "content": "fn move_word_backwards(value: &[char], mut position: usize) -> usize {\n\n while position > 0 {\n\n position -= 1;\n\n if !value[position].is_whitespace() {\n\n break;\n\n }\n\n }\n\n while position > 0 {\n\n if value[position].is_whitespace() {\n\n position += 1;\n\n break;\n\n }\n\n position -= 1;\n\n }\n\n position\n\n}\n\n\n", "file_path": "src/prompt.rs", "rank": 2, "score": 133322.1564725174 }, { "content": "fn move_word_forwards(value: &[char], mut position: usize) -> usize {\n\n let len = value.len();\n\n while position < len && value[position].is_whitespace() {\n\n position += 1;\n\n }\n\n while position < len && !value[position].is_whitespace() {\n\n position += 1;\n\n }\n\n position\n\n}\n\n\n", "file_path": "src/prompt.rs", "rank": 3, "score": 133322.1564725174 }, { "content": "fn write_key_names(text: &mut String, keys: &[(Modifiers, KeyCode)]) -> Result<usize> {\n\n let mut w = 0;\n\n for (index, (modifiers, keycode)) in keys.iter().enumerate() {\n\n if index > 0 {\n\n if index == keys.len() - 1 {\n\n text.push_str(\"\\x1B[0;2m or \");\n\n w += 4;\n\n } else {\n\n text.push_str(\"\\x1B[0;2m, \");\n\n w += 2;\n\n }\n\n }\n\n text.push_str(\"\\x1B[1m\");\n\n for (modifier, desc) in [\n\n (Modifiers::CTRL, \"Ctrl-\"),\n\n (Modifiers::ALT, \"Alt-\"),\n\n (Modifiers::SUPER, \"Super-\"),\n\n (Modifiers::SHIFT, \"Shift-\"),\n\n ]\n\n .iter()\n", "file_path": "src/help.rs", "rank": 4, "score": 128423.82933669674 }, { "content": "#[derive(Clone, Copy, Debug)]\n\nenum FileEvent {\n\n /// File has been appended to.\n\n Append,\n\n\n\n /// File has changed and needs reloading.\n\n Reload,\n\n}\n\n\n", "file_path": "src/file.rs", "rank": 5, "score": 121234.12825060196 }, { "content": "/// Parse data into an array of Spans.\n\nfn parse_spans(data: &[u8], match_index: Option<usize>) -> Vec<Span> {\n\n let mut spans = Vec::new();\n\n let mut input = &data[..];\n\n\n\n fn parse_unicode_span(data: &str, spans: &mut Vec<Span>, match_index: Option<usize>) {\n\n let mut text_start = None;\n\n let mut skip_to = None;\n\n for (index, grapheme) in data.grapheme_indices(true) {\n\n let mut span = None;\n\n\n\n // Skip past any escape sequence we've already extracted\n\n if let Some(end) = skip_to {\n\n if index < end {\n\n continue;\n\n } else {\n\n skip_to = None;\n\n }\n\n }\n\n\n\n if grapheme == \"\\x1B\" {\n", "file_path": "src/line.rs", "rank": 6, "score": 113402.94708803501 }, { "content": "/// Determine the special rendering for a character, if any.\n\nfn special_render(c: char) -> Option<String> {\n\n if c < ' ' || c == '\\x7F' {\n\n Some(format!(\"<{:02X}>\", c as u8))\n\n } else if c.width().is_none() {\n\n Some(format!(\"<U+{:04X}>\", c as u32))\n\n } else {\n\n None\n\n }\n\n}\n", "file_path": "src/prompt.rs", "rank": 7, "score": 105342.07035585778 }, { "content": "/// Line wrap in the cache are uniquely identified by index and wrapping mode.\n\ntype WrapCacheIndex = (usize, WrappingMode);\n\n\n", "file_path": "src/line.rs", "rank": 8, "score": 97540.65822664532 }, { "content": "type PromptRunFn = dyn FnMut(&mut Screen, &str) -> Result<Option<Action>, Error>;\n\n\n\n/// A prompt for input from the user.\n\npub(crate) struct Prompt {\n\n /// The text of the prompt to display to the user.\n\n prompt: String,\n\n\n\n /// The current prompt history,\n\n history: PromptHistory,\n\n\n\n /// The closure to run when the user presses Return. Will only be called once.\n\n run: Option<Box<PromptRunFn>>,\n\n}\n\n\n\npub(crate) struct PromptState {\n\n /// The value the user is typing in.\n\n value: Vec<char>,\n\n\n\n /// The offset within the value that we are displaying from.\n\n offset: usize,\n", "file_path": "src/prompt.rs", "rank": 9, "score": 96260.1621866154 }, { "content": "fn main() -> Result<()> {\n\n let (out_read, mut out_write) = pipe();\n\n let (err_read, mut err_write) = pipe();\n\n let (prog_read, mut prog_write) = pipe();\n\n let infinite_output = std::env::args().nth(1) == Some(\"inf\".to_string());\n\n\n\n let out_thread = spawn(move || -> io::Result<()> {\n\n if infinite_output {\n\n let mut i = 0;\n\n loop {\n\n i += 1;\n\n out_write.write_all(format!(\"this is line {}\\n\", i).as_bytes())?;\n\n }\n\n } else {\n\n for i in 1..=100 {\n\n out_write.write_all(b\"this is line\")?;\n\n sleep(Duration::from_millis(225));\n\n out_write.write_all(format!(\" {}\\n\", i).as_bytes())?;\n\n sleep(Duration::from_millis(225));\n\n }\n", "file_path": "examples/streams_example.rs", "rank": 10, "score": 93458.50477863097 }, { "content": "#[derive(Clone)]\n\nenum FileData {\n\n /// Data content is being streamed from an input stream, and stored in a\n\n /// vector of buffers.\n\n Streamed { buffers: Arc<RwLock<Vec<Buffer>>> },\n\n\n\n /// Data content should be read from a file on disk.\n\n File {\n\n path: PathBuf,\n\n buffer_cache: Arc<Mutex<BufferCache>>,\n\n events: mpsc::Sender<FileEvent>,\n\n },\n\n\n\n /// Data content has been memory mapped.\n\n Mapped { mmap: Arc<Mmap> },\n\n\n\n /// File is empty.\n\n Empty,\n\n\n\n /// Static content.\n\n Static { data: Arc<Cow<'static, [u8]>> },\n\n}\n\n\n", "file_path": "src/file.rs", "rank": 11, "score": 91077.70281083655 }, { "content": "/// Determine terminal capabilities.\n\nfn termcaps() -> Result<Capabilities> {\n\n // Get terminal capabilities from the environment, but disable mouse\n\n // reporting, as we don't want to change the terminal's mouse handling.\n\n // Enable TrueColor support, which is backwards compatible with 16\n\n // or 256 colors. Applications can still limit themselves to 16 or\n\n // 256 colors if they want.\n\n let hints = ProbeHints::new_from_env()\n\n .color_level(Some(ColorLevel::TrueColor))\n\n .mouse_reporting(Some(false));\n\n let caps = Capabilities::new_with_hints(hints)?;\n\n if cfg!(unix) && caps.terminfo_db().is_none() {\n\n bail!(\"terminfo database not found (is $TERM correct?)\");\n\n }\n\n Ok(caps)\n\n}\n\n\n\nimpl Pager {\n\n /// Build a `Pager` using the system terminal.\n\n pub fn new_using_system_terminal() -> Result<Self> {\n\n Self::new_with_terminal_func(SystemTerminal::new)\n", "file_path": "src/lib.rs", "rank": 12, "score": 90163.49493225152 }, { "content": "fn fill_range(b: &mut BitSet, start: usize, end: usize, fill: bool) {\n\n if fill {\n\n b.extend(start..end);\n\n } else {\n\n for row in start..end {\n\n b.remove(row);\n\n }\n\n }\n\n}\n\n\n\nimpl Refresh {\n\n /// Add a range of rows to the rows that must be rendered.\n\n pub(crate) fn add_range(&mut self, start: usize, end: usize) {\n\n match *self {\n\n Refresh::None => {\n\n let mut b = BitSet::new();\n\n b.extend(start..end);\n\n *self = Refresh::Rows(b);\n\n }\n\n Refresh::Rows(ref mut b) => {\n", "file_path": "src/refresh.rs", "rank": 13, "score": 89674.60057814284 }, { "content": "/// Erase the last grapheme from the string. If that's not possible, or if the\n\n/// previous grapheme was a control character, add a backspace character to the\n\n/// string.\n\nfn backspace(out: &mut String) {\n\n let mut cursor = GraphemeCursor::new(out.len(), out.len(), true);\n\n if let Ok(Some(offset)) = cursor.prev_boundary(&out, 0) {\n\n if out[offset..]\n\n .chars()\n\n .next()\n\n .map(char::is_control)\n\n .unwrap_or(true)\n\n {\n\n out.push('\\x08');\n\n } else {\n\n out.truncate(offset);\n\n }\n\n } else {\n\n out.push('\\x08');\n\n }\n\n}\n\n\n", "file_path": "src/overstrike.rs", "rank": 14, "score": 87269.59808414191 }, { "content": "#[derive(Debug, Clone, PartialEq, Eq)]\n\nenum Span {\n\n /// Ordinary text.\n\n Text(String),\n\n /// Text that matches the current search, and the search match index.\n\n Match(String, usize),\n\n /// A control character.\n\n Control(u8),\n\n /// An invalid UTF-8 byte.\n\n Invalid(u8),\n\n /// An unprintable unicode grapheme cluster.\n\n Unprintable(String),\n\n /// A sequence of SGR escape codes.\n\n SgrSequence(SmallVec<[Sgr; 5]>),\n\n /// A hyperlink escape code.\n\n Hyperlink(Option<Arc<Hyperlink>>),\n\n /// A DEC line drawing mode escape code.\n\n LineDrawing(bool),\n\n /// Data that should be ignored.\n\n Ignore(SmallVec<[u8; 20]>),\n\n /// A tab control character.\n\n TAB,\n\n /// A terminating CRLF sequence.\n\n CRLF,\n\n /// A terminating LF sequence.\n\n LF,\n\n}\n\n\n", "file_path": "src/line.rs", "rank": 15, "score": 84841.38122625403 }, { "content": "#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\nenum OutputStyle {\n\n /// The source file's output style.\n\n File,\n\n /// Control characters style (inverse video).\n\n Control,\n\n /// A search match.\n\n Match,\n\n /// The currently selected search match.\n\n CurrentMatch,\n\n}\n\n\n", "file_path": "src/line.rs", "rank": 16, "score": 81180.86274856115 }, { "content": "/// Produce `Change`s to output some text in the given style at the given\n\n/// position, truncated to the start and end columns.\n\n///\n\n/// Returns the new position after the text has been rendered.\n\nfn write_truncated(\n\n changes: &mut Vec<Change>,\n\n attr_state: &mut AttributeState,\n\n style: OutputStyle,\n\n text: &str,\n\n start: usize,\n\n end: usize,\n\n position: usize,\n\n) -> Result<usize, std::io::Error> {\n\n let text_width = text.width();\n\n if position + text_width > start && position < end {\n\n if let Some(change) = attr_state.style(style)? {\n\n changes.push(change);\n\n }\n\n let start = start.saturating_sub(position);\n\n let end = end.saturating_sub(position);\n\n changes.push(Change::Text(util::truncate_string(\n\n text,\n\n start,\n\n end - start,\n\n )));\n\n }\n\n Ok(position + text_width)\n\n}\n\n\n", "file_path": "src/line.rs", "rank": 17, "score": 78587.05439561969 }, { "content": "fn generate_man_page(out_dir: impl AsRef<Path>) -> Result<(), io::Error> {\n\n // If asciidoc isn't installed, then don't do anything.\n\n if let Err(err) = process::Command::new(\"a2x\").output() {\n\n eprintln!(\"Could not run 'a2x' binary, skipping man page generation.\");\n\n eprintln!(\"Error from running 'a2x': {}\", err);\n\n return Ok(());\n\n }\n\n // 1. Read asciidoc template.\n\n // 2. Interpolate template with auto-generated docs.\n\n // 3. Save interpolation to disk.\n\n // 4. Use a2x (part of asciidoc) to convert to man page.\n\n let out_dir = out_dir.as_ref();\n\n let cwd = env::current_dir()?;\n\n let tpl_path = cwd.join(\"doc\").join(\"sp.1.txt.tpl\");\n\n let txt_path = out_dir.join(\"sp.1.txt\");\n\n\n\n let mut tpl = String::new();\n\n File::open(&tpl_path)?.read_to_string(&mut tpl)?;\n\n tpl = tpl.replace(\"{VERSION}\", env!(\"CARGO_PKG_VERSION\"));\n\n\n", "file_path": "build.rs", "rank": 18, "score": 65768.30730509727 }, { "content": "#[derive(Clone, Debug, Default)]\n\nstruct RenderState {\n\n /// The number of columns on screen.\n\n width: usize,\n\n\n\n /// The number of rows on screen.\n\n height: usize,\n\n\n\n /// The file line at the top of the screen.\n\n top_line: usize,\n\n\n\n /// The porition of the file line at the top of the screen.\n\n top_line_portion: usize,\n\n\n\n /// The file line at the bottom of the screen.\n\n bottom_line: usize,\n\n\n\n /// The column at the left of the screen.\n\n left: usize,\n\n\n\n /// The height of the overlay.\n", "file_path": "src/screen.rs", "rank": 19, "score": 60426.71933072395 }, { "content": "/// Tracker of current attributes state.\n\nstruct AttributeState {\n\n /// Current attributes for the file\n\n attrs: CellAttributes,\n\n /// Whether DEC line drawing mode is currently enabled\n\n line_drawing: bool,\n\n /// Whether the file's attributes have changed\n\n changed: bool,\n\n /// What the currently applied style is.\n\n style: OutputStyle,\n\n}\n\n\n\nimpl AttributeState {\n\n /// Create a new color state tracker.\n\n fn new() -> AttributeState {\n\n AttributeState {\n\n attrs: CellAttributes::default(),\n\n line_drawing: false,\n\n changed: false,\n\n style: OutputStyle::File,\n\n }\n", "file_path": "src/line.rs", "rank": 20, "score": 59715.95447209325 }, { "content": "/// Line wraps in the cache are represented by a list of start and end offsets.\n\ntype WrapCacheItem = Vec<(usize, usize)>;\n\n\n\n/// Represents a single line in a displayed file.\n\n#[derive(Debug, Clone)]\n\npub(crate) struct Line {\n\n spans: Box<[Span]>,\n\n wraps: Arc<Mutex<LruCache<WrapCacheIndex, WrapCacheItem>>>,\n\n}\n\n\n\n/// Style that is being applied.\n", "file_path": "src/line.rs", "rank": 21, "score": 58717.95182780042 }, { "content": "#[derive(Copy, Clone, PartialEq, Eq)]\n\nenum Overstrike {\n\n Normal,\n\n Bold,\n\n Underline,\n\n BoldUnderline,\n\n}\n\n\n\nimpl Overstrike {\n\n /// Make the overstrike style bold.\n\n fn bold(&mut self) {\n\n *self = match *self {\n\n Overstrike::Normal | Overstrike::Bold => Overstrike::Bold,\n\n Overstrike::Underline | Overstrike::BoldUnderline => Overstrike::BoldUnderline,\n\n }\n\n }\n\n\n\n /// Make the overstrike style underlined.\n\n fn underline(&mut self) {\n\n *self = match *self {\n\n Overstrike::Normal | Overstrike::Underline => Overstrike::Underline,\n", "file_path": "src/overstrike.rs", "rank": 22, "score": 53540.414877145646 }, { "content": "fn main() {\n\n let out_dir = env::var_os(\"OUT_DIR\").expect(\"OUT_DIR should be set\");\n\n fs::create_dir_all(&out_dir).expect(&format!(\n\n \"couldn't create output directory {}\",\n\n out_dir.to_string_lossy()\n\n ));\n\n\n\n generate_man_page(&out_dir).expect(\"couldn't generate manpage\");\n\n\n\n let mut app = app::app();\n\n app.gen_completions(\"sp\", Shell::Bash, &out_dir);\n\n app.gen_completions(\"sp\", Shell::Fish, &out_dir);\n\n app.gen_completions(\"sp\", Shell::Zsh, &out_dir);\n\n}\n\n\n", "file_path": "build.rs", "rank": 23, "score": 53008.24603126744 }, { "content": "fn fill_buffer(\n\n file: &mut StdFile,\n\n buffer: &mut Buffer,\n\n file_offset: usize,\n\n buffer_offset: usize,\n\n len: usize,\n\n) -> Result<(), Error> {\n\n if buffer_offset + len <= buffer.available() {\n\n return Ok(());\n\n }\n\n let mut write = buffer.write();\n\n if file.seek(SeekFrom::Start(file_offset as u64)).is_err() {\n\n // Ignore seek errors, treat them as though the data isn't there.\n\n return Ok(());\n\n }\n\n loop {\n\n match file.read(&mut write) {\n\n Ok(0) => {\n\n // We're at the end of the file. Nothing to do.\n\n break;\n", "file_path": "src/buffer_cache.rs", "rank": 24, "score": 47444.79534668753 }, { "content": "/// Metadata about a file that is being loaded.\n\nstruct FileMeta {\n\n /// The index of the file.\n\n index: usize,\n\n\n\n /// The loaded file's title. Usually its name.\n\n title: String,\n\n\n\n /// Information about the file.\n\n info: RwLock<Vec<String>>,\n\n\n\n /// The length of the file that has been parsed.\n\n length: AtomicUsize,\n\n\n\n /// The offset of each newline in the file.\n\n newlines: RwLock<Vec<usize>>,\n\n\n\n /// During reload, the number of lines the file had before reloading.\n\n reload_old_line_count: RwLock<Option<usize>>,\n\n\n\n /// Set to true when the file has been loaded and parsed.\n", "file_path": "src/file.rs", "rank": 25, "score": 39397.0842513325 }, { "content": "/// Guard to stop reading from a file when it is dropped\n\nstruct FileGuard {\n\n meta: Arc<FileMeta>,\n\n}\n\n\n\n/// Default value for `needed_lines`.\n\npub(crate) const DEFAULT_NEEDED_LINES: usize = 5000;\n\n\n\nimpl FileMeta {\n\n /// Create new file metadata.\n\n fn new(index: usize, title: String) -> FileMeta {\n\n FileMeta {\n\n index,\n\n title,\n\n info: RwLock::new(Vec::new()),\n\n length: AtomicUsize::new(0usize),\n\n newlines: RwLock::new(Vec::new()),\n\n reload_old_line_count: RwLock::new(None),\n\n finished: AtomicBool::new(false),\n\n dropped: AtomicBool::new(false),\n\n error: RwLock::new(None),\n", "file_path": "src/file.rs", "rank": 26, "score": 39392.70152656679 }, { "content": "/// Convert a span of unicode characters with overstrikes into a span with\n\n/// escape sequences\n\nfn convert_unicode_span(input: &str) -> String {\n\n let mut result = String::with_capacity(input.len());\n\n let mut prev_grapheme = None;\n\n let mut prev_overstrike = Overstrike::Normal;\n\n let mut overstrike = Overstrike::Normal;\n\n let mut graphemes = input.graphemes(true);\n\n while let Some(grapheme) = graphemes.next() {\n\n if grapheme == \"\\x08\" {\n\n if prev_grapheme.is_some() {\n\n if let Some(next_grapheme) = graphemes.next() {\n\n if next_grapheme == \"\\x08\" {\n\n backspace(&mut result);\n\n prev_grapheme = None;\n\n overstrike = Overstrike::Normal;\n\n } else if prev_grapheme == Some(next_grapheme) {\n\n if next_grapheme == \"_\" {\n\n // Overstriking underscore with itself is\n\n // ambiguous. Prefer to continue the existing\n\n // overstrike if there is any.\n\n if overstrike == Overstrike::Normal {\n", "file_path": "src/overstrike.rs", "rank": 27, "score": 38712.15232521178 }, { "content": "fn parse_bool(value: &str) -> Option<bool> {\n\n match value.to_ascii_lowercase().as_ref() {\n\n \"1\" | \"yes\" | \"true\" | \"on\" | \"always\" => Some(true),\n\n \"0\" | \"no\" | \"false\" | \"off\" | \"never\" => Some(false),\n\n _ => None,\n\n }\n\n}\n", "file_path": "src/config.rs", "rank": 28, "score": 37616.28901210052 }, { "content": "#[derive(Parser)]\n\n#[grammar = \"keymaps/keymap.pest\"]\n\nstruct KeymapFileParser;\n\n\n\n// File data to generate a keymap\n\npub(crate) struct KeymapFile(Vec<((Modifiers, KeyCode), BindingConfig)>);\n\n\n\nimpl KeymapFile {\n\n fn parse_keycode(ident: &str) -> Option<KeyCode> {\n\n use KeyCode::*;\n\n match ident {\n\n \"Space\" => Some(Char(' ')),\n\n \"Cancel\" => Some(Cancel),\n\n \"Backspace\" => Some(Backspace),\n\n \"Tab\" => Some(Tab),\n\n \"Clear\" => Some(Clear),\n\n \"Enter\" => Some(Enter),\n\n \"Shift\" => Some(Shift),\n\n \"Escape\" => Some(Escape),\n\n \"Menu\" => Some(Menu),\n\n \"LeftMenu\" => Some(LeftMenu),\n\n \"RightMenu\" => Some(RightMenu),\n", "file_path": "src/keymap_file.rs", "rank": 29, "score": 36574.78601708948 }, { "content": "//! Progress indicator.\n\n//!\n\n//! sp can accept another file descriptor from its parent process via the\n\n//! `--pager-fd` option or the PAGER_PROGRESS_FD environment variable. This\n\n//! should be a pipe on which the parent process sends progress indicator pages.\n\n//!\n\n//! Progress indicator pages are blocks of text terminated by an ASCII form-feed\n\n//! character. The progress indicator will display the most recently received\n\n//! page.\n\nuse anyhow::Result;\n\nuse std::io::{BufRead, BufReader, Read};\n\nuse std::sync::{Arc, RwLock};\n\nuse std::thread;\n\n\n\nuse crate::event::{Event, EventSender, UniqueInstance};\n\n\n\n/// Initial buffer size for progress indicator pages.\n\nconst PROGRESS_BUFFER_SIZE: usize = 4096;\n\n\n\n/// Inner struct for the progress indicator.\n", "file_path": "src/progress.rs", "rank": 30, "score": 34132.19405260252 }, { "content": " });\n\n Progress { inner }\n\n }\n\n\n\n /// Returns the number of lines in the current page.\n\n pub(crate) fn lines(&self) -> usize {\n\n let inner = self.inner.read().unwrap();\n\n if inner.finished {\n\n return 0;\n\n }\n\n let mut lines = inner.newlines.len();\n\n let after_last_newline_offset = if lines == 0 {\n\n 0\n\n } else {\n\n inner.newlines[lines - 1] + 1\n\n };\n\n if inner.buffer.len() > after_last_newline_offset {\n\n lines += 1;\n\n }\n\n lines\n", "file_path": "src/progress.rs", "rank": 31, "score": 34131.135474939496 }, { "content": " }\n\n\n\n /// Calls the callback `call` with the given line of the current page.\n\n pub(crate) fn with_line<T, F>(&self, index: usize, mut call: F) -> Option<T>\n\n where\n\n F: FnMut(&[u8]) -> T,\n\n {\n\n let inner = self.inner.read().unwrap();\n\n if index > inner.newlines.len() {\n\n return None;\n\n }\n\n let start = if index == 0 {\n\n 0\n\n } else {\n\n inner.newlines[index - 1] + 1\n\n };\n\n let end = if index < inner.newlines.len() {\n\n inner.newlines[index] + 1\n\n } else {\n\n inner.buffer.len()\n\n };\n\n if start == end {\n\n return None;\n\n }\n\n Some(call(&inner.buffer[start..end]))\n\n }\n\n}\n", "file_path": "src/progress.rs", "rank": 32, "score": 34130.56878293768 }, { "content": " /// Create a new progress indicator that receives progress pages on the\n\n /// given file descriptor. Progress events are sent on the event_sender\n\n /// whenever a new page is received.\n\n pub(crate) fn new(reader: impl Read + Send + 'static, event_sender: EventSender) -> Progress {\n\n let inner = Arc::new(RwLock::new(ProgressInner {\n\n buffer: Vec::new(),\n\n newlines: Vec::new(),\n\n finished: false,\n\n }));\n\n let mut input = BufReader::new(reader);\n\n thread::spawn({\n\n let inner = inner.clone();\n\n let progress_unique = UniqueInstance::new();\n\n move || -> Result<()> {\n\n loop {\n\n let mut buffer = Vec::with_capacity(PROGRESS_BUFFER_SIZE);\n\n match input.read_until(b'\\x0C', &mut buffer) {\n\n Ok(0) | Err(_) => {\n\n let mut inner = inner.write().unwrap();\n\n inner.buffer = Vec::new();\n", "file_path": "src/progress.rs", "rank": 33, "score": 34128.45661207299 }, { "content": " inner.newlines = Vec::new();\n\n inner.finished = true;\n\n return Ok(());\n\n }\n\n Ok(len) => {\n\n buffer.truncate(len - 1);\n\n let mut newlines = Vec::new();\n\n for (i, byte) in buffer.iter().enumerate().take(len - 1) {\n\n if *byte == b'\\n' {\n\n newlines.push(i);\n\n }\n\n }\n\n let mut inner = inner.write().unwrap();\n\n inner.buffer = buffer;\n\n inner.newlines = newlines;\n\n event_sender.send_unique(Event::Progress, &progress_unique)?;\n\n }\n\n }\n\n }\n\n }\n", "file_path": "src/progress.rs", "rank": 34, "score": 34124.81723769216 }, { "content": " }\n\n\n\n /// Get an event, either from the event stream or from the terminal.\n\n pub(crate) fn get(\n\n &self,\n\n term: &mut dyn Terminal,\n\n wait: Option<Duration>,\n\n ) -> Result<Option<Event>, Error> {\n\n loop {\n\n if let Some(event) = self.try_recv()? {\n\n return Ok(Some(event));\n\n }\n\n\n\n // The queue is empty. Try to get an input event from the terminal.\n\n match term.poll_input(wait)? {\n\n Some(InputEvent::Wake) => {}\n\n Some(input_event) => return Ok(Some(Event::Input(input_event))),\n\n None => return Ok(None),\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/event.rs", "rank": 35, "score": 34122.93750918051 }, { "content": "//! Events.\n\nuse anyhow::Error;\n\nuse std::sync::atomic::{AtomicBool, Ordering};\n\nuse std::sync::mpsc;\n\nuse std::sync::Arc;\n\nuse std::time::Duration;\n\nuse termwiz::input::InputEvent;\n\nuse termwiz::terminal::{Terminal, TerminalWaker};\n\n\n\n/// An event.\n\n///\n\n/// Events drive most of the main processing of `sp`. This includes user\n\n/// input, state changes, and display refresh requests.\n\n#[derive(Debug, Clone, PartialEq, Eq)]\n\npub(crate) enum Event {\n\n /// An input event.\n\n Input(InputEvent),\n\n /// A file has finished loading.\n\n Loaded(usize),\n\n /// A file has started loading more data.\n", "file_path": "src/event.rs", "rank": 36, "score": 34122.007413196916 }, { "content": "pub(crate) struct ProgressInner {\n\n /// Buffer containing the currently displayed page.\n\n buffer: Vec<u8>,\n\n\n\n /// Offsets of all the newlines in the current page.\n\n newlines: Vec<usize>,\n\n\n\n /// Whether the progress indicator is finished because the other\n\n /// end of the pipe closed.\n\n finished: bool,\n\n}\n\n\n\n/// A progress indicator.\n\n#[derive(Clone)]\n\npub(crate) struct Progress {\n\n /// The inner progress indicator data.\n\n inner: Arc<RwLock<ProgressInner>>,\n\n}\n\n\n\nimpl Progress {\n", "file_path": "src/progress.rs", "rank": 37, "score": 34119.41618809406 }, { "content": " Appending(usize),\n\n /// A file has started reloading.\n\n Reloading(usize),\n\n /// Render an update to the screen.\n\n Render,\n\n /// Refresh the whole screen.\n\n Refresh,\n\n /// Refresh the overlay.\n\n RefreshOverlay,\n\n /// A new progress display is available.\n\n Progress,\n\n /// Search has found the first match.\n\n SearchFirstMatch(usize),\n\n /// Search has finished.\n\n SearchFinished(usize),\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub(crate) struct UniqueInstance(Arc<AtomicBool>);\n\n\n", "file_path": "src/event.rs", "rank": 38, "score": 34112.31250562342 }, { "content": " pub(crate) fn new(waker: TerminalWaker) -> EventStream {\n\n let (send, recv) = mpsc::channel();\n\n EventStream { send, recv, waker }\n\n }\n\n\n\n /// Create a sender for the event stream.\n\n pub(crate) fn sender(&self) -> EventSender {\n\n EventSender(self.send.clone(), self.waker.clone())\n\n }\n\n\n\n fn try_recv(&self) -> Result<Option<Event>, Error> {\n\n match self.recv.try_recv() {\n\n Ok(Envelope::Normal(event)) => Ok(Some(event)),\n\n Ok(Envelope::Unique(event, unique)) => {\n\n unique.0.store(false, Ordering::SeqCst);\n\n Ok(Some(event))\n\n }\n\n Err(mpsc::TryRecvError::Empty) => Ok(None),\n\n Err(e) => Err(e.into()),\n\n }\n", "file_path": "src/event.rs", "rank": 39, "score": 34110.76577913353 }, { "content": " }\n\n pub(crate) fn send_unique(&self, event: Event, unique: &UniqueInstance) -> Result<(), Error> {\n\n if !unique.0.compare_and_swap(false, true, Ordering::SeqCst) {\n\n self.0.send(Envelope::Unique(event, unique.clone()))?;\n\n self.1.wake()?;\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\n/// An event stream. This is a wrapper multi-producer, single-consumer\n\n/// stream of `Event`s.\n\npub(crate) struct EventStream {\n\n send: mpsc::Sender<Envelope>,\n\n recv: mpsc::Receiver<Envelope>,\n\n waker: TerminalWaker,\n\n}\n\n\n\nimpl EventStream {\n\n /// Create a new event stream.\n", "file_path": "src/event.rs", "rank": 40, "score": 34109.3735213391 }, { "content": "impl UniqueInstance {\n\n pub(crate) fn new() -> UniqueInstance {\n\n UniqueInstance(Arc::new(AtomicBool::new(false)))\n\n }\n\n}\n\n\n\npub(crate) enum Envelope {\n\n Normal(Event),\n\n Unique(Event, UniqueInstance),\n\n}\n\n\n\n/// An event sender endpoint.\n\n#[derive(Clone)]\n\npub(crate) struct EventSender(mpsc::Sender<Envelope>, TerminalWaker);\n\n\n\nimpl EventSender {\n\n pub(crate) fn send(&self, event: Event) -> Result<(), Error> {\n\n self.0.send(Envelope::Normal(event))?;\n\n self.1.wake()?;\n\n Ok(())\n", "file_path": "src/event.rs", "rank": 41, "score": 34107.30755352905 }, { "content": " let (send_event, mut reload) = if appending.load(Ordering::SeqCst) {\n\n std::thread::sleep(Duration::from_millis(100));\n\n (false, end_data.is_empty())\n\n } else {\n\n meta.finished.store(true, Ordering::SeqCst);\n\n event_sender.send_unique(Event::Loaded(meta.index), &loaded_instance)?;\n\n {\n\n let mut reload_old_line_count =\n\n meta.reload_old_line_count.write().unwrap();\n\n *reload_old_line_count = None;\n\n }\n\n match event_rx.recv() {\n\n Ok(FileEvent::Append) => (true, end_data.is_empty()),\n\n Ok(FileEvent::Reload) => (true, true),\n\n Err(e) => {\n\n let mut error = meta.error.write().unwrap();\n\n *error = Some(e.into());\n\n return Ok(());\n\n }\n\n }\n", "file_path": "src/file.rs", "rank": 42, "score": 33486.02683381501 }, { "content": " needed_lines: AtomicUsize::new(DEFAULT_NEEDED_LINES),\n\n waker: Condvar::new(),\n\n waker_mutex: Mutex::new(()),\n\n }\n\n }\n\n}\n\n\n\nimpl FileData {\n\n /// Create a new streamed file.\n\n ///\n\n /// A background thread is started to read from `input` and store the\n\n /// content in buffers. Metadata about loading is written to `meta`.\n\n ///\n\n /// Returns `FileData` containing the buffers that the background thread\n\n /// is loading into.\n\n fn new_streamed(\n\n mut input: impl Read + Send + 'static,\n\n meta: Arc<FileMeta>,\n\n event_sender: EventSender,\n\n ) -> Result<FileData, Error> {\n", "file_path": "src/file.rs", "rank": 43, "score": 33479.15487508559 }, { "content": " /// Returns `FileData` containing the memory map.\n\n fn new_mapped(\n\n file: StdFile,\n\n meta: Arc<FileMeta>,\n\n event_sender: EventSender,\n\n ) -> Result<FileData, Error> {\n\n // We can't mmap empty files, so just return an empty filedata if the\n\n // file's length is 0.\n\n if file.metadata()?.len() == 0 {\n\n meta.finished.store(true, Ordering::SeqCst);\n\n event_sender.send(Event::Loaded(meta.index))?;\n\n return Ok(FileData::Empty);\n\n }\n\n let mmap = Arc::new(unsafe { Mmap::map(&file)? });\n\n thread::spawn({\n\n let mmap = mmap.clone();\n\n move || -> Result<()> {\n\n let len = mmap.len();\n\n let blocks = (len + BUFFER_SIZE - 1) / BUFFER_SIZE;\n\n for block in 0..blocks {\n", "file_path": "src/file.rs", "rank": 44, "score": 33478.73507442779 }, { "content": " let path = path.to_path_buf();\n\n move || -> Result<()> {\n\n loop {\n\n let (tx, rx) = mpsc::channel();\n\n let mut watcher: RecommendedWatcher =\n\n Watcher::new(tx, Duration::from_millis(500)).expect(\"create watcher\");\n\n watcher\n\n .watch(path.clone(), RecursiveMode::NonRecursive)\n\n .expect(\"watch file\");\n\n loop {\n\n if meta.dropped.load(Ordering::SeqCst) {\n\n return Ok(());\n\n }\n\n let event = rx.recv();\n\n match event {\n\n Ok(DebouncedEvent::NoticeWrite(_)) => {\n\n appending.store(true, Ordering::SeqCst);\n\n events.send(FileEvent::Append)?;\n\n }\n\n Ok(DebouncedEvent::Write(_)) => {\n", "file_path": "src/file.rs", "rank": 45, "score": 33478.30563166288 }, { "content": " Ok((out_file, err_file))\n\n }\n\n\n\n /// Load a file from static data.\n\n pub(crate) fn new_static(\n\n index: usize,\n\n title: &str,\n\n data: impl Into<Cow<'static, [u8]>>,\n\n event_sender: EventSender,\n\n ) -> Result<File, Error> {\n\n let meta = Arc::new(FileMeta::new(index, title.to_string()));\n\n let data = FileData::new_static(data, meta.clone(), event_sender)?;\n\n Ok(File::new(data, meta))\n\n }\n\n\n\n /// The file's index.\n\n pub(crate) fn index(&self) -> usize {\n\n self.meta.index\n\n }\n\n\n", "file_path": "src/file.rs", "rank": 46, "score": 33477.55295330763 }, { "content": " ///\n\n /// Returns `FileData` containing the static data.\n\n fn new_static(\n\n data: impl Into<Cow<'static, [u8]>>,\n\n meta: Arc<FileMeta>,\n\n event_sender: EventSender,\n\n ) -> Result<FileData, Error> {\n\n let data = Arc::new(data.into());\n\n thread::spawn({\n\n let data = data.clone();\n\n move || -> Result<()> {\n\n let len = data.len();\n\n let blocks = (len + BUFFER_SIZE - 1) / BUFFER_SIZE;\n\n for block in 0..blocks {\n\n if meta.dropped.load(Ordering::SeqCst) {\n\n return Ok(());\n\n }\n\n let mut newlines = meta.newlines.write().unwrap();\n\n for (i, byte) in data\n\n .iter()\n", "file_path": "src/file.rs", "rank": 47, "score": 33476.81142021455 }, { "content": " }\n\n\n\n pub(crate) fn new_file(\n\n index: usize,\n\n filename: &OsStr,\n\n event_sender: EventSender,\n\n ) -> Result<File, Error> {\n\n let title = filename.to_string_lossy().into_owned();\n\n let meta = Arc::new(FileMeta::new(index, title.to_string()));\n\n let mut file = StdFile::open(filename).context(title)?;\n\n // Determine whether this file is a real file, or some kind of pipe, by\n\n // attempting to do a no-op seek. If it fails, we won't be able to seek\n\n // around and load parts of the file at will, so treat it as a stream.\n\n let data = match file.seek(SeekFrom::Current(0)) {\n\n Ok(_) => FileData::new_file(filename, meta.clone(), event_sender)?,\n\n Err(_) => FileData::new_streamed(file, meta.clone(), event_sender)?,\n\n };\n\n Ok(File::new(data, meta))\n\n }\n\n\n", "file_path": "src/file.rs", "rank": 48, "score": 33476.73395160637 }, { "content": " let out = process.stdout.take().unwrap();\n\n let err = process.stderr.take().unwrap();\n\n let out_file = File::new_streamed(index, out, &title, event_sender.clone())?;\n\n let err_file = File::new_streamed(index + 1, err, &title_err, event_sender.clone())?;\n\n thread::spawn({\n\n let out_file = out_file.clone();\n\n move || -> Result<()> {\n\n if let Ok(rc) = process.wait() {\n\n if !rc.success() {\n\n let mut info = out_file.meta.info.write().unwrap();\n\n match rc.code() {\n\n Some(code) => info.push(format!(\"rc: {}\", code)),\n\n None => info.push(\"killed!\".to_string()),\n\n }\n\n event_sender.send(Event::RefreshOverlay)?;\n\n }\n\n }\n\n Ok(())\n\n }\n\n });\n", "file_path": "src/file.rs", "rank": 49, "score": 33476.67294469453 }, { "content": " /// Load a file by memory mapping it if possible.\n\n #[allow(unused)]\n\n pub(crate) fn new_mapped(\n\n index: usize,\n\n filename: &OsStr,\n\n event_sender: EventSender,\n\n ) -> Result<File, Error> {\n\n let title = filename.to_string_lossy().into_owned();\n\n let meta = Arc::new(FileMeta::new(index, title.clone()));\n\n let mut file = StdFile::open(filename).context(title)?;\n\n // Determine whether this file is a real file, or some kind of pipe, by\n\n // attempting to do a no-op seek. If it fails, assume we can't mmap\n\n // it.\n\n let data = match file.seek(SeekFrom::Current(0)) {\n\n Ok(_) => FileData::new_mapped(file, meta.clone(), event_sender)?,\n\n Err(_) => FileData::new_streamed(file, meta.clone(), event_sender)?,\n\n };\n\n Ok(File::new(data, meta))\n\n }\n\n\n", "file_path": "src/file.rs", "rank": 50, "score": 33476.42313366685 }, { "content": " if meta.dropped.load(Ordering::SeqCst) {\n\n return Ok(());\n\n }\n\n let mut newlines = meta.newlines.write().unwrap();\n\n for i in block * BUFFER_SIZE..min((block + 1) * BUFFER_SIZE, len) {\n\n if mmap[i] == b'\\n' {\n\n newlines.push(i);\n\n }\n\n }\n\n }\n\n meta.length.store(len, Ordering::SeqCst);\n\n meta.finished.store(true, Ordering::SeqCst);\n\n event_sender.send(Event::Loaded(meta.index))?;\n\n Ok(())\n\n }\n\n });\n\n Ok(FileData::Mapped { mmap })\n\n }\n\n\n\n /// Create a new file from static data.\n", "file_path": "src/file.rs", "rank": 51, "score": 33476.217185059184 }, { "content": " finished: AtomicBool,\n\n\n\n /// Set to true when the file has been dropped. Checked by background\n\n /// threads to exit early.\n\n dropped: AtomicBool,\n\n\n\n /// The most recent error encountered when loading the file.\n\n error: RwLock<Option<Error>>,\n\n\n\n /// If needed_lines > newlines.len(), pause loading.\n\n needed_lines: AtomicUsize,\n\n\n\n /// CondVar to wake up file loading.\n\n waker: Condvar,\n\n\n\n /// Mutex used by waker.\n\n waker_mutex: Mutex<()>,\n\n}\n\n\n\n/// Event triggered by changes to a file on disk.\n", "file_path": "src/file.rs", "rank": 52, "score": 33476.16687948565 }, { "content": "//! Files.\n\nuse anyhow::{Context, Error, Result};\n\nuse memmap::Mmap;\n\nuse notify::{DebouncedEvent, RecommendedWatcher, RecursiveMode, Watcher};\n\nuse std::borrow::Cow;\n\nuse std::cmp::{max, min};\n\nuse std::ffi::OsStr;\n\nuse std::fs::File as StdFile;\n\nuse std::io::{Read, Seek, SeekFrom};\n\nuse std::path::{Path, PathBuf};\n\nuse std::process::{Command, Stdio};\n\nuse std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};\n\nuse std::sync::mpsc;\n\nuse std::sync::{Arc, Condvar, Mutex, RwLock};\n\nuse std::thread;\n\nuse std::time::Duration;\n\n\n\nuse crate::buffer::Buffer;\n\nuse crate::buffer_cache::BufferCache;\n\nuse crate::event::{Event, EventSender, UniqueInstance};\n", "file_path": "src/file.rs", "rank": 53, "score": 33475.71146615881 }, { "content": " reload_old_line_count.unwrap_or(0)\n\n } else {\n\n 0\n\n };\n\n let newlines = self.meta.newlines.read().unwrap();\n\n max(\n\n lines,\n\n line_count(newlines.as_slice(), self.meta.length.load(Ordering::SeqCst)),\n\n )\n\n }\n\n\n\n /// Runs the `call` function, passing it the contents of line `index`.\n\n /// Tries to avoid copying the data if possible, however the borrowed\n\n /// line only lasts as long as the function call.\n\n pub(crate) fn with_line<T, F>(&self, index: usize, call: F) -> Option<T>\n\n where\n\n F: FnMut(Cow<'_, [u8]>) -> T,\n\n {\n\n let newlines = self.meta.newlines.read().unwrap();\n\n if index > newlines.len() {\n", "file_path": "src/file.rs", "rank": 54, "score": 33475.6851416453 }, { "content": " let buffers = Arc::new(RwLock::new(Vec::new()));\n\n thread::spawn({\n\n let buffers = buffers.clone();\n\n move || -> Result<()> {\n\n let mut offset = 0usize;\n\n let mut total_buffer_size = 0usize;\n\n let mut waker_mutex = meta.waker_mutex.lock().unwrap();\n\n loop {\n\n // Check if a new buffer must be allocated.\n\n if offset == total_buffer_size {\n\n let mut buffers = buffers.write().unwrap();\n\n buffers.push(Buffer::new(BUFFER_SIZE));\n\n total_buffer_size += BUFFER_SIZE;\n\n }\n\n let buffers = buffers.read().unwrap();\n\n let mut write = buffers.last().unwrap().write();\n\n match input.read(&mut write) {\n\n Ok(0) => {\n\n // The end of the file has been reached. Complete.\n\n meta.finished.store(true, Ordering::SeqCst);\n", "file_path": "src/file.rs", "rank": 55, "score": 33475.63309622446 }, { "content": " /// Guard to stop loading the file when all references to it are dropped.\n\n guard: Arc<FileGuard>,\n\n}\n\n\n\nimpl File {\n\n fn new(data: FileData, meta: Arc<FileMeta>) -> Self {\n\n let guard = Arc::new(FileGuard { meta: meta.clone() });\n\n File { data, meta, guard }\n\n }\n\n\n\n /// Load stream.\n\n pub(crate) fn new_streamed(\n\n index: usize,\n\n stream: impl Read + Send + 'static,\n\n title: &str,\n\n event_sender: EventSender,\n\n ) -> Result<File, Error> {\n\n let meta = Arc::new(FileMeta::new(index, title.to_string()));\n\n let data = FileData::new_streamed(stream, meta.clone(), event_sender)?;\n\n Ok(File::new(data, meta))\n", "file_path": "src/file.rs", "rank": 56, "score": 33475.07159455629 }, { "content": " /// Load the output and error of a command\n\n pub(crate) fn new_command<I, S>(\n\n index: usize,\n\n command: &OsStr,\n\n args: I,\n\n title: &str,\n\n event_sender: EventSender,\n\n ) -> Result<(File, File), Error>\n\n where\n\n I: IntoIterator<Item = S>,\n\n S: AsRef<OsStr>,\n\n {\n\n let title_err = format!(\"STDERR for {}\", title);\n\n let mut process = Command::new(command)\n\n .args(args)\n\n .stdin(Stdio::null())\n\n .stdout(Stdio::piped())\n\n .stderr(Stdio::piped())\n\n .spawn()\n\n .context(command.to_string_lossy().into_owned())?;\n", "file_path": "src/file.rs", "rank": 57, "score": 33474.637492721646 }, { "content": " /// If `self.lines()` exceeds that number, pause loading until\n\n /// `set_needed_lines` is called with a larger number.\n\n /// This is only effective for \"streamed\" input.\n\n pub(crate) fn set_needed_lines(&self, lines: usize) {\n\n // This can be simplified by `fetch_max` when it's stable.\n\n if self.meta.needed_lines.load(Ordering::SeqCst) >= lines {\n\n return;\n\n }\n\n self.meta.needed_lines.store(lines, Ordering::SeqCst);\n\n self.meta.waker.notify_all();\n\n }\n\n\n\n /// Check if the loading thread has been paused.\n\n pub(crate) fn paused(&self) -> bool {\n\n !self.loaded() && self.meta.waker_mutex.try_lock().is_ok()\n\n }\n\n}\n\n\n\nimpl Drop for FileGuard {\n\n fn drop(&mut self) {\n\n self.meta.dropped.store(true, Ordering::SeqCst);\n\n // The thread might be blocked. Wake it up so it can notice the change\n\n // in `dropped`.\n\n self.meta.waker.notify_all();\n\n }\n\n}\n\n\n", "file_path": "src/file.rs", "rank": 58, "score": 33474.58818816314 }, { "content": " Ok(FileData::Streamed { buffers })\n\n }\n\n\n\n /// Create a new file from disk.\n\n fn new_file<P: AsRef<Path>>(\n\n path: P,\n\n meta: Arc<FileMeta>,\n\n event_sender: EventSender,\n\n ) -> Result<FileData, Error> {\n\n let path = path.as_ref();\n\n let mut file = Some(StdFile::open(path)?);\n\n let (events, event_rx) = mpsc::channel();\n\n let appending = Arc::new(AtomicBool::new(false));\n\n let buffer_cache = Arc::new(Mutex::new(BufferCache::new(path, BUFFER_SIZE, CACHE_SIZE)));\n\n\n\n // Create a thread to watch for file change notifications.\n\n thread::spawn({\n\n let events = events.clone();\n\n let appending = appending.clone();\n\n let meta = meta.clone();\n", "file_path": "src/file.rs", "rank": 59, "score": 33474.19164147107 }, { "content": " .enumerate()\n\n .skip(block * BUFFER_SIZE)\n\n .take(BUFFER_SIZE)\n\n {\n\n if *byte == b'\\n' {\n\n newlines.push(i);\n\n }\n\n }\n\n }\n\n meta.length.store(len, Ordering::SeqCst);\n\n meta.finished.store(true, Ordering::SeqCst);\n\n event_sender.send(Event::Loaded(meta.index))?;\n\n Ok(())\n\n }\n\n });\n\n Ok(FileData::Static { data })\n\n }\n\n\n\n /// Runs the `call` function, passing it a slice of the data from `start` to `end`.\n\n /// Tries to avoid copying the data if possible.\n", "file_path": "src/file.rs", "rank": 60, "score": 33473.92631131817 }, { "content": " fn with_slice<T, F>(&self, start: usize, end: usize, mut call: F) -> T\n\n where\n\n F: FnMut(Cow<'_, [u8]>) -> T,\n\n {\n\n match self {\n\n FileData::Streamed { buffers } => {\n\n let start_buffer = start / BUFFER_SIZE;\n\n let end_buffer = (end - 1) / BUFFER_SIZE;\n\n let buffers = buffers.read().unwrap();\n\n if start_buffer == end_buffer {\n\n let data = buffers[start_buffer].read();\n\n call(Cow::Borrowed(\n\n &data[start % BUFFER_SIZE..=(end - 1) % BUFFER_SIZE],\n\n ))\n\n } else {\n\n // The data spans multiple buffers, so we must make a copy to make it contiguous.\n\n let mut v = Vec::with_capacity(end - start);\n\n v.extend_from_slice(&buffers[start_buffer].read()[start % BUFFER_SIZE..]);\n\n for b in start_buffer + 1..end_buffer {\n\n v.extend_from_slice(&buffers[b].read()[..]);\n", "file_path": "src/file.rs", "rank": 61, "score": 33473.46678031336 }, { "content": " return None;\n\n }\n\n let start = if index == 0 {\n\n 0\n\n } else {\n\n newlines[index - 1] + 1\n\n };\n\n let end = if index < newlines.len() {\n\n newlines[index] + 1\n\n } else {\n\n self.meta.length.load(Ordering::SeqCst)\n\n };\n\n if start == end {\n\n return None;\n\n }\n\n Some(self.data.with_slice(start, end, call))\n\n }\n\n\n\n /// Set how many lines are needed.\n\n ///\n", "file_path": "src/file.rs", "rank": 62, "score": 33472.359076480774 }, { "content": " /// The file's title.\n\n pub(crate) fn title(&self) -> &str {\n\n &self.meta.title\n\n }\n\n\n\n /// The file's info.\n\n pub(crate) fn info(&self) -> String {\n\n let info = self.meta.info.read().unwrap();\n\n info.join(\" \")\n\n }\n\n\n\n /// True once the file is loaded and all newlines have been parsed.\n\n pub(crate) fn loaded(&self) -> bool {\n\n self.meta.finished.load(Ordering::SeqCst)\n\n }\n\n\n\n /// Returns the number of lines in the file.\n\n pub(crate) fn lines(&self) -> usize {\n\n let lines = if !self.meta.finished.load(Ordering::SeqCst) {\n\n let reload_old_line_count = self.meta.reload_old_line_count.read().unwrap();\n", "file_path": "src/file.rs", "rank": 63, "score": 33472.275390669245 }, { "content": "\n\n/// Buffer size to use when loading and parsing files. This is also the block\n\n/// size when parsing memory mapped files or caching files read from disk.\n\nconst BUFFER_SIZE: usize = 1024 * 1024;\n\n\n\n/// Size of the file cache in buffers.\n\nconst CACHE_SIZE: usize = 16;\n\n\n\n/// The data content of the file.\n\n#[derive(Clone)]\n", "file_path": "src/file.rs", "rank": 64, "score": 33472.05089610188 }, { "content": " }\n\n }\n\n if reload {\n\n buffer_cache.lock().unwrap().clear();\n\n let mut reload_old_line_count = meta.reload_old_line_count.write().unwrap();\n\n let mut newlines = meta.newlines.write().unwrap();\n\n let count = max(\n\n reload_old_line_count.unwrap_or(0),\n\n line_count(newlines.as_slice(), total_length),\n\n );\n\n *reload_old_line_count = Some(count);\n\n newlines.clear();\n\n total_length = 0;\n\n if send_event {\n\n event_sender\n\n .send_unique(Event::Reloading(meta.index), &reloading_instance)?;\n\n }\n\n } else if send_event {\n\n event_sender\n\n .send_unique(Event::Appending(meta.index), &appending_instance)?;\n", "file_path": "src/file.rs", "rank": 65, "score": 33471.796231209984 }, { "content": " event_sender.send(Event::Loaded(meta.index))?;\n\n return Ok(());\n\n }\n\n Ok(len) => {\n\n if meta.dropped.load(Ordering::SeqCst) {\n\n return Ok(());\n\n }\n\n // Some data has been read. Parse its newlines.\n\n let line_count = {\n\n let mut newlines = meta.newlines.write().unwrap();\n\n for i in 0..len {\n\n if write[i] == b'\\n' {\n\n newlines.push(offset + i);\n\n }\n\n }\n\n // Mark that the data has been written. This\n\n // needs to be done here before we drop the\n\n // lock for `newlines`.\n\n offset += len;\n\n write.written(len);\n", "file_path": "src/file.rs", "rank": 66, "score": 33471.575974786254 }, { "content": " }\n\n v.extend_from_slice(&buffers[end_buffer].read()[..=(end - 1) % BUFFER_SIZE]);\n\n call(Cow::Owned(v))\n\n }\n\n }\n\n FileData::File {\n\n events,\n\n buffer_cache,\n\n ..\n\n } => {\n\n let mut buffer_cache = buffer_cache.lock().unwrap();\n\n buffer_cache\n\n .with_slice(start, end, |data| {\n\n if data\n\n .iter()\n\n .take(data.len().saturating_sub(1))\n\n .any(|c| *c == b'\\n')\n\n {\n\n events.send(FileEvent::Reload).unwrap();\n\n }\n", "file_path": "src/file.rs", "rank": 67, "score": 33470.87134298715 }, { "content": " };\n\n match StdFile::open(&path) {\n\n Ok(mut f) => {\n\n if !reload {\n\n let mut new_data = Vec::new();\n\n new_data.resize(end_data.len(), 0);\n\n let offset = total_length - end_data.len();\n\n if f.seek(SeekFrom::Start(offset as u64)).is_ok()\n\n && f.read(new_data.as_mut_slice()).ok() == Some(end_data.len())\n\n && new_data == end_data\n\n {\n\n // We can continue where we left off\n\n } else {\n\n reload = true;\n\n }\n\n }\n\n file = Some(f);\n\n }\n\n Err(_) => {\n\n reload = true;\n", "file_path": "src/file.rs", "rank": 68, "score": 33469.494933798225 }, { "content": " buffer.resize(BUFFER_SIZE, 0);\n\n loop {\n\n match file.read(buffer.as_mut_slice()) {\n\n Ok(0) => break,\n\n Ok(len) => {\n\n if meta.dropped.load(Ordering::SeqCst) {\n\n return Ok(());\n\n }\n\n let mut newlines = meta.newlines.write().unwrap();\n\n for (i, byte) in buffer.iter().enumerate().take(len) {\n\n if *byte == b'\\n' {\n\n newlines.push(total_length + i);\n\n }\n\n }\n\n total_length += len;\n\n meta.length.store(total_length, Ordering::SeqCst);\n\n }\n\n Err(ref e) if e.kind() == std::io::ErrorKind::Interrupted => {}\n\n Err(e) => {\n\n let mut error = meta.error.write().unwrap();\n", "file_path": "src/file.rs", "rank": 69, "score": 33469.44942503248 }, { "content": " appending.store(false, Ordering::SeqCst);\n\n events.send(FileEvent::Append)?;\n\n }\n\n Ok(DebouncedEvent::Create(_)) => {\n\n events.send(FileEvent::Append)?;\n\n }\n\n Ok(DebouncedEvent::Rename(_, _)) => {\n\n events.send(FileEvent::Reload)?;\n\n }\n\n Ok(DebouncedEvent::NoticeRemove(_)) | Ok(DebouncedEvent::Chmod(_)) => {\n\n events.send(FileEvent::Reload)?;\n\n break;\n\n }\n\n Err(_) => {\n\n // The watcher failed for some reason.\n\n // Wait before retrying.\n\n thread::sleep(Duration::from_secs(1));\n\n break;\n\n }\n\n _ => {}\n", "file_path": "src/file.rs", "rank": 70, "score": 33469.28471698691 }, { "content": " meta.length.fetch_add(len, Ordering::SeqCst);\n\n newlines.len()\n\n };\n\n while line_count >= meta.needed_lines.load(Ordering::SeqCst) {\n\n // Enough data is loaded. Pause.\n\n waker_mutex = meta.waker.wait(waker_mutex).unwrap();\n\n if meta.dropped.load(Ordering::SeqCst) {\n\n return Ok(());\n\n }\n\n }\n\n }\n\n Err(ref e) if e.kind() == std::io::ErrorKind::Interrupted => {}\n\n Err(e) => {\n\n let mut error = meta.error.write().unwrap();\n\n *error = Some(e.into());\n\n }\n\n }\n\n }\n\n }\n\n });\n", "file_path": "src/file.rs", "rank": 71, "score": 33469.14257838501 }, { "content": " }\n\n }\n\n }\n\n }\n\n });\n\n\n\n // Create a thread to load the file.\n\n thread::spawn({\n\n let buffer_cache = buffer_cache.clone();\n\n let path = path.to_path_buf();\n\n move || -> Result<()> {\n\n let loaded_instance = UniqueInstance::new();\n\n let appending_instance = UniqueInstance::new();\n\n let reloading_instance = UniqueInstance::new();\n\n let mut total_length = 0;\n\n let mut end_data = Vec::new();\n\n loop {\n\n meta.length.store(total_length, Ordering::SeqCst);\n\n if let Some(mut file) = file.take() {\n\n let mut buffer = Vec::new();\n", "file_path": "src/file.rs", "rank": 72, "score": 33467.0623981659 }, { "content": " }\n\n meta.finished.store(false, Ordering::SeqCst);\n\n }\n\n }\n\n });\n\n\n\n let path = path.to_path_buf();\n\n Ok(FileData::File {\n\n path,\n\n buffer_cache,\n\n events,\n\n })\n\n }\n\n\n\n /// Create a new memory mapped file.\n\n ///\n\n /// The `file` is memory mapped and then a background thread is started to\n\n /// parse the newlines in the file. The parsing progress is stored in\n\n /// `meta`.\n\n ///\n", "file_path": "src/file.rs", "rank": 73, "score": 33466.775604591145 }, { "content": " call(data)\n\n })\n\n .unwrap()\n\n }\n\n FileData::Mapped { mmap } => call(Cow::Borrowed(&mmap[start..end])),\n\n FileData::Empty => call(Cow::Borrowed(&[])),\n\n FileData::Static { data } => call(Cow::Borrowed(&data[start..end])),\n\n }\n\n }\n\n}\n\n\n\n/// A loaded file.\n\n#[derive(Clone)]\n\npub(crate) struct File {\n\n /// The data for the file.\n\n data: FileData,\n\n\n\n /// Metadata about the loading of the file.\n\n meta: Arc<FileMeta>,\n\n\n", "file_path": "src/file.rs", "rank": 74, "score": 33465.860378727 }, { "content": " *error = Some(e.into());\n\n }\n\n }\n\n }\n\n\n\n // Attempt to read the last 4k of the file. If the file changes, we will\n\n // check this portion of the file to see if we need to reload the file.\n\n let end_len = total_length.min(4096);\n\n end_data.clear();\n\n if file.seek(SeekFrom::End(-(end_len as i64))).is_ok() {\n\n end_data.resize(end_len, 0);\n\n if let Ok(len) = file.read(end_data.as_mut_slice()) {\n\n if len != end_len {\n\n end_data.clear();\n\n }\n\n } else {\n\n end_data.clear();\n\n }\n\n }\n\n }\n", "file_path": "src/file.rs", "rank": 75, "score": 33465.14176914704 }, { "content": "}\n\n\n\nimpl Span {\n\n /// Render the span at the given position in the terminal.\n\n fn render(\n\n &self,\n\n changes: &mut Vec<Change>,\n\n attr_state: &mut AttributeState,\n\n start: usize,\n\n end: usize,\n\n mut position: usize,\n\n search_index: Option<usize>,\n\n ) -> Result<usize, std::io::Error> {\n\n match *self {\n\n Span::Text(ref t) => {\n\n let text = if attr_state.line_drawing {\n\n Cow::Owned(line_drawing::convert_line_drawing(t.as_str()))\n\n } else {\n\n Cow::Borrowed(t.as_str())\n\n };\n", "file_path": "src/line.rs", "rank": 76, "score": 33377.75041507287 }, { "content": " &data[match_start..match_end],\n\n Some(match_index),\n\n ));\n\n start = match_end;\n\n }\n\n if start < data.len() {\n\n spans.append(&mut parse_spans(&data[start..], None));\n\n }\n\n let spans = spans.into_boxed_slice();\n\n let wraps = Arc::new(Mutex::new(LruCache::new(WRAPS_CACHE_SIZE)));\n\n Line { spans, wraps }\n\n }\n\n\n\n /// Produce the `Change`s needed to render a slice of the line on a terminal.\n\n pub(crate) fn render(\n\n &self,\n\n changes: &mut Vec<Change>,\n\n start: usize,\n\n end: usize,\n\n search_index: Option<usize>,\n", "file_path": "src/line.rs", "rank": 77, "score": 33376.39340074538 }, { "content": " }\n\n Ordering::Equal => changes.push(Change::AllAttributes(CellAttributes::default())),\n\n Ordering::Less => changes.push(Change::ClearToEndOfLine(ColorAttribute::default())),\n\n }\n\n Ok(())\n\n }\n\n\n\n /// Produce the `Change`s needed to render a row of the wrapped line on a terminal.\n\n pub(crate) fn render_wrapped(\n\n &self,\n\n changes: &mut Vec<Change>,\n\n row: usize,\n\n width: usize,\n\n wrapping: WrappingMode,\n\n search_index: Option<usize>,\n\n ) -> Result<(), std::io::Error> {\n\n let (start, end) = {\n\n let mut wraps = self.wraps.lock().unwrap();\n\n if let Some(rows) = wraps.get_mut(&(width, wrapping)) {\n\n let (start, end) = rows.get(row).unwrap_or(&(0, 0));\n", "file_path": "src/line.rs", "rank": 78, "score": 33371.53788701121 }, { "content": " position = write_truncated(\n\n changes,\n\n attr_state,\n\n OutputStyle::File,\n\n text.as_ref(),\n\n start,\n\n end,\n\n position,\n\n )?;\n\n }\n\n Span::Match(ref t, ref match_index) => {\n\n let style = if search_index == Some(*match_index) {\n\n OutputStyle::CurrentMatch\n\n } else {\n\n OutputStyle::Match\n\n };\n\n let text = if attr_state.line_drawing {\n\n Cow::Owned(line_drawing::convert_line_drawing(t.as_str()))\n\n } else {\n\n Cow::Borrowed(t.as_str())\n", "file_path": "src/line.rs", "rank": 79, "score": 33366.91784757424 }, { "content": " ) -> Result<(), std::io::Error> {\n\n let mut start = start;\n\n let mut attr_state = AttributeState::new();\n\n let mut position = 0;\n\n if start > 0 {\n\n changes.push(Change::AllAttributes(\n\n CellAttributes::default()\n\n .set_foreground(AnsiColor::Navy)\n\n .set_intensity(Intensity::Bold)\n\n .clone(),\n\n ));\n\n changes.push(LEFT_ARROW.into());\n\n changes.push(Change::AllAttributes(CellAttributes::default()));\n\n start += 1;\n\n }\n\n for span in self.spans.iter() {\n\n position = span.render(changes, &mut attr_state, start, end, position, search_index)?;\n\n }\n\n match position.cmp(&end) {\n\n Ordering::Greater => {\n", "file_path": "src/line.rs", "rank": 80, "score": 33366.606763243944 }, { "content": " }\n\n }\n\n self.changed = true;\n\n }\n\n\n\n /// Apply a hyperlink escape code onto the attribute state.\n\n fn apply_hyperlink(&mut self, hyperlink: &Option<Arc<Hyperlink>>) {\n\n self.attrs.set_hyperlink(hyperlink.clone());\n\n self.changed = true;\n\n }\n\n\n\n /// Switch to the given style. The correct escape color sequences will be emitted.\n\n fn style(&mut self, style: OutputStyle) -> Result<Option<Change>, std::io::Error> {\n\n if self.style != style || self.changed {\n\n let attrs = match style {\n\n OutputStyle::File => self.attrs.clone(),\n\n OutputStyle::Control => CellAttributes::default().set_reverse(true).clone(),\n\n OutputStyle::Match => self\n\n .attrs\n\n .clone()\n", "file_path": "src/line.rs", "rank": 81, "score": 33365.67377923643 }, { "content": "use crate::config::WrappingMode;\n\nuse crate::line_drawing;\n\nuse crate::overstrike;\n\nuse crate::search::{trim_trailing_newline, ESCAPE_SEQUENCE};\n\nuse crate::util;\n\n\n\nconst LEFT_ARROW: &str = \"<\";\n\nconst RIGHT_ARROW: &str = \">\";\n\nconst TAB_SPACES: &str = \" \";\n\n\n\nconst WRAPS_CACHE_SIZE: usize = 4;\n\n\n\n/// Line wrap in the cache are uniquely identified by index and wrapping mode.\n", "file_path": "src/line.rs", "rank": 82, "score": 33365.37340437708 }, { "content": " (*start, *end)\n\n } else {\n\n let rows = self.make_wrap(width, wrapping);\n\n let (start, end) = rows.get(row).unwrap_or(&(0, 0));\n\n let (start, end) = (*start, *end);\n\n wraps.insert((width, wrapping), rows);\n\n (start, end)\n\n }\n\n };\n\n let mut attr_state = AttributeState::new();\n\n let mut position = 0;\n\n for span in self.spans.iter() {\n\n position = span.render(changes, &mut attr_state, start, end, position, search_index)?;\n\n }\n\n if end - start < width {\n\n changes.push(Change::ClearToEndOfLine(ColorAttribute::default()));\n\n }\n\n Ok(())\n\n }\n\n\n", "file_path": "src/line.rs", "rank": 83, "score": 33364.62477871015 }, { "content": " /// Returns the start and end pairs for each row of the line if wrapped.\n\n fn make_wrap(&self, width: usize, wrapping: WrappingMode) -> Vec<(usize, usize)> {\n\n let mut rows = Vec::new();\n\n match wrapping {\n\n WrappingMode::Unwrapped => {\n\n rows.push((0, std::usize::MAX));\n\n }\n\n WrappingMode::GraphemeBoundary | WrappingMode::WordBoundary => {\n\n let mut start = 0;\n\n let mut position = 0;\n\n for span in self.spans.iter() {\n\n let (new_start, new_position) = span.split(\n\n &mut rows,\n\n start,\n\n position,\n\n width,\n\n wrapping == WrappingMode::WordBoundary,\n\n );\n\n start = new_start;\n\n position = new_position;\n", "file_path": "src/line.rs", "rank": 84, "score": 33364.38132413819 }, { "content": " remaining_offset -= escape_start - original_offset;\n\n original_offset = *escape_end;\n\n }\n\n }\n\n original_offset + remaining_offset\n\n }),\n\n )\n\n } else {\n\n (Cow::Borrowed(&data[..len]), None)\n\n };\n\n for (match_index, match_range) in regex.find_iter(&data_without_escapes[..]).enumerate() {\n\n let (match_start, match_end) = if let Some(ref convert) = convert_offset {\n\n (convert(match_range.start()), convert(match_range.end()))\n\n } else {\n\n (match_range.start(), match_range.end())\n\n };\n\n if start < match_start {\n\n spans.append(&mut parse_spans(&data[start..match_start], None));\n\n }\n\n spans.append(&mut parse_spans(\n", "file_path": "src/line.rs", "rank": 85, "score": 33362.10972175742 }, { "content": " pub(crate) fn new_search(_index: usize, data: impl AsRef<[u8]>, regex: &Regex) -> Line {\n\n let data = data.as_ref();\n\n let data = overstrike::convert_overstrike(&data[..]);\n\n let len = trim_trailing_newline(&data[..]);\n\n let mut spans = Vec::new();\n\n let mut start = 0;\n\n let (data_without_escapes, convert_offset) = if ESCAPE_SEQUENCE.is_match(&data[..len]) {\n\n let mut escape_ranges = Vec::new();\n\n for match_range in ESCAPE_SEQUENCE.find_iter(&data[..len]) {\n\n escape_ranges.push((match_range.start(), match_range.end()));\n\n }\n\n (\n\n ESCAPE_SEQUENCE.replace_all(&data[..len], NoExpand(b\"\")),\n\n Some(move |offset| {\n\n let mut original_offset = 0;\n\n let mut remaining_offset = offset;\n\n for (escape_start, escape_end) in escape_ranges.iter() {\n\n if original_offset + remaining_offset < *escape_start {\n\n break;\n\n } else {\n", "file_path": "src/line.rs", "rank": 86, "score": 33361.2640292965 }, { "content": " }\n\n if position > start || rows.is_empty() {\n\n rows.push((start, position))\n\n }\n\n }\n\n }\n\n rows\n\n }\n\n\n\n /// Returns the number of rows for this line if wrapped at the given width\n\n pub(crate) fn height(&self, width: usize, wrapping: WrappingMode) -> usize {\n\n if wrapping == WrappingMode::Unwrapped {\n\n return 1;\n\n }\n\n let mut wraps = self.wraps.lock().unwrap();\n\n if let Some(rows) = wraps.get_mut(&(width, wrapping)) {\n\n return rows.len();\n\n }\n\n let rows = self.make_wrap(width, wrapping);\n\n let height = rows.len();\n", "file_path": "src/line.rs", "rank": 87, "score": 33361.00184913839 }, { "content": " &format!(\"<U+{:04X}>\", c as u32),\n\n start,\n\n end,\n\n position,\n\n )?;\n\n }\n\n }\n\n Span::SgrSequence(ref s) => attr_state.apply_sgr_sequence(s),\n\n Span::Hyperlink(ref l) => attr_state.apply_hyperlink(l),\n\n Span::LineDrawing(e) => attr_state.line_drawing = e,\n\n _ => {}\n\n }\n\n Ok(position)\n\n }\n\n\n\n fn split(\n\n &self,\n\n rows: &mut Vec<(usize, usize)>,\n\n start: usize,\n\n position: usize,\n", "file_path": "src/line.rs", "rank": 88, "score": 33360.72973950409 }, { "content": " for byte in &after_valid[..] {\n\n spans.push(Span::Invalid(*byte));\n\n }\n\n break;\n\n }\n\n }\n\n }\n\n }\n\n spans\n\n}\n\n\n\nimpl Line {\n\n pub(crate) fn new(_index: usize, data: impl AsRef<[u8]>) -> Line {\n\n let data = data.as_ref();\n\n let data = overstrike::convert_overstrike(&data[..]);\n\n let spans = parse_spans(&data[..], None).into_boxed_slice();\n\n let wraps = Arc::new(Mutex::new(LruCache::new(WRAPS_CACHE_SIZE)));\n\n Line { spans, wraps }\n\n }\n\n\n", "file_path": "src/line.rs", "rank": 89, "score": 33359.550910756756 }, { "content": " }\n\n text_start = None;\n\n }\n\n spans.push(span);\n\n } else if text_start.is_none() {\n\n text_start = Some(index);\n\n }\n\n }\n\n if let Some(start) = text_start {\n\n if let Some(match_index) = match_index {\n\n spans.push(Span::Match(data[start..].to_string(), match_index));\n\n } else {\n\n spans.push(Span::Text(data[start..].to_string()));\n\n }\n\n }\n\n }\n\n\n\n loop {\n\n match str::from_utf8(input) {\n\n Ok(valid) => {\n", "file_path": "src/line.rs", "rank": 90, "score": 33358.92079538942 }, { "content": " // There is more text after the end of the line, so we need to\n\n // render the right arrow.\n\n //\n\n // The cursor should be in the final column of the line. However,\n\n // we need to work around strange terminal behaviour when setting\n\n // styles at the end of the line by backspacing and then moving\n\n // forwards.\n\n changes.push(Change::Text(\"\\x08\".into()));\n\n changes.push(Change::CursorPosition {\n\n x: Position::Relative(1),\n\n y: Position::Relative(0),\n\n });\n\n changes.push(Change::AllAttributes(\n\n CellAttributes::default()\n\n .set_foreground(AnsiColor::Navy)\n\n .set_intensity(Intensity::Bold)\n\n .clone(),\n\n ));\n\n changes.push(RIGHT_ARROW.into());\n\n changes.push(Change::AllAttributes(CellAttributes::default()));\n", "file_path": "src/line.rs", "rank": 91, "score": 33358.888200600384 }, { "content": " }\n\n\n\n if span.is_none() && grapheme.len() == 1 {\n\n if let Some(ch) = grapheme.bytes().next() {\n\n if ch < b' ' || ch == b'\\x7F' {\n\n span = Some(Span::Control(ch));\n\n }\n\n }\n\n }\n\n\n\n if span.is_none() && grapheme.width() == 0 {\n\n span = Some(Span::Unprintable(grapheme.to_string()));\n\n }\n\n\n\n if let Some(span) = span {\n\n if let Some(start) = text_start {\n\n if let Some(match_index) = match_index {\n\n spans.push(Span::Match(data[start..index].to_string(), match_index));\n\n } else {\n\n spans.push(Span::Text(data[start..index].to_string()));\n", "file_path": "src/line.rs", "rank": 92, "score": 33358.3452768325 }, { "content": " };\n\n position = write_truncated(\n\n changes,\n\n attr_state,\n\n style,\n\n text.as_ref(),\n\n start,\n\n end,\n\n position,\n\n )?;\n\n }\n\n Span::TAB => {\n\n let tabchars = 8 - position % 8;\n\n position = write_truncated(\n\n changes,\n\n attr_state,\n\n OutputStyle::File,\n\n &TAB_SPACES[..tabchars],\n\n start,\n\n end,\n", "file_path": "src/line.rs", "rank": 93, "score": 33357.353609938335 }, { "content": " position,\n\n )?;\n\n }\n\n Span::Control(c) | Span::Invalid(c) => {\n\n position = write_truncated(\n\n changes,\n\n attr_state,\n\n OutputStyle::Control,\n\n &format!(\"<{:02X}>\", c),\n\n start,\n\n end,\n\n position,\n\n )?;\n\n }\n\n Span::Unprintable(ref grapheme) => {\n\n for c in grapheme.chars() {\n\n position = write_truncated(\n\n changes,\n\n attr_state,\n\n OutputStyle::Control,\n", "file_path": "src/line.rs", "rank": 94, "score": 33357.01880205799 }, { "content": " .set_foreground(AnsiColor::Black)\n\n .set_background(AnsiColor::Olive)\n\n .set_intensity(Intensity::Normal)\n\n .clone(),\n\n OutputStyle::CurrentMatch => self\n\n .attrs\n\n .clone()\n\n .set_foreground(AnsiColor::Black)\n\n .set_background(AnsiColor::Teal)\n\n .set_intensity(Intensity::Normal)\n\n .clone(),\n\n };\n\n self.style = style;\n\n self.changed = false;\n\n Ok(Some(Change::AllAttributes(attrs)))\n\n } else {\n\n Ok(None)\n\n }\n\n }\n\n}\n\n\n\n/// A span of text within a line.\n", "file_path": "src/line.rs", "rank": 95, "score": 33356.73806291519 }, { "content": " \"A simple \",\n\n \"line with \",\n\n \"several \",\n\n \"words, \",\n\n \"including \",\n\n \"some \",\n\n \"superobnox\",\n\n \"iously \",\n\n \"big ones \",\n\n \"and some \",\n\n \"extra-\",\n\n \"confusingl\",\n\n \"y-awkward \",\n\n \"hyphenated\",\n\n \" ones.\",\n\n ];\n\n let line = Line::new(0, data.as_bytes());\n\n assert_eq!(\n\n line.make_wrap(100, WrappingMode::Unwrapped),\n\n vec![(0, std::usize::MAX)],\n", "file_path": "src/line.rs", "rank": 96, "score": 33356.66311939422 }, { "content": " parse_unicode_span(valid, &mut spans, match_index);\n\n break;\n\n }\n\n Err(error) => {\n\n let (valid, after_valid) = input.split_at(error.valid_up_to());\n\n if !valid.is_empty() {\n\n unsafe {\n\n parse_unicode_span(\n\n str::from_utf8_unchecked(valid),\n\n &mut spans,\n\n match_index,\n\n );\n\n }\n\n }\n\n if let Some(len) = error.error_len() {\n\n for byte in &after_valid[..len] {\n\n spans.push(Span::Invalid(*byte));\n\n }\n\n input = &after_valid[len..];\n\n } else {\n", "file_path": "src/line.rs", "rank": 97, "score": 33356.503364852295 }, { "content": " width: usize,\n\n words: bool,\n\n ) -> (usize, usize) {\n\n match self {\n\n Span::Text(text) | Span::Match(text, _) => {\n\n let mut start = start;\n\n let mut position = position;\n\n if words {\n\n for (word, sep) in SplitWords::new(text) {\n\n let end = position + word.width() + sep.width();\n\n if end - start <= width {\n\n // This word fits within this row\n\n position = end;\n\n } else {\n\n // This word wraps to the next row.\n\n if start != position {\n\n // Add the existing words to the row.\n\n rows.push((start, position));\n\n start = position;\n\n }\n", "file_path": "src/line.rs", "rank": 98, "score": 33356.46005427839 }, { "content": " );\n\n assert_eq!(\n\n line.make_wrap(40, WrappingMode::GraphemeBoundary),\n\n vec![(0, 40), (40, 80), (80, 120), (120, 126)],\n\n );\n\n\n\n // The start and end values are positions, not string indices, but since data is pure ASCII\n\n // they will match.\n\n let line_wrapped_10: Vec<_> = line\n\n .make_wrap(10, WrappingMode::WordBoundary)\n\n .iter()\n\n .map(|(start, end)| &data[*start..*end])\n\n .collect();\n\n assert_eq!(line_wrapped_10, data_wrapped_10);\n\n\n\n // In this example, the control character doesn't fit into the 40 character width.\n\n let line = Line::new(\n\n 0,\n\n \"Some line with Únícódé and \\x1B[31mcolors\\x1B[m and \\x01Control characters\\r\\n\"\n\n .as_bytes(),\n\n );\n\n assert_eq!(\n\n line.make_wrap(40, WrappingMode::GraphemeBoundary),\n\n vec![(0, 38), (38, 60)],\n\n );\n\n }\n\n}\n", "file_path": "src/line.rs", "rank": 99, "score": 33356.450832313174 } ]
Rust
automerge-backend/src/backend.rs
gterzian/automerge-rs
a81a37dfb4eef71dbdf5016cd65289b652f19c38
use crate::actor_map::ActorMap; use crate::change::encode_document; use crate::error::AutomergeError; use crate::internal::ObjectId; use crate::op_handle::OpHandle; use crate::op_set::OpSet; use crate::pending_diff::PendingDiff; use crate::Change; use automerge_protocol as amp; use core::cmp::max; use std::collections::{HashMap, HashSet}; #[derive(Debug, PartialEq, Clone)] pub struct Backend { queue: Vec<Change>, op_set: OpSet, states: HashMap<amp::ActorId, Vec<Change>>, actors: ActorMap, hashes: HashMap<amp::ChangeHash, Change>, history: Vec<amp::ChangeHash>, } impl Backend { pub fn init() -> Backend { let op_set = OpSet::init(); Backend { op_set, queue: Vec::new(), actors: ActorMap::new(), states: HashMap::new(), history: Vec::new(), hashes: HashMap::new(), } } fn make_patch( &self, diffs: Option<amp::Diff>, actor_seq: Option<(amp::ActorId, u64)>, ) -> Result<amp::Patch, AutomergeError> { let mut deps: Vec<_> = if let Some((ref actor, ref seq)) = actor_seq { let last_hash = self.get_hash(actor, *seq)?; self.op_set .deps .iter() .cloned() .filter(|dep| dep != &last_hash) .collect() } else { self.op_set.deps.iter().cloned().collect() }; deps.sort_unstable(); Ok(amp::Patch { diffs, deps, max_op: self.op_set.max_op, clock: self .states .iter() .map(|(k, v)| (k.clone(), v.len() as u64)) .collect(), actor: actor_seq.clone().map(|(actor, _)| actor), seq: actor_seq.map(|(_, seq)| seq), }) } pub fn load_changes(&mut self, changes: Vec<Change>) -> Result<(), AutomergeError> { self.apply(changes, None)?; Ok(()) } pub fn apply_changes( &mut self, changes: Vec<Change>, ) -> Result<amp::Patch, AutomergeError> { self.apply(changes, None) } pub fn get_heads(&self) -> Vec<amp::ChangeHash> { self.op_set.heads() } fn apply( &mut self, changes: Vec<Change>, actor: Option<(amp::ActorId, u64)>, ) -> Result<amp::Patch, AutomergeError> { let mut pending_diffs = HashMap::new(); for change in changes.into_iter() { self.add_change(change, actor.is_some(), &mut pending_diffs)?; } let op_set = &mut self.op_set; let diffs = op_set.finalize_diffs(pending_diffs, &self.actors)?; self.make_patch(diffs, actor) } fn get_hash(&self, actor: &amp::ActorId, seq: u64) -> Result<amp::ChangeHash, AutomergeError> { self.states .get(actor) .and_then(|v| v.get(seq as usize - 1)) .map(|c| c.hash) .ok_or(AutomergeError::InvalidSeq(seq)) } pub fn apply_local_change( &mut self, mut change: amp::UncompressedChange, ) -> Result<(amp::Patch, Change), AutomergeError> { self.check_for_duplicate(&change)?; let actor_seq = (change.actor_id.clone(), change.seq); if change.seq > 1 { let last_hash = self.get_hash(&change.actor_id, change.seq - 1)?; if !change.deps.contains(&last_hash) { change.deps.push(last_hash) } } let bin_change: Change = change.into(); let patch: amp::Patch = self.apply(vec![bin_change.clone()], Some(actor_seq))?; Ok((patch, bin_change)) } fn check_for_duplicate(&self, change: &amp::UncompressedChange) -> Result<(), AutomergeError> { if self .states .get(&change.actor_id) .map(|v| v.len() as u64) .unwrap_or(0) >= change.seq { return Err(AutomergeError::DuplicateChange(format!( "Change request has already been applied {}:{}", change.actor_id.to_hex_string(), change.seq ))); } Ok(()) } fn add_change( &mut self, change: Change, local: bool, diffs: &mut HashMap<ObjectId, Vec<PendingDiff>>, ) -> Result<(), AutomergeError> { if local { self.apply_change(change, diffs) } else { self.queue.push(change); self.apply_queued_ops(diffs) } } fn apply_queued_ops( &mut self, diffs: &mut HashMap<ObjectId, Vec<PendingDiff>>, ) -> Result<(), AutomergeError> { while let Some(next_change) = self.pop_next_causally_ready_change() { self.apply_change(next_change, diffs)?; } Ok(()) } fn apply_change( &mut self, change: Change, diffs: &mut HashMap<ObjectId, Vec<PendingDiff>>, ) -> Result<(), AutomergeError> { if self.hashes.contains_key(&change.hash) { return Ok(()); } self.update_history(&change); let op_set = &mut self.op_set; let start_op = change.start_op; op_set.update_deps(&change); let ops = OpHandle::extract(change, &mut self.actors); op_set.max_op = max(op_set.max_op, start_op + (ops.len() as u64) - 1); op_set.apply_ops(ops, diffs, &mut self.actors)?; Ok(()) } fn update_history(&mut self, change: &Change) { self.states .entry(change.actor_id().clone()) .or_default() .push(change.clone()); self.history.push(change.hash); self.hashes.insert(change.hash, change.clone()); } fn pop_next_causally_ready_change(&mut self) -> Option<Change> { let mut index = 0; while index < self.queue.len() { let change = self.queue.get(index).unwrap(); if change.deps.iter().all(|d| self.hashes.contains_key(d)) { return Some(self.queue.remove(index)); } index += 1 } None } pub fn get_patch(&self) -> Result<amp::Patch, AutomergeError> { let diffs = self .op_set .construct_object(&ObjectId::Root, &self.actors)?; self.make_patch(Some(diffs), None) } pub fn get_changes_for_actor_id( &self, actor_id: &amp::ActorId, ) -> Result<Vec<&Change>, AutomergeError> { Ok(self .states .get(actor_id) .map(|vec| vec.iter().collect()) .unwrap_or_default()) } pub fn get_changes(&self, have_deps: &[amp::ChangeHash]) -> Vec<&Change> { let mut stack = have_deps.to_owned(); let mut has_seen = HashSet::new(); while let Some(hash) = stack.pop() { if let Some(change) = self.hashes.get(&hash) { stack.extend(change.deps.clone()); } has_seen.insert(hash); } self.history .iter() .filter(|hash| !has_seen.contains(hash)) .filter_map(|hash| self.hashes.get(hash)) .collect() } pub fn save(&self) -> Result<Vec<u8>, AutomergeError> { let changes: Vec<amp::UncompressedChange> = self .history .iter() .filter_map(|hash| self.hashes.get(&hash)) .map(|r| r.into()) .collect(); encode_document(changes) } pub fn load(data: Vec<u8>) -> Result<Self, AutomergeError> { let changes = Change::load_document(&data)?; let mut backend = Self::init(); backend.load_changes(changes)?; Ok(backend) } pub fn get_missing_deps(&self) -> Vec<amp::ChangeHash> { let in_queue: Vec<_> = self.queue.iter().map(|change| &change.hash).collect(); self.queue .iter() .flat_map(|change| change.deps.clone()) .filter(|h| !in_queue.contains(&h)) .collect() } }
use crate::actor_map::ActorMap; use crate::change::encode_document; use crate::error::AutomergeError; use crate::internal::ObjectId; use crate::op_handle::OpHandle; use crate::op_set::OpSet; use crate::pending_diff::PendingDiff; use crate::Change; use automerge_protocol as amp; use core::cmp::max; use std::collections::{HashMap, HashSet}; #[derive(Debug, PartialEq, Clone)] pub struct Backend { queue: Vec<Change>, op_set: OpSet, states: HashMap<amp::ActorId, Vec<Change>>, actors: ActorMap, hashes: HashMap<amp::ChangeHash, Change>, history: Vec<amp::ChangeHash>, } impl Backend { pub fn init() -> Backend { let op_set = OpSet::init(); Backend { op_set, queue: Vec::new(), actors: ActorMap::new(), states: HashMap::new(), history: Vec::new(), hashes: HashMap::new(), } } fn make_patch( &self, diffs: Option<amp::Diff>, actor_seq: Option<(amp::ActorId, u64)>, ) -> Result<amp::Patch, AutomergeError> { let mut deps: Vec<_> = if let Some((ref actor, ref seq)) = actor_seq { let last_hash = self.get_hash(actor, *seq)?; self.op_set .deps .iter() .cloned() .filter(|
pub fn load_changes(&mut self, changes: Vec<Change>) -> Result<(), AutomergeError> { self.apply(changes, None)?; Ok(()) } pub fn apply_changes( &mut self, changes: Vec<Change>, ) -> Result<amp::Patch, AutomergeError> { self.apply(changes, None) } pub fn get_heads(&self) -> Vec<amp::ChangeHash> { self.op_set.heads() } fn apply( &mut self, changes: Vec<Change>, actor: Option<(amp::ActorId, u64)>, ) -> Result<amp::Patch, AutomergeError> { let mut pending_diffs = HashMap::new(); for change in changes.into_iter() { self.add_change(change, actor.is_some(), &mut pending_diffs)?; } let op_set = &mut self.op_set; let diffs = op_set.finalize_diffs(pending_diffs, &self.actors)?; self.make_patch(diffs, actor) } fn get_hash(&self, actor: &amp::ActorId, seq: u64) -> Result<amp::ChangeHash, AutomergeError> { self.states .get(actor) .and_then(|v| v.get(seq as usize - 1)) .map(|c| c.hash) .ok_or(AutomergeError::InvalidSeq(seq)) } pub fn apply_local_change( &mut self, mut change: amp::UncompressedChange, ) -> Result<(amp::Patch, Change), AutomergeError> { self.check_for_duplicate(&change)?; let actor_seq = (change.actor_id.clone(), change.seq); if change.seq > 1 { let last_hash = self.get_hash(&change.actor_id, change.seq - 1)?; if !change.deps.contains(&last_hash) { change.deps.push(last_hash) } } let bin_change: Change = change.into(); let patch: amp::Patch = self.apply(vec![bin_change.clone()], Some(actor_seq))?; Ok((patch, bin_change)) } fn check_for_duplicate(&self, change: &amp::UncompressedChange) -> Result<(), AutomergeError> { if self .states .get(&change.actor_id) .map(|v| v.len() as u64) .unwrap_or(0) >= change.seq { return Err(AutomergeError::DuplicateChange(format!( "Change request has already been applied {}:{}", change.actor_id.to_hex_string(), change.seq ))); } Ok(()) } fn add_change( &mut self, change: Change, local: bool, diffs: &mut HashMap<ObjectId, Vec<PendingDiff>>, ) -> Result<(), AutomergeError> { if local { self.apply_change(change, diffs) } else { self.queue.push(change); self.apply_queued_ops(diffs) } } fn apply_queued_ops( &mut self, diffs: &mut HashMap<ObjectId, Vec<PendingDiff>>, ) -> Result<(), AutomergeError> { while let Some(next_change) = self.pop_next_causally_ready_change() { self.apply_change(next_change, diffs)?; } Ok(()) } fn apply_change( &mut self, change: Change, diffs: &mut HashMap<ObjectId, Vec<PendingDiff>>, ) -> Result<(), AutomergeError> { if self.hashes.contains_key(&change.hash) { return Ok(()); } self.update_history(&change); let op_set = &mut self.op_set; let start_op = change.start_op; op_set.update_deps(&change); let ops = OpHandle::extract(change, &mut self.actors); op_set.max_op = max(op_set.max_op, start_op + (ops.len() as u64) - 1); op_set.apply_ops(ops, diffs, &mut self.actors)?; Ok(()) } fn update_history(&mut self, change: &Change) { self.states .entry(change.actor_id().clone()) .or_default() .push(change.clone()); self.history.push(change.hash); self.hashes.insert(change.hash, change.clone()); } fn pop_next_causally_ready_change(&mut self) -> Option<Change> { let mut index = 0; while index < self.queue.len() { let change = self.queue.get(index).unwrap(); if change.deps.iter().all(|d| self.hashes.contains_key(d)) { return Some(self.queue.remove(index)); } index += 1 } None } pub fn get_patch(&self) -> Result<amp::Patch, AutomergeError> { let diffs = self .op_set .construct_object(&ObjectId::Root, &self.actors)?; self.make_patch(Some(diffs), None) } pub fn get_changes_for_actor_id( &self, actor_id: &amp::ActorId, ) -> Result<Vec<&Change>, AutomergeError> { Ok(self .states .get(actor_id) .map(|vec| vec.iter().collect()) .unwrap_or_default()) } pub fn get_changes(&self, have_deps: &[amp::ChangeHash]) -> Vec<&Change> { let mut stack = have_deps.to_owned(); let mut has_seen = HashSet::new(); while let Some(hash) = stack.pop() { if let Some(change) = self.hashes.get(&hash) { stack.extend(change.deps.clone()); } has_seen.insert(hash); } self.history .iter() .filter(|hash| !has_seen.contains(hash)) .filter_map(|hash| self.hashes.get(hash)) .collect() } pub fn save(&self) -> Result<Vec<u8>, AutomergeError> { let changes: Vec<amp::UncompressedChange> = self .history .iter() .filter_map(|hash| self.hashes.get(&hash)) .map(|r| r.into()) .collect(); encode_document(changes) } pub fn load(data: Vec<u8>) -> Result<Self, AutomergeError> { let changes = Change::load_document(&data)?; let mut backend = Self::init(); backend.load_changes(changes)?; Ok(backend) } pub fn get_missing_deps(&self) -> Vec<amp::ChangeHash> { let in_queue: Vec<_> = self.queue.iter().map(|change| &change.hash).collect(); self.queue .iter() .flat_map(|change| change.deps.clone()) .filter(|h| !in_queue.contains(&h)) .collect() } }
dep| dep != &last_hash) .collect() } else { self.op_set.deps.iter().cloned().collect() }; deps.sort_unstable(); Ok(amp::Patch { diffs, deps, max_op: self.op_set.max_op, clock: self .states .iter() .map(|(k, v)| (k.clone(), v.len() as u64)) .collect(), actor: actor_seq.clone().map(|(actor, _)| actor), seq: actor_seq.map(|(_, seq)| seq), }) }
function_block-function_prefixed
[ { "content": "#[wasm_bindgen(js_name = initSyncState)]\n\npub fn init_sync_state() -> SyncState {\n\n SyncState(am::sync::State::new())\n\n}\n\n\n\n// this is needed to be compatible with the automerge-js api\n", "file_path": "rust/automerge-wasm/src/lib.rs", "rank": 0, "score": 207346.01834126757 }, { "content": "pub fn change(\n\n mut reader: impl std::io::Read,\n\n mut writer: impl std::io::Write,\n\n script: &str,\n\n) -> Result<(), ChangeError> {\n\n let mut buf: Vec<u8> = Vec::new();\n\n reader\n\n .read_to_end(&mut buf)\n\n .map_err(|e| ChangeError::ErrReadingChanges { source: e })?;\n\n let backend = am::Automerge::load(&buf)\n\n .map_err(|e| ChangeError::ErrApplyingInitialChanges { source: e })?;\n\n let local_change = parse_change_script(script)?;\n\n let ((), new_changes) = frontend.change::<_, _, amf::InvalidChangeRequest>(None, |d| {\n\n d.add_change(local_change)?;\n\n Ok(())\n\n })?;\n\n let change_bytes = backend.save().unwrap();\n\n writer\n\n .write_all(&change_bytes)\n\n .map_err(|e| ChangeError::ErrWritingChanges { source: e })?;\n", "file_path": "rust/automerge-cli/src/change.rs", "rank": 1, "score": 192307.40364705137 }, { "content": "fn encode_hashes(buf: &mut Vec<u8>, hashes: &[ChangeHash]) {\n\n debug_assert!(\n\n hashes.windows(2).all(|h| h[0] <= h[1]),\n\n \"hashes were not sorted\"\n\n );\n\n encode_many(buf, hashes.iter(), |buf, hash| buf.extend(hash.as_bytes()))\n\n}\n\n\n", "file_path": "rust/automerge/src/sync.rs", "rank": 2, "score": 189861.5497933105 }, { "content": "struct ChangeDepsIter<'a> {\n\n change: &'a Change,\n\n graph: &'a HashGraph,\n\n offset: usize,\n\n}\n\n\n\nimpl<'a> ExactSizeIterator for ChangeDepsIter<'a> {\n\n fn len(&self) -> usize {\n\n self.change.deps().len()\n\n }\n\n}\n\n\n\nimpl<'a> Iterator for ChangeDepsIter<'a> {\n\n type Item = u64;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n if let Some(dep) = self.change.deps().get(self.offset) {\n\n self.offset += 1;\n\n Some(self.graph.change_index(dep) as u64)\n\n } else {\n\n None\n\n }\n\n }\n\n}\n", "file_path": "rust/automerge/src/storage/save/document.rs", "rank": 3, "score": 185175.27657169383 }, { "content": "fn are_sorted<A, O, I>(mut opids: I) -> bool\n\nwhere\n\n A: PartialEq + Ord + Clone,\n\n O: convert::OpId<A>,\n\n I: Iterator<Item = O>,\n\n{\n\n if let Some(first) = opids.next() {\n\n let mut prev = first;\n\n for opid in opids {\n\n if opid.counter() < prev.counter() {\n\n return false;\n\n }\n\n if opid.counter() == prev.counter() && opid.actor() < prev.actor() {\n\n return false;\n\n }\n\n prev = opid;\n\n }\n\n }\n\n true\n\n}\n", "file_path": "rust/automerge/src/storage/change/change_actors.rs", "rank": 4, "score": 184060.4114778858 }, { "content": "#[wasm_bindgen(js_name = encodeSyncState)]\n\npub fn encode_sync_state(state: &SyncState) -> Uint8Array {\n\n Uint8Array::from(state.0.encode().as_slice())\n\n}\n\n\n", "file_path": "rust/automerge-wasm/src/lib.rs", "rank": 5, "score": 175823.53159403964 }, { "content": "#[wasm_bindgen(js_name = exportSyncState)]\n\npub fn export_sync_state(state: &SyncState) -> JsValue {\n\n JS::from(state.0.clone()).into()\n\n}\n\n\n", "file_path": "rust/automerge-wasm/src/lib.rs", "rank": 6, "score": 175823.53159403964 }, { "content": "#[wasm_bindgen(js_name = create)]\n\npub fn init(options: JsValue) -> Result<Automerge, error::BadActorId> {\n\n console_error_panic_hook::set_once();\n\n let actor = js_get(&options, \"actor\").ok().and_then(|a| a.as_string());\n\n let text_v1 = js_get(&options, \"text_v1\")\n\n .ok()\n\n .and_then(|v1| v1.as_bool())\n\n .unwrap_or(false);\n\n let text_rep = if text_v1 {\n\n TextRepresentation::Array\n\n } else {\n\n TextRepresentation::String\n\n };\n\n Automerge::new(actor, text_rep)\n\n}\n\n\n", "file_path": "rust/automerge-wasm/src/lib.rs", "rank": 7, "score": 168433.17237498745 }, { "content": "pub fn new_doc_with_actor(actor: automerge::ActorId) -> automerge::AutoCommit {\n\n let mut d = automerge::AutoCommit::new();\n\n d.set_actor(actor);\n\n d\n\n}\n\n\n", "file_path": "rust/automerge-test/src/lib.rs", "rank": 8, "score": 161160.4384508491 }, { "content": "/// Returns two actor IDs, the first considered to be ordered before the second\n\npub fn sorted_actors() -> (automerge::ActorId, automerge::ActorId) {\n\n let a = automerge::ActorId::random();\n\n let b = automerge::ActorId::random();\n\n if a > b {\n\n (b, a)\n\n } else {\n\n (a, b)\n\n }\n\n}\n\n\n\n/// This macro makes it easy to make assertions about a document. It is called with two arguments,\n\n/// the first is a reference to an `automerge::Automerge`, the second is an instance of\n\n/// `RealizedObject<ExportableOpId>`.\n\n///\n\n/// What - I hear you ask - is a `RealizedObject`? It's a fully hydrated version of the contents of\n\n/// an automerge document. You don't need to think about this too much though because you can\n\n/// easily construct one with the `map!` and `list!` macros. Here's an example:\n\n///\n\n/// ## Constructing documents\n\n///\n", "file_path": "rust/automerge-test/src/lib.rs", "rank": 9, "score": 154855.84070072087 }, { "content": "#[test]\n\nfn keys_iter_seq() {\n\n let mut doc = Automerge::new();\n\n let mut tx = doc.transaction();\n\n let list = tx.put_object(ROOT, \"list\", ObjType::List).unwrap();\n\n tx.insert(&list, 0, 3).unwrap();\n\n tx.insert(&list, 1, 4).unwrap();\n\n tx.insert(&list, 2, 5).unwrap();\n\n tx.insert(&list, 3, 6).unwrap();\n\n tx.commit();\n\n let mut tx = doc.transaction();\n\n tx.put(&list, 0, 7).unwrap();\n\n tx.commit();\n\n let mut tx = doc.transaction();\n\n tx.put(&list, 0, 8).unwrap();\n\n tx.put(&list, 3, 9).unwrap();\n\n tx.commit();\n\n let actor = doc.get_actor();\n\n assert_eq!(doc.keys(&list).count(), 4);\n\n\n\n let mut keys = doc.keys(&list);\n", "file_path": "rust/automerge/src/automerge/tests.rs", "rank": 10, "score": 153944.11287221633 }, { "content": "#[wasm_bindgen(js_name = importSyncState)]\n\npub fn import_sync_state(state: JsValue) -> Result<SyncState, interop::error::BadSyncState> {\n\n Ok(SyncState(JS(state).try_into()?))\n\n}\n\n\n\n// this is needed to be compatible with the automerge-js api\n", "file_path": "rust/automerge-wasm/src/lib.rs", "rank": 11, "score": 150378.16359447123 }, { "content": "fn hash(typ: ChunkType, data: &[u8]) -> ChangeHash {\n\n let mut out = vec![u8::from(typ)];\n\n leb128::write::unsigned(&mut out, data.len() as u64).unwrap();\n\n out.extend(data);\n\n let hash_result = Sha256::digest(out);\n\n let array: [u8; 32] = hash_result.into();\n\n ChangeHash(array)\n\n}\n", "file_path": "rust/automerge/src/storage/chunk.rs", "rank": 12, "score": 150125.10456751008 }, { "content": "fn length_prefixed_bytes<B: AsRef<[u8]>>(b: B, out: &mut Vec<u8>) -> usize {\n\n let prefix_len = leb128::write::unsigned(out, b.as_ref().len() as u64).unwrap();\n\n out.write_all(b.as_ref()).unwrap();\n\n prefix_len + b.as_ref().len()\n\n}\n\n\n\n// Bunch of type safe builder boilerplate\n\npub(crate) struct Unset;\n\npub(crate) struct Set<T> {\n\n value: T,\n\n}\n\n\n\n#[allow(non_camel_case_types)]\n\npub(crate) struct ChangeBuilder<START_OP, ACTOR, SEQ, TIME> {\n\n dependencies: Vec<ChangeHash>,\n\n actor: ACTOR,\n\n seq: SEQ,\n\n start_op: START_OP,\n\n timestamp: TIME,\n\n message: Option<String>,\n", "file_path": "rust/automerge/src/storage/change.rs", "rank": 13, "score": 149824.80066108218 }, { "content": "#[test]\n\nfn loaded_doc_changes_have_hash() {\n\n let mut doc = Automerge::new();\n\n let mut tx = doc.transaction();\n\n tx.put(ROOT, \"a\", 1_u64).unwrap();\n\n tx.commit();\n\n let hash = doc.get_last_local_change().unwrap().hash();\n\n let bytes = doc.save();\n\n let doc = Automerge::load(&bytes).unwrap();\n\n assert_eq!(doc.get_change_by_hash(&hash).unwrap().hash(), hash);\n\n}\n\n\n", "file_path": "rust/automerge/src/automerge/tests.rs", "rank": 14, "score": 149616.84928410992 }, { "content": "#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]\n\nstruct HashIdx(u32);\n\n\n", "file_path": "rust/automerge/src/change_graph.rs", "rank": 15, "score": 149442.5731115147 }, { "content": "fn smol_str() -> impl Strategy<Value = SmolStr> + Clone {\n\n any::<String>().prop_map(SmolStr::from)\n\n}\n", "file_path": "rust/automerge/src/columnar/encoding/properties.rs", "rank": 16, "score": 149384.28946311848 }, { "content": "pub fn to_result<R: Into<AMresult>>(r: R) -> *mut AMresult {\n\n (r.into()).into()\n\n}\n\n\n\n/// \\ingroup enumerations\n\n/// \\enum AMstatus\n\n/// \\installed_headerfile\n\n/// \\brief The status of an API call.\n\n#[derive(PartialEq, Eq)]\n\n#[repr(u8)]\n\npub enum AMstatus {\n\n /// Success.\n\n /// \\note This tag is unalphabetized so that `0` indicates success.\n\n Ok,\n\n /// Failure due to an error.\n\n Error,\n\n /// Failure due to an invalid result.\n\n InvalidResult,\n\n}\n\n\n", "file_path": "rust/automerge-c/src/result.rs", "rank": 17, "score": 147930.3861420698 }, { "content": "#[wasm_bindgen(js_name = decodeChange)]\n\npub fn decode_change(change: Uint8Array) -> Result<JsValue, error::DecodeChange> {\n\n let change = Change::from_bytes(change.to_vec())?;\n\n let change: am::ExpandedChange = change.decode();\n\n let serializer = serde_wasm_bindgen::Serializer::json_compatible();\n\n Ok(change.serialize(&serializer)?)\n\n}\n\n\n", "file_path": "rust/automerge-wasm/src/lib.rs", "rank": 18, "score": 141538.80681543905 }, { "content": "#[wasm_bindgen(js_name = encodeChange)]\n\npub fn encode_change(change: JsValue) -> Result<Uint8Array, error::EncodeChange> {\n\n // Alex: Technically we should be using serde_wasm_bindgen::from_value instead of into_serde.\n\n // Unfortunately serde_wasm_bindgen::from_value fails for some inscrutable reason, so instead\n\n // we use into_serde (sorry to future me).\n\n #[allow(deprecated)]\n\n let change: am::ExpandedChange = change.into_serde()?;\n\n let change: Change = change.into();\n\n Ok(Uint8Array::from(change.raw_bytes()))\n\n}\n\n\n", "file_path": "rust/automerge-wasm/src/lib.rs", "rank": 19, "score": 141538.80681543905 }, { "content": "fn fold_hash_set<F, O>(init: O, val: &JsValue, f: F) -> Result<O, error::BadChangeHashSet>\n\nwhere\n\n F: Fn(O, ChangeHash) -> O,\n\n{\n\n let mut result = init;\n\n for key in Reflect::own_keys(val)\n\n .map_err(|_| error::BadChangeHashSet::ListProp)?\n\n .iter()\n\n {\n\n if let Some(true) = js_get(val, &key)?.0.as_bool() {\n\n let hash = ChangeHash::try_from(JS(key.clone()))\n\n .map_err(|e| error::BadChangeHashSet::BadHash(key, e))?;\n\n result = f(result, hash);\n\n }\n\n }\n\n Ok(result)\n\n}\n\n\n\nimpl TryFrom<JS> for ChangeHash {\n\n type Error = error::BadChangeHash;\n", "file_path": "rust/automerge-wasm/src/interop.rs", "rank": 20, "score": 140619.56889878298 }, { "content": "fn log_list_diff<'a, I: Iterator<Item = Patch<'a>>>(\n\n patch_log: &mut PatchLog,\n\n obj: &ObjId,\n\n patches: I,\n\n) {\n\n patches.fold(0, |index, patch| match patch {\n\n Patch::New(op, marks) => {\n\n let value = op.value_at(Some(op.clock)).into();\n\n patch_log.insert(*obj, index, value, op.id, op.conflict, marks);\n\n index + 1\n\n }\n\n Patch::Update { before, after, .. } => {\n\n let conflict = !before.conflict && after.conflict;\n\n if after.cross_visible {\n\n let value = after.value_at(Some(after.clock)).into();\n\n patch_log.put_seq(*obj, index, value, after.id, conflict, true)\n\n } else {\n\n let value = after.value_at(Some(after.clock)).into();\n\n patch_log.put_seq(*obj, index, value, after.id, conflict, false)\n\n }\n", "file_path": "rust/automerge/src/automerge/diff.rs", "rank": 21, "score": 139359.63974066288 }, { "content": "fn log_map_diff<'a, I: Iterator<Item = Patch<'a>>>(\n\n doc: &Automerge,\n\n patch_log: &mut PatchLog,\n\n obj: &ObjId,\n\n diffs: I,\n\n) {\n\n diffs\n\n .filter_map(|patch| Some((get_prop(doc, patch.op())?, patch)))\n\n .for_each(|(key, patch)| match patch {\n\n Patch::New(op, _) => {\n\n let value = op.value_at(Some(op.clock)).into();\n\n patch_log.put_map(*obj, key, value, op.id, op.conflict, false)\n\n }\n\n Patch::Update { before, after, .. } => {\n\n let conflict = !before.conflict && after.conflict;\n\n if after.cross_visible {\n\n let value = after.value_at(Some(after.clock)).into();\n\n patch_log.put_map(*obj, key, value, after.id, conflict, true)\n\n } else {\n\n let value = after.value_at(Some(after.clock)).into();\n", "file_path": "rust/automerge/src/automerge/diff.rs", "rank": 22, "score": 139359.63974066288 }, { "content": "fn log_text_diff<'a, I: Iterator<Item = Patch<'a>>>(\n\n patch_log: &mut PatchLog,\n\n obj: &ObjId,\n\n patches: I,\n\n) {\n\n let encoding = ListEncoding::Text;\n\n patches.fold(0, |index, patch| match &patch {\n\n Patch::New(op, marks) => {\n\n patch_log.splice(*obj, index, op.to_str(), marks.clone());\n\n index + op.width(encoding)\n\n }\n\n Patch::Update {\n\n before,\n\n after,\n\n marks,\n\n } => {\n\n patch_log.delete_seq(*obj, index, before.width(encoding));\n\n patch_log.splice(*obj, index, after.to_str(), marks.clone());\n\n index + after.width(encoding)\n\n }\n", "file_path": "rust/automerge/src/automerge/diff.rs", "rank": 23, "score": 139359.63974066288 }, { "content": "fn leb_bytes(bits: u64) -> u64 {\n\n (bits + 6) / 7\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use proptest::prelude::*;\n\n\n\n proptest! {\n\n #[test]\n\n fn test_ulebsize(val in 0..u64::MAX) {\n\n let mut out = Vec::new();\n\n leb128::write::unsigned(&mut out, val).unwrap();\n\n let expected = out.len() as u64;\n\n assert_eq!(expected, ulebsize(val))\n\n }\n\n\n\n #[test]\n\n fn test_lebsize(val in i64::MIN..i64::MAX) {\n", "file_path": "rust/automerge/src/columnar/encoding/leb128.rs", "rank": 24, "score": 138278.5552300893 }, { "content": "fn change_parser<'a, Input: 'a>() -> impl combine::Parser<Input, Output = amf::LocalChange> + 'a\n\nwhere\n\n Input: 'a,\n\n Input: combine::stream::Stream<Token = char>,\n\n Input::Error: combine::ParseError<Input::Token, Input::Range, Input::Position>,\n\n{\n\n charparser::spaces()\n\n .with(\n\n op_parser()\n\n .skip(charparser::spaces())\n\n .skip(charparser::string(\"$\"))\n\n .and(path_segment_parser(am::Path::root())),\n\n )\n\n .skip(charparser::spaces())\n\n .then(|(operation, path)| {\n\n let onwards: Box<\n\n dyn combine::Parser<Input, Output = amf::LocalChange, PartialState = _>,\n\n > = match operation {\n\n Op::Set => value_parser::<'a>()\n\n .map(move |value| amf::LocalChange::set(path.clone(), value))\n", "file_path": "rust/automerge-cli/src/change.rs", "rank": 25, "score": 137438.46544190394 }, { "content": "#[test]\n\nfn load_incremental_change_without_deps_throws() {\n\n let mut doc = AutoCommit::new();\n\n doc.put(&ROOT, \"key\", \"value\").unwrap();\n\n let _ = doc.save_incremental();\n\n\n\n doc.put(&ROOT, \"key\", \"value2\").unwrap();\n\n let orphan = doc.save_incremental();\n\n if let Err(e) = AutoCommit::load(&orphan) {\n\n assert_eq!(e, automerge::AutomergeError::MissingDeps);\n\n } else {\n\n panic!(\"loading an orphan change without a document chunk as first chunk should fail\");\n\n }\n\n}\n", "file_path": "rust/automerge/tests/test_save_load_orphans.rs", "rank": 26, "score": 135865.3502257668 }, { "content": "#[wasm_bindgen(js_name = decodeSyncState)]\n\npub fn decode_sync_state(data: Uint8Array) -> Result<SyncState, sync::DecodeSyncStateErr> {\n\n SyncState::decode(data)\n\n}\n\n\n\npub mod error {\n\n use automerge::{AutomergeError, ObjType};\n\n use js_sys::RangeError;\n\n use wasm_bindgen::JsValue;\n\n\n\n use crate::interop::{\n\n self,\n\n error::{BadChangeHashes, BadJSChanges},\n\n };\n\n\n\n #[derive(Debug, thiserror::Error)]\n\n #[error(\"could not parse Actor ID as a hex string: {0}\")]\n\n pub struct BadActorId(#[from] hex::FromHexError);\n\n\n\n impl From<BadActorId> for JsValue {\n\n fn from(s: BadActorId) -> Self {\n", "file_path": "rust/automerge-wasm/src/lib.rs", "rank": 27, "score": 135011.288782755 }, { "content": "pub fn import_json(\n\n mut reader: impl std::io::Read,\n\n mut writer: impl std::io::Write,\n\n) -> anyhow::Result<()> {\n\n let mut buffer = String::new();\n\n reader.read_to_string(&mut buffer)?;\n\n\n\n let json_value: serde_json::Value = serde_json::from_str(&buffer)?;\n\n let mut doc = initialize_from_json(&json_value)?;\n\n writer.write_all(&doc.save())?;\n\n Ok(())\n\n}\n", "file_path": "rust/automerge-cli/src/import.rs", "rank": 28, "score": 134074.18832718662 }, { "content": "fn key_parser<Input>() -> impl Parser<Input, Output = String>\n\nwhere\n\n Input: combine::Stream<Token = char>,\n\n{\n\n let key_char_parser = combine::choice::<Input, _>((\n\n charparser::alpha_num(),\n\n charparser::char('-'),\n\n charparser::char('_'),\n\n ));\n\n combine::many1(key_char_parser).map(|chars: Vec<char>| chars.into_iter().collect())\n\n}\n\n\n", "file_path": "rust/automerge-cli/src/change.rs", "rank": 29, "score": 133315.67392010003 }, { "content": "fn index_parser<Input>() -> impl Parser<Input, Output = u32>\n\nwhere\n\n Input: combine::Stream<Token = char>,\n\n{\n\n combine::many1::<Vec<char>, Input, _>(charparser::digit()).map(|digits| {\n\n let num_string: String = digits.iter().collect();\n\n num_string.parse::<u32>().unwrap()\n\n })\n\n}\n\n\n\ncombine::parser! {\n\n fn path_segment_parser[Input](path_so_far: amf::Path)(Input) -> amf::Path\n\n where [Input: combine::Stream<Token=char>]\n\n {\n\n let key_path_so_far = path_so_far.clone();\n\n let key_segment_parser = charparser::string(\"[\\\"\")\n\n .with(key_parser())\n\n .skip(charparser::string(\"\\\"]\"))\n\n .then(move |key| path_segment_parser(key_path_so_far.clone().key(key)));\n\n\n", "file_path": "rust/automerge-cli/src/change.rs", "rank": 30, "score": 133315.67392010003 }, { "content": "#[derive(Copy, Clone)]\n\nstruct ValueMeta(u64);\n\n\n\nimpl ValueMeta {\n\n fn type_code(&self) -> ValueType {\n\n let low_byte = (self.0 as u8) & 0b00001111;\n\n match low_byte {\n\n 0 => ValueType::Null,\n\n 1 => ValueType::False,\n\n 2 => ValueType::True,\n\n 3 => ValueType::Uleb,\n\n 4 => ValueType::Leb,\n\n 5 => ValueType::Float,\n\n 6 => ValueType::String,\n\n 7 => ValueType::Bytes,\n\n 8 => ValueType::Counter,\n\n 9 => ValueType::Timestamp,\n\n other => ValueType::Unknown(other),\n\n }\n\n }\n\n\n", "file_path": "rust/automerge/src/columnar/column_range/value.rs", "rank": 31, "score": 129916.0670377164 }, { "content": "#[derive(Debug, Clone)]\n\nstruct ChangeNode {\n\n hash_idx: HashIdx,\n\n actor_index: usize,\n\n seq: u64,\n\n max_op: u64,\n\n parents: Option<EdgeIdx>,\n\n}\n\n\n\nimpl ChangeGraph {\n\n pub(crate) fn new() -> Self {\n\n Self {\n\n nodes: Vec::new(),\n\n edges: Vec::new(),\n\n nodes_by_hash: BTreeMap::new(),\n\n hashes: Vec::new(),\n\n }\n\n }\n\n\n\n pub(crate) fn add_change(\n\n &mut self,\n", "file_path": "rust/automerge/src/change_graph.rs", "rank": 32, "score": 129562.69086185892 }, { "content": "fn process<'a, T: Iterator<Item = &'a Op>>(\n\n ops: T,\n\n before: &'a Clock,\n\n after: &'a Clock,\n\n diff: &mut MarkDiff<'a>,\n\n) -> Option<Patch<'a>> {\n\n let mut before_op = None;\n\n let mut after_op = None;\n\n\n\n for op in ops {\n\n let predates_before = op.predates(before);\n\n let predates_after = op.predates(after);\n\n\n\n if predates_before && !op.was_deleted_before(before) {\n\n push_top(&mut before_op, op, predates_after, before);\n\n }\n\n\n\n if predates_after && !op.was_deleted_before(after) {\n\n push_top(&mut after_op, op, predates_before, after);\n\n }\n\n }\n\n resolve(before_op, after_op, diff)\n\n}\n\n\n", "file_path": "rust/automerge/src/automerge/diff.rs", "rank": 33, "score": 129170.57198336645 }, { "content": "fn op_parser<Input>() -> impl combine::Parser<Input, Output = Op>\n\nwhere\n\n Input: combine::Stream<Token = char>,\n\n{\n\n combine::choice((\n\n combine::attempt(case_insensitive_string(\"set\")).map(|_| Op::Set),\n\n combine::attempt(case_insensitive_string(\"insert\")).map(|_| Op::Insert),\n\n combine::attempt(case_insensitive_string(\"delete\")).map(|_| Op::Delete),\n\n combine::attempt(case_insensitive_string(\"increment\")).map(|_| Op::Increment),\n\n ))\n\n}\n\n\n", "file_path": "rust/automerge-cli/src/change.rs", "rank": 34, "score": 127990.46990742622 }, { "content": "#[derive(Debug, Clone)]\n\nstruct MarkDiff<'a> {\n\n doc: &'a Automerge,\n\n before: MarkStateMachine<'a>,\n\n after: MarkStateMachine<'a>,\n\n}\n\n\n\nimpl<'a> MarkDiff<'a> {\n\n fn new(doc: &'a Automerge) -> Self {\n\n MarkDiff {\n\n doc,\n\n before: MarkStateMachine::default(),\n\n after: MarkStateMachine::default(),\n\n }\n\n }\n\n\n\n fn current(&self) -> Option<Arc<MarkSet>> {\n\n // do this without all the cloning - cache the result\n\n let b = self.before.current().cloned().unwrap_or_default();\n\n let a = self.after.current().cloned().unwrap_or_default();\n\n if a != b {\n", "file_path": "rust/automerge/src/automerge/diff.rs", "rank": 35, "score": 124609.42412855414 }, { "content": "fn doc(n: u64) -> Automerge {\n\n let mut doc = Automerge::new();\n\n let mut tx = doc.transaction();\n\n for i in 0..n {\n\n tx.put(ROOT, i.to_string(), i.to_string()).unwrap();\n\n }\n\n tx.commit();\n\n doc\n\n}\n\n\n", "file_path": "rust/automerge/benches/range.rs", "rank": 36, "score": 123939.19865816427 }, { "content": "use std::convert::TryInto;\n\n\n\nuse serde::{de, Deserialize, Deserializer, Serialize, Serializer};\n\n\n\nuse crate::ChangeHash;\n\n\n\nimpl Serialize for ChangeHash {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n hex::encode(self.0).serialize(serializer)\n\n }\n\n}\n\n\n\nimpl<'de> Deserialize<'de> for ChangeHash {\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n\n D: Deserializer<'de>,\n\n {\n", "file_path": "rust/automerge/src/legacy/serde_impls/change_hash.rs", "rank": 37, "score": 123070.69807631458 }, { "content": " let s = String::deserialize(deserializer)?;\n\n let vec = hex::decode(&s).map_err(|_| {\n\n de::Error::invalid_value(de::Unexpected::Str(&s), &\"A valid hex string\")\n\n })?;\n\n vec.as_slice().try_into().map_err(|_| {\n\n de::Error::invalid_value(de::Unexpected::Str(&s), &\"A 32 byte hex encoded string\")\n\n })\n\n }\n\n}\n", "file_path": "rust/automerge/src/legacy/serde_impls/change_hash.rs", "rank": 38, "score": 123050.09634700505 }, { "content": "fn log_list_patches<'a, I: Iterator<Item = &'a Op>>(\n\n doc: &'a Automerge,\n\n patch_log: &mut PatchLog,\n\n obj: &ObjId,\n\n ops: I,\n\n) {\n\n let mut marks = MarkStateMachine::default();\n\n let ops_by_key = ops.group_by(|o| o.elemid_or_key());\n\n let mut len = 0;\n\n //let mut finished = Vec::new();\n\n ops_by_key\n\n .into_iter()\n\n .filter_map(|(_key, key_ops)| {\n\n key_ops\n\n .filter(|o| o.visible_or_mark(None))\n\n .filter_map(|o| match &o.action {\n\n OpType::Make(obj_type) => {\n\n Some((Value::Object(*obj_type), o.id, marks.current().cloned()))\n\n }\n\n OpType::Put(value) => Some((\n", "file_path": "rust/automerge/src/automerge/current_state.rs", "rank": 39, "score": 122224.2374165582 }, { "content": "fn log_map_patches<'a, I: Iterator<Item = &'a Op>>(\n\n doc: &'a Automerge,\n\n patch_log: &mut PatchLog,\n\n obj: &ObjId,\n\n ops: I,\n\n) {\n\n let ops_by_key = ops.group_by(|o| o.key);\n\n ops_by_key\n\n .into_iter()\n\n .filter_map(log_map_key_patches)\n\n .for_each(|(i, put)| {\n\n if let Some(prop_index) = put.key.prop_index() {\n\n if let Some(key) = doc.ops().m.props.safe_get(prop_index) {\n\n let conflict = i > 0;\n\n patch_log.put_map(*obj, key, put.value.into(), put.id, conflict, false);\n\n }\n\n }\n\n });\n\n}\n\n\n", "file_path": "rust/automerge/src/automerge/current_state.rs", "rank": 40, "score": 122224.2374165582 }, { "content": "fn log_text_patches<'a, I: Iterator<Item = &'a Op>>(\n\n doc: &'a Automerge,\n\n patch_log: &mut PatchLog,\n\n obj: &ObjId,\n\n ops: I,\n\n) {\n\n let ops_by_key = ops.group_by(|o| o.elemid_or_key());\n\n let encoding = ListEncoding::Text;\n\n let state = TextState::default();\n\n let state = ops_by_key\n\n .into_iter()\n\n .fold(state, |mut state, (_key, key_ops)| {\n\n if let Some(o) = key_ops.filter(|o| o.visible_or_mark(None)).last() {\n\n match &o.action {\n\n OpType::Make(_) | OpType::Put(_) => {\n\n state.push_str(o.to_str(), o.width(encoding))\n\n }\n\n OpType::MarkBegin(_, data) => {\n\n if state.marks.mark_begin(o.id, data, &doc.ops.m) {\n\n state.push_mark();\n", "file_path": "rust/automerge/src/automerge/current_state.rs", "rank": 41, "score": 122224.23741655823 }, { "content": "#[derive(Clone)]\n\nstruct NodeIter<'a> {\n\n /// The node itself\n\n node: &'a OpTreeNode,\n\n /// The index of the next element we will pull from the node. This means something different\n\n /// depending on whether the node is a leaf node or not. If the node is a leaf node then this\n\n /// index is the index in `node.elements` which will be returned on the next call to `next()`.\n\n /// If the node is not an internal node then this index is the index of `children` which we are\n\n /// currently iterating as well as being the index of the next element of `elements` which we\n\n /// will return once we have finished iterating over the child node.\n\n index: usize,\n\n}\n\n\n\nimpl<'a> Iterator for Inner<'a> {\n\n type Item = &'a Op;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n match self {\n\n Inner::Empty => None,\n\n Inner::NonEmpty {\n\n ancestors,\n", "file_path": "rust/automerge/src/op_tree/iter.rs", "rank": 42, "score": 121938.90777538043 }, { "content": "#[derive(Debug, Default)]\n\nstruct TextState<'a> {\n\n len: usize,\n\n spans: Vec<TextSpan>,\n\n marks: MarkStateMachine<'a>,\n\n}\n\n\n\nimpl<'a> TextState<'a> {\n\n fn push_str(&mut self, text: &str, len: usize) {\n\n if let Some(last_span) = self.spans.last_mut() {\n\n last_span.text.push_str(text);\n\n } else {\n\n self.spans.push(TextSpan {\n\n text: text.to_owned(),\n\n start: 0,\n\n marks: None,\n\n });\n\n }\n\n self.len += len;\n\n }\n\n\n", "file_path": "rust/automerge/src/automerge/current_state.rs", "rank": 43, "score": 121802.42007420408 }, { "content": "// keep syncing until doc1 no longer generates a sync message for doc2.\n\nfn sync(doc1: &mut DocWithSync, doc2: &mut DocWithSync) {\n\n while let Some(message1) = doc1.doc.generate_sync_message(&mut doc1.peer_state) {\n\n doc2.doc\n\n .receive_sync_message(&mut doc2.peer_state, message1)\n\n .unwrap();\n\n\n\n if let Some(message2) = doc2.doc.generate_sync_message(&mut doc2.peer_state) {\n\n doc1.doc\n\n .receive_sync_message(&mut doc1.peer_state, message2)\n\n .unwrap()\n\n }\n\n }\n\n}\n\n\n", "file_path": "rust/automerge/benches/sync.rs", "rank": 44, "score": 121776.6316621828 }, { "content": "fn repeated_put(n: u64) -> Automerge {\n\n let mut doc = Automerge::new();\n\n let mut tx = doc.transaction();\n\n for i in 0..n {\n\n tx.put(ROOT, \"0\", i).unwrap();\n\n }\n\n tx.commit();\n\n doc\n\n}\n\n\n", "file_path": "rust/automerge/benches/map.rs", "rank": 45, "score": 121064.08539712717 }, { "content": "fn increasing_put(n: u64) -> Automerge {\n\n let mut doc = Automerge::new();\n\n let mut tx = doc.transaction();\n\n for i in 0..n {\n\n tx.put(ROOT, i.to_string(), i).unwrap();\n\n }\n\n tx.commit();\n\n doc\n\n}\n\n\n", "file_path": "rust/automerge/benches/map.rs", "rank": 46, "score": 121064.08539712717 }, { "content": "fn decreasing_put(n: u64) -> Automerge {\n\n let mut doc = Automerge::new();\n\n let mut tx = doc.transaction();\n\n for i in (0..n).rev() {\n\n tx.put(ROOT, i.to_string(), i).unwrap();\n\n }\n\n tx.commit();\n\n doc\n\n}\n\n\n", "file_path": "rust/automerge/benches/map.rs", "rank": 47, "score": 121064.08539712717 }, { "content": "fn repeated_increment(n: u64) -> Automerge {\n\n let mut doc = Automerge::new();\n\n let mut tx = doc.transaction();\n\n tx.put(ROOT, \"counter\", ScalarValue::counter(0)).unwrap();\n\n for _ in 0..n {\n\n tx.increment(ROOT, \"counter\", 1).unwrap();\n\n }\n\n tx.commit();\n\n doc\n\n}\n\n\n", "file_path": "rust/automerge/benches/map.rs", "rank": 48, "score": 121064.08539712717 }, { "content": "fn increasing_put(n: u64) -> Automerge {\n\n let mut doc = Automerge::new();\n\n let mut tx = doc.transaction();\n\n for i in 0..n {\n\n tx.put(ROOT, i.to_string(), i).unwrap();\n\n }\n\n tx.commit();\n\n doc\n\n}\n\n\n", "file_path": "rust/automerge/benches/sync.rs", "rank": 49, "score": 121064.08539712717 }, { "content": "fn criterion_benchmark(c: &mut Criterion) {\n\n let sizes = [100, 1_000, 10_000];\n\n\n\n let mut group = c.benchmark_group(\"map\");\n\n for size in &sizes {\n\n group.throughput(criterion::Throughput::Elements(*size));\n\n group.bench_with_input(BenchmarkId::new(\"repeated put\", size), size, |b, &size| {\n\n b.iter(|| repeated_put(size))\n\n });\n\n group.bench_with_input(\n\n BenchmarkId::new(\"repeated increment\", size),\n\n size,\n\n |b, &size| b.iter(|| repeated_increment(size)),\n\n );\n\n\n\n group.throughput(criterion::Throughput::Elements(*size));\n\n group.bench_with_input(\n\n BenchmarkId::new(\"increasing put\", size),\n\n size,\n\n |b, &size| b.iter(|| increasing_put(size)),\n", "file_path": "rust/automerge/benches/map.rs", "rank": 50, "score": 121029.2440105558 }, { "content": "fn bench(c: &mut Criterion) {\n\n let contents = fs::read_to_string(\"edits.json\").expect(\"cannot read edits file\");\n\n let edits = json::parse(&contents).expect(\"cant parse edits\");\n\n let mut commands = vec![];\n\n for i in 0..edits.len() {\n\n let pos: usize = edits[i][0].as_usize().unwrap();\n\n let del: isize = edits[i][1].as_isize().unwrap();\n\n let mut vals = String::new();\n\n for j in 2..edits[i].len() {\n\n let v = edits[i][j].as_str().unwrap();\n\n vals.push_str(v);\n\n }\n\n commands.push((pos, del, vals));\n\n }\n\n\n\n let mut group = c.benchmark_group(\"edit trace\");\n\n group.throughput(Throughput::Elements(commands.len() as u64));\n\n\n\n group.bench_with_input(\n\n BenchmarkId::new(\"replay\", commands.len()),\n", "file_path": "rust/edit-trace/benches/main.rs", "rank": 51, "score": 121029.2440105558 }, { "content": "fn criterion_benchmark(c: &mut Criterion) {\n\n let sizes = [100, 1_000, 10_000];\n\n\n\n let mut group = c.benchmark_group(\"sync unidirectional\");\n\n for size in &sizes {\n\n group.throughput(criterion::Throughput::Elements(*size));\n\n\n\n group.bench_with_input(\n\n BenchmarkId::new(\"increasing put\", size),\n\n size,\n\n |b, &size| {\n\n b.iter_batched(\n\n || (increasing_put(size), DocWithSync::default()),\n\n |(doc1, mut doc2)| sync(&mut doc1.into(), &mut doc2),\n\n criterion::BatchSize::LargeInput,\n\n )\n\n },\n\n );\n\n }\n\n group.finish();\n", "file_path": "rust/automerge/benches/sync.rs", "rank": 52, "score": 121029.2440105558 }, { "content": "fn criterion_benchmark(c: &mut Criterion) {\n\n let n = 100_000;\n\n let doc = doc(n);\n\n c.bench_function(&format!(\"range {}\", n), |b| {\n\n b.iter(|| range(black_box(&doc)))\n\n });\n\n c.bench_function(&format!(\"range_at {}\", n), |b| {\n\n b.iter(|| range_at(black_box(&doc)))\n\n });\n\n}\n\n\n\ncriterion_group!(benches, criterion_benchmark);\n\ncriterion_main!(benches);\n", "file_path": "rust/automerge/benches/range.rs", "rank": 53, "score": 121029.2440105558 }, { "content": "fn load_path(backend: &mut am::Automerge, path: &Path) -> Result<(), Box<dyn std::error::Error>> {\n\n let input = std::fs::read(path).map_err(Box::new)?;\n\n backend.load_incremental(&input).map_err(Box::new)?;\n\n Ok(())\n\n}\n", "file_path": "rust/automerge-cli/src/merge.rs", "rank": 54, "score": 119666.51970760763 }, { "content": "#[derive(Debug, Clone)]\n\nstruct Edge {\n\n // Edges are always child -> parent so we only store the target, the child is implicit\n\n // as you get the edge from the child\n\n target: NodeIdx,\n\n next: Option<EdgeIdx>,\n\n}\n\n\n", "file_path": "rust/automerge/src/change_graph.rs", "rank": 55, "score": 119538.92292985259 }, { "content": "fn log_map_key_patches<'a, I: Iterator<Item = &'a Op>>(\n\n (key, key_ops): (Key, I),\n\n) -> Option<(usize, Put<'a>)> {\n\n key_ops\n\n .filter(|o| o.visible())\n\n .filter_map(|o| match &o.action {\n\n OpType::Make(obj_type) => {\n\n let value = Value::Object(*obj_type);\n\n Some(Put {\n\n value,\n\n key,\n\n id: o.id,\n\n })\n\n }\n\n OpType::Put(value) => {\n\n let value = Value::Scalar(Cow::Borrowed(value));\n\n Some(Put {\n\n value,\n\n key,\n\n id: o.id,\n\n })\n\n }\n\n _ => None,\n\n })\n\n .enumerate()\n\n .last()\n\n}\n\n\n", "file_path": "rust/automerge/src/automerge/current_state.rs", "rank": 56, "score": 119088.27704576234 }, { "content": "#[derive(Debug)]\n\nstruct PartialChange<'a> {\n\n index: usize,\n\n deps: Vec<u64>,\n\n actor: usize,\n\n seq: u64,\n\n max_op: u64,\n\n timestamp: i64,\n\n message: Option<smol_str::SmolStr>,\n\n extra_bytes: Cow<'a, [u8]>,\n\n ops: Vec<(ObjId, Op)>,\n\n}\n\n\n\nimpl<'a> PartialChange<'a> {\n\n /// # Panics\n\n ///\n\n /// * If any op references a property index which is not in `props`\n\n /// * If any op references an actor index which is not in `actors`\n\n #[instrument(skip(self, known_changes, metadata))]\n\n fn finish(\n\n mut self,\n", "file_path": "rust/automerge/src/storage/load/change_collector.rs", "rank": 57, "score": 119029.2401628561 }, { "content": "fn case_insensitive_string<Input>(s: &'static str) -> impl Parser<Input, Output = String>\n\nwhere\n\n Input: combine::Stream<Token = char>,\n\n Input::Error: combine::ParseError<Input::Token, Input::Range, Input::Position>,\n\n{\n\n charparser::string_cmp(s, |l, r| l.eq_ignore_ascii_case(&r)).map(|s| s.to_lowercase())\n\n}\n\n\n", "file_path": "rust/automerge-cli/src/change.rs", "rank": 58, "score": 118831.46068679194 }, { "content": "#[test]\n\nfn test_change_encoding_expanded_change_round_trip() {\n\n let change_bytes: Vec<u8> = vec![\n\n 0x85, 0x6f, 0x4a, 0x83, // magic bytes\n\n 0xb2, 0x98, 0x9e, 0xa9, // checksum\n\n 1, 61, 0, 2, 0x12, 0x34, // chunkType: change, length, deps, actor '1234'\n\n 1, 1, 252, 250, 220, 255, 5, // seq, startOp, time\n\n 14, 73, 110, 105, 116, 105, 97, 108, 105, 122, 97, 116, 105, 111,\n\n 110, // message: 'Initialization'\n\n 0, 6, // actor list, column count\n\n 0x15, 3, 0x34, 1, 0x42, 2, // keyStr, insert, action\n\n 0x56, 2, 0x57, 1, 0x70, 2, // valLen, valRaw, predNum\n\n 0x7f, 1, 0x78, // keyStr: 'x'\n\n 1, // insert: false\n\n 0x7f, 1, // action: set\n\n 0x7f, 19, // valLen: 1 byte of type uint\n\n 1, // valRaw: 1\n\n 0x7f, 0, // predNum: 0\n\n 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, // 10 trailing bytes\n\n ];\n\n let change = automerge::Change::try_from(&change_bytes[..]).unwrap();\n\n assert_eq!(change.raw_bytes(), change_bytes);\n\n let expanded = automerge::ExpandedChange::from(&change);\n\n let unexpanded: automerge::Change = expanded.try_into().unwrap();\n\n assert_eq!(unexpanded.raw_bytes(), change_bytes);\n\n}\n\n\n", "file_path": "rust/automerge/tests/test.rs", "rank": 59, "score": 118585.37347714504 }, { "content": "#[test]\n\nfn delete_only_change() {\n\n let actor = automerge::ActorId::random();\n\n let mut doc1 = automerge::Automerge::new().with_actor(actor.clone());\n\n let list = doc1\n\n .transact::<_, _, automerge::AutomergeError>(|d| {\n\n let l = d.put_object(&automerge::ROOT, \"list\", ObjType::List)?;\n\n d.insert(&l, 0, 'a')?;\n\n Ok(l)\n\n })\n\n .unwrap()\n\n .result;\n\n\n\n let mut doc2 = automerge::Automerge::load(&doc1.save())\n\n .unwrap()\n\n .with_actor(actor.clone());\n\n doc2.transact::<_, _, automerge::AutomergeError>(|d| d.delete(&list, 0))\n\n .unwrap();\n\n\n\n let mut doc3 = automerge::Automerge::load(&doc2.save())\n\n .unwrap()\n", "file_path": "rust/automerge/tests/test.rs", "rank": 60, "score": 118406.83060067301 }, { "content": "fn encoded_actor<O>(id: convert::ObjId<O>) -> Option<u64>\n\nwhere\n\n O: convert::OpId<usize>,\n\n{\n\n match id {\n\n convert::ObjId::Root => None,\n\n convert::ObjId::Op(o) => Some(o.actor() as u64),\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n\npub(crate) struct ObjIdIter<'a> {\n\n actor: RleDecoder<'a, u64>,\n\n counter: RleDecoder<'a, u64>,\n\n}\n\n\n\nimpl<'a> ObjIdIter<'a> {\n\n fn try_next(&mut self) -> Result<Option<ObjId>, DecodeColumnError> {\n\n let actor = self\n\n .actor\n", "file_path": "rust/automerge/src/columnar/column_range/obj_id.rs", "rank": 61, "score": 117967.67527846442 }, { "content": "#[test]\n\nfn hash_for_opid() {\n\n let mut doc = AutoCommit::new();\n\n\n\n doc.put(ROOT, \"key1\", 1).unwrap();\n\n let (_, id1) = doc.get(ROOT, \"key1\").unwrap().unwrap();\n\n // it isn't available yet\n\n assert_eq!(doc.hash_for_opid(&id1), None);\n\n let hash1 = doc.commit();\n\n // we can get the hash for the change that made this id\n\n assert_eq!(doc.hash_for_opid(&id1), hash1);\n\n\n\n // this should still work with historical opids too\n\n doc.put(ROOT, \"key1\", 2).unwrap();\n\n let (_, id2) = doc.get(ROOT, \"key1\").unwrap().unwrap();\n\n // the newest one still isn't available yet\n\n assert_eq!(doc.hash_for_opid(&id2), None);\n\n let hash2 = doc.commit();\n\n assert_eq!(doc.hash_for_opid(&id1), hash1);\n\n assert_eq!(doc.hash_for_opid(&id2), hash2);\n\n\n", "file_path": "rust/automerge/src/automerge/tests.rs", "rank": 62, "score": 115691.20367427659 }, { "content": "#[test]\n\nfn parents_iterator() {\n\n let mut doc = AutoCommit::new();\n\n let map = doc.put_object(ROOT, \"a\", ObjType::Map).unwrap();\n\n let list = doc.put_object(&map, \"b\", ObjType::List).unwrap();\n\n doc.insert(&list, 0, 2).unwrap();\n\n let text = doc.put_object(&list, 0, ObjType::Text).unwrap();\n\n\n\n let mut parents = doc.parents(text).unwrap();\n\n assert_eq!(\n\n parents.next(),\n\n Some(Parent {\n\n obj: list,\n\n prop: Prop::Seq(0),\n\n visible: true\n\n })\n\n );\n\n assert_eq!(\n\n parents.next(),\n\n Some(Parent {\n\n obj: map,\n", "file_path": "rust/automerge/src/automerge/tests.rs", "rank": 63, "score": 115624.69317527727 }, { "content": "#[test]\n\nfn test_compressed_changes() {\n\n let mut doc = new_doc();\n\n // crate::storage::DEFLATE_MIN_SIZE is 250, so this should trigger compression\n\n doc.put(ROOT, \"bytes\", ScalarValue::Bytes(vec![10; 300]))\n\n .unwrap();\n\n let mut change = doc.get_last_local_change().unwrap().clone();\n\n let uncompressed = change.raw_bytes().to_vec();\n\n assert!(uncompressed.len() > 256);\n\n let compressed = change.bytes().to_vec();\n\n assert!(compressed.len() < uncompressed.len());\n\n\n\n let reloaded = automerge::Change::try_from(&compressed[..]).unwrap();\n\n assert_eq!(change.raw_bytes(), reloaded.raw_bytes());\n\n}\n\n\n", "file_path": "rust/automerge/tests/test.rs", "rank": 64, "score": 115336.26413285025 }, { "content": "#[derive(Clone, Debug)]\n\nstruct Winner<'a> {\n\n op: &'a Op,\n\n clock: &'a Clock,\n\n cross_visible: bool,\n\n conflict: bool,\n\n}\n\n\n\nimpl<'a> Deref for Winner<'a> {\n\n type Target = Op;\n\n\n\n fn deref(&self) -> &'a Self::Target {\n\n self.op\n\n }\n\n}\n\n\n", "file_path": "rust/automerge/src/automerge/diff.rs", "rank": 65, "score": 114511.15860062368 }, { "content": "struct HashGraph {\n\n index_by_hash: HashMap<ChangeHash, usize, FxBuildHasher>,\n\n}\n\n\n\nimpl HashGraph {\n\n fn new<'a, I>(changes: I) -> Self\n\n where\n\n I: Iterator<Item = &'a Change>,\n\n {\n\n let mut index_by_hash: HashMap<_, _, _> = Default::default();\n\n for (index, change) in changes.enumerate() {\n\n index_by_hash.insert(change.hash(), index);\n\n }\n\n Self { index_by_hash }\n\n }\n\n\n\n fn change_index(&self, hash: &ChangeHash) -> usize {\n\n self.index_by_hash[hash]\n\n }\n\n\n\n fn heads_with_indices(&self, heads: Vec<ChangeHash>) -> Vec<(ChangeHash, usize)> {\n\n heads\n\n .into_iter()\n\n .map(|h| (h, self.index_by_hash[&h]))\n\n .collect()\n\n }\n\n}\n\n\n", "file_path": "rust/automerge/src/storage/save/document.rs", "rank": 66, "score": 113923.79351432831 }, { "content": "#[derive(Debug, Default)]\n\nstruct TextSpan {\n\n text: String,\n\n start: usize,\n\n marks: Option<Arc<MarkSet>>,\n\n}\n\n\n", "file_path": "rust/automerge/src/automerge/current_state.rs", "rank": 67, "score": 113760.83452172266 }, { "content": "fn resolve<'a>(\n\n before: Option<Winner<'a>>,\n\n after: Option<Winner<'a>>,\n\n diff: &mut MarkDiff<'a>,\n\n) -> Option<Patch<'a>> {\n\n match (before, after) {\n\n (None, Some(after)) if after.is_mark() => diff.process_after(after.op),\n\n (None, Some(after)) => Some(Patch::New(after, diff.after.current().cloned())),\n\n (Some(before), None) if before.is_mark() => diff.process_before(before.op),\n\n (Some(before), None) => Some(Patch::Delete(before)),\n\n (Some(_), Some(after)) if after.is_mark() => diff.process(after.op),\n\n (Some(before), Some(after)) if before.op.id == after.op.id => Some(Patch::Old {\n\n before,\n\n after,\n\n marks: diff.current(),\n\n }),\n\n (Some(before), Some(after)) if before.op.id != after.op.id => Some(Patch::Update {\n\n before,\n\n after,\n\n marks: diff.after.current().cloned(),\n\n }),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "rust/automerge/src/automerge/diff.rs", "rank": 68, "score": 113520.12677113112 }, { "content": "pub fn new_doc() -> automerge::AutoCommit {\n\n let mut d = automerge::AutoCommit::new();\n\n d.set_actor(automerge::ActorId::random());\n\n d\n\n}\n\n\n", "file_path": "rust/automerge-test/src/lib.rs", "rank": 69, "score": 113427.59302003219 }, { "content": "pub fn realize_obj<R: ReadDoc>(\n\n doc: &R,\n\n obj_id: &automerge::ObjId,\n\n objtype: automerge::ObjType,\n\n) -> RealizedObject {\n\n match objtype {\n\n automerge::ObjType::Map | automerge::ObjType::Table => {\n\n let mut result = BTreeMap::new();\n\n for key in doc.keys(obj_id) {\n\n result.insert(key.clone(), realize_values(doc, obj_id, key));\n\n }\n\n RealizedObject::Map(result)\n\n }\n\n automerge::ObjType::List | automerge::ObjType::Text => {\n\n let length = doc.length(obj_id);\n\n let mut result = Vec::with_capacity(length);\n\n for i in 0..length {\n\n result.push(realize_values(doc, obj_id, i));\n\n }\n\n RealizedObject::Sequence(result)\n\n }\n\n }\n\n}\n\n\n", "file_path": "rust/automerge-test/src/lib.rs", "rank": 70, "score": 113427.59302003219 }, { "content": "#[test]\n\nfn range_iter_map() {\n\n let mut doc = Automerge::new();\n\n let mut tx = doc.transaction();\n\n tx.put(ROOT, \"a\", 3).unwrap();\n\n tx.put(ROOT, \"b\", 4).unwrap();\n\n tx.put(ROOT, \"c\", 5).unwrap();\n\n tx.put(ROOT, \"d\", 6).unwrap();\n\n tx.commit();\n\n let mut tx = doc.transaction();\n\n tx.put(ROOT, \"a\", 7).unwrap();\n\n tx.commit();\n\n let mut tx = doc.transaction();\n\n tx.put(ROOT, \"a\", 8).unwrap();\n\n tx.put(ROOT, \"d\", 9).unwrap();\n\n tx.commit();\n\n let actor = doc.get_actor();\n\n assert_eq!(doc.map_range(ROOT, ..).count(), 4);\n\n\n\n let mut range = doc.map_range(ROOT, \"b\".to_owned()..\"d\".into());\n\n assert_eq!(\n", "file_path": "rust/automerge/src/automerge/tests.rs", "rank": 71, "score": 112752.32404047062 }, { "content": "#[test]\n\nfn keys_iter_map() {\n\n let mut doc = Automerge::new();\n\n let mut tx = doc.transaction();\n\n tx.put(ROOT, \"a\", 3).unwrap();\n\n tx.put(ROOT, \"b\", 4).unwrap();\n\n tx.put(ROOT, \"c\", 5).unwrap();\n\n tx.put(ROOT, \"d\", 6).unwrap();\n\n tx.commit();\n\n let mut tx = doc.transaction();\n\n tx.put(ROOT, \"a\", 7).unwrap();\n\n tx.commit();\n\n let mut tx = doc.transaction();\n\n tx.put(ROOT, \"a\", 8).unwrap();\n\n tx.put(ROOT, \"d\", 9).unwrap();\n\n tx.commit();\n\n assert_eq!(doc.keys(ROOT).count(), 4);\n\n\n\n let mut keys = doc.keys(ROOT);\n\n\n\n assert_eq!(keys.next(), Some(\"a\".into()));\n", "file_path": "rust/automerge/src/automerge/tests.rs", "rank": 72, "score": 112752.32404047062 }, { "content": "#[test]\n\nfn import_change_export() {\n\n let bin = env!(\"CARGO_BIN_EXE_automerge\");\n\n let initial_state_json = serde_json::json!({\n\n \"birds\": {\n\n \"wrens\": 3.0,\n\n \"sparrows\": 15.0\n\n }\n\n });\n\n let json_bytes = serde_json::to_string_pretty(&initial_state_json).unwrap();\n\n\n\n let stdout = cmd!(bin, \"import\")\n\n .stdin_bytes(json_bytes.clone())\n\n .pipe(cmd!(bin, \"change\", \"set $[\\\"birds\\\"][\\\"owls\\\"] 12.0\"))\n\n .stdin_bytes(json_bytes)\n\n .pipe(cmd!(bin, \"export\"))\n\n .read()\n\n .unwrap();\n\n let result: serde_json::Value = serde_json::from_str(stdout.as_str()).unwrap();\n\n let expected = serde_json::json!({\n\n \"birds\": {\n\n \"wrens\": 3.0,\n\n \"sparrows\": 15.0,\n\n \"owls\": 12.0,\n\n }\n\n });\n\n assert_eq!(result, expected);\n\n}\n\n*/\n", "file_path": "rust/automerge-cli/tests/integration.rs", "rank": 73, "score": 112473.66242657171 }, { "content": "#[test]\n\nfn observe_counter_change_application() {\n\n let mut doc = AutoCommit::new();\n\n doc.put(ROOT, \"counter\", ScalarValue::counter(1)).unwrap();\n\n doc.increment(ROOT, \"counter\", 2).unwrap();\n\n doc.increment(ROOT, \"counter\", 5).unwrap();\n\n let changes = doc.get_changes(&[]).into_iter().cloned();\n\n\n\n let mut doc = AutoCommit::new();\n\n doc.apply_changes(changes).unwrap();\n\n}\n\n\n", "file_path": "rust/automerge/tests/test.rs", "rank": 74, "score": 112473.66242657171 }, { "content": "struct Put<'a> {\n\n value: Value<'a>,\n\n key: Key,\n\n id: OpId,\n\n}\n\n\n\n/// Traverse the \"current\" state of the document, logging patches to `patch_log`.\n\n///\n\n/// The \"current\" state of the document is the set of visible operations. This function will\n\n/// traverse that set of operations and add corresponding patches to `patch_log` as it encounters\n\n/// values.\n\n///\n\n/// Due to only notifying of visible operations the [`PatchLog`] will only be called with `put`,\n\n/// `insert`, and `splice`, operations.\n\npub(crate) fn log_current_state_patches(doc: &Automerge, patch_log: &mut PatchLog) {\n\n // The OpSet already exposes operations in the order they appear in the document.\n\n // `OpSet::iter_objs` iterates over the objects in causal order, this means that parent objects\n\n // will always appear before their children. Furthermore, the operations within each object are\n\n // ordered by key (which means by their position in a sequence for sequences).\n\n //\n", "file_path": "rust/automerge/src/automerge/current_state.rs", "rank": 75, "score": 111329.3537780598 }, { "content": "fn save_trace_autotx(mut doc: AutoCommit) {\n\n doc.save();\n\n}\n\n\n", "file_path": "rust/edit-trace/benches/main.rs", "rank": 76, "score": 111245.24494348947 }, { "content": "#[test]\n\nfn handle_repeated_out_of_order_changes() -> Result<(), automerge::AutomergeError> {\n\n let mut doc1 = new_doc();\n\n let list = doc1.put_object(ROOT, \"list\", ObjType::List)?;\n\n doc1.insert(&list, 0, \"a\")?;\n\n let mut doc2 = doc1.fork();\n\n doc1.insert(&list, 1, \"b\")?;\n\n doc1.commit();\n\n doc1.insert(&list, 2, \"c\")?;\n\n doc1.commit();\n\n doc1.insert(&list, 3, \"d\")?;\n\n doc1.commit();\n\n let changes = doc1\n\n .get_changes(&[])\n\n .into_iter()\n\n .cloned()\n\n .collect::<Vec<_>>();\n\n doc2.apply_changes(changes[2..].to_vec())?;\n\n doc2.apply_changes(changes[2..].to_vec())?;\n\n doc2.apply_changes(changes)?;\n\n assert_eq!(doc1.save(), doc2.save());\n\n Ok(())\n\n}\n\n\n", "file_path": "rust/automerge/tests/test.rs", "rank": 77, "score": 110933.44141494215 }, { "content": "#[test]\n\nfn range_iter_map_rev() {\n\n let mut doc = Automerge::new();\n\n let mut tx = doc.transaction();\n\n tx.put(ROOT, \"a\", 3).unwrap();\n\n tx.put(ROOT, \"b\", 4).unwrap();\n\n tx.put(ROOT, \"c\", 5).unwrap();\n\n tx.put(ROOT, \"d\", 6).unwrap();\n\n tx.commit();\n\n let mut tx = doc.transaction();\n\n tx.put(ROOT, \"a\", 7).unwrap();\n\n tx.commit();\n\n let mut tx = doc.transaction();\n\n tx.put(ROOT, \"a\", 8).unwrap();\n\n tx.put(ROOT, \"d\", 9).unwrap();\n\n tx.commit();\n\n doc.dump();\n\n let actor = doc.get_actor();\n\n assert_eq!(doc.map_range(ROOT, ..).count(), 4);\n\n\n\n let mut range = doc.map_range(ROOT, \"b\".to_owned()..\"d\".into());\n", "file_path": "rust/automerge/src/automerge/tests.rs", "rank": 78, "score": 110068.12386783044 }, { "content": "#[test]\n\nfn bad_change_on_optree_node_boundary() {\n\n let mut doc = Automerge::new();\n\n doc.transact::<_, _, AutomergeError>(|d| {\n\n d.put(ROOT, \"a\", \"z\")?;\n\n d.put(ROOT, \"b\", 0)?;\n\n d.put(ROOT, \"c\", 0)?;\n\n Ok(())\n\n })\n\n .unwrap();\n\n let iterations = 15_u64;\n\n for i in 0_u64..iterations {\n\n doc.transact::<_, _, AutomergeError>(|d| {\n\n let s = \"a\".repeat(i as usize);\n\n d.put(ROOT, \"a\", s)?;\n\n d.put(ROOT, \"b\", i + 1)?;\n\n d.put(ROOT, \"c\", i + 1)?;\n\n Ok(())\n\n })\n\n .unwrap();\n\n }\n", "file_path": "rust/automerge/tests/test.rs", "rank": 79, "score": 109798.58981801232 }, { "content": "#[test]\n\nfn get_changes_heads_empty() {\n\n let mut doc = AutoCommit::new();\n\n doc.put(ROOT, \"key1\", 1).unwrap();\n\n doc.commit();\n\n doc.put(ROOT, \"key2\", 1).unwrap();\n\n doc.commit();\n\n let heads = doc.get_heads();\n\n assert_eq!(doc.get_changes(&heads), Vec::<&Change>::new());\n\n}\n\n\n", "file_path": "rust/automerge/src/automerge/tests.rs", "rank": 80, "score": 109798.58981801232 }, { "content": "#[test]\n\nfn changes_within_conflicting_list_element() {\n\n let (actor1, actor2) = sorted_actors();\n\n let mut doc1 = new_doc_with_actor(actor1);\n\n let mut doc2 = new_doc_with_actor(actor2);\n\n let list_id = doc1\n\n .put_object(&automerge::ROOT, \"list\", ObjType::List)\n\n .unwrap();\n\n doc1.insert(&list_id, 0, \"hello\").unwrap();\n\n doc2.merge(&mut doc1).unwrap();\n\n\n\n let map_in_doc1 = doc1.put_object(&list_id, 0, ObjType::Map).unwrap();\n\n doc1.put(&map_in_doc1, \"map1\", true).unwrap();\n\n doc1.put(&map_in_doc1, \"key\", 1).unwrap();\n\n\n\n let map_in_doc2 = doc2.put_object(&list_id, 0, ObjType::Map).unwrap();\n\n doc1.merge(&mut doc2).unwrap();\n\n doc2.put(&map_in_doc2, \"map2\", true).unwrap();\n\n doc2.put(&map_in_doc2, \"key\", 2).unwrap();\n\n\n\n doc1.merge(&mut doc2).unwrap();\n", "file_path": "rust/automerge/tests/test.rs", "rank": 81, "score": 109798.58981801232 }, { "content": "#[test]\n\nfn observe_counter_change_application() {\n\n let mut doc = AutoCommit::new();\n\n doc.put(ROOT, \"counter\", ScalarValue::counter(1)).unwrap();\n\n doc.increment(ROOT, \"counter\", 2).unwrap();\n\n doc.increment(ROOT, \"counter\", 5).unwrap();\n\n let changes = doc.get_changes(&[]).into_iter().cloned();\n\n\n\n let mut new_doc = AutoCommit::new();\n\n // make a new change to the doc to stop the empty doc logic from skipping the intermediate\n\n // patches. The is probably not really necessary, we could update this test to just test that\n\n // the correct final state is emitted. For now though, we leave it as is.\n\n new_doc.put(ROOT, \"foo\", \"bar\").unwrap();\n\n new_doc.update_diff_cursor();\n\n new_doc.apply_changes(changes).unwrap();\n\n assert_eq!(\n\n new_doc\n\n .diff_incremental()\n\n .into_iter()\n\n .map(|p| p.action)\n\n .collect::<Vec<_>>(),\n", "file_path": "rust/automerge/src/automerge/tests.rs", "rank": 82, "score": 109798.58981801232 }, { "content": "#[test]\n\nfn changes_within_conflicting_map_field() {\n\n let mut doc1 = new_doc();\n\n let mut doc2 = new_doc();\n\n doc1.put(&automerge::ROOT, \"field\", \"string\").unwrap();\n\n let map_id = doc2\n\n .put_object(&automerge::ROOT, \"field\", ObjType::Map)\n\n .unwrap();\n\n doc2.put(&map_id, \"innerKey\", 42).unwrap();\n\n doc1.merge(&mut doc2).unwrap();\n\n\n\n assert_doc!(\n\n &doc1,\n\n map! {\n\n \"field\" => {\n\n \"string\",\n\n map!{\n\n \"innerKey\" => {\n\n 42,\n\n }\n\n }\n\n }\n\n }\n\n );\n\n}\n\n\n", "file_path": "rust/automerge/tests/test.rs", "rank": 83, "score": 109798.58981801232 }, { "content": "/// Pretty print the contents of a document\n\npub fn pretty_print(doc: &automerge::Automerge) {\n\n println!(\"{}\", serde_json::to_string_pretty(&realize(doc)).unwrap())\n\n}\n\n\n", "file_path": "rust/automerge-test/src/lib.rs", "rank": 84, "score": 109075.92548543695 }, { "content": "fn push_top<'a>(top: &mut Option<Winner<'a>>, op: &'a Op, cross_visible: bool, clock: &'a Clock) {\n\n match &op.action {\n\n OpType::Increment(_) => {} // can ignore - info captured inside Counter\n\n _ => {\n\n top.replace(Winner {\n\n op,\n\n clock,\n\n cross_visible,\n\n conflict: top.is_some(),\n\n });\n\n }\n\n }\n\n}\n\n\n", "file_path": "rust/automerge/src/automerge/diff.rs", "rank": 85, "score": 108338.82419764457 }, { "content": "#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]\n\nstruct NodeIdx(u32);\n\n\n", "file_path": "rust/automerge/src/change_graph.rs", "rank": 86, "score": 108275.89879725332 }, { "content": "#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]\n\nstruct EdgeIdx(u32);\n\n\n", "file_path": "rust/automerge/src/change_graph.rs", "rank": 87, "score": 108275.89879725332 }, { "content": "struct ChangeWithGraph<'a> {\n\n change: &'a Change,\n\n graph: &'a HashGraph,\n\n actor_lookup: &'a [usize],\n\n actors: &'a IndexedCache<ActorId>,\n\n}\n\n\n\nimpl<'a> AsChangeMeta<'a> for ChangeWithGraph<'a> {\n\n type DepsIter = ChangeDepsIter<'a>;\n\n\n\n fn actor(&self) -> u64 {\n\n self.actor_lookup[self.actors.lookup(self.change.actor_id()).unwrap()] as u64\n\n }\n\n\n\n fn seq(&self) -> u64 {\n\n self.change.seq()\n\n }\n\n\n\n fn deps(&self) -> Self::DepsIter {\n\n ChangeDepsIter {\n", "file_path": "rust/automerge/src/storage/save/document.rs", "rank": 88, "score": 108271.14191122442 }, { "content": "#[test]\n\nfn splice_text_uses_unicode_scalars() {\n\n let mut doc = Automerge::new();\n\n let mut tx = doc.transaction();\n\n let text = tx.put_object(ROOT, \"text\", ObjType::Text).unwrap();\n\n let polar_bear = \"🐻‍❄️\";\n\n tx.splice_text(&text, 0, 0, polar_bear).unwrap();\n\n tx.commit();\n\n let s = doc.text(&text).unwrap();\n\n assert_eq!(s, polar_bear);\n\n let len = doc.length(&text);\n\n assert_eq!(len, 4); // 4 chars\n\n}\n\n\n", "file_path": "rust/automerge/src/automerge/tests.rs", "rank": 89, "score": 107634.8607583264 }, { "content": "fn load_next_change<'a>(\n\n data: parse::Input<'a>,\n\n changes: &mut Vec<Change>,\n\n) -> Result<parse::Input<'a>, Error> {\n\n let (remaining, chunk) = storage::Chunk::parse(data).map_err(|e| Error::Parse(Box::new(e)))?;\n\n if !chunk.checksum_valid() {\n\n return Err(Error::BadChecksum);\n\n }\n\n match chunk {\n\n storage::Chunk::Document(d) => {\n\n tracing::trace!(\"loading document chunk\");\n\n let Reconstructed {\n\n changes: new_changes,\n\n ..\n\n } = reconstruct_document(&d, VerificationMode::DontCheck, NullObserver)\n\n .map_err(|e| Error::InflateDocument(Box::new(e)))?;\n\n changes.extend(new_changes);\n\n }\n\n storage::Chunk::Change(change) => {\n\n tracing::trace!(\"loading change chunk\");\n", "file_path": "rust/automerge/src/storage/load.rs", "rank": 90, "score": 107304.9911089646 }, { "content": "#[test]\n\nfn observe_counter_change_application_overwrite() {\n\n let mut doc1 = AutoCommit::new();\n\n doc1.set_actor(ActorId::from([1]));\n\n doc1.put(ROOT, \"counter\", ScalarValue::counter(1)).unwrap();\n\n doc1.commit();\n\n\n\n let mut doc2 = doc1.fork();\n\n doc2.set_actor(ActorId::from([2]));\n\n doc2.put(ROOT, \"counter\", \"mystring\").unwrap();\n\n doc2.commit();\n\n\n\n doc1.increment(ROOT, \"counter\", 2).unwrap();\n\n doc1.commit();\n\n doc1.increment(ROOT, \"counter\", 5).unwrap();\n\n doc1.commit();\n\n\n\n let mut doc3 = doc1.fork();\n\n doc3.merge(&mut doc2).unwrap();\n\n\n\n assert_eq!(\n", "file_path": "rust/automerge/src/automerge/tests.rs", "rank": 91, "score": 107293.20320547736 }, { "content": "#[test]\n\nfn load_change_with_zero_start_op() {\n\n let bytes = &[\n\n 133, 111, 74, 131, 202, 50, 52, 158, 2, 96, 163, 163, 83, 255, 255, 255, 50, 50, 50, 50,\n\n 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 255, 255, 245, 53, 1, 0, 0, 0, 0, 0, 0, 4, 233,\n\n 245, 239, 255, 1, 0, 0, 0, 133, 111, 74, 131, 163, 96, 0, 0, 2, 10, 202, 144, 125, 19, 48,\n\n 89, 133, 49, 10, 10, 67, 91, 111, 10, 74, 131, 96, 0, 163, 131, 255, 255, 255, 255, 255,\n\n 255, 255, 255, 255, 1, 153, 0, 0, 246, 255, 255, 255, 157, 157, 157, 157, 157, 157, 157,\n\n 157, 157, 157, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,\n\n 255, 255, 255, 48, 254, 208,\n\n ];\n\n let _ = Automerge::load(bytes);\n\n}\n\n\n", "file_path": "rust/automerge/src/automerge/tests.rs", "rank": 92, "score": 107293.20320547736 }, { "content": "#[test]\n\nfn save_orphaned_changes() {\n\n let Orphans {\n\n mut doc,\n\n missing_change,\n\n } = doc_with_orphans();\n\n\n\n let saved = doc.save();\n\n let mut loaded = AutoCommit::load(&saved).unwrap();\n\n\n\n loaded.apply_changes(vec![missing_change]).unwrap();\n\n\n\n // Both changes should now have been applied so the end result should be value3\n\n assert_eq!(\n\n loaded.get(&ROOT, \"key\").unwrap().unwrap().0,\n\n Value::from(\"value3\")\n\n );\n\n}\n\n\n", "file_path": "rust/automerge/tests/test_save_load_orphans.rs", "rank": 93, "score": 107293.20320547736 }, { "content": "struct AutoSerdeSeq<'a, R> {\n\n doc: &'a R,\n\n obj: ObjId,\n\n}\n\n\n\nimpl<'a, R: crate::ReadDoc> serde::Serialize for AutoSerdeSeq<'a, R> {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: serde::Serializer,\n\n {\n\n let mut seq_ser = serializer.serialize_seq(None)?;\n\n for i in 0..self.doc.length(&self.obj) {\n\n // SAFETY: This only errors if the object ID is unknown, but we construct this type\n\n // with a known real object ID\n\n let (val, obj) = self.doc.get(&self.obj, i).unwrap().unwrap();\n\n let serdeval = AutoSerdeVal {\n\n doc: self.doc,\n\n val,\n\n obj,\n\n };\n\n seq_ser.serialize_element(&serdeval)?;\n\n }\n\n seq_ser.end()\n\n }\n\n}\n\n\n", "file_path": "rust/automerge/src/autoserde.rs", "rank": 94, "score": 104855.26037831178 }, { "content": "fn parse_change_script(input: &str) -> Result<amf::LocalChange, ChangeError> {\n\n let (change, _) =\n\n change_parser()\n\n .easy_parse(input)\n\n .map_err(|e| ChangeError::InvalidChangeScript {\n\n message: e.to_string(),\n\n })?;\n\n Ok(change)\n\n}\n\n\n", "file_path": "rust/automerge-cli/src/change.rs", "rank": 95, "score": 104458.71681914208 }, { "content": "fn value_parser<'a, Input>(\n\n) -> Box<dyn combine::Parser<Input, Output = amf::Value, PartialState = ()> + 'a>\n\nwhere\n\n Input: 'a,\n\n Input: combine::Stream<Token = char>,\n\n Input::Error: combine::ParseError<Input::Token, Input::Range, Input::Position>,\n\n{\n\n combine::parser::combinator::no_partial(\n\n //combine::position().and(combine::many1::<Vec<char>, _, _>(combine::any())).and_then(\n\n combine::position().and(combine::many1::<Vec<char>, _, _>(combine::any())).flat_map(\n\n |(position, chars): (Input::Position, Vec<char>)| -> Result<amf::Value, Input::Error> {\n\n let json_str: String = chars.into_iter().collect();\n\n let json: serde_json::Value = serde_json::from_str(json_str.as_str()).map_err(|e| {\n\n //let pe = <Input::Error as ParseError<_, _, _>>::StreamError::message::<combine::error::Format<String>>(combine::error::Format(e.to_string()));\n\n //let pe = <Input::Error as ParseError<Input::Token, Input::Range, Input::Position>>::StreamError::message(e.to_string().into());\n\n let mut pe = Input::Error::empty(position);\n\n pe.add_message(combine::error::Format(e.to_string()));\n\n //let pe = combine::ParseError:::wmpty(position);\n\n pe\n\n })?;\n\n Ok(amf::Value::from_json(&json))\n\n },\n\n )\n\n ).boxed()\n\n}\n\n\n", "file_path": "rust/automerge-cli/src/change.rs", "rank": 96, "score": 103621.60032317389 }, { "content": "#[derive(Clone)]\n\nstruct ExtraDecoder<'a> {\n\n val: ValueIter<'a>,\n\n}\n\n\n\nimpl<'a> Iterator for ExtraDecoder<'a> {\n\n type Item = Result<Cow<'a, [u8]>, ReadChangeError>;\n\n fn next(&mut self) -> Option<Self::Item> {\n\n match self.val.next() {\n\n Some(Ok(ScalarValue::Bytes(b))) => Some(Ok(Cow::Owned(b))),\n\n Some(Ok(_)) => Some(Err(ReadChangeError::InvalidExtraBytes)),\n\n Some(Err(e)) => Some(Err(e.into())),\n\n None => None,\n\n }\n\n }\n\n}\n\n\n\nimpl TryFrom<Columns> for DocChangeColumns {\n\n type Error = ReadChangeError;\n\n\n\n fn try_from(columns: Columns) -> Result<Self, Self::Error> {\n", "file_path": "rust/automerge/src/storage/document/doc_change_columns.rs", "rank": 97, "score": 103079.42597876914 }, { "content": "#[test]\n\nfn mutliple_insertions_at_same_list_position_with_insertion_by_greater_actor_id() {\n\n let (actor1, actor2) = sorted_actors();\n\n assert!(actor2 > actor1);\n\n let mut doc1 = new_doc_with_actor(actor1);\n\n let mut doc2 = new_doc_with_actor(actor2);\n\n\n\n let list = doc1\n\n .put_object(&automerge::ROOT, \"list\", ObjType::List)\n\n .unwrap();\n\n doc1.insert(&list, 0, \"two\").unwrap();\n\n doc2.merge(&mut doc1).unwrap();\n\n\n\n doc2.insert(&list, 0, \"one\").unwrap();\n\n assert_doc!(\n\n &doc2,\n\n map! {\n\n \"list\" => { list![\n\n { \"one\" },\n\n { \"two\" },\n\n ]}\n\n }\n\n );\n\n}\n\n\n", "file_path": "rust/automerge/tests/test.rs", "rank": 98, "score": 102983.11458737048 }, { "content": "#[test]\n\nfn mutliple_insertions_at_same_list_position_with_insertion_by_lesser_actor_id() {\n\n let (actor2, actor1) = sorted_actors();\n\n assert!(actor2 < actor1);\n\n let mut doc1 = new_doc_with_actor(actor1);\n\n let mut doc2 = new_doc_with_actor(actor2);\n\n\n\n let list = doc1\n\n .put_object(&automerge::ROOT, \"list\", ObjType::List)\n\n .unwrap();\n\n doc1.insert(&list, 0, \"two\").unwrap();\n\n doc2.merge(&mut doc1).unwrap();\n\n\n\n doc2.insert(&list, 0, \"one\").unwrap();\n\n assert_doc!(\n\n &doc2,\n\n map! {\n\n \"list\" => { list![\n\n { \"one\" },\n\n { \"two\" },\n\n ]}\n\n }\n\n );\n\n}\n\n\n", "file_path": "rust/automerge/tests/test.rs", "rank": 99, "score": 102983.11458737048 } ]
Rust
src/options.rs
Mange/graceful-shutdown
c99fb299c07611c9c57526f9be59080500094b39
extern crate structopt; extern crate termion; extern crate users; use matcher::MatchMode; use signal::Signal; use std::time::Duration; use structopt::clap::Shell; #[derive(Debug, Clone, Copy)] pub enum OutputMode { Normal, Verbose, Quiet, } #[derive(Debug, Clone, Copy)] enum ColorMode { Auto, Always, Never, } #[derive(StructOpt, Debug)] #[structopt(raw(setting = "structopt::clap::AppSettings::ColoredHelp"))] pub struct CliOptions { #[structopt(short = "w", long = "wait-time", default_value = "5.0", value_name = "SECONDS")] wait_time: f64, #[structopt(long = "no-kill")] no_kill: bool, #[structopt( short = "s", long = "terminate-signal", default_value = "term", value_name = "SIGNAL", parse(try_from_str = "parse_signal") )] terminate_signal: Signal, #[structopt( long = "kill-signal", default_value = "kill", value_name = "SIGNAL", parse(try_from_str = "parse_signal") )] kill_signal: Signal, #[structopt(short = "W", long = "whole-command", visible_alias = "whole")] match_whole: bool, #[structopt(short = "u", long = "user", value_name = "USER", overrides_with = "mine")] user: Option<String>, #[structopt(short = "m", long = "mine", overrides_with = "user")] mine: bool, #[structopt(short = "n", long = "dry-run")] dry_run: bool, #[structopt(short = "v", long = "verbose", overrides_with = "quiet")] verbose: bool, #[structopt(short = "q", long = "quiet", overrides_with = "verbose")] quiet: bool, #[structopt( long = "color", default_value = "auto", raw(possible_values = "&ColorMode::variants()") )] color_mode: ColorMode, #[structopt(long = "list-signals")] pub list_signals: bool, #[structopt( long = "generate-completions", value_name = "SHELL", raw(possible_values = "&Shell::variants()") )] pub generate_completions: Option<Shell>, } #[derive(Debug)] pub struct Options { pub dry_run: bool, pub kill: bool, pub kill_signal: Signal, pub match_mode: MatchMode, pub output_mode: OutputMode, pub terminate_signal: Signal, pub colors: Colors, pub user_mode: UserMode, pub wait_time: Option<Duration>, } #[derive(Debug)] pub enum UserMode { Everybody, OnlyMe, Only(String), } #[derive(Debug)] pub struct Colors { enabled: bool, } impl From<CliOptions> for Options { fn from(cli_options: CliOptions) -> Options { let wait_time = if cli_options.wait_time > 0.0 { Some(duration_from_secs_float(cli_options.wait_time)) } else { None }; let user_mode = match (cli_options.user, cli_options.mine) { (Some(name), false) => UserMode::Only(name), (None, true) => UserMode::OnlyMe, (None, false) => UserMode::Everybody, (Some(_), true) => unreachable!("Should not happen because of overrides_with"), }; let match_mode = if cli_options.match_whole { MatchMode::Commandline } else { MatchMode::Basename }; let output_mode = match (cli_options.dry_run, cli_options.verbose, cli_options.quiet) { (true, _, _) => OutputMode::Verbose, (false, false, false) => OutputMode::Normal, (false, true, false) => OutputMode::Verbose, (false, false, true) => OutputMode::Quiet, (false, true, true) => unreachable!("Should not happen due to overrides_with option"), }; let use_color = match cli_options.color_mode { ColorMode::Never => false, ColorMode::Always => true, ColorMode::Auto => termion::is_tty(&::std::io::stdout()), }; Options { dry_run: cli_options.dry_run, kill: !cli_options.no_kill, kill_signal: cli_options.kill_signal, match_mode, output_mode, terminate_signal: cli_options.terminate_signal, colors: Colors { enabled: use_color }, user_mode, wait_time, } } } impl OutputMode { pub fn show_normal(self) -> bool { match self { OutputMode::Verbose | OutputMode::Normal => true, OutputMode::Quiet => false, } } pub fn show_verbose(self) -> bool { match self { OutputMode::Verbose => true, OutputMode::Normal | OutputMode::Quiet => false, } } } impl ColorMode { fn variants() -> [&'static str; 3] { ["auto", "always", "never"] } } impl ::std::str::FromStr for ColorMode { type Err = &'static str; fn from_str(string: &str) -> Result<ColorMode, Self::Err> { match string { "auto" => Ok(ColorMode::Auto), "always" => Ok(ColorMode::Always), "never" => Ok(ColorMode::Never), _ => Err("Not a valid color mode"), } } } impl Colors { pub fn reset(&self) -> String { if self.enabled { format!( "{}{}", termion::color::Fg(termion::color::Reset), termion::style::Reset, ) } else { String::new() } } pub fn red(&self) -> String { if self.enabled { termion::color::Fg(termion::color::Red).to_string() } else { String::new() } } pub fn yellow(&self) -> String { if self.enabled { termion::color::Fg(termion::color::Yellow).to_string() } else { String::new() } } pub fn green(&self) -> String { if self.enabled { termion::color::Fg(termion::color::Green).to_string() } else { String::new() } } pub fn faded(&self) -> String { if self.enabled { termion::style::Faint.to_string() } else { String::new() } } } fn parse_signal(sig: &str) -> Result<Signal, String> { sig.parse() .map_err(|_| format!("Failed to parse \"{}\" as a signal name.", sig)) } fn duration_from_secs_float(float: f64) -> Duration { let whole_seconds = float.floor(); let sec_frac = float - whole_seconds; let nanos = (sec_frac * 1e9).round(); Duration::new(whole_seconds as u64, nanos as u32) }
extern crate structopt; extern crate termion; extern crate users; use matcher::MatchMode; use signal::Signal; use std::time::Duration; use structopt::clap::Shell; #[derive(Debug, Clone, Copy)] pub enum OutputMode { Normal, Verbose, Quiet, } #[derive(Debug, Clone, Copy)] enum ColorMode { Auto, Always, Never, } #[derive(StructOpt, Debug)] #[structopt(raw(setting = "structopt::clap::AppSettings::ColoredHelp"))] pub struct CliOptions { #[structopt(short = "w", long = "wait-time", default_value = "5.0", value_name = "SECONDS")] wait_time: f64, #[structopt(long = "no-kill")] no_kill: bool, #[structopt( short = "s", long = "terminate-signal", default_value = "term", value_name = "SIGNAL", parse(try_from_str = "parse_signal") )] terminate_signal: Signal, #[structopt( long = "kill-signal", default_value = "kill", value_name = "SIGNAL", parse(try_from_str = "parse_signal") )] kill_signal: Signal, #[structopt(short = "W", long = "whole-command", visible_alias = "whole")] match_whole: bool, #[structopt(short = "u", long = "user", value_name = "USER", overrides_with = "mine")] user: Option<String>, #[structopt(short = "m", long = "mine", overrides_with = "user")] mine: bool, #[structopt(short = "n", long = "dry-run")] dry_run: bool, #[structopt(short = "v", long = "verbose", overrides_with = "quiet")] verbose: bool, #[structopt(short = "q", long = "quiet", overrides_with = "verbose")] quiet: bool, #[structopt( long = "color", default_value = "auto", raw(possible_values = "&ColorMode::variants()") )] color_mode: ColorMode, #[structopt(long = "list-signals")] pub list_signals: bool, #[structopt( long = "generate-completions", value_name = "SHELL", raw(possible_values = "&Shell::variants()") )] pub generate_completions: Option<Shell>, } #[derive(Debug)] pub struct Options { pub dry_run: bool, pub kill: bool, pub kill_signal: Signal, pub match_mode: MatchMode, pub output_mode: OutputMode, pub terminate_signal: Signal, pub colors: Colors, pub user_mode: UserMode, pub wait_time: Option<Duration>, } #[derive(Debug)] pub enum UserMode { Everybody, OnlyMe, Only(String), } #[derive(Debug)] pub struct Colors { enabled: bool, } impl From<CliOptions> for Options {
} impl OutputMode { pub fn show_normal(self) -> bool { match self { OutputMode::Verbose | OutputMode::Normal => true, OutputMode::Quiet => false, } } pub fn show_verbose(self) -> bool { match self { OutputMode::Verbose => true, OutputMode::Normal | OutputMode::Quiet => false, } } } impl ColorMode { fn variants() -> [&'static str; 3] { ["auto", "always", "never"] } } impl ::std::str::FromStr for ColorMode { type Err = &'static str; fn from_str(string: &str) -> Result<ColorMode, Self::Err> { match string { "auto" => Ok(ColorMode::Auto), "always" => Ok(ColorMode::Always), "never" => Ok(ColorMode::Never), _ => Err("Not a valid color mode"), } } } impl Colors { pub fn reset(&self) -> String { if self.enabled { format!( "{}{}", termion::color::Fg(termion::color::Reset), termion::style::Reset, ) } else { String::new() } } pub fn red(&self) -> String { if self.enabled { termion::color::Fg(termion::color::Red).to_string() } else { String::new() } } pub fn yellow(&self) -> String { if self.enabled { termion::color::Fg(termion::color::Yellow).to_string() } else { String::new() } } pub fn green(&self) -> String { if self.enabled { termion::color::Fg(termion::color::Green).to_string() } else { String::new() } } pub fn faded(&self) -> String { if self.enabled { termion::style::Faint.to_string() } else { String::new() } } } fn parse_signal(sig: &str) -> Result<Signal, String> { sig.parse() .map_err(|_| format!("Failed to parse \"{}\" as a signal name.", sig)) } fn duration_from_secs_float(float: f64) -> Duration { let whole_seconds = float.floor(); let sec_frac = float - whole_seconds; let nanos = (sec_frac * 1e9).round(); Duration::new(whole_seconds as u64, nanos as u32) }
fn from(cli_options: CliOptions) -> Options { let wait_time = if cli_options.wait_time > 0.0 { Some(duration_from_secs_float(cli_options.wait_time)) } else { None }; let user_mode = match (cli_options.user, cli_options.mine) { (Some(name), false) => UserMode::Only(name), (None, true) => UserMode::OnlyMe, (None, false) => UserMode::Everybody, (Some(_), true) => unreachable!("Should not happen because of overrides_with"), }; let match_mode = if cli_options.match_whole { MatchMode::Commandline } else { MatchMode::Basename }; let output_mode = match (cli_options.dry_run, cli_options.verbose, cli_options.quiet) { (true, _, _) => OutputMode::Verbose, (false, false, false) => OutputMode::Normal, (false, true, false) => OutputMode::Verbose, (false, false, true) => OutputMode::Quiet, (false, true, true) => unreachable!("Should not happen due to overrides_with option"), }; let use_color = match cli_options.color_mode { ColorMode::Never => false, ColorMode::Always => true, ColorMode::Auto => termion::is_tty(&::std::io::stdout()), }; Options { dry_run: cli_options.dry_run, kill: !cli_options.no_kill, kill_signal: cli_options.kill_signal, match_mode, output_mode, terminate_signal: cli_options.terminate_signal, colors: Colors { enabled: use_color }, user_mode, wait_time, } }
function_block-full_function
[ { "content": "#[must_use]\n\nfn send_with_error_handling(signal: Signal, options: &Options, process: &Process) -> bool {\n\n match process.send(signal) {\n\n Ok(_) => true,\n\n // Process quit before we had time to signal it? That should be fine. The next steps will\n\n // verify that it is gone instead.\n\n Err(KillError::DoesNotExist) => true,\n\n Err(error) => {\n\n eprintln!(\n\n \"{red}Failed to send {signal} to{reset} {process}: {red}{error}{reset}\",\n\n signal = signal,\n\n process = human_process_description(options, process),\n\n error = error,\n\n red = options.colors.red(),\n\n reset = options.colors.reset(),\n\n );\n\n false\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 1, "score": 80233.45042263919 }, { "content": "fn generate_completions(shell: structopt::clap::Shell) {\n\n let mut app = CliOptions::clap();\n\n let name = app.get_name().to_string();\n\n\n\n app.gen_completions_to(name, shell, &mut io::stdout());\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 2, "score": 74728.2714450265 }, { "content": "fn verbose_signal_message(signal: Signal, options: &Options, process: &Process) {\n\n if options.output_mode.show_verbose() {\n\n eprintln!(\n\n \"Sending {signal} to process {process}\",\n\n signal = signal,\n\n process = human_process_description(options, process),\n\n );\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 3, "score": 61469.38674753125 }, { "content": "fn dry_run(options: &Options, processes: &[Process]) -> Result<bool, Error> {\n\n // If we're not rendering anything, might as well skip the iteration completely.\n\n if !options.output_mode.show_normal() {\n\n return Ok(true);\n\n }\n\n\n\n for process in processes {\n\n println!(\n\n \"Would have sent {signal} to process {process}\",\n\n signal = options.terminate_signal,\n\n process = human_process_description(options, process),\n\n );\n\n }\n\n\n\n Ok(true)\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 4, "score": 55899.16863689084 }, { "content": "fn run(options: &Options) -> Result<bool, Error> {\n\n let matcher = Matcher::new(\n\n load_patterns(options).context(\"Could not load patterns\")?,\n\n options.match_mode,\n\n );\n\n\n\n let processes = all_processes(options, &matcher).context(\"Could not build process list\")?;\n\n\n\n // Time to shut them down\n\n if options.dry_run {\n\n dry_run(options, &processes)\n\n } else {\n\n real_run(options, processes)\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 5, "score": 50079.738909336076 }, { "content": "fn real_run(options: &Options, mut processes: Vec<Process>) -> Result<bool, Error> {\n\n let mut success = true;\n\n\n\n // Try to terminate all the processes. If any process failed to receive the signal, then remove\n\n // it from the list so the coming waiting part does not wait for any process that will not be\n\n // terminated anyway.\n\n //\n\n // As an example, if a process has a \"Permission denied\" error, it will fail to get the\n\n // terminate signal. Why would we be waiting on this process and then try to kill it when that\n\n // too will fail?\n\n processes.retain(|process| {\n\n verbose_signal_message(options.terminate_signal, options, process);\n\n if send_with_error_handling(options.terminate_signal, options, process) {\n\n true\n\n } else {\n\n success = false;\n\n false\n\n }\n\n });\n\n\n", "file_path": "src/main.rs", "rank": 8, "score": 41164.69596477636 }, { "content": "fn is_dir(entry: &DirEntry) -> bool {\n\n entry.file_type().map(|t| t.is_dir()).unwrap_or(false)\n\n}\n\n\n", "file_path": "src/processes.rs", "rank": 9, "score": 31679.94748676152 }, { "content": "fn has_numeric_name(entry: &DirEntry) -> bool {\n\n entry\n\n .file_name()\n\n .to_string_lossy()\n\n .bytes()\n\n .all(|b| b >= b'0' && b <= b'9')\n\n}\n\n\n\nimpl ProcessIterator {\n\n fn new() -> Result<ProcessIterator, Error> {\n\n Ok(ProcessIterator {\n\n read_dir: read_dir(\"/proc\")\n\n .map_err(|err| format_err!(\"Failed to open /proc: {}\", err))?,\n\n })\n\n }\n\n}\n\n\n\nimpl Iterator for ProcessIterator {\n\n type Item = Result<Process, String>;\n\n\n", "file_path": "src/processes.rs", "rank": 10, "score": 30867.34435583103 }, { "content": "use nix::sys::signal::Signal as NixSignal;\n\nuse std::fmt;\n\nuse std::str::FromStr;\n\n\n\n#[derive(Debug, PartialEq, Eq, Clone, Copy)]\n\npub struct Signal(NixSignal);\n\n\n\nimpl Signal {\n\n pub fn iterator() -> impl Iterator<Item = Signal> {\n\n NixSignal::iterator().map(Signal)\n\n }\n\n\n\n pub fn name(self) -> String {\n\n format!(\"SIG{}\", self.basename())\n\n }\n\n\n\n pub fn basename(self) -> &'static str {\n\n match self.0 {\n\n NixSignal::SIGABRT => \"ABRT\",\n\n NixSignal::SIGALRM => \"ALRM\",\n", "file_path": "src/signal.rs", "rank": 11, "score": 22391.034777253026 }, { "content": " }\n\n}\n\n\n\nimpl From<Signal> for NixSignal {\n\n fn from(signal: Signal) -> NixSignal {\n\n signal.0\n\n }\n\n}\n\n\n\nimpl From<Signal> for Option<NixSignal> {\n\n fn from(signal: Signal) -> Option<NixSignal> {\n\n Some(signal.0)\n\n }\n\n}\n\n\n\n#[derive(Debug, PartialEq, Eq)]\n\npub enum ParseError {\n\n UnknownSignalName,\n\n}\n\n\n", "file_path": "src/signal.rs", "rank": 12, "score": 22388.88876706076 }, { "content": "impl FromStr for Signal {\n\n type Err = ParseError;\n\n\n\n fn from_str(sig: &str) -> Result<Signal, ParseError> {\n\n let upper_sig = {\n\n let mut s = String::from(sig);\n\n s.make_ascii_uppercase();\n\n s\n\n };\n\n\n\n let signal_number: Option<i32> = sig.parse().ok();\n\n\n\n for signal in Signal::iterator() {\n\n if signal.basename() == upper_sig || signal.name() == upper_sig\n\n || signal_number\n\n .map(|num| signal.number() == num)\n\n .unwrap_or(false)\n\n {\n\n return Ok(signal);\n\n }\n", "file_path": "src/signal.rs", "rank": 13, "score": 22383.392428724383 }, { "content": " NixSignal::SIGURG => \"URG\",\n\n NixSignal::SIGXCPU => \"XCPU\",\n\n NixSignal::SIGXFSZ => \"XFSZ\",\n\n NixSignal::SIGVTALRM => \"VTALRM\",\n\n NixSignal::SIGPROF => \"PROF\",\n\n NixSignal::SIGWINCH => \"WINCH\",\n\n NixSignal::SIGIO => \"IO\",\n\n NixSignal::SIGPWR => \"PWR\",\n\n NixSignal::SIGSYS => \"SYS\",\n\n }\n\n }\n\n\n\n pub fn number(self) -> i32 {\n\n self.0 as i32\n\n }\n\n}\n\n\n\nimpl fmt::Display for Signal {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n self.basename().fmt(f)\n", "file_path": "src/signal.rs", "rank": 14, "score": 22383.27530205383 }, { "content": " NixSignal::SIGHUP => \"HUP\",\n\n NixSignal::SIGINT => \"INT\",\n\n NixSignal::SIGKILL => \"KILL\",\n\n NixSignal::SIGQUIT => \"QUIT\",\n\n NixSignal::SIGSTOP => \"STOP\",\n\n NixSignal::SIGTERM => \"TERM\",\n\n NixSignal::SIGUSR1 => \"USR1\",\n\n NixSignal::SIGUSR2 => \"USR2\",\n\n NixSignal::SIGILL => \"ILL\",\n\n NixSignal::SIGTRAP => \"TRAP\",\n\n NixSignal::SIGBUS => \"BUS\",\n\n NixSignal::SIGFPE => \"FPE\",\n\n NixSignal::SIGSEGV => \"SEGV\",\n\n NixSignal::SIGPIPE => \"PIPE\",\n\n NixSignal::SIGSTKFLT => \"STKFLT\",\n\n NixSignal::SIGCHLD => \"CHLD\",\n\n NixSignal::SIGCONT => \"CONT\",\n\n NixSignal::SIGTSTP => \"TSTP\",\n\n NixSignal::SIGTTIN => \"TTIN\",\n\n NixSignal::SIGTTOU => \"TTOU\",\n", "file_path": "src/signal.rs", "rank": 15, "score": 22382.909680763933 }, { "content": " }\n\n\n\n Err(ParseError::UnknownSignalName)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn it_parses_strings_with_basename() {\n\n let sig: Signal = \"kiLL\".parse().expect(\"Failed to parse\");\n\n assert_eq!(sig, Signal(NixSignal::SIGKILL));\n\n }\n\n\n\n #[test]\n\n fn it_parses_strings_with_name() {\n\n let sig: Signal = \"SiGkiLL\".parse().expect(\"Failed to parse\");\n\n assert_eq!(sig, Signal(NixSignal::SIGKILL));\n", "file_path": "src/signal.rs", "rank": 16, "score": 22381.815792565103 }, { "content": " \"31337\".parse::<Signal>(),\n\n Err(ParseError::UnknownSignalName)\n\n );\n\n }\n\n\n\n #[test]\n\n fn it_roundtrips_all_signals_parsing() {\n\n for signal in Signal::iterator() {\n\n assert_eq!(signal.basename().parse(), Ok(signal));\n\n assert_eq!(signal.name().parse(), Ok(signal));\n\n assert_eq!(signal.number().to_string().parse(), Ok(signal));\n\n }\n\n }\n\n}\n", "file_path": "src/signal.rs", "rank": 17, "score": 22380.865597110238 }, { "content": " }\n\n\n\n #[test]\n\n fn it_parses_strings_with_signal_number() {\n\n let string = Signal(NixSignal::SIGKILL).number().to_string();\n\n let sig: Signal = string.parse().expect(\"Failed to parse\");\n\n assert_eq!(sig, Signal(NixSignal::SIGKILL));\n\n }\n\n\n\n #[test]\n\n fn it_does_not_parse_invalid_strings() {\n\n assert_eq!(\n\n \"foobar\".parse::<Signal>(),\n\n Err(ParseError::UnknownSignalName)\n\n );\n\n assert_eq!(\n\n \"sigfoo\".parse::<Signal>(),\n\n Err(ParseError::UnknownSignalName)\n\n );\n\n assert_eq!(\n", "file_path": "src/signal.rs", "rank": 18, "score": 22380.69285605588 }, { "content": "fn find_user_by_name(name: &str) -> Result<uid_t, UserError> {\n\n users::get_user_by_name(name)\n\n .ok_or_else(|| UserError::NotFound(name.to_owned()))\n\n .map(|user| user.uid())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 19, "score": 21827.66758474944 }, { "content": "fn human_process_description(options: &Options, process: &Process) -> String {\n\n use matcher::MatchMode;\n\n\n\n match options.match_mode {\n\n MatchMode::Basename => format!(\n\n \"{green}{pid}{reset} ({green}{name}{reset})\",\n\n pid = process.pid(),\n\n name = process.name(),\n\n green = options.colors.green(),\n\n reset = options.colors.reset()\n\n ),\n\n MatchMode::Commandline => format!(\n\n \"{green}{pid}{reset} ({green}{name}{reset}): {faded}{cmdline}{reset}\",\n\n pid = process.pid(),\n\n name = process.name(),\n\n cmdline = process.commandline(),\n\n green = options.colors.green(),\n\n faded = options.colors.faded(),\n\n reset = options.colors.reset(),\n\n ),\n", "file_path": "src/main.rs", "rank": 33, "score": 19956.176118352138 }, { "content": "fn load_patterns(options: &Options) -> Result<RegexSet, Error> {\n\n if options.output_mode.show_normal() && termion::is_tty(&::std::io::stdin()) {\n\n eprintln!(\n\n \"{yellow}WARNING: Reading processlist from TTY stdin. Exit with ^D when you are done, or ^C to abort.{reset}\",\n\n yellow = options.colors.yellow(),\n\n reset = options.colors.reset(),\n\n );\n\n }\n\n\n\n let stdin = io::stdin();\n\n let patterns: Vec<String> = stdin\n\n .lock()\n\n .lines()\n\n .flat_map(Result::ok)\n\n .map(strip_comment)\n\n .filter(|s| !s.is_empty())\n\n .collect();\n\n\n\n RegexSetBuilder::new(&patterns)\n\n .case_insensitive(true)\n\n .build()\n\n .map_err(|err| err.into())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 34, "score": 19956.176118352138 }, { "content": "fn list_signals() {\n\n // Print user-centric text if stdout is to a terminal. If piping stdout to some other process,\n\n // this text will not be shown.\n\n let is_tty = termion::is_tty(&::std::io::stdout());\n\n\n\n if is_tty {\n\n println!(\"Currently supported signals:\")\n\n };\n\n\n\n for signal in Signal::iterator() {\n\n println!(\"{}\\t{}\", signal.number(), signal);\n\n }\n\n\n\n if is_tty {\n\n println!(\"Signal names does not require the SIG prefix, and are case-insensitive.\");\n\n };\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 35, "score": 19801.00602403868 }, { "content": "fn all_processes(options: &Options, matcher: &Matcher) -> Result<Vec<Process>, Error> {\n\n let iter = match &options.user_mode {\n\n UserMode::Everybody => Process::all()?,\n\n UserMode::OnlyMe => Process::all_from_user(users::get_current_uid())?,\n\n UserMode::Only(name) => Process::all_from_user(find_user_by_name(&name)?)?,\n\n };\n\n\n\n Ok(iter\n\n .flat_map(Result::ok)\n\n .filter(|process| matcher.is_match(process))\n\n .collect::<Vec<_>>())\n\n}\n\n\n\n#[derive(Debug, Fail)]\n\npub enum UserError {\n\n #[fail(display = \"Could not find user with name \\\"{}\\\"\", _0)]\n\n NotFound(String),\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 36, "score": 19353.53804745483 }, { "content": "#[macro_use]\n\nextern crate structopt;\n\n#[macro_use]\n\nextern crate failure;\n\n\n\nextern crate nix;\n\nextern crate regex;\n\nextern crate termion;\n\nextern crate users;\n\n\n\nmod matcher;\n\nmod options;\n\nmod processes;\n\nmod signal;\n\n\n\nuse failure::{Error, ResultExt};\n\nuse matcher::Matcher;\n\nuse options::{CliOptions, Options, UserMode};\n\nuse processes::{KillError, Process};\n\nuse regex::{RegexSet, RegexSetBuilder};\n\nuse signal::Signal;\n\nuse std::io;\n\nuse std::io::BufRead;\n\nuse std::time::{Duration, Instant};\n\nuse structopt::StructOpt;\n\nuse users::uid_t;\n\n\n", "file_path": "src/main.rs", "rank": 37, "score": 17.586435443336548 }, { "content": "extern crate users;\n\n\n\nuse failure::Error;\n\nuse nix::sys::signal::kill;\n\nuse nix::unistd::Pid;\n\nuse signal::Signal;\n\nuse std::fs::{read_dir, DirEntry, File, ReadDir};\n\nuse std::io::Read;\n\nuse std::path::{Path, PathBuf};\n\nuse users::uid_t;\n\n\n\npub type ProcIter = Box<Iterator<Item = Result<Process, String>>>;\n\n\n\n#[derive(Debug)]\n\npub struct Process {\n\n pid: Pid,\n\n user_id: uid_t,\n\n name: String,\n\n cmdline: String,\n\n}\n", "file_path": "src/processes.rs", "rank": 38, "score": 13.615350260018303 }, { "content": "use processes::Process;\n\nuse regex::RegexSet;\n\n\n\n#[derive(Debug, Clone, Copy)]\n\npub enum MatchMode {\n\n Basename,\n\n Commandline,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Matcher {\n\n regex_set: RegexSet,\n\n mode: MatchMode,\n\n}\n\n\n\nimpl Matcher {\n\n pub fn new(regex_set: RegexSet, mode: MatchMode) -> Self {\n\n Matcher { regex_set, mode }\n\n }\n\n\n\n pub fn is_match(&self, process: &Process) -> bool {\n\n match self.mode {\n\n MatchMode::Basename => self.regex_set.is_match(process.name()),\n\n MatchMode::Commandline => self.regex_set.is_match(process.commandline()),\n\n }\n\n }\n\n}\n", "file_path": "src/matcher.rs", "rank": 39, "score": 10.45397191254575 }, { "content": "\n\n if processes.is_empty() {\n\n return Ok(success);\n\n }\n\n }\n\n\n\n // Time is up. Kill remaining processes.\n\n if options.kill {\n\n if options.output_mode.show_verbose() {\n\n eprintln!(\n\n \"{red}Timeout reached. Forcefully shutting down processes.{reset}\",\n\n red = options.colors.red(),\n\n reset = options.colors.reset()\n\n );\n\n }\n\n for process in &processes {\n\n verbose_signal_message(options.kill_signal, options, process);\n\n if !send_with_error_handling(options.kill_signal, options, process) {\n\n success = false;\n\n }\n", "file_path": "src/main.rs", "rank": 40, "score": 9.674662842404212 }, { "content": "# graceful-shutdown\n\n\n\n> For the times where you want to terminate things. Humanely.\n\n\n\n[![Build Status][ci-badge]][ci]\n\n\n\nThis command reads a list of processes from STDIN and shuts them all down\n\ngracefully. Commands will be matched using regular expressions.\n\n\n\nInput supports comments using \"#\", making it simple to have saved recipes.\n\n\n\n```bash\n\ncat ~/.config/graceful-shutdown/browsers\n\n```\n\n\n\n```\n\n# Shuts down all browsers\n\n\n\nqutebrowser\n\n\n\nfirefox # Vanilla Firefox\n\nfirefox-dev.* # Firefox Developer Edition\n\n\n\n(google-)?chrom(e|ium) # Google Chrome + Chromium\n\n```\n\n\n\n```bash\n\ngraceful-shutdown --mine < ~/.config/graceful-shutdown/browsers\n\n```\n\n\n\n## Options\n\n\n\nBy default all matching processes will receive `SIGTERM`, then the command will\n\nwait up to 5 seconds for all processes to terminate and then send `SIGKILL` to\n\nthem.\n\n\n\nThe signals and wait time can be set through the command-line, and the wait\n\ntime can also be disabled, as well as the final kill strike.\n\n\n\n```bash\n\n# Wait up to 15 seconds for Firefox\n\necho \"firefox\" | graceful-shutdown --wait-time 15\n\n\n\n# Don't even wait, just forcefully kill all open man pages immediately\n\necho \"^man$\" | graceful-shutdown --wait-time 0\n\n\n\n# Wait, but give up after the timeout instead of killing the process\n\nif ! echo \"^[nmg]?vim\" | graceful-shutdown --quiet --wait-time 30 --no-kill; then\n\n echo \"Failed to exit all instances of vim in 30 seconds…\"\n\nfi\n\n```\n\n\n\n### Signals\n\n\n\nTo list supported signals you can invoke the command with `--list-signals`.\n\n\n\n\n\n### Matching on whole command\n\n\n\nSometimes you want to match processes that have been started with a specific\n\ncommandline (or to ignore those that do). For this you may use the\n\n`--whole-command` option.\n\n\n\n```bash\n\n# Shut down electron shells for YakYak\n\n# Example commandline: \"/usr/lib/electron/electron /usr/share/yakyak/app\"\n\necho \"/electron .*yakyak/app$\" | graceful-shutdown --whole-command --mine\n\n\n\n# Only shut down the main Spotify process, not the zygote and renderer child\n\n# processes.\n", "file_path": "README.md", "rank": 41, "score": 9.312216933550095 }, { "content": "\n\n Err(error) => Err(KillError::UnexpectedError(format!(\"{}\", error))),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Fail)]\n\npub enum KillError {\n\n #[fail(display = \"Invalid signal\")]\n\n InvalidSignal,\n\n #[fail(display = \"Insufficient permission to send signal to this process\")]\n\n NoPermission,\n\n #[fail(display = \"Cannot find process\")]\n\n DoesNotExist,\n\n #[fail(display = \"Unexpected error: {}\", _0)]\n\n UnexpectedError(String),\n\n}\n\n\n", "file_path": "src/processes.rs", "rank": 42, "score": 8.83522537888619 }, { "content": " pub fn is_alive(&self) -> bool {\n\n let mut proc_path = PathBuf::new();\n\n proc_path.push(\"/\");\n\n proc_path.push(\"proc\");\n\n proc_path.push(self.pid.to_string());\n\n\n\n proc_path.exists()\n\n }\n\n\n\n pub fn send(&self, signal: Signal) -> Result<(), KillError> {\n\n use nix::errno::Errno;\n\n use nix::Error;\n\n\n\n match kill(self.pid, signal) {\n\n Ok(()) => Ok(()),\n\n Err(Error::Sys(Errno::EINVAL)) => Err(KillError::InvalidSignal),\n\n Err(Error::Sys(Errno::EPERM)) => Err(KillError::NoPermission),\n\n Err(Error::Sys(Errno::ESRCH)) => Err(KillError::DoesNotExist),\n\n\n\n Err(Error::Sys(errno)) => Err(KillError::UnexpectedError(format!(\"errno {}\", errno))),\n", "file_path": "src/processes.rs", "rank": 43, "score": 8.635424801264096 }, { "content": " fn next(&mut self) -> Option<Self::Item> {\n\n match self.process_iter.next() {\n\n Some(Ok(process)) => {\n\n if process.user_id == self.user {\n\n Some(Ok(process))\n\n } else {\n\n self.next()\n\n }\n\n }\n\n other => other,\n\n }\n\n }\n\n}\n\n\n\nimpl Process {\n\n pub fn all() -> Result<ProcIter, Error> {\n\n ProcessIterator::new().map(|iter| Box::new(iter) as ProcIter)\n\n }\n\n\n\n pub fn all_from_user(user: uid_t) -> Result<ProcIter, Error> {\n", "file_path": "src/processes.rs", "rank": 44, "score": 7.554615649612181 }, { "content": " },\n\n Err(err) => {\n\n if options.output_mode.show_normal() {\n\n eprintln!(\n\n \"{red}ERROR: {message}{reset}\",\n\n message = err,\n\n red = options.colors.red(),\n\n reset = options.colors.reset(),\n\n );\n\n for (level, cause) in err.iter_causes().enumerate() {\n\n eprintln!(\n\n \"{red}{indent:width$}Caused by: {cause}{reset}\",\n\n cause = cause,\n\n indent = \"\",\n\n width = (level + 1) * 2,\n\n red = options.colors.red(),\n\n reset = options.colors.reset(),\n\n );\n\n }\n\n }\n\n exit(1);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 45, "score": 6.71262557081937 }, { "content": " }\n\n } else {\n\n if options.output_mode.show_normal() {\n\n eprintln!(\n\n \"{yellow}WARNING: Some processes are still alive.{reset}\",\n\n yellow = options.colors.yellow(),\n\n reset = options.colors.reset()\n\n );\n\n }\n\n if options.output_mode.show_verbose() {\n\n for process in &processes {\n\n eprintln!(\n\n \"Process {process}\",\n\n process = human_process_description(options, process)\n\n );\n\n }\n\n }\n\n success = false;\n\n }\n\n }\n\n\n\n Ok(success)\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 46, "score": 6.514363053852108 }, { "content": "\n\npub struct ProcessIterator {\n\n read_dir: ReadDir,\n\n}\n\n\n\npub struct UserFilter {\n\n user: uid_t,\n\n process_iter: ProcessIterator,\n\n}\n\n\n", "file_path": "src/processes.rs", "rank": 47, "score": 5.329331996334463 }, { "content": " Ok(Process {\n\n name,\n\n cmdline,\n\n pid: Pid::from_raw(pid),\n\n user_id: uid_of_file(&path)?,\n\n })\n\n }\n\n\n\n pub fn name(&self) -> &str {\n\n &self.name\n\n }\n\n\n\n pub fn commandline(&self) -> &str {\n\n &self.cmdline\n\n }\n\n\n\n pub fn pid(&self) -> Pid {\n\n self.pid\n\n }\n\n\n", "file_path": "src/processes.rs", "rank": 48, "score": 4.5226998858968255 }, { "content": " fn next(&mut self) -> Option<Self::Item> {\n\n // Read next dir entry. If it's not a directory, then skip to the next one again.\n\n // If entry failed to be loaded, skip that one too.\n\n match self.read_dir.next() {\n\n Some(Ok(entry)) => {\n\n if is_dir(&entry) && has_numeric_name(&entry) {\n\n Some(Process::from_entry(&entry))\n\n } else {\n\n self.next()\n\n }\n\n }\n\n Some(Err(_)) => self.next(),\n\n None => None,\n\n }\n\n }\n\n}\n\n\n\nimpl Iterator for UserFilter {\n\n type Item = Result<Process, String>;\n\n\n", "file_path": "src/processes.rs", "rank": 49, "score": 4.18806859447545 }, { "content": " // Wait for processess to die\n\n if let Some(wait_time) = options.wait_time {\n\n let start = Instant::now();\n\n\n\n while start.elapsed() < wait_time {\n\n ::std::thread::sleep(Duration::from_millis(100));\n\n\n\n // Remove dead processes\n\n processes.retain(|process| {\n\n let is_alive = process.is_alive();\n\n\n\n if options.output_mode.show_verbose() && !is_alive {\n\n eprintln!(\n\n \"Process shut down: {process}\",\n\n process = human_process_description(options, process),\n\n );\n\n }\n\n\n\n is_alive\n\n });\n", "file_path": "src/main.rs", "rank": 50, "score": 2.3274146431150045 }, { "content": "# Example commandline:\n\n# \"/usr/share/spotify/spotify --force-device-scale-factor=1\"\n\n# \"/usr/share/spotify/spotify\"\n\n# Should not match:\n\n# \"/usr/share/spotify/spotify --type=zygote --no-sandbox --fo…\"\n\n# \"/usr/share/spotify/spotify --type=renderer --force-device-…\"\n\necho \"/spotify( --force-device|$)\" | graceful-shutdown --whole-command --mine\n\n```\n\n\n\n## Installation\n\n\n\n<a href=\"https://repology.org/metapackage/graceful-shutdown/versions\">\n\n <img src=\"https://repology.org/badge/vertical-allrepos/graceful-shutdown.svg\" alt=\"Packaging status\" align=\"right\">\n\n</a>\n\n\n\nThis package is available through Arch Linux's AUR repository as\n\n`graceful-shutdown`. You may also compile it from source by downloading the\n\nsource code and install it using `cargo` (Rust's build system and package\n\nmanager):\n\n\n\n```bash\n\ncargo install\n\n```\n\n\n\n### Platform support\n\n\n\nCurrently this software is only supported on Linux. It is possible to add more\n\nplatforms if someone would care to add support for them; PRs are welcome.\n\n\n\n### Completions\n\n\n\nThis command comes with support for shell autocompletions for **bash**,\n\n**zsh**, and **fish**.\n\n\n\nYou can generate and install these completions globally:\n\n\n\n```bash\n\ngraceful-shutdown --generate-completions zsh > _graceful-shutdown\n\ngraceful-shutdown --generate-completions bash > graceful-shutdown.bash\n\ngraceful-shutdown --generate-completions fish > graceful-shutdown.fish\n\n\n\nsudo install -Dm644 _graceful-shutdown \\\n\n /usr/share/zsh/site-functions/_graceful-shutdown\n\n\n\nsudo install -Dm644 graceful-shutdown.bash \\\n\n /usr/share/bash-completion/completions/graceful-shutdown\n\n\n\nsudo install -Dm644 graceful-shutdown.fish \\\n\n /usr/share/fish/completions/graceful-shutdown.fish\n\n```\n\n\n\nIf you have a local source for completions, redirect the output of the\n\n`--generate-completions` command to the appropriate location.\n\n\n\nIf you install through the AUR, then these completions are already installed\n\nfor you automatically.\n\n\n\n## Copyright\n\n\n", "file_path": "README.md", "rank": 51, "score": 2.019493828590517 }, { "content": " ProcessIterator::new().map(|iter| {\n\n Box::new(UserFilter {\n\n user,\n\n process_iter: iter,\n\n }) as ProcIter\n\n })\n\n }\n\n\n\n fn from_entry(entry: &DirEntry) -> Result<Process, String> {\n\n let path = entry.path();\n\n let name = read_file(&path.join(\"comm\"))?.trim_right().to_string();\n\n let cmdline = parse_cmdline(&read_file(&path.join(\"cmdline\"))?);\n\n let pid = {\n\n let basename = entry.file_name();\n\n let basename = basename.to_string_lossy();\n\n basename\n\n .parse()\n\n .map_err(|e| format!(\"Failed to parse PID in {}: {}\", basename, e))?\n\n };\n\n\n", "file_path": "src/processes.rs", "rank": 52, "score": 1.8203831529609829 }, { "content": " }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn it_strips_comments() {\n\n assert_eq!(\n\n strip_comment(String::from(\"Foobar\")),\n\n String::from(\"Foobar\"),\n\n );\n\n\n\n assert_eq!(strip_comment(String::from(\"Foo#bar\")), String::from(\"Foo\"),);\n\n\n\n assert_eq!(\n\n strip_comment(String::from(\" Complicated # oh yes!! # another one\")),\n\n String::from(\"Complicated\"),\n\n );\n", "file_path": "src/main.rs", "rank": 53, "score": 1.499480837203174 } ]
Rust
src/structs/types/system_boot_information.rs
alesharik/smbios-lib
27dc2fa78afa2163763207165c31a900f72a02e1
use crate::{SMBiosStruct, UndefinedStruct}; use serde::{ser::SerializeStruct, Serialize, Serializer}; use core::{fmt, any}; pub struct SMBiosSystemBootInformation<'a> { parts: &'a UndefinedStruct, } impl<'a> SMBiosStruct<'a> for SMBiosSystemBootInformation<'a> { const STRUCT_TYPE: u8 = 32u8; fn new(parts: &'a UndefinedStruct) -> Self { Self { parts } } fn parts(&self) -> &'a UndefinedStruct { self.parts } } impl<'a> SMBiosSystemBootInformation<'a> { const BOOT_STATUS_OFFSET: usize = 0x0A; const BOOT_STATUS_MAX_SIZE: usize = 0x0A; pub fn boot_status_data(&self) -> Option<SystemBootStatusData<'_>> { let struct_length = self.parts.header.length() as usize; if struct_length < Self::BOOT_STATUS_OFFSET + 1 { return None; } let end_index: usize; if struct_length < Self::BOOT_STATUS_OFFSET + Self::BOOT_STATUS_MAX_SIZE { end_index = struct_length; } else { end_index = Self::BOOT_STATUS_OFFSET + Self::BOOT_STATUS_MAX_SIZE; } self.parts .get_field_data(Self::BOOT_STATUS_OFFSET, end_index) .map(|raw| SystemBootStatusData { raw }) } } impl fmt::Debug for SMBiosSystemBootInformation<'_> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct(any::type_name::<SMBiosSystemBootInformation<'_>>()) .field("header", &self.parts.header) .field("boot_status_data", &self.boot_status_data()) .finish() } } impl Serialize for SMBiosSystemBootInformation<'_> { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { let mut state = serializer.serialize_struct("SMBiosSystemBootInformation", 2)?; state.serialize_field("header", &self.parts.header)?; state.serialize_field("boot_status_data", &self.boot_status_data())?; state.end() } } pub struct SystemBootStatusData<'a> { pub raw: &'a [u8], } impl<'a> SystemBootStatusData<'a> { pub fn system_boot_status(&self) -> SystemBootStatus { debug_assert!(self.raw.len() > 0); match self.raw[0] { 0x00 => SystemBootStatus::NoErrors, 0x01 => SystemBootStatus::NoBootableMedia, 0x02 => SystemBootStatus::NormalOSFailedToLoad, 0x03 => SystemBootStatus::FirmwareDetectedFailure, 0x04 => SystemBootStatus::OSDetectedFailure, 0x05 => SystemBootStatus::UserRequestedBoot, 0x06 => SystemBootStatus::SystemSecurityViolation, 0x07 => SystemBootStatus::PreviouslyRequestedImage, 0x08 => SystemBootStatus::SystemWatchdogTimerExpired, _ => SystemBootStatus::None, } } } impl fmt::Debug for SystemBootStatusData<'_> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct(any::type_name::<SMBiosSystemBootInformation<'_>>()) .field("system_boot_status", &self.system_boot_status()) .finish() } } impl Serialize for SystemBootStatusData<'_> { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { let mut state = serializer.serialize_struct("SystemBootStatusData", 1)?; state.serialize_field("system_boot_status", &self.system_boot_status())?; state.end() } } #[derive(Serialize, Debug, PartialEq, Eq)] pub enum SystemBootStatus { NoErrors, NoBootableMedia, NormalOSFailedToLoad, FirmwareDetectedFailure, OSDetectedFailure, UserRequestedBoot, SystemSecurityViolation, PreviouslyRequestedImage, SystemWatchdogTimerExpired, None, } #[cfg(test)] mod tests { use super::*; #[test] fn unit_test() { let struct_type32 = vec![ 0x20, 0x14, 0x25, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, ]; let parts = UndefinedStruct::new(&struct_type32); let test_struct = SMBiosSystemBootInformation::new(&parts); let boot_status_data = test_struct.boot_status_data().unwrap(); assert_eq!( boot_status_data.raw, &[0x00u8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00] as &[u8] ); assert_eq!( boot_status_data.system_boot_status(), SystemBootStatus::NoErrors ); let struct_type32 = vec![ 0x20, 0x0C, 0x25, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x01, ]; let parts = UndefinedStruct::new(&struct_type32); let test_struct = SMBiosSystemBootInformation::new(&parts); let boot_status_data = test_struct.boot_status_data().unwrap(); assert_eq!(boot_status_data.raw, &[0x02u8, 0x01] as &[u8]); assert_eq!( boot_status_data.system_boot_status(), SystemBootStatus::NormalOSFailedToLoad ); let struct_type32 = vec![ 0x20, 0x0F, 0x25, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x01, ]; let parts = UndefinedStruct::new(&struct_type32); let test_struct = SMBiosSystemBootInformation::new(&parts); assert!(test_struct.boot_status_data().is_none()); } }
use crate::{SMBiosStruct, UndefinedStruct}; use serde::{ser::SerializeStruct, Serialize, Serializer}; use core::{fmt, any}; pub struct SMBiosSystemBootInformation<'a> { parts: &'a UndefinedStruct, } impl<'a> SMBiosStruct<'a> for SMBiosSystemBootInformation<'a> { const STRUCT_TYPE: u8 = 32u8; fn new(parts: &'a UndefinedStruct) -> Self { Self { parts } } fn parts(&self) -> &'a UndefinedStruct { self.parts } } impl<'a> SMBiosSystemBootInformation<'a> { const BOOT_STATUS_OFFSET: usize = 0x0A; const BOOT_STATUS_MAX_SIZE: usize = 0x0A; pub fn boot_status_data(&self) -> Option<SystemBootStatusData<'_>> { let struct_length = self.parts.header.length() as usize; if struct_length < Self::BOOT_STATUS_OFFSET + 1 { return None; } let end_index: usize; if struct_length < Self::BOOT_STATUS_OFFSET + Self::BOOT_STATUS_MAX_SIZE { end_index = struct_length; } else { end_index = Self::BOOT_STATUS_OFFSET + Self::BOOT_STATUS_MAX_SIZE; } self.parts .get_field_data(Self::BOOT_STATUS_OFFSET, end_index) .map(|raw| SystemBootStatusData { raw }) } } impl fmt::Debug for SMBiosSystemBootInformation<'_> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct(any::type_name::<SMBiosSystemBootInformation<'_>>()) .field("header", &self.parts.header) .field("boot_status_data", &self.boot_status_data()) .finish() } } impl Serialize for SMBiosSystemBootInformation<'_> { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { let mut state = serializer.serialize_struct("SMBiosSystemBootInformation", 2)?; state.serialize_field("header", &self.parts.header)?; state.serialize_field("boot_status_data", &self.boot_status_data())?; state.end() } } pub struct SystemBootStatusData<'a> { pub raw: &'a [u8], } impl<'a> SystemBootStatusData<'a> { pub fn system_boot_status(&self) -> SystemBootStatus { debug_assert!(self.raw.len() > 0); match self.raw[0] { 0x00 => SystemBootStatus::NoErrors, 0x01 => SystemBootStatus::NoBootableMedia, 0x02 => SystemBootStatus::NormalOSFailedToLoad, 0x03 => SystemBootStatus::FirmwareDetectedFailure, 0x04 => SystemBootStatus::OSDetectedFailure, 0x05 => SystemBootStatus::UserRequestedBoot, 0x06 => SystemBootStatus::SystemSecurityViolation, 0x07 => SystemBootStatus::PreviouslyRequestedImage, 0x08 => SystemBootStatus::SystemWatchdogTimerExpired, _ => SystemBootStatus::None, } } } impl fmt::Debug for SystemBootStatusData<'_> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct(any::type_name::<SMBiosSystemBootInformation<'_>>()) .field("system_boot_status", &self.system_boot_status()) .finish() } } impl Serialize for SystemBootStatusData<'_> {
} #[derive(Serialize, Debug, PartialEq, Eq)] pub enum SystemBootStatus { NoErrors, NoBootableMedia, NormalOSFailedToLoad, FirmwareDetectedFailure, OSDetectedFailure, UserRequestedBoot, SystemSecurityViolation, PreviouslyRequestedImage, SystemWatchdogTimerExpired, None, } #[cfg(test)] mod tests { use super::*; #[test] fn unit_test() { let struct_type32 = vec![ 0x20, 0x14, 0x25, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, ]; let parts = UndefinedStruct::new(&struct_type32); let test_struct = SMBiosSystemBootInformation::new(&parts); let boot_status_data = test_struct.boot_status_data().unwrap(); assert_eq!( boot_status_data.raw, &[0x00u8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00] as &[u8] ); assert_eq!( boot_status_data.system_boot_status(), SystemBootStatus::NoErrors ); let struct_type32 = vec![ 0x20, 0x0C, 0x25, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x01, ]; let parts = UndefinedStruct::new(&struct_type32); let test_struct = SMBiosSystemBootInformation::new(&parts); let boot_status_data = test_struct.boot_status_data().unwrap(); assert_eq!(boot_status_data.raw, &[0x02u8, 0x01] as &[u8]); assert_eq!( boot_status_data.system_boot_status(), SystemBootStatus::NormalOSFailedToLoad ); let struct_type32 = vec![ 0x20, 0x0F, 0x25, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x01, ]; let parts = UndefinedStruct::new(&struct_type32); let test_struct = SMBiosSystemBootInformation::new(&parts); assert!(test_struct.boot_status_data().is_none()); } }
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { let mut state = serializer.serialize_struct("SystemBootStatusData", 1)?; state.serialize_field("system_boot_status", &self.system_boot_status())?; state.end() }
function_block-full_function
[ { "content": "/// Returns smbios raw data\n\npub fn raw_smbios_from_device() -> Result<Vec<u8>, Error> {\n\n Ok(try_load_macos_table()?)\n\n}\n", "file_path": "src/macos/platform.rs", "rank": 0, "score": 173349.63818810516 }, { "content": "/// Returns smbios raw data\n\npub fn raw_smbios_from_device() -> Result<Vec<u8>, Error> {\n\n use std::ptr;\n\n\n\n unsafe {\n\n const MEMORY_ERROR_MESSAGE: &'static str = \"Memory error\";\n\n const RAW_SMBIOS_SIGNATURE: u32 = 1381190978u32; // 'RSMB' ASCII bytes == 1381190978\n\n let max_i32: u32 = i32::MAX.try_into().unwrap();\n\n let firmware_table_buffer_ptr: *mut u8 = ptr::null_mut();\n\n\n\n let buffer_size =\n\n ffi::GetSystemFirmwareTable(RAW_SMBIOS_SIGNATURE, 0, firmware_table_buffer_ptr, 0);\n\n\n\n // 0 is win32 exception, > i32::MAX is memory exception\n\n if buffer_size == 0 || buffer_size > max_i32 {\n\n return Err(Error::new(ErrorKind::Other, MEMORY_ERROR_MESSAGE));\n\n }\n\n\n\n let mut firmware_table_buffer = Vec::with_capacity(buffer_size as usize);\n\n let firmware_table_buffer_ptr = firmware_table_buffer.as_mut_ptr();\n\n\n", "file_path": "src/windows/platform.rs", "rank": 1, "score": 173349.63818810516 }, { "content": "/// Returns smbios raw data via /dev/mem (on FreeBSD)\n\npub fn raw_smbios_from_device() -> Result<Vec<u8>, Error> {\n\n use std::io::{prelude::*, SeekFrom};\n\n const RANGE_START: u64 = 0x000F0000u64;\n\n const RANGE_END: u64 = 0x000FFFFFu64;\n\n let structure_table_address: u64;\n\n let structure_table_length: usize;\n\n\n\n let mut dev_mem = std::fs::File::open(DEV_MEM_FILE)?;\n\n\n\n match SMBiosEntryPoint32::try_scan_from_file(&mut dev_mem, RANGE_START..=RANGE_END) {\n\n Ok(entry_point) => {\n\n structure_table_address = entry_point.structure_table_address() as u64;\n\n structure_table_length = entry_point.structure_table_length() as usize;\n\n }\n\n Err(error) => {\n\n if error.kind() != ErrorKind::UnexpectedEof {\n\n return Err(error);\n\n }\n\n\n\n let entry_point =\n", "file_path": "src/unix/platform.rs", "rank": 2, "score": 173349.23845729543 }, { "content": "/// dumps raw data into a file\n\npub fn dump_raw(data: Vec<u8>, out_path: &Path) -> Result<(), Error> {\n\n let f = File::create(&out_path)?;\n\n let mut f = BufWriter::new(f);\n\n f.write_all(&data)?;\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::path::PathBuf;\n\n\n\n #[test]\n\n fn test_load_smbios_table_data() {\n\n let mut path = PathBuf::new();\n\n path.push(\".\");\n\n path.push(\"tests\");\n\n path.push(\"jeffgerlap_3_2_0\");\n\n path.set_extension(\"dat\");\n\n\n", "file_path": "src/file_io.rs", "rank": 3, "score": 161520.52199768252 }, { "content": "/// Loads raw smbios data files from a given _folder_ and returns [Vec<SMBiosStructTable>]\n\npub fn load_raw_files(folder: &Path) -> Vec<SMBiosData> {\n\n assert!(folder.is_dir());\n\n let mut result = Vec::new();\n\n\n\n let entries = read_dir(folder)\n\n .expect(\"valid files\")\n\n .map(|res| res.map(|e| e.path()))\n\n .collect::<Result<Vec<_>, Error>>()\n\n .expect(\"msg\");\n\n\n\n for elem in entries {\n\n let smbios_table_data = load_smbios_data_from_file(&elem);\n\n match smbios_table_data {\n\n Ok(data) => result.push(data),\n\n Err(_) => {}\n\n }\n\n }\n\n\n\n result\n\n}\n\n\n", "file_path": "src/file_io.rs", "rank": 4, "score": 123268.56127385762 }, { "content": "/// Verifies EPS and IEPS Checksums\n\n///\n\n/// The EPS and IEPS contain a checksum value.\n\n///\n\n/// The checksum value, when added to all other bytes in the EPS, results in\n\n/// the value 00h (using 8-bit addition [Wrapping] calculations).\n\n/// Values in the EPS are summed starting at offset 00h, for 'entry_point_length'\n\n/// bytes.\n\nfn verify_checksum(data: &[u8]) -> bool {\n\n let mut sum = Wrapping(0u8);\n\n\n\n data.iter().for_each(|b| sum += Wrapping(*b));\n\n\n\n sum == Wrapping(0)\n\n}\n", "file_path": "src/core/entry_point.rs", "rank": 5, "score": 97615.04775595394 }, { "content": "fn ser_strings<S>(data: &Strings, serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n serializer.serialize_str(format!(\"{:?}\", data).as_str())\n\n}\n\n\n\nimpl<'a> UndefinedStruct {\n\n /// Creates a structure instance of the given byte array slice\n\n pub fn new(raw: &Vec<u8>) -> Self {\n\n match raw.get(Header::LENGTH_OFFSET) {\n\n Some(&header_length) => UndefinedStruct {\n\n header: Header::new(raw[..Header::SIZE].try_into().expect(\"4 bytes\")),\n\n fields: raw.get(..(header_length as usize)).unwrap_or(&[]).to_vec(),\n\n strings: {\n\n Strings::new(\n\n raw.get((header_length as usize)..raw.len() - 2)\n\n .unwrap_or(&[])\n\n .to_vec(),\n\n )\n", "file_path": "src/core/undefined_struct.rs", "rank": 6, "score": 96865.7586157056 }, { "content": "#[test]\n\nfn struct_struct_association() {\n\n match table_load_from_device() {\n\n Ok(data) => match data.first::<SMBiosMemoryDevice>() {\n\n Some(first_memory_device) => {\n\n let handle = first_memory_device.physical_memory_array_handle().unwrap();\n\n match data.find_by_handle(&handle) {\n\n Some(undefined_struct) => {\n\n let physical_memory_array = undefined_struct.defined_struct();\n\n println!(\"{:#?}\", physical_memory_array)\n\n }\n\n None => println!(\"No Physical Memory Array (Type 16) structure found\"),\n\n }\n\n }\n\n None => println!(\"No Memory Device (Type 17) structure found\"),\n\n },\n\n Err(err) => println!(\"failure: {:?}\", err),\n\n }\n\n}\n\n\n\n/// Test find() - finds the first populated CPU socket\n", "file_path": "tests/integration_test.rs", "rank": 7, "score": 96151.05136750042 }, { "content": "fn try_load_macos_table() -> Result<Vec<u8>, Error> {\n\n let service = AppleSMBiosService::try_init()?;\n\n\n\n unsafe {\n\n let smbios_table_name = CString::new(\"SMBIOS\").expect(\"CString::new failed\");\n\n\n\n let option_bits: IOOptionBits = 0;\n\n let data_ref = IORegistryEntryCreateCFProperty(\n\n service.service_handle,\n\n CFSTR(smbios_table_name.as_ptr()),\n\n kCFAllocatorDefault,\n\n option_bits,\n\n ) as CFDataRef;\n\n\n\n if data_ref.is_null() {\n\n return Err(Error::new(ErrorKind::NotFound, \"SMBIOS is unreachable\"));\n\n }\n\n\n\n if !data_ref.is_null() {\n\n CFRelease(data_ref.as_void_ptr());\n", "file_path": "src/macos/platform.rs", "rank": 8, "score": 91663.23636487537 }, { "content": "/// Loads SMBIOS table data ([SMBiosData]) from the device\n\npub fn table_load_from_device() -> Result<SMBiosData, Error> {\n\n Ok(load_windows_smbios_data()?.smbios_data)\n\n}\n\n\n", "file_path": "src/windows/platform.rs", "rank": 9, "score": 89864.56169286802 }, { "content": "/// Loads [SMBiosData] from the device via /dev/mem (on FreeBSD)\n\npub fn table_load_from_device() -> Result<SMBiosData, Error> {\n\n const RANGE_START: u64 = 0x000F0000u64;\n\n const RANGE_END: u64 = 0x000FFFFFu64;\n\n let structure_table_address: u64;\n\n let structure_table_length: u32;\n\n let version: SMBiosVersion;\n\n\n\n let mut dev_mem = std::fs::File::open(DEV_MEM_FILE)?;\n\n\n\n match SMBiosEntryPoint32::try_scan_from_file(&mut dev_mem, RANGE_START..=RANGE_END) {\n\n Ok(entry_point) => {\n\n structure_table_address = entry_point.structure_table_address() as u64;\n\n structure_table_length = entry_point.structure_table_length() as u32;\n\n\n\n version = SMBiosVersion {\n\n major: entry_point.major_version(),\n\n minor: entry_point.minor_version(),\n\n revision: 0,\n\n }\n\n }\n", "file_path": "src/unix/platform.rs", "rank": 10, "score": 89864.56169286802 }, { "content": "/// Loads SMBIOS table data ([SMBiosData]) from the device\n\npub fn table_load_from_device() -> Result<SMBiosData, Error> {\n\n let entry_point = try_load_macos_entry_point()?;\n\n\n\n let version = SMBiosVersion {\n\n major: entry_point.major_version(),\n\n minor: entry_point.minor_version(),\n\n revision: 0,\n\n };\n\n\n\n let table = try_load_macos_table()?;\n\n\n\n Ok(SMBiosData::from_vec_and_version(table, Some(version)))\n\n}\n\n\n", "file_path": "src/macos/platform.rs", "rank": 11, "score": 89864.56169286802 }, { "content": "/// # SMBIOS Structure\n\n///\n\n/// A type implementing this trait provides a representation of an SMBIOS type.\n\npub trait SMBiosStruct<'a> {\n\n /// The SMBIOS structure type\n\n ///\n\n /// Example: System Information (Type 1) this is set to 1.\n\n const STRUCT_TYPE: u8;\n\n\n\n /// Creates a new instance of the implementing SMBIOS type\n\n fn new(parts: &'a UndefinedStruct) -> Self;\n\n\n\n /// Contains the standard parts/sections of the implementing SMBIOS type.\n\n fn parts(&self) -> &'a UndefinedStruct;\n\n}\n", "file_path": "src/structs/structure.rs", "rank": 12, "score": 88973.19683382442 }, { "content": "/// Calls the Windows kernel32 function [GetSystemFirmwareTable](https://docs.microsoft.com/en-us/windows/win32/api/sysinfoapi/nf-sysinfoapi-getsystemfirmwaretable)\n\npub fn load_windows_smbios_data() -> Result<WinSMBiosData, Error> {\n\n match raw_smbios_from_device() {\n\n Ok(raw) => WinSMBiosData::new(raw),\n\n Err(e) => Err(e),\n\n }\n\n}\n\n\n", "file_path": "src/windows/platform.rs", "rank": 13, "score": 86673.53045306924 }, { "content": "/// Loads raw smbios data from a file and returns [SMBiosData] or [std::io::Error] on error.\n\n///\n\n/// Currently supports reading raw files containing only SMBIOS table data or\n\n/// Windows raw files containing the windows header and SMBIOS table data.\n\npub fn load_smbios_data_from_file(file_path: &Path) -> Result<SMBiosData, Error> {\n\n let data = read(file_path)?;\n\n if WinSMBiosData::is_valid_win_smbios_data(&data) {\n\n let win_smbios = WinSMBiosData::new(data)\n\n .expect(\"Structure shouldn't be invalid it was already checked.\");\n\n Ok(win_smbios.smbios_data)\n\n } else {\n\n Ok(SMBiosData::from_vec_and_version(data, None))\n\n }\n\n}\n\n\n", "file_path": "src/file_io.rs", "rank": 14, "score": 80628.958249847 }, { "content": "#[cfg(target_family = \"windows\")]\n\n#[test]\n\nfn windows_dump() {\n\n match load_windows_smbios_data() {\n\n Ok(windows_data) => {\n\n println!(\"windows_data: {:#?}\", windows_data);\n\n }\n\n Err(err) => panic!(\"failure: {:?}\", err),\n\n }\n\n}\n\n\n", "file_path": "tests/integration_test.rs", "rank": 15, "score": 60170.72734726544 }, { "content": "#[test]\n\nfn retrieve_system_uuid() {\n\n match table_load_from_device() {\n\n Ok(data) => match data.find_map(|sys_info: SMBiosSystemInformation| sys_info.uuid()) {\n\n Some(uuid) => println!(\"System Information UUID == {:?}\", uuid),\n\n None => println!(\"No System Information (Type 1) structure found with a UUID field\"),\n\n },\n\n Err(err) => println!(\"failure: {:?}\", err),\n\n }\n\n}\n\n\n", "file_path": "tests/integration_test.rs", "rank": 16, "score": 59033.35758470165 }, { "content": "#[test]\n\nfn print_all_memory_devices() {\n\n match table_load_from_device() {\n\n Ok(data) => {\n\n for memory_device in data.collect::<SMBiosMemoryDevice>() {\n\n println!(\"{:#?}\", memory_device);\n\n }\n\n }\n\n Err(err) => println!(\"failure: {:?}\", err),\n\n }\n\n}\n\n\n\n/// Finds an associated struct by handle\n", "file_path": "tests/integration_test.rs", "rank": 17, "score": 59033.35758470165 }, { "content": "#[test]\n\nfn find_first_cpu() {\n\n match table_load_from_device() {\n\n Ok(data) => match data.find(|proc_info: &SMBiosProcessorInformation| match (proc_info.status(), proc_info.processor_type()) {\n\n (Some(status), Some(proc_type)) => { status.socket_populated() && proc_type.value == ProcessorType::CentralProcessor }\n\n _ => { false }\n\n }) {\n\n Some(first_cpu) => {\n\n println!(\"First populated CPU socket: {:#?}\", first_cpu);\n\n }\n\n None => println!(\"No Processor Information (Type 4) structure found that is a CPU with a populated socket\"),\n\n },\n\n Err(err) => println!(\"Table load failure: {:?}\", err),\n\n }\n\n}\n\n\n\n/// Test filter() - finds all populated memory sockets\n", "file_path": "tests/integration_test.rs", "rank": 18, "score": 59033.35758470165 }, { "content": "#[test]\n\nfn find_installed_memory() {\n\n match table_load_from_device() {\n\n Err(err) => println!(\"Table load failure: {:?}\", err),\n\n Ok(data) => data\n\n .filter(\n\n |memory_device: &SMBiosMemoryDevice| match memory_device.size() {\n\n Some(size) => size != MemorySize::NotInstalled,\n\n _ => false,\n\n },\n\n )\n\n .for_each(|installed_memory| println!(\"Installed memory: {:#X?}\", installed_memory)),\n\n }\n\n}\n", "file_path": "tests/integration_test.rs", "rank": 19, "score": 59033.35758470165 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let file_option = \"f\";\n\n let output_option = \"o\";\n\n let string_option = \"s\";\n\n let json_option = \"j\";\n\n\n\n let args: Vec<String> = std::env::args().collect();\n\n let mut opts = getopts::Options::new();\n\n opts.optopt(file_option, \"\", \"read smbios table from file\", \"FILE\");\n\n opts.optopt(output_option, \"\", \"dump smbios table to a file\", \"FILE\");\n\n opts.optopt(\n\n string_option,\n\n \"\",\n\n \"Only display the value of the DMI string identified by KEYWORD.\",\n\n \"KEYWORD\",\n\n );\n\n opts.optflag(json_option, \"\", \"output in json format\");\n\n\n\n let matches = opts.parse(&args[1..])?;\n\n\n", "file_path": "src/main.rs", "rank": 20, "score": 45970.47350490376 }, { "content": "struct AppleSMBiosService {\n\n pub service_handle: io_service_t,\n\n}\n\n\n\nimpl AppleSMBiosService {\n\n fn try_init() -> Result<Self, Error> {\n\n unsafe {\n\n let service_name = CString::new(\"AppleSMBIOS\").expect(\"CString::new failed\");\n\n let mut master_port: mach_port_t = port::MACH_PORT_NULL;\n\n\n\n IOMasterPort(port::MACH_PORT_NULL, &mut master_port);\n\n\n\n let service: io_service_t =\n\n IOServiceGetMatchingService(master_port, IOServiceMatching(service_name.as_ptr()));\n\n\n\n if service == port::MACH_PORT_NULL {\n\n return Err(Error::new(\n\n ErrorKind::NotFound,\n\n \"AppleSMBIOS service is unreachable\",\n\n ));\n", "file_path": "src/macos/platform.rs", "rank": 21, "score": 45498.93082412947 }, { "content": "fn try_load_macos_entry_point() -> Result<SMBiosEntryPoint32, Error> {\n\n let service = AppleSMBiosService::try_init()?;\n\n\n\n unsafe {\n\n let smbios_entry_point_name = CString::new(\"SMBIOS-EPS\").expect(\"CString::new failed\");\n\n\n\n let option_bits: IOOptionBits = 0;\n\n let data_ref = IORegistryEntryCreateCFProperty(\n\n service.service_handle,\n\n CFSTR(smbios_entry_point_name.as_ptr()),\n\n kCFAllocatorDefault,\n\n option_bits,\n\n ) as CFDataRef;\n\n\n\n if data_ref.is_null() {\n\n return Err(Error::new(\n\n ErrorKind::NotFound,\n\n \"SMBIOS-EPS entry point is unreachable\",\n\n ));\n\n }\n", "file_path": "src/macos/platform.rs", "rank": 22, "score": 44379.862716095064 }, { "content": "fn string_keyword(keyword: String, data: &SMBiosData) -> Result<String, BiosParseError> {\n\n match keyword.to_lowercase().as_str() {\n\n \"bios-vendor\" => data\n\n .find_map(|bios_info: SMBiosInformation| bios_info.vendor())\n\n .ok_or(BiosParseError::BiosVendorNotFound),\n\n \"bios-version\" => data\n\n .find_map(|bios_info: SMBiosInformation| bios_info.version())\n\n .ok_or(BiosParseError::BiosVersionNotFound),\n\n \"bios-release-date\" => data\n\n .find_map(|bios_info: SMBiosInformation| bios_info.release_date())\n\n .ok_or(BiosParseError::BiosReleaseDateNotFound),\n\n \"bios-revision\" => data\n\n .find_map(|bios_info: SMBiosInformation| {\n\n match (\n\n bios_info.system_bios_major_release(),\n\n bios_info.system_bios_minor_release(),\n\n ) {\n\n (Some(major), Some(minor)) => Some(format!(\"{}.{}\", major, minor)),\n\n _ => None,\n\n }\n", "file_path": "src/main.rs", "rank": 23, "score": 40399.894587586576 }, { "content": " }\n\n\n\n fn add(&mut self, elem: DefinedStruct<'a>) {\n\n self.0.push(elem);\n\n }\n\n}\n\n\n\nimpl<'a> IntoIterator for DefinedStructTable<'a> {\n\n type Item = DefinedStruct<'a>;\n\n type IntoIter = IntoIter<Self::Item>;\n\n\n\n fn into_iter(self) -> Self::IntoIter {\n\n self.0.into_iter()\n\n }\n\n}\n\n\n\nimpl<'a> FromIterator<&'a UndefinedStruct> for DefinedStructTable<'a> {\n\n fn from_iter<I: IntoIterator<Item = &'a UndefinedStruct>>(iter: I) -> Self {\n\n let mut defined_struct_table = DefinedStructTable::new();\n\n\n\n for undefined_struct in iter {\n\n defined_struct_table.add(undefined_struct.into());\n\n }\n\n\n\n defined_struct_table\n\n }\n\n}\n", "file_path": "src/structs/defined_struct.rs", "rank": 24, "score": 38356.61507273994 }, { "content": "//! [DefinedStruct] and [DefinedStructTable] perform downcast operations\n\n//! via into() and into_iter() trait functions for [UndefinedStruct].\n\n\n\nuse serde::Serialize;\n\nuse core::iter::FromIterator;\n\n#[cfg(feature = \"no_std\")]\n\nuse alloc::vec::{Vec, IntoIter};\n\n#[cfg(not(feature = \"no_std\"))]\n\nuse std::vec::IntoIter;\n\n\n\nuse crate::core::UndefinedStruct;\n\n\n\nuse super::*;\n\n\n\n/// # SMBIOS Standard Defined Structure\n\n///\n\n/// Represents one of the SMBIOS defined structures or, in the case\n\n/// of an OEM defined structure, as a generically defined Unknown variant\n\n#[derive(Serialize, Debug)]\n\npub enum DefinedStruct<'a> {\n", "file_path": "src/structs/defined_struct.rs", "rank": 25, "score": 38355.16733428841 }, { "content": " SMBiosInactive::STRUCT_TYPE => {\n\n DefinedStruct::Inactive(SMBiosInactive::new(undefined_struct))\n\n }\n\n SMBiosEndOfTable::STRUCT_TYPE => {\n\n DefinedStruct::EndOfTable(SMBiosEndOfTable::new(undefined_struct))\n\n }\n\n _ => DefinedStruct::Undefined(SMBiosUnknown::new(undefined_struct)),\n\n }\n\n }\n\n}\n\n\n\n/// # Defined Struct Table\n\n///\n\n/// Contains a list of [DefinedStruct] items.\n\n#[derive(Serialize, Debug)]\n\npub struct DefinedStructTable<'a>(Vec<DefinedStruct<'a>>);\n\n\n\nimpl<'a> DefinedStructTable<'a> {\n\n fn new() -> DefinedStructTable<'a> {\n\n DefinedStructTable(Vec::new())\n", "file_path": "src/structs/defined_struct.rs", "rank": 26, "score": 38351.792063081506 }, { "content": "}\n\n\n\nimpl<'a> From<&'a UndefinedStruct> for DefinedStruct<'a> {\n\n fn from(undefined_struct: &'a UndefinedStruct) -> Self {\n\n match undefined_struct.header.struct_type() {\n\n SMBiosInformation::STRUCT_TYPE => {\n\n DefinedStruct::Information(SMBiosInformation::new(undefined_struct))\n\n }\n\n SMBiosSystemInformation::STRUCT_TYPE => {\n\n DefinedStruct::SystemInformation(SMBiosSystemInformation::new(undefined_struct))\n\n }\n\n SMBiosBaseboardInformation::STRUCT_TYPE => DefinedStruct::BaseBoardInformation(\n\n SMBiosBaseboardInformation::new(undefined_struct),\n\n ),\n\n SMBiosSystemChassisInformation::STRUCT_TYPE => DefinedStruct::SystemChassisInformation(\n\n SMBiosSystemChassisInformation::new(undefined_struct),\n\n ),\n\n SMBiosProcessorInformation::STRUCT_TYPE => DefinedStruct::ProcessorInformation(\n\n SMBiosProcessorInformation::new(undefined_struct),\n\n ),\n", "file_path": "src/structs/defined_struct.rs", "rank": 27, "score": 38350.786086008586 }, { "content": " DefinedStruct::PortableBattery(SMBiosPortableBattery::new(undefined_struct))\n\n }\n\n SMBiosSystemReset::STRUCT_TYPE => {\n\n DefinedStruct::SystemReset(SMBiosSystemReset::new(undefined_struct))\n\n }\n\n SMBiosHardwareSecurity::STRUCT_TYPE => {\n\n DefinedStruct::HardwareSecurity(SMBiosHardwareSecurity::new(undefined_struct))\n\n }\n\n SMBiosSystemPowerControls::STRUCT_TYPE => {\n\n DefinedStruct::SystemPowerControls(SMBiosSystemPowerControls::new(undefined_struct))\n\n }\n\n SMBiosVoltageProbe::STRUCT_TYPE => {\n\n DefinedStruct::VoltageProbe(SMBiosVoltageProbe::new(undefined_struct))\n\n }\n\n SMBiosCoolingDevice::STRUCT_TYPE => {\n\n DefinedStruct::CoolingDevice(SMBiosCoolingDevice::new(undefined_struct))\n\n }\n\n SMBiosTemperatureProbe::STRUCT_TYPE => {\n\n DefinedStruct::TemperatureProbe(SMBiosTemperatureProbe::new(undefined_struct))\n\n }\n", "file_path": "src/structs/defined_struct.rs", "rank": 28, "score": 38342.43355625798 }, { "content": " SMBiosOemStrings::STRUCT_TYPE => {\n\n DefinedStruct::OemStrings(SMBiosOemStrings::new(undefined_struct))\n\n }\n\n SMBiosSystemConfigurationOptions::STRUCT_TYPE => {\n\n DefinedStruct::SystemConfigurationOptions(SMBiosSystemConfigurationOptions::new(\n\n undefined_struct,\n\n ))\n\n }\n\n SMBiosBiosLanguageInformation::STRUCT_TYPE => DefinedStruct::LanguageInformation(\n\n SMBiosBiosLanguageInformation::new(undefined_struct),\n\n ),\n\n SMBiosGroupAssociations::STRUCT_TYPE => {\n\n DefinedStruct::GroupAssociations(SMBiosGroupAssociations::new(undefined_struct))\n\n }\n\n SMBiosSystemEventLog::STRUCT_TYPE => {\n\n DefinedStruct::EventLog(SMBiosSystemEventLog::new(undefined_struct))\n\n }\n\n SMBiosPhysicalMemoryArray::STRUCT_TYPE => {\n\n DefinedStruct::PhysicalMemoryArray(SMBiosPhysicalMemoryArray::new(undefined_struct))\n\n }\n", "file_path": "src/structs/defined_struct.rs", "rank": 29, "score": 38342.3978796004 }, { "content": " SMBiosMemoryControllerInformation::STRUCT_TYPE => {\n\n DefinedStruct::MemoryControllerInformation(SMBiosMemoryControllerInformation::new(\n\n undefined_struct,\n\n ))\n\n }\n\n SMBiosMemoryModuleInformation::STRUCT_TYPE => DefinedStruct::MemoryModuleInformation(\n\n SMBiosMemoryModuleInformation::new(undefined_struct),\n\n ),\n\n SMBiosCacheInformation::STRUCT_TYPE => {\n\n DefinedStruct::CacheInformation(SMBiosCacheInformation::new(undefined_struct))\n\n }\n\n SMBiosPortConnectorInformation::STRUCT_TYPE => DefinedStruct::PortConnectorInformation(\n\n SMBiosPortConnectorInformation::new(undefined_struct),\n\n ),\n\n SMBiosSystemSlot::STRUCT_TYPE => {\n\n DefinedStruct::SystemSlot(SMBiosSystemSlot::new(undefined_struct))\n\n }\n\n SMBiosOnBoardDeviceInformation::STRUCT_TYPE => DefinedStruct::OnBoardDeviceInformation(\n\n SMBiosOnBoardDeviceInformation::new(undefined_struct),\n\n ),\n", "file_path": "src/structs/defined_struct.rs", "rank": 30, "score": 38342.39100579855 }, { "content": " SMBiosElectricalCurrentProbe::STRUCT_TYPE => DefinedStruct::ElectricalCurrentProbe(\n\n SMBiosElectricalCurrentProbe::new(undefined_struct),\n\n ),\n\n SMBiosOutOfBandRemoteAccess::STRUCT_TYPE => DefinedStruct::OutOfBandRemoteAccess(\n\n SMBiosOutOfBandRemoteAccess::new(undefined_struct),\n\n ),\n\n SMBiosBisEntryPoint::STRUCT_TYPE => {\n\n DefinedStruct::BisEntryPoint(SMBiosBisEntryPoint::new(undefined_struct))\n\n }\n\n SMBiosSystemBootInformation::STRUCT_TYPE => DefinedStruct::SystemBootInformation(\n\n SMBiosSystemBootInformation::new(undefined_struct),\n\n ),\n\n SMBiosMemoryErrorInformation64::STRUCT_TYPE => {\n\n DefinedStruct::MemoryErrorInformation64Bit(SMBiosMemoryErrorInformation64::new(\n\n undefined_struct,\n\n ))\n\n }\n\n SMBiosManagementDevice::STRUCT_TYPE => {\n\n DefinedStruct::ManagementDevice(SMBiosManagementDevice::new(undefined_struct))\n\n }\n", "file_path": "src/structs/defined_struct.rs", "rank": 31, "score": 38342.37730479804 }, { "content": " SMBiosManagementDeviceComponent::STRUCT_TYPE => {\n\n DefinedStruct::ManagementDeviceComponent(SMBiosManagementDeviceComponent::new(\n\n undefined_struct,\n\n ))\n\n }\n\n SMBiosManagementDeviceThresholdData::STRUCT_TYPE => {\n\n DefinedStruct::ManagementDeviceThresholdData(\n\n SMBiosManagementDeviceThresholdData::new(undefined_struct),\n\n )\n\n }\n\n SMBiosMemoryChannel::STRUCT_TYPE => {\n\n DefinedStruct::MemoryChannel(SMBiosMemoryChannel::new(undefined_struct))\n\n }\n\n SMBiosIpmiDeviceInformation::STRUCT_TYPE => DefinedStruct::IpmiDeviceInformation(\n\n SMBiosIpmiDeviceInformation::new(undefined_struct),\n\n ),\n\n SMBiosSystemPowerSupply::STRUCT_TYPE => {\n\n DefinedStruct::SystemPowerSupply(SMBiosSystemPowerSupply::new(undefined_struct))\n\n }\n\n SMBiosAdditionalInformation::STRUCT_TYPE => DefinedStruct::AdditionalInformation(\n", "file_path": "src/structs/defined_struct.rs", "rank": 32, "score": 38342.37708744291 }, { "content": " SMBiosMemoryDevice::STRUCT_TYPE => {\n\n DefinedStruct::MemoryDevice(SMBiosMemoryDevice::new(undefined_struct))\n\n }\n\n SMBiosMemoryErrorInformation32::STRUCT_TYPE => {\n\n DefinedStruct::MemoryErrorInformation32Bit(SMBiosMemoryErrorInformation32::new(\n\n undefined_struct,\n\n ))\n\n }\n\n SMBiosMemoryArrayMappedAddress::STRUCT_TYPE => DefinedStruct::MemoryArrayMappedAddress(\n\n SMBiosMemoryArrayMappedAddress::new(undefined_struct),\n\n ),\n\n SMBiosMemoryDeviceMappedAddress::STRUCT_TYPE => {\n\n DefinedStruct::MemoryDeviceMappedAddress(SMBiosMemoryDeviceMappedAddress::new(\n\n undefined_struct,\n\n ))\n\n }\n\n SMBiosBuiltInPointingDevice::STRUCT_TYPE => DefinedStruct::BuiltInPointingDevice(\n\n SMBiosBuiltInPointingDevice::new(undefined_struct),\n\n ),\n\n SMBiosPortableBattery::STRUCT_TYPE => {\n", "file_path": "src/structs/defined_struct.rs", "rank": 33, "score": 38342.34624562214 }, { "content": " SMBiosAdditionalInformation::new(undefined_struct),\n\n ),\n\n SMBiosOnboardDevicesExtendedInformation::STRUCT_TYPE => {\n\n DefinedStruct::OnboardDevicesExtendedInformation(\n\n SMBiosOnboardDevicesExtendedInformation::new(undefined_struct),\n\n )\n\n }\n\n SMBiosManagementControllerHostInterface::STRUCT_TYPE => {\n\n DefinedStruct::ManagementControllerHostInterface(\n\n SMBiosManagementControllerHostInterface::new(undefined_struct),\n\n )\n\n }\n\n SMBiosTpmDevice::STRUCT_TYPE => {\n\n DefinedStruct::TpmDevice(SMBiosTpmDevice::new(undefined_struct))\n\n }\n\n SMBiosProcessorAdditionalInformation::STRUCT_TYPE => {\n\n DefinedStruct::ProcessorAdditionalInformation(\n\n SMBiosProcessorAdditionalInformation::new(undefined_struct),\n\n )\n\n }\n", "file_path": "src/structs/defined_struct.rs", "rank": 34, "score": 38342.31965507755 }, { "content": " /// On Board Devices Information (Type 10, Obsolete)\n\n OnBoardDeviceInformation(SMBiosOnBoardDeviceInformation<'a>),\n\n /// OEM Strings (Type 11)\n\n OemStrings(SMBiosOemStrings<'a>),\n\n /// System Configuration Options (Type 12)\n\n SystemConfigurationOptions(SMBiosSystemConfigurationOptions<'a>),\n\n /// BIOS Language Information (Type 13)\n\n LanguageInformation(SMBiosBiosLanguageInformation<'a>),\n\n /// Group Associations (Type 14)\n\n GroupAssociations(SMBiosGroupAssociations<'a>),\n\n /// System Event Log (Type 15)\n\n EventLog(SMBiosSystemEventLog<'a>),\n\n /// Physical Memory Array (Type 16)\n\n PhysicalMemoryArray(SMBiosPhysicalMemoryArray<'a>),\n\n /// Memory Device (Type 17)\n\n MemoryDevice(SMBiosMemoryDevice<'a>),\n\n /// 32-Bit Memory Error Information (Type 18)\n\n MemoryErrorInformation32Bit(SMBiosMemoryErrorInformation32<'a>),\n\n /// Memory Array Mapped Address (Type 19)\n\n MemoryArrayMappedAddress(SMBiosMemoryArrayMappedAddress<'a>),\n", "file_path": "src/structs/defined_struct.rs", "rank": 35, "score": 38336.32846365655 }, { "content": " /// BIOS Information (Type 0)\n\n Information(SMBiosInformation<'a>),\n\n /// System Information (Type 1)\n\n SystemInformation(SMBiosSystemInformation<'a>),\n\n /// Baseboard (or Module) Information (Type 2)\n\n BaseBoardInformation(SMBiosBaseboardInformation<'a>),\n\n /// System Enclosure or Chassis (Type 3)\n\n SystemChassisInformation(SMBiosSystemChassisInformation<'a>),\n\n /// Processor Information (Type 4)\n\n ProcessorInformation(SMBiosProcessorInformation<'a>),\n\n /// Memory Controller Information (Type 5, Obsolete)\n\n MemoryControllerInformation(SMBiosMemoryControllerInformation<'a>),\n\n /// Memory Module Information (Type 6, Obsolete)\n\n MemoryModuleInformation(SMBiosMemoryModuleInformation<'a>),\n\n /// Cache Informaiton (Type 7)\n\n CacheInformation(SMBiosCacheInformation<'a>),\n\n /// Port Connector Information (Type 8)\n\n PortConnectorInformation(SMBiosPortConnectorInformation<'a>),\n\n /// System Slot Information (Type 9)\n\n SystemSlot(SMBiosSystemSlot<'a>),\n", "file_path": "src/structs/defined_struct.rs", "rank": 36, "score": 38336.32846365655 }, { "content": " /// Memory Device Mapped Address (Type 20)\n\n MemoryDeviceMappedAddress(SMBiosMemoryDeviceMappedAddress<'a>),\n\n /// Built-in Pointing Device (Type 21)\n\n BuiltInPointingDevice(SMBiosBuiltInPointingDevice<'a>),\n\n /// Portable Battery (Type 22)\n\n PortableBattery(SMBiosPortableBattery<'a>),\n\n /// System Reset (Type 23)\n\n SystemReset(SMBiosSystemReset<'a>),\n\n /// Hardware Security (Type 24)\n\n HardwareSecurity(SMBiosHardwareSecurity<'a>),\n\n /// System Power Controls (Type 25)\n\n SystemPowerControls(SMBiosSystemPowerControls<'a>),\n\n /// Voltage Probe (Type 26)\n\n VoltageProbe(SMBiosVoltageProbe<'a>),\n\n /// Cooling Device (Type 27)\n\n CoolingDevice(SMBiosCoolingDevice<'a>),\n\n /// Temperature Probe (Type 28)\n\n TemperatureProbe(SMBiosTemperatureProbe<'a>),\n\n /// Electrical Current Probe (Type 29)\n\n ElectricalCurrentProbe(SMBiosElectricalCurrentProbe<'a>),\n", "file_path": "src/structs/defined_struct.rs", "rank": 37, "score": 38336.32846365655 }, { "content": " /// Out-of-Band Remote Access (Type 30)\n\n OutOfBandRemoteAccess(SMBiosOutOfBandRemoteAccess<'a>),\n\n /// Boot Integrity Services (BIS) (Type 31)\n\n BisEntryPoint(SMBiosBisEntryPoint<'a>),\n\n /// System Boot Information (Type 32)\n\n SystemBootInformation(SMBiosSystemBootInformation<'a>),\n\n /// 64-Bit Memory Error Information (Type 33)\n\n MemoryErrorInformation64Bit(SMBiosMemoryErrorInformation64<'a>),\n\n /// Management Device (Type 34)\n\n ManagementDevice(SMBiosManagementDevice<'a>),\n\n /// Management Device Component (Type 35)\n\n ManagementDeviceComponent(SMBiosManagementDeviceComponent<'a>),\n\n /// Management Device Threshold Data (Type 36)\n\n ManagementDeviceThresholdData(SMBiosManagementDeviceThresholdData<'a>),\n\n /// Memory Channel (Type 37)\n\n MemoryChannel(SMBiosMemoryChannel<'a>),\n\n /// IPMI Device Information (Type 38)\n\n IpmiDeviceInformation(SMBiosIpmiDeviceInformation<'a>),\n\n /// Power Supply (Type 39)\n\n SystemPowerSupply(SMBiosSystemPowerSupply<'a>),\n", "file_path": "src/structs/defined_struct.rs", "rank": 38, "score": 38336.32846365655 }, { "content": " /// Additional Information (Type 40)\n\n AdditionalInformation(SMBiosAdditionalInformation<'a>),\n\n /// Onboard Devices Extended Information (Type 41)\n\n OnboardDevicesExtendedInformation(SMBiosOnboardDevicesExtendedInformation<'a>),\n\n /// Management Controller Host Interface (Type 42)\n\n ManagementControllerHostInterface(SMBiosManagementControllerHostInterface<'a>),\n\n /// TPM Device (Type 43)\n\n TpmDevice(SMBiosTpmDevice<'a>),\n\n /// Processor Additional Information (Type 44)\n\n ProcessorAdditionalInformation(SMBiosProcessorAdditionalInformation<'a>),\n\n /// Inactive (Type 126)\n\n Inactive(SMBiosInactive<'a>),\n\n /// End-of-Table (Type 127)\n\n EndOfTable(SMBiosEndOfTable<'a>),\n\n /// OEM-Defined or Unknown Structure\n\n ///\n\n /// - A structure with a type value not yet defined, such as by a DMTF specification\n\n /// that supercedes the types known by this library\n\n /// - An OEM type with a value > 127.\n\n Undefined(SMBiosUnknown<'a>),\n", "file_path": "src/structs/defined_struct.rs", "rank": 39, "score": 38336.32846365655 }, { "content": "mod defined_struct;\n\nmod structure;\n\nmod types;\n\n\n\npub use defined_struct::*;\n\npub use structure::*;\n\npub use types::*;\n", "file_path": "src/structs/mod.rs", "rank": 40, "score": 32415.946728848245 }, { "content": "use crate::core::UndefinedStruct;\n\n\n\n/// # SMBIOS Structure\n\n///\n\n/// A type implementing this trait provides a representation of an SMBIOS type.\n", "file_path": "src/structs/structure.rs", "rank": 41, "score": 32408.54700962323 }, { "content": " pub fn new(parts: &'a UndefinedStruct) -> Self {\n\n SMBiosUnknown { parts: parts }\n\n }\n\n\n\n /// Structure parts of this unknown structure\n\n ///\n\n /// Use this to inspect the structure in more detail.\n\n pub fn parts(&self) -> &'a UndefinedStruct {\n\n self.parts\n\n }\n\n}\n\n\n\nimpl fmt::Debug for SMBiosUnknown<'_> {\n\n fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n let fields = &self.parts.fields[Header::SIZE..];\n\n fmt.debug_struct(any::type_name::<SMBiosUnknown<'_>>())\n\n .field(\"header\", &self.parts.header)\n\n .field(\"fields\", &fields)\n\n .field(\"strings\", &self.parts.strings)\n\n .finish()\n", "file_path": "src/structs/types/unknown.rs", "rank": 42, "score": 31559.21197399131 }, { "content": " fn new(parts: &'a UndefinedStruct) -> Self {\n\n Self { parts }\n\n }\n\n\n\n fn parts(&self) -> &'a UndefinedStruct {\n\n self.parts\n\n }\n\n}\n\n\n\nimpl<'a> SMBiosInactive<'a> {}\n\n\n\nimpl fmt::Debug for SMBiosInactive<'_> {\n\n fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n fmt.debug_struct(any::type_name::<SMBiosInactive<'_>>())\n\n .field(\"header\", &self.parts.header)\n\n .finish()\n\n }\n\n}\n\n\n\nimpl Serialize for SMBiosInactive<'_> {\n", "file_path": "src/structs/types/inactive.rs", "rank": 43, "score": 31558.852296647216 }, { "content": "use serde::{ser::SerializeStruct, Serialize, Serializer};\n\nuse crate::{SMBiosStruct, UndefinedStruct};\n\nuse core::{fmt, any};\n\n\n\n/// # Inactive (Type 126)\n\n///\n\n/// This structure definition supports a system implementation where the SMBIOS structure-table is a\n\n/// superset of all supported system attributes and provides a standard mechanism for the system BIOS to\n\n/// signal that a structure is currently inactive and should not be interpreted by the upper-level software.\n\n///\n\n/// Compliant with:\n\n/// DMTF SMBIOS Reference Specification 3.4.0 (DSP0134)\n\n/// Document Date: 2020-07-17\n\npub struct SMBiosInactive<'a> {\n\n parts: &'a UndefinedStruct,\n\n}\n\n\n\nimpl<'a> SMBiosStruct<'a> for SMBiosInactive<'a> {\n\n const STRUCT_TYPE: u8 = 126u8;\n\n\n", "file_path": "src/structs/types/inactive.rs", "rank": 44, "score": 31557.096250700422 }, { "content": " table.resize(table_len, 0);\n\n file.read_exact(&mut table)?;\n\n Ok(table.into())\n\n }\n\n}\n\n\n\nimpl From<Vec<u8>> for UndefinedStructTable {\n\n fn from(data: Vec<u8>) -> Self {\n\n const DOUBLE_ZERO_SIZE: usize = 2usize;\n\n const MIN_STRUCT_SIZE: usize = Header::SIZE + DOUBLE_ZERO_SIZE;\n\n let mut result = Self::new();\n\n let mut current_index = 0usize;\n\n\n\n loop {\n\n // Is the next structure long enough?\n\n match data.get(current_index..current_index + MIN_STRUCT_SIZE) {\n\n Some(min_struct) => {\n\n // Read the structure's self-reported length in its header\n\n let struct_len = min_struct[Header::LENGTH_OFFSET] as usize;\n\n\n", "file_path": "src/core/undefined_struct.rs", "rank": 45, "score": 31555.651404813132 }, { "content": "use crate::{Header, UndefinedStruct};\n\nuse serde::{ser::SerializeStruct, Serialize, Serializer};\n\nuse core::{fmt, any};\n\n\n\n/// # OEM or Unknown Structure\n\n///\n\n/// Types 0 through 127 (7Fh) are reserved for and\n\n/// defined by the DMTF SMBIOS specification.\n\n/// Types 128 through 256 (80h to FFh) are available for\n\n/// system- and OEM-specific information.\n\n///\n\n/// When a structure has a type which is not defined or\n\n/// its type is an OEM type in the 80h to FFh range,\n\n/// this structure is used to represent the type.\n\npub struct SMBiosUnknown<'a> {\n\n parts: &'a UndefinedStruct,\n\n}\n\n\n\nimpl<'a> SMBiosUnknown<'a> {\n\n /// Creates an instance of this struct\n", "file_path": "src/structs/types/unknown.rs", "rank": 46, "score": 31554.000677544434 }, { "content": " }\n\n}\n\n\n\nimpl Serialize for SMBiosUnknown<'_> {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n let fields = &self.parts.fields[Header::SIZE..];\n\n\n\n let mut state = serializer.serialize_struct(\"SMBiosUnknown\", 3)?;\n\n state.serialize_field(\"header\", &self.parts.header)?;\n\n state.serialize_field(\"fields\", &fields)?;\n\n state.serialize_field(\"strings\", &self.parts.strings)?;\n\n state.end()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "src/structs/types/unknown.rs", "rank": 47, "score": 31552.193615613352 }, { "content": "use serde::{ser::SerializeStruct, Serialize, Serializer};\n\nuse std::{\n\n convert::TryInto,\n\n fmt,\n\n io::{Error, ErrorKind},\n\n};\n\n\n\nuse crate::core::{SMBiosData, SMBiosVersion};\n\n\n\n/// # Raw SMBIOS Data\n\n///\n\n/// When Windows kernel32 [GetSystemFirmwareTable](https://docs.microsoft.com/en-us/windows/win32/api/sysinfoapi/nf-sysinfoapi-getsystemfirmwaretable) function is called for RSMB,\n\n/// the raw SMBIOS table provider ('RSMB') it retrieves the contents of this\n\n/// raw SMBIOS firmware table structure.\n\npub struct WinSMBiosData {\n\n windows_header: Vec<u8>,\n\n /// SMBios table data\n\n pub smbios_data: SMBiosData,\n\n}\n\n\n", "file_path": "src/windows/win_struct.rs", "rank": 48, "score": 31551.933174306636 }, { "content": " ///\n\n /// Make this a \"try_into\"\n\n pub fn as_type<T: SMBiosStruct<'a>>(&'a self) -> Option<T> {\n\n if T::STRUCT_TYPE == self.header.struct_type() {\n\n Some(T::new(self))\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n /// Down casts the current structure to its specific defined BIOS structure type\n\n pub fn defined_struct(&self) -> DefinedStruct<'_> {\n\n self.into()\n\n }\n\n}\n\n\n\nimpl fmt::Debug for UndefinedStruct {\n\n fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n let fields = &self.fields[Header::SIZE..];\n\n fmt.debug_struct(any::type_name::<UndefinedStruct>())\n", "file_path": "src/core/undefined_struct.rs", "rank": 49, "score": 31551.497463018222 }, { "content": " /// Creates an iterator of the defined structure which uses a closure to determine if an element should be yielded.\n\n pub fn filter<T: 'a, P: 'a>(&'a self, predicate: P) -> impl Iterator<Item = T> + 'a\n\n where\n\n T: SMBiosStruct<'a>,\n\n P: FnMut(&T) -> bool,\n\n {\n\n self.defined_struct_iter().filter(predicate)\n\n }\n\n\n\n /// Maps the defined struct to another type given by the closure.\n\n pub fn map<A: 'a, B, F: 'a>(&'a self, f: F) -> impl Iterator<Item = B> + 'a\n\n where\n\n A: SMBiosStruct<'a>,\n\n F: FnMut(A) -> B,\n\n {\n\n self.defined_struct_iter().map(f)\n\n }\n\n\n\n /// Creates an iterator that both filters and maps from the defined struct iterator.\n\n pub fn filter_map<A: 'a, B, F: 'a>(&'a self, f: F) -> impl Iterator<Item = B> + 'a\n", "file_path": "src/core/undefined_struct.rs", "rank": 50, "score": 31551.01975381185 }, { "content": " },\n\n },\n\n None => UndefinedStruct {\n\n ..Default::default()\n\n },\n\n }\n\n }\n\n\n\n /// Retrieve a byte at the given offset from the structure's data section\n\n pub fn get_field_byte(&self, offset: usize) -> Option<u8> {\n\n match self.fields.get(offset..offset + 1) {\n\n Some(val) => Some(val[0]),\n\n None => None,\n\n }\n\n }\n\n\n\n /// Retrieve a WORD at the given offset from the structure's data section\n\n pub fn get_field_word(&self, offset: usize) -> Option<u16> {\n\n match self.fields.get(offset..offset + 2) {\n\n Some(val) => Some(u16::from_le_bytes(val.try_into().expect(\"u16 is 2 bytes\"))),\n", "file_path": "src/core/undefined_struct.rs", "rank": 51, "score": 31550.437218627263 }, { "content": " where\n\n A: SMBiosStruct<'a>,\n\n F: FnMut(A) -> Option<B>,\n\n {\n\n self.defined_struct_iter().filter_map(f)\n\n }\n\n\n\n /// Finds the structure matching the given handle\n\n ///\n\n /// To downcast to the defined struct, call .defined_struct() on the result.\n\n pub fn find_by_handle(&'a self, handle: &Handle) -> Option<&'a UndefinedStruct> {\n\n self.iter()\n\n .find(|smbios_struct| smbios_struct.header.handle() == *handle)\n\n .and_then(|undefined_struct| Some(undefined_struct))\n\n }\n\n\n\n /// Returns all occurances of the structure\n\n pub fn collect<T>(&'a self) -> Vec<T>\n\n where\n\n T: SMBiosStruct<'a>,\n", "file_path": "src/core/undefined_struct.rs", "rank": 52, "score": 31549.133124730703 }, { "content": " fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n let mut state = serializer.serialize_struct(\"SMBiosInactive\", 1)?;\n\n state.serialize_field(\"header\", &self.parts.header)?;\n\n state.end()\n\n }\n\n}\n", "file_path": "src/structs/types/inactive.rs", "rank": 53, "score": 31548.879835516393 }, { "content": " F: FnMut(T) -> bool,\n\n {\n\n self.defined_struct_iter().all(f)\n\n }\n\n\n\n /// Tests if any element of the defined struct iterator matches a predicate.\n\n pub fn any<T, F>(&'a self, f: F) -> bool\n\n where\n\n T: SMBiosStruct<'a>,\n\n F: FnMut(T) -> bool,\n\n {\n\n self.defined_struct_iter().any(f)\n\n }\n\n\n\n /// Finds the first occurance of the structure\n\n pub fn first<T>(&'a self) -> Option<T>\n\n where\n\n T: SMBiosStruct<'a>,\n\n {\n\n self.defined_struct_iter().next()\n", "file_path": "src/core/undefined_struct.rs", "rank": 54, "score": 31548.096759526314 }, { "content": " {\n\n let mut state = serializer.serialize_struct(\"WinSMBiosData\", 6)?;\n\n state.serialize_field(\"used20_calling_method\", &self.used20_calling_method())?;\n\n state.serialize_field(\"smbios_major_version\", &self.smbios_major_version())?;\n\n state.serialize_field(\"smbios_minor_version\", &self.smbios_minor_version())?;\n\n state.serialize_field(\"dmi_revision\", &self.dmi_revision())?;\n\n state.serialize_field(\"table_data_length\", &self.table_data_length())?;\n\n state.serialize_field(\"smbios_data\", &self.smbios_data)?;\n\n state.end()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_is_valid_raw_smbios_data() {\n\n // Good structure (lengths are correct)\n\n let struct_data = vec![0x00u8, 0x03, 0x03, 0x00, 0x01, 0x00, 0x00, 0x00, 0xAB];\n", "file_path": "src/windows/win_struct.rs", "rank": 55, "score": 31547.934834039697 }, { "content": " }\n\n\n\n /// Finds the first occurance of the structure that satisfies a predicate.\n\n pub fn find<T, P>(&'a self, predicate: P) -> Option<T>\n\n where\n\n T: SMBiosStruct<'a>,\n\n P: FnMut(&T) -> bool,\n\n {\n\n self.defined_struct_iter().find(predicate)\n\n }\n\n\n\n /// Applies function to the defined struct elements and returns the first non-none result.\n\n pub fn find_map<A, B, F>(&'a self, f: F) -> Option<B>\n\n where\n\n A: SMBiosStruct<'a>,\n\n F: FnMut(A) -> Option<B>,\n\n {\n\n self.defined_struct_iter().find_map(f)\n\n }\n\n\n", "file_path": "src/core/undefined_struct.rs", "rank": 56, "score": 31547.12394811353 }, { "content": "/// A collection of [UndefinedStruct] items.\n\n#[derive(Debug, Serialize)]\n\npub struct UndefinedStructTable(Vec<UndefinedStruct>);\n\n\n\nimpl<'a> UndefinedStructTable {\n\n fn new() -> UndefinedStructTable {\n\n UndefinedStructTable(Vec::new())\n\n }\n\n\n\n fn add(&mut self, elem: UndefinedStruct) {\n\n self.0.push(elem);\n\n }\n\n\n\n /// Iterator of the contained [UndefinedStruct] items.\n\n pub fn iter(&self) -> Iter<'_, UndefinedStruct> {\n\n self.0.iter()\n\n }\n\n\n\n /// An iterator over the defined type instances within the table.\n\n pub fn defined_struct_iter<T>(&'a self) -> impl Iterator<Item = T> + 'a\n", "file_path": "src/core/undefined_struct.rs", "rank": 57, "score": 31546.502469598 }, { "content": "use super::header::{Handle, Header};\n\nuse super::strings::Strings;\n\nuse crate::structs::{DefinedStruct, SMBiosEndOfTable, SMBiosStruct};\n\nuse serde::{Serialize, Serializer};\n\n#[cfg(not(feature = \"no_std\"))]\n\nuse std::{\n\n fs::File,\n\n io::{prelude::*, Error, ErrorKind, SeekFrom},\n\n vec::IntoIter,\n\n};\n\nuse core::{\n\n convert::TryInto,\n\n slice::Iter,\n\n fmt,\n\n any\n\n};\n\n#[cfg(feature = \"no_std\")]\n\nuse alloc::{vec::{Vec, IntoIter}, format};\n\n\n\n/// # Embodies the three basic parts of an SMBIOS structure\n", "file_path": "src/core/undefined_struct.rs", "rank": 58, "score": 31546.227623085568 }, { "content": " }\n\n\n\n /// Retrieve a QWORD at the given offset from the structure's data section\n\n pub fn get_field_qword(&self, offset: usize) -> Option<u64> {\n\n match self.fields.get(offset..offset + 8) {\n\n Some(val) => Some(u64::from_le_bytes(val.try_into().expect(\"u64 is 8 bytes\"))),\n\n None => None,\n\n }\n\n }\n\n\n\n /// Retrieve a String of the given offset\n\n ///\n\n /// Retrieval of strings is a two part operation. The given offset\n\n /// contains a byte whose value is a 1 based index into the strings section.\n\n /// The string is thus retrieved from the strings section based on the\n\n /// byte value at the given offset.\n\n pub fn get_field_string(&self, offset: usize) -> Option<String> {\n\n match self.get_field_byte(offset) {\n\n Some(val) => self.strings.get_string(val),\n\n None => None,\n", "file_path": "src/core/undefined_struct.rs", "rank": 59, "score": 31545.596870935497 }, { "content": " }\n\n}\n\n\n\nimpl fmt::Debug for WinSMBiosData {\n\n fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n fmt.debug_struct(std::any::type_name::<WinSMBiosData>())\n\n .field(\"used20_calling_method\", &self.used20_calling_method())\n\n .field(\"smbios_major_version\", &self.smbios_major_version())\n\n .field(\"smbios_minor_version\", &self.smbios_minor_version())\n\n .field(\"dmi_revision\", &self.dmi_revision())\n\n .field(\"table_data_length\", &self.table_data_length())\n\n .field(\"smbios_data\", &self.smbios_data)\n\n .finish()\n\n }\n\n}\n\n\n\nimpl Serialize for WinSMBiosData {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n", "file_path": "src/windows/win_struct.rs", "rank": 60, "score": 31545.497546293547 }, { "content": " }\n\n }\n\n\n\n // todo: learn how to pass an index range (SliceIndex?) rather than start/end indices.\n\n // This would better conform to the Rust design look and feel.\n\n\n\n /// Retrieve a block of bytes from the structure's data section\n\n pub fn get_field_data(&self, start_index: usize, end_index: usize) -> Option<&[u8]> {\n\n return self.fields.get(start_index..end_index);\n\n }\n\n\n\n /// Cast to a given structure\n\n ///\n\n /// When this library does not contain a [DefinedStruct] variant\n\n /// matching the SMBiosStruct::STRUCT_TYPE, this function affords a cast to the\n\n /// given type. Such would be the case with OEM structure type T\n\n /// (which implements the [SMBiosStruct] trait).\n\n ///\n\n /// TODO: This should panic (not be Option) when the STRUCT_TYPE does not match because\n\n /// this would be a logic error in code, not a runtime error.\n", "file_path": "src/core/undefined_struct.rs", "rank": 61, "score": 31544.643489039703 }, { "content": " WinSMBiosData::TABLE_DATA_LENGTH_OFFSET\n\n ..WinSMBiosData::TABLE_DATA_LENGTH_OFFSET + 4,\n\n )\n\n .unwrap();\n\n let table_data_length = u32::from_le_bytes(\n\n slice\n\n .try_into()\n\n .expect(\"array length does not match type width\"),\n\n ) as usize;\n\n\n\n table_data_length == length - WinSMBiosData::SMBIOS_TABLE_DATA_OFFSET\n\n }\n\n\n\n /// The raw SMBIOS data this structure is wrapping\n\n pub fn raw_smbios_data(&self) -> &[u8] {\n\n self.windows_header.as_slice()\n\n }\n\n\n\n /// Used20CallingMethod\n\n pub fn used20_calling_method(&self) -> u8 {\n", "file_path": "src/windows/win_struct.rs", "rank": 62, "score": 31544.416425642765 }, { "content": " None => None,\n\n }\n\n }\n\n\n\n /// Retrieve a [Handle] at the given offset from the structure's data section\n\n pub fn get_field_handle(&self, offset: usize) -> Option<Handle> {\n\n match self.fields.get(offset..offset + Handle::SIZE) {\n\n Some(val) => Some(Handle(u16::from_le_bytes(\n\n val.try_into().expect(\"u16 is 2 bytes\"),\n\n ))),\n\n None => None,\n\n }\n\n }\n\n\n\n /// Retrieve a DWORD at the given offset from the structure's data section\n\n pub fn get_field_dword(&self, offset: usize) -> Option<u32> {\n\n match self.fields.get(offset..offset + 4) {\n\n Some(val) => Some(u32::from_le_bytes(val.try_into().expect(\"u32 is 4 bytes\"))),\n\n None => None,\n\n }\n", "file_path": "src/core/undefined_struct.rs", "rank": 63, "score": 31543.733862800327 }, { "content": " {\n\n self.defined_struct_iter().collect()\n\n }\n\n\n\n /// Load an [UndefinedStructTable] by seeking and reading the file offsets.\n\n #[cfg(not(feature = \"no_std\"))]\n\n pub fn try_load_from_file_offset(\n\n file: &mut File,\n\n table_offset: u64,\n\n table_len: usize,\n\n ) -> Result<Self, Error> {\n\n if table_len < Header::SIZE + 2 {\n\n return Err(Error::new(\n\n ErrorKind::InvalidData,\n\n format!(\"The table has an invalid size: {}\", table_len),\n\n ));\n\n }\n\n\n\n file.seek(SeekFrom::Start(table_offset))?;\n\n let mut table = Vec::with_capacity(table_len);\n", "file_path": "src/core/undefined_struct.rs", "rank": 64, "score": 31543.701213935874 }, { "content": " .field(\"header\", &self.header)\n\n .field(\"fields\", &fields)\n\n .field(\"strings\", &self.strings)\n\n .finish()\n\n }\n\n}\n\n\n\nimpl Default for UndefinedStruct {\n\n fn default() -> Self {\n\n let v: [u8; 4] = [0; 4];\n\n UndefinedStruct {\n\n header: Header::new(v),\n\n fields: (&[]).to_vec(),\n\n strings: { Strings::new((&[]).to_vec()) },\n\n }\n\n }\n\n}\n\n\n\n/// # Undefined Struct Table\n\n///\n", "file_path": "src/core/undefined_struct.rs", "rank": 65, "score": 31542.052763773947 }, { "content": " where\n\n T: SMBiosStruct<'a>,\n\n {\n\n self.iter()\n\n .take_while(|undefined_struct| {\n\n undefined_struct.header.struct_type() != SMBiosEndOfTable::STRUCT_TYPE\n\n })\n\n .filter_map(|undefined_struct| {\n\n if undefined_struct.header.struct_type() == T::STRUCT_TYPE {\n\n Some(T::new(undefined_struct))\n\n } else {\n\n None\n\n }\n\n })\n\n }\n\n\n\n /// Tests if every element of the defined struct iterator matches a predicate.\n\n pub fn all<T, F>(&'a self, f: F) -> bool\n\n where\n\n T: SMBiosStruct<'a>,\n", "file_path": "src/core/undefined_struct.rs", "rank": 66, "score": 31541.30904564075 }, { "content": "///\n\n/// Every SMBIOS structure contains three distinct sections:\n\n/// - A header\n\n/// - A formatted structure of fields (offsets and widths)\n\n/// - String data\n\n///\n\n/// A consumer of BIOS data ultimately wants to work with a [DefinedStruct].\n\n/// [UndefinedStruct] provides a set of fields and functions that enables\n\n/// downcasting to a [DefinedStruct]. Further, the OEM is allowed to define\n\n/// their own structures and in such cases working with UndefinedStruct is\n\n/// necessary. Therefore, [UndefinedStruct] is public for the case of OEM,\n\n/// as well as when working with structures that are defined in an SMBIOS\n\n/// standard newer than the one this library currently supports.\n\n#[derive(Serialize)]\n\npub struct UndefinedStruct {\n\n /// The [Header] of the structure\n\n pub header: Header,\n\n\n\n /// The raw data for the header and fields\n\n ///\n", "file_path": "src/core/undefined_struct.rs", "rank": 67, "score": 31540.38582540851 }, { "content": "\n\nimpl IntoIterator for UndefinedStructTable {\n\n type Item = UndefinedStruct;\n\n type IntoIter = IntoIter<Self::Item>;\n\n\n\n fn into_iter(self) -> Self::IntoIter {\n\n self.0.into_iter()\n\n }\n\n}\n", "file_path": "src/core/undefined_struct.rs", "rank": 68, "score": 31539.60275590724 }, { "content": "impl WinSMBiosData {\n\n /// Offset of the Used20CallingMethod field (0)\n\n pub const USED20_CALLING_METHOD_OFFSET: usize = 0usize;\n\n\n\n /// Offset of the SMBIOSMajorVersion field (1)\n\n pub const SMBIOS_MAJOR_VERSION_OFFSET: usize = 1usize;\n\n\n\n /// Offset of the SMBIOSMinorVersion field (2)\n\n pub const SMBIOS_MINOR_VERSION_OFFSET: usize = 2usize;\n\n\n\n /// Offset of the DMIRevision field (3)\n\n pub const DMI_REVISION_OFFSET: usize = 3usize;\n\n\n\n /// Offset of the Length field (4)\n\n pub const TABLE_DATA_LENGTH_OFFSET: usize = 4usize;\n\n\n\n /// Offset of the SMBIOSTableData field (8)\n\n pub const SMBIOS_TABLE_DATA_OFFSET: usize = 8usize;\n\n\n\n /// Creates an instance of [WinSMBiosData]\n", "file_path": "src/windows/win_struct.rs", "rank": 69, "score": 31539.118650625394 }, { "content": " self.windows_header[WinSMBiosData::USED20_CALLING_METHOD_OFFSET]\n\n }\n\n\n\n /// SMBIOS major version\n\n pub fn smbios_major_version(&self) -> u8 {\n\n self.windows_header[WinSMBiosData::SMBIOS_MAJOR_VERSION_OFFSET]\n\n }\n\n\n\n /// SMBIOS minor version\n\n pub fn smbios_minor_version(&self) -> u8 {\n\n self.windows_header[WinSMBiosData::SMBIOS_MINOR_VERSION_OFFSET]\n\n }\n\n\n\n /// DMI revision\n\n pub fn dmi_revision(&self) -> u8 {\n\n self.windows_header[WinSMBiosData::DMI_REVISION_OFFSET]\n\n }\n\n\n\n fn version_from_raw_header(windows_header: &Vec<u8>) -> SMBiosVersion {\n\n SMBiosVersion {\n", "file_path": "src/windows/win_struct.rs", "rank": 70, "score": 31537.44854230546 }, { "content": " /// `fields` is used by the `get_field_*()` functions. `fields` does not\n\n /// include _strings_; therefore, preventing accidentally retrieving\n\n /// data from the _strings_ area. This avoids a need to check\n\n /// `header.length()` during field retrieval.\n\n ///\n\n /// Note: A better design is for this to only hold the fields, however,\n\n /// that will shift field offsets given in code by 4 (the header size).\n\n /// The SMBIOS specification gives offsets relative to the start of the\n\n /// header, and therefore maintaining this library code is easier to\n\n /// keep the header.\n\n ///\n\n /// An alternative would be to make the `get_field_*()` functions adjust\n\n /// for the header offset though this adds a small cost to every field\n\n /// retrieval in comparison to just keeping an extra 4 bytes for every\n\n /// structure.\n\n pub fields: Vec<u8>,\n\n\n\n /// The strings of the structure\n\n #[serde(serialize_with = \"ser_strings\")]\n\n pub strings: Strings,\n\n}\n\n\n", "file_path": "src/core/undefined_struct.rs", "rank": 71, "score": 31537.428143206504 }, { "content": " Some(version),\n\n )\n\n },\n\n })\n\n }\n\n }\n\n\n\n /// Verify if a block of data is a valid WinSMBiosData structure\n\n ///\n\n /// This only checks if the structure itself is valid and not whether the contained\n\n /// [SMBiosData] structure is valid or not.\n\n pub fn is_valid_win_smbios_data(raw_data: &Vec<u8>) -> bool {\n\n let length = raw_data.len();\n\n if length <= WinSMBiosData::SMBIOS_TABLE_DATA_OFFSET {\n\n return false;\n\n }\n\n\n\n // retrieve the table data length field\n\n let slice = raw_data\n\n .get(\n", "file_path": "src/windows/win_struct.rs", "rank": 72, "score": 31536.429302890007 }, { "content": "pub use system_configuration_options::*;\n\n\n\nmod system_event_log;\n\npub use system_event_log::*;\n\n\n\nmod system_information;\n\npub use system_information::*;\n\n\n\nmod system_power_controls;\n\npub use system_power_controls::*;\n\n\n\nmod system_power_supply;\n\npub use system_power_supply::*;\n\n\n\nmod system_reset;\n\npub use system_reset::*;\n\n\n\nmod system_slot;\n\npub use system_slot::*;\n\n\n\nmod temperature_probe;\n\npub use temperature_probe::*;\n\n\n\nmod tpm_device;\n\npub use tpm_device::*;\n\n\n\nmod voltage_probe;\n\npub use voltage_probe::*;\n", "file_path": "src/structs/types/mod.rs", "rank": 73, "score": 31536.342103152885 }, { "content": "mod bis_entry_point;\n\npub use bis_entry_point::*;\n\n\n\nmod built_in_pointing_device;\n\npub use built_in_pointing_device::*;\n\n\n\nmod cache_information;\n\npub use cache_information::*;\n\n\n\nmod cooling_device;\n\npub use cooling_device::*;\n\n\n\nmod electrical_current_probe;\n\npub use electrical_current_probe::*;\n\n\n\nmod end_of_table;\n\npub use end_of_table::*;\n\n\n\nmod group_associations;\n\npub use group_associations::*;\n", "file_path": "src/structs/types/mod.rs", "rank": 74, "score": 31536.186309643163 }, { "content": "pub use management_device_threshold_data::*;\n\n\n\nmod memory_array_mapped_address;\n\npub use memory_array_mapped_address::*;\n\n\n\nmod memory_channel;\n\npub use memory_channel::*;\n\n\n\nmod memory_controller_information;\n\npub use memory_controller_information::*;\n\n\n\nmod memory_device;\n\npub use memory_device::*;\n\n\n\nmod memory_device_mapped_address;\n\npub use memory_device_mapped_address::*;\n\n\n\nmod memory_error_information_32;\n\npub use memory_error_information_32::*;\n\n\n", "file_path": "src/structs/types/mod.rs", "rank": 75, "score": 31536.072007757615 }, { "content": "mod memory_error_information_64;\n\npub use memory_error_information_64::*;\n\n\n\nmod memory_module_information;\n\npub use memory_module_information::*;\n\n\n\nmod oem_strings;\n\npub use oem_strings::*;\n\n\n\nmod on_board_device_information;\n\npub use on_board_device_information::*;\n\n\n\nmod onboard_devices_extended_information;\n\npub use onboard_devices_extended_information::*;\n\n\n\nmod out_of_band_remote_access;\n\npub use out_of_band_remote_access::*;\n\n\n\nmod physical_memory_array;\n\npub use physical_memory_array::*;\n", "file_path": "src/structs/types/mod.rs", "rank": 76, "score": 31536.023699740545 }, { "content": "\n\nmod hardware_security;\n\npub use hardware_security::*;\n\n\n\nmod inactive;\n\npub use inactive::*;\n\n\n\nmod ipmi_device_information;\n\npub use ipmi_device_information::*;\n\n\n\nmod management_controller_host_interface;\n\npub use management_controller_host_interface::*;\n\n\n\nmod management_device;\n\npub use management_device::*;\n\n\n\nmod management_device_component;\n\npub use management_device_component::*;\n\n\n\nmod management_device_threshold_data;\n", "file_path": "src/structs/types/mod.rs", "rank": 77, "score": 31535.97027770911 }, { "content": "\n\nmod portable_battery;\n\npub use portable_battery::*;\n\n\n\nmod port_connector_information;\n\npub use port_connector_information::*;\n\n\n\nmod processor_additional_information;\n\npub use processor_additional_information::*;\n\n\n\nmod processor_information;\n\npub use processor_information::*;\n\n\n\nmod system_boot_information;\n\npub use system_boot_information::*;\n\n\n\nmod system_chassis_information;\n\npub use system_chassis_information::*;\n\n\n\nmod system_configuration_options;\n", "file_path": "src/structs/types/mod.rs", "rank": 78, "score": 31535.951771342166 }, { "content": "//! Standard structures.\n\n//!\n\n//! Contains implementations for all standard structures found in\n\n//! the DMTF SMBIOS specification.\n\n\n\nmod unknown;\n\npub use unknown::*;\n\n\n\nmod bios_information;\n\npub use bios_information::*;\n\n\n\nmod additional_information;\n\npub use additional_information::*;\n\n\n\nmod baseboard_information;\n\npub use baseboard_information::*;\n\n\n\nmod bios_language_information;\n\npub use bios_language_information::*;\n\n\n", "file_path": "src/structs/types/mod.rs", "rank": 79, "score": 31535.852560143183 }, { "content": " ///\n\n /// To retrieve this structure on a windows system call load_windows_smbios_data().\n\n ///\n\n /// The new() is provided publicly to allow loading data from other sources\n\n /// such as a file or from memory array as is done with testing.\n\n pub fn new(raw_smbios_data: Vec<u8>) -> Result<WinSMBiosData, Error> {\n\n if !WinSMBiosData::is_valid_win_smbios_data(&raw_smbios_data) {\n\n Err(Error::new(\n\n ErrorKind::InvalidData,\n\n \"Invalid WinSMBiosData structure\",\n\n ))\n\n } else {\n\n let windows_header =\n\n Vec::from(&raw_smbios_data[..WinSMBiosData::SMBIOS_TABLE_DATA_OFFSET]);\n\n let version = WinSMBiosData::version_from_raw_header(&windows_header);\n\n Ok(WinSMBiosData {\n\n windows_header,\n\n smbios_data: {\n\n SMBiosData::from_vec_and_version(\n\n Vec::from(&raw_smbios_data[WinSMBiosData::SMBIOS_TABLE_DATA_OFFSET..]),\n", "file_path": "src/windows/win_struct.rs", "rank": 80, "score": 31534.714649473895 }, { "content": "\n\n // Copy the current structure to the collection\n\n result.add(UndefinedStruct::new(\n\n &data[current_index..next_index].to_vec(),\n\n ));\n\n current_index = next_index;\n\n }\n\n None => break,\n\n }\n\n }\n\n None => break,\n\n };\n\n }\n\n None => break,\n\n }\n\n }\n\n\n\n result\n\n }\n\n}\n", "file_path": "src/core/undefined_struct.rs", "rank": 81, "score": 31534.35075948215 }, { "content": " use super::*;\n\n\n\n #[test]\n\n fn test_unknown_oem_type() {\n\n // For testing we've borrowed a language information type (0x0D) structure and change its type to 0x99 (> 0x7F are OEM types)\n\n let unknown_bytes = vec![\n\n 0x99u8, 0x16, 0x21, 0x00, // unknown data\n\n 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\n 0x00, 0x00, 0x00, 0x01, // \"en|US|iso8859-1\"\n\n 0x65, 0x6E, 0x7C, 0x55, 0x53, 0x7C, 0x69, 0x73, 0x6F, 0x38, 0x38, 0x35, 0x39, 0x2D,\n\n 0x31, 0x00, // \"fr|FR|iso8859-1\"\n\n 0x66, 0x72, 0x7C, 0x46, 0x52, 0x7C, 0x69, 0x73, 0x6F, 0x38, 0x38, 0x35, 0x39, 0x2D,\n\n 0x31, 0x00, // \"ja|JP|unicode\"\n\n 0x6A, 0x61, 0x7C, 0x4A, 0x50, 0x7C, 0x75, 0x6E, 0x69, 0x63, 0x6F, 0x64, 0x65, 0x00,\n\n // end of structure\n\n 0x00,\n\n ];\n\n\n\n let parts = UndefinedStruct::new(&unknown_bytes);\n\n let unknown = SMBiosUnknown::new(&parts);\n", "file_path": "src/structs/types/unknown.rs", "rank": 82, "score": 31532.971843541814 }, { "content": " major: windows_header[WinSMBiosData::SMBIOS_MAJOR_VERSION_OFFSET],\n\n minor: windows_header[WinSMBiosData::SMBIOS_MINOR_VERSION_OFFSET],\n\n revision: windows_header[WinSMBiosData::DMI_REVISION_OFFSET],\n\n }\n\n }\n\n\n\n /// Length of the smbios table data\n\n pub fn table_data_length(&self) -> u32 {\n\n let slice = self\n\n .windows_header\n\n .get(\n\n WinSMBiosData::TABLE_DATA_LENGTH_OFFSET\n\n ..WinSMBiosData::TABLE_DATA_LENGTH_OFFSET + 4,\n\n )\n\n .unwrap();\n\n u32::from_le_bytes(\n\n slice\n\n .try_into()\n\n .expect(\"array length does not match type width\"),\n\n )\n", "file_path": "src/windows/win_struct.rs", "rank": 83, "score": 31530.74947606388 }, { "content": "\n\n // header tests\n\n assert_eq!(*unknown.parts().header.handle(), 0x0021);\n\n assert_eq!(unknown.parts().header.length(), 0x16);\n\n\n\n // debug print test\n\n println!(\"unknown structure: {:?}\", unknown);\n\n }\n\n}\n", "file_path": "src/structs/types/unknown.rs", "rank": 84, "score": 31529.691874118416 }, { "content": " assert_eq!(win_smbios_data.dmi_revision(), 0x00);\n\n assert_eq!(win_smbios_data.table_data_length(), 0x01);\n\n }\n\n\n\n #[test]\n\n fn test_win_smbios_data_constructor() {\n\n let raw_win_data = vec![0x00u8, 0x03, 0x04, 0x00, 0x02, 0x00, 0x00, 0x00, 0x10, 0xFF];\n\n\n\n let win_smbios_data = WinSMBiosData::new(raw_win_data.clone()).unwrap();\n\n\n\n assert_eq!(\n\n win_smbios_data.windows_header.as_slice(),\n\n &raw_win_data[..8]\n\n );\n\n }\n\n}\n", "file_path": "src/windows/win_struct.rs", "rank": 85, "score": 31529.348558140035 }, { "content": " // Bad reported length\n\n if struct_len < Header::SIZE {\n\n break;\n\n }\n\n\n\n // Beyond the structure length are the structure's strings\n\n // Find the /0/0 which marks the end of this structure and the\n\n // beginning of the next.\n\n match data.get(current_index + struct_len..) {\n\n Some(strings_etc) => {\n\n match strings_etc\n\n .windows(DOUBLE_ZERO_SIZE)\n\n .position(|x| x[0] == x[1] && x[1] == 0)\n\n {\n\n Some(double_zero_position) => {\n\n // The next structure will start at this index\n\n let next_index = current_index\n\n + struct_len\n\n + double_zero_position\n\n + DOUBLE_ZERO_SIZE;\n", "file_path": "src/core/undefined_struct.rs", "rank": 86, "score": 31529.21933605479 }, { "content": " assert!(WinSMBiosData::is_valid_win_smbios_data(&struct_data));\n\n\n\n // Bad structure (too short)\n\n let struct_data = vec![0x00u8, 0x03, 0x03];\n\n assert!(!WinSMBiosData::is_valid_win_smbios_data(&struct_data));\n\n\n\n // Bad structure (bad table data length)\n\n let struct_data = vec![0x00u8, 0x03, 0x03, 0x00, 0xFF, 0x00, 0x00, 0x00, 0xAB];\n\n assert!(!WinSMBiosData::is_valid_win_smbios_data(&struct_data));\n\n }\n\n\n\n #[test]\n\n fn test_win_smbios_data_headers() {\n\n let raw_win_data = vec![0x00u8, 0x03, 0x04, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00];\n\n\n\n let win_smbios_data = WinSMBiosData::new(raw_win_data).unwrap();\n\n\n\n assert_eq!(win_smbios_data.used20_calling_method(), 0x00);\n\n assert_eq!(win_smbios_data.smbios_major_version(), 0x03);\n\n assert_eq!(win_smbios_data.smbios_minor_version(), 0x04);\n", "file_path": "src/windows/win_struct.rs", "rank": 87, "score": 31528.104561078115 }, { "content": " let mut state = serializer.serialize_struct(\"SystemCacheTypeData\", 2)?;\n\n state.serialize_field(\"raw\", &self.raw)?;\n\n state.serialize_field(\"value\", &self.value)?;\n\n state.end()\n\n }\n\n}\n\n\n\nimpl fmt::Display for SystemCacheTypeData {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match &self.value {\n\n SystemCacheType::None => write!(f, \"{}\", &self.raw),\n\n _ => write!(f, \"{:?}\", &self.value),\n\n }\n\n }\n\n}\n\n\n\nimpl Deref for SystemCacheTypeData {\n\n type Target = SystemCacheType;\n\n\n\n fn deref(&self) -> &Self::Target {\n", "file_path": "src/structs/types/cache_information.rs", "rank": 88, "score": 30740.427946232903 }, { "content": " /// _raw_ is most useful when _value_ is None.\n\n /// This is most likely to occur when the standard was updated but\n\n /// this library code has not been updated to match the current\n\n /// standard.\n\n pub raw: u8,\n\n /// The contained [ProcessorUpgrade] value\n\n pub value: ProcessorUpgrade,\n\n}\n\n\n\nimpl fmt::Debug for ProcessorUpgradeData {\n\n fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n fmt.debug_struct(any::type_name::<ProcessorUpgradeData>())\n\n .field(\"raw\", &self.raw)\n\n .field(\"value\", &self.value)\n\n .finish()\n\n }\n\n}\n\n\n\nimpl Serialize for ProcessorUpgradeData {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n", "file_path": "src/structs/types/processor_information.rs", "rank": 89, "score": 30740.187883249662 }, { "content": " ///\n\n /// _raw_ is most useful when _value_ is None.\n\n /// This is most likely to occur when the standard was updated but\n\n /// this library code has not been updated to match the current\n\n /// standard.\n\n pub raw: u8,\n\n /// The contained [ErrorCorrectionType] value\n\n pub value: ErrorCorrectionType,\n\n}\n\n\n\nimpl fmt::Debug for ErrorCorrectionTypeData {\n\n fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n fmt.debug_struct(any::type_name::<ErrorCorrectionTypeData>())\n\n .field(\"raw\", &self.raw)\n\n .field(\"value\", &self.value)\n\n .finish()\n\n }\n\n}\n\n\n\nimpl Serialize for ErrorCorrectionTypeData {\n", "file_path": "src/structs/types/cache_information.rs", "rank": 90, "score": 30739.24311130594 }, { "content": "use crate::{SMBiosStruct, UndefinedStruct};\n\nuse serde::{ser::SerializeStruct, Serialize, Serializer};\n\nuse core::{fmt, any};\n\nuse core::ops::Deref;\n\n#[cfg(feature = \"no_std\")]\n\nuse alloc::string::String;\n\n\n\n/// # BIOS Information (Type 0)\n\npub struct SMBiosInformation<'a> {\n\n parts: &'a UndefinedStruct,\n\n}\n\n\n\nimpl<'a> SMBiosStruct<'a> for SMBiosInformation<'a> {\n\n const STRUCT_TYPE: u8 = 0u8;\n\n\n\n fn new(parts: &'a UndefinedStruct) -> Self {\n\n Self { parts }\n\n }\n\n\n\n fn parts(&self) -> &'a UndefinedStruct {\n", "file_path": "src/structs/types/bios_information.rs", "rank": 91, "score": 30737.086848928862 }, { "content": " .finish()\n\n }\n\n}\n\n\n\nimpl Serialize for SystemWakeUpTypeData {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n let mut state = serializer.serialize_struct(\"SystemWakeUpTypeData\", 2)?;\n\n state.serialize_field(\"raw\", &self.raw)?;\n\n state.serialize_field(\"value\", &self.value)?;\n\n state.end()\n\n }\n\n}\n\n\n\nimpl fmt::Display for SystemWakeUpTypeData {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match &self.value {\n\n SystemWakeUpType::None => write!(f, \"{}\", &self.raw),\n", "file_path": "src/structs/types/system_information.rs", "rank": 92, "score": 30737.033397151394 }, { "content": "impl Serialize for MemoryChannelTypeData {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n let mut state = serializer.serialize_struct(\"MemoryChannelTypeData\", 2)?;\n\n state.serialize_field(\"raw\", &self.raw)?;\n\n state.serialize_field(\"value\", &self.value)?;\n\n state.end()\n\n }\n\n}\n\n\n\nimpl fmt::Display for MemoryChannelTypeData {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match &self.value {\n\n MemoryChannelType::None => write!(f, \"{}\", &self.raw),\n\n _ => write!(f, \"{:?}\", &self.value),\n\n }\n\n }\n\n}\n", "file_path": "src/structs/types/memory_channel.rs", "rank": 93, "score": 30736.895454745703 }, { "content": " /// _raw_ is most useful when _value_ is None.\n\n /// This is most likely to occur when the standard was updated but\n\n /// this library code has not been updated to match the current\n\n /// standard.\n\n pub raw: u16,\n\n /// The contained [ProcessorFamily] value\n\n pub value: ProcessorFamily,\n\n}\n\n\n\nimpl fmt::Debug for ProcessorFamilyData2 {\n\n fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n fmt.debug_struct(any::type_name::<ProcessorFamilyData2>())\n\n .field(\"raw\", &self.raw)\n\n .field(\"value\", &self.value)\n\n .finish()\n\n }\n\n}\n\n\n\nimpl Serialize for ProcessorFamilyData2 {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n", "file_path": "src/structs/types/processor_information.rs", "rank": 94, "score": 30736.843214489625 }, { "content": " S: Serializer,\n\n {\n\n let mut state = serializer.serialize_struct(\"ProcessorFamilyData\", 2)?;\n\n state.serialize_field(\"raw\", &self.raw)?;\n\n state.serialize_field(\"value\", &self.value)?;\n\n state.end()\n\n }\n\n}\n\n\n\nimpl fmt::Display for ProcessorFamilyData {\n\n /// Displays ProcessorFamily either by name or as a hex value if the name for the value is unknown.\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match &self.value {\n\n ProcessorFamily::None => write!(f, \"{:#X}\", &self.raw),\n\n _ => write!(f, \"{:?}\", &self.value),\n\n }\n\n }\n\n}\n\n\n\nimpl Deref for ProcessorFamilyData {\n", "file_path": "src/structs/types/processor_information.rs", "rank": 95, "score": 30736.818962979778 }, { "content": " fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n let mut state = serializer.serialize_struct(\"ErrorCorrectionTypeData\", 2)?;\n\n state.serialize_field(\"raw\", &self.raw)?;\n\n state.serialize_field(\"value\", &self.value)?;\n\n state.end()\n\n }\n\n}\n\n\n\nimpl fmt::Display for ErrorCorrectionTypeData {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match &self.value {\n\n ErrorCorrectionType::None => write!(f, \"{}\", &self.raw),\n\n _ => write!(f, \"{:?}\", &self.value),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/structs/types/cache_information.rs", "rank": 96, "score": 30736.603305912555 }, { "content": " where\n\n S: Serializer,\n\n {\n\n let mut state = serializer.serialize_struct(\"ProcessorFamilyData2\", 2)?;\n\n state.serialize_field(\"raw\", &self.raw)?;\n\n state.serialize_field(\"value\", &self.value)?;\n\n state.end()\n\n }\n\n}\n\n\n\nimpl fmt::Display for ProcessorFamilyData2 {\n\n /// Displays ProcessorFamily either by name or as a hex value if the name for the value is unknown.\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match &self.value {\n\n ProcessorFamily::None => write!(f, \"{:#X}\", &self.raw),\n\n _ => write!(f, \"{:?}\", &self.value),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/structs/types/processor_information.rs", "rank": 97, "score": 30736.49807881103 }, { "content": "pub struct ChassisStateData {\n\n /// Raw value\n\n ///\n\n /// _raw_ is most useful when _value_ is None.\n\n /// This is most likely to occur when the standard was updated but\n\n /// this library code has not been updated to match the current\n\n /// standard.\n\n pub raw: u8,\n\n /// The contained [ChassisState] value\n\n pub value: ChassisState,\n\n}\n\n\n\nimpl fmt::Debug for ChassisStateData {\n\n fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n fmt.debug_struct(any::type_name::<ChassisStateData>())\n\n .field(\"raw\", &self.raw)\n\n .field(\"value\", &self.value)\n\n .finish()\n\n }\n\n}\n", "file_path": "src/structs/types/system_chassis_information.rs", "rank": 98, "score": 46.31943619535369 }, { "content": " state.serialize_field(\"error_resolution\", &self.error_resolution())?;\n\n state.end()\n\n }\n\n}\n\n\n\n/// # Memory Error - Error Type Data\n\npub struct MemoryErrorTypeData {\n\n /// Raw value\n\n ///\n\n /// _raw_ is most useful when _value_ is None.\n\n /// This is most likely to occur when the standard was updated but\n\n /// this library code has not been updated to match the current\n\n /// standard.\n\n pub raw: u8,\n\n /// The contained [MemoryErrorType] value\n\n pub value: MemoryErrorType,\n\n}\n\n\n\nimpl fmt::Debug for MemoryErrorTypeData {\n\n fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {\n", "file_path": "src/structs/types/memory_error_information_32.rs", "rank": 99, "score": 43.97784398646255 } ]
Rust
dbcrossbarlib/src/drivers/postgres_shared/table.rs
faradayio/schemaconv
eeb808354af7d58f1782927eaa9e754d59544011
use itertools::Itertools; use std::{ collections::{HashMap, HashSet}, fmt, }; use super::{PgColumn, PgDataType, PgName, PgScalarDataType}; use crate::common::*; use crate::schema::Column; use crate::separator::Separator; #[derive(Debug)] pub(crate) enum CheckCatalog { Yes, No, } impl From<&IfExists> for CheckCatalog { fn from(if_exists: &IfExists) -> CheckCatalog { match if_exists { IfExists::Error | IfExists::Overwrite => CheckCatalog::No, IfExists::Append | IfExists::Upsert(_) => CheckCatalog::Yes, } } } #[derive(Clone, Debug, Eq, PartialEq)] pub struct PgCreateTable { pub(crate) name: PgName, pub(crate) columns: Vec<PgColumn>, pub(crate) if_not_exists: bool, pub(crate) temporary: bool, } impl PgCreateTable { pub(crate) fn from_name_and_columns( schema: &Schema, table_name: PgName, columns: &[Column], ) -> Result<PgCreateTable> { let pg_columns = columns .iter() .map(|c| PgColumn::from_column(schema, c)) .collect::<Result<Vec<PgColumn>>>()?; Ok(PgCreateTable { name: table_name, columns: pg_columns, if_not_exists: false, temporary: false, }) } pub(crate) fn to_table(&self) -> Result<Table> { let columns = self .columns .iter() .map(|c| c.to_column()) .collect::<Result<Vec<Column>>>()?; Ok(Table { name: self.name.unquoted(), columns, }) } pub(crate) fn aligned_with( &self, other_table: &PgCreateTable, ) -> Result<PgCreateTable> { let column_map = self .columns .iter() .map(|c| (&c.name[..], c)) .collect::<HashMap<_, _>>(); Ok(PgCreateTable { name: self.name.clone(), columns: other_table .columns .iter() .map(|c| { if let Some(&col) = column_map.get(&c.name[..]) { Ok(col.to_owned()) } else { Err(format_err!( "could not find column {} in destination table: {}", c.name, column_map.keys().join(", "), )) } }) .collect::<Result<Vec<_>>>()?, if_not_exists: self.if_not_exists, temporary: self.temporary, }) } pub(crate) fn named_type_names(&self) -> HashSet<&PgName> { let mut names = HashSet::new(); for col in &self.columns { let scalar_ty = match &col.data_type { PgDataType::Array { ty, .. } => ty, PgDataType::Scalar(ty) => ty, }; if let PgScalarDataType::Named(name) = scalar_ty { names.insert(name); } } names } pub(crate) fn write_export_sql( &self, f: &mut dyn Write, source_args: &SourceArguments<Verified>, ) -> Result<()> { write!(f, "COPY (")?; self.write_export_select_sql(f, source_args)?; write!(f, ") TO STDOUT WITH CSV HEADER")?; Ok(()) } pub(crate) fn write_export_select_sql( &self, f: &mut dyn Write, source_args: &SourceArguments<Verified>, ) -> Result<()> { write!(f, "SELECT ")?; if self.columns.is_empty() { return Err(format_err!("cannot export 0 columns")); } let mut sep = Separator::new(","); for col in &self.columns { write!(f, "{}", sep.display())?; col.write_export_select_expr(f)?; } write!(f, " FROM {}", &self.name.quoted())?; if let Some(where_clause) = source_args.where_clause() { write!(f, " WHERE ({})", where_clause)?; } Ok(()) } pub(crate) fn write_count_sql( &self, f: &mut dyn Write, source_args: &SourceArguments<Verified>, ) -> Result<()> { writeln!(f, "SELECT COUNT(*)")?; writeln!(f, " FROM {}", &self.name.quoted())?; if let Some(where_clause) = source_args.where_clause() { writeln!(f, " WHERE ({})", where_clause)?; } Ok(()) } } impl fmt::Display for PgCreateTable { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "CREATE")?; if self.temporary { write!(f, " TEMPORARY")?; } write!(f, " TABLE")?; if self.if_not_exists { write!(f, " IF NOT EXISTS")?; } writeln!(f, " {} (", &self.name.quoted())?; for (idx, col) in self.columns.iter().enumerate() { write!(f, " {}", col)?; if idx + 1 == self.columns.len() { writeln!(f)?; } else { writeln!(f, ",")?; } } writeln!(f, ");")?; Ok(()) } }
use itertools::Itertools; use std::{ collections::{HashMap, HashSet}, fmt, }; use super::{PgColumn, PgDataType, PgName, PgScalarDataType}; use crate::common::*; use crate::schema::Column; use crate::separator::Separator; #[derive(Debug)] pub(crate) enum CheckCatalog { Yes, No, } impl From<&IfExists> for CheckCatalog { fn from(if_exists: &IfExists) -> CheckCatalog { match if_exists { IfExists::Error | IfExists::Overwrite => CheckCatalog::No, IfExists::Append | IfExists::Upsert(_) => CheckCatalog::Yes, } } } #[derive(Clone, Debug, Eq, PartialEq)] pub struct PgCreateTable { pub(crate) name: PgName, pub(crate) columns: Vec<PgColumn>, pub(crate) if_not_exists: bool, pub(crate) temporary: bool, } impl PgCreateTable { pub(crate) fn from_name_and_columns( schema: &Schema, table_name: PgName, columns: &[Column], ) -> Result<PgCreateTable> { let pg_columns = columns .iter() .map(|c| PgColumn::from_column(schema, c)) .collect::<Result<Vec<PgColumn>>>()?; Ok(PgCreateTable { name: table_name, columns: pg_columns, if_not_exists: false, temporary: false, }) } pub(crate) fn to_table(&self) -> Result<Table> { let columns = self .columns .iter() .map(|c| c.to_column()) .collect::<Result<Vec<Column>>>()?; Ok(Table { name: self.name.unquoted(), columns, }) }
pub(crate) fn named_type_names(&self) -> HashSet<&PgName> { let mut names = HashSet::new(); for col in &self.columns { let scalar_ty = match &col.data_type { PgDataType::Array { ty, .. } => ty, PgDataType::Scalar(ty) => ty, }; if let PgScalarDataType::Named(name) = scalar_ty { names.insert(name); } } names } pub(crate) fn write_export_sql( &self, f: &mut dyn Write, source_args: &SourceArguments<Verified>, ) -> Result<()> { write!(f, "COPY (")?; self.write_export_select_sql(f, source_args)?; write!(f, ") TO STDOUT WITH CSV HEADER")?; Ok(()) } pub(crate) fn write_export_select_sql( &self, f: &mut dyn Write, source_args: &SourceArguments<Verified>, ) -> Result<()> { write!(f, "SELECT ")?; if self.columns.is_empty() { return Err(format_err!("cannot export 0 columns")); } let mut sep = Separator::new(","); for col in &self.columns { write!(f, "{}", sep.display())?; col.write_export_select_expr(f)?; } write!(f, " FROM {}", &self.name.quoted())?; if let Some(where_clause) = source_args.where_clause() { write!(f, " WHERE ({})", where_clause)?; } Ok(()) } pub(crate) fn write_count_sql( &self, f: &mut dyn Write, source_args: &SourceArguments<Verified>, ) -> Result<()> { writeln!(f, "SELECT COUNT(*)")?; writeln!(f, " FROM {}", &self.name.quoted())?; if let Some(where_clause) = source_args.where_clause() { writeln!(f, " WHERE ({})", where_clause)?; } Ok(()) } } impl fmt::Display for PgCreateTable { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "CREATE")?; if self.temporary { write!(f, " TEMPORARY")?; } write!(f, " TABLE")?; if self.if_not_exists { write!(f, " IF NOT EXISTS")?; } writeln!(f, " {} (", &self.name.quoted())?; for (idx, col) in self.columns.iter().enumerate() { write!(f, " {}", col)?; if idx + 1 == self.columns.len() { writeln!(f)?; } else { writeln!(f, ",")?; } } writeln!(f, ");")?; Ok(()) } }
pub(crate) fn aligned_with( &self, other_table: &PgCreateTable, ) -> Result<PgCreateTable> { let column_map = self .columns .iter() .map(|c| (&c.name[..], c)) .collect::<HashMap<_, _>>(); Ok(PgCreateTable { name: self.name.clone(), columns: other_table .columns .iter() .map(|c| { if let Some(&col) = column_map.get(&c.name[..]) { Ok(col.to_owned()) } else { Err(format_err!( "could not find column {} in destination table: {}", c.name, column_map.keys().join(", "), )) } }) .collect::<Result<Vec<_>>>()?, if_not_exists: self.if_not_exists, temporary: self.temporary, }) }
function_block-full_function
[ { "content": "/// Specify the the location of data or a schema.\n\npub trait Locator: fmt::Debug + fmt::Display + Send + Sync + 'static {\n\n /// Provide a mechanism for casting a `dyn Locator` back to the underlying,\n\n /// concrete locator type using Rust's `Any` type.\n\n ///\n\n /// See [this StackOverflow question][so] for a discussion of the technical\n\n /// details, and why we need a `Locator::as_any` method to use `Any`.\n\n ///\n\n /// This is a bit of a sketchy feature to provide, but we provide it for use\n\n /// with `supports_write_remote_data` and `write_remote_data`, which are\n\n /// used for certain locator pairs (i.e., Google Cloud Storage and BigQuery)\n\n /// to bypass our normal `local_data` and `write_local_data` transfers and\n\n /// use an external, optimized transfer method (such as direct loads from\n\n /// Google Cloud Storage into BigQuery).\n\n ///\n\n /// This should always be implemented as follows:\n\n ///\n\n /// ```no_compile\n\n /// impl Locator for MyLocator {\n\n /// fn as_any(&self) -> &dyn Any {\n\n /// self\n", "file_path": "dbcrossbarlib/src/locator.rs", "rank": 0, "score": 170485.24329095625 }, { "content": "#[test]\n\nfn find_schema() {\n\n let storage = TemporaryStorage::new(vec![\n\n \"s3://example/\".to_string(),\n\n \"gs://example/1/\".to_string(),\n\n \"gs://example/2/\".to_string(),\n\n ]);\n\n assert_eq!(storage.find_scheme(\"s3:\"), Some(\"s3://example/\"));\n\n assert_eq!(storage.find_scheme(\"gs:\"), Some(\"gs://example/1/\"));\n\n}\n\n\n", "file_path": "dbcrossbarlib/src/temporary_storage.rs", "rank": 1, "score": 151316.2019414906 }, { "content": "#[test]\n\nfn temporary_table_name() {\n\n let table_name = \"project:dataset.table\".parse::<TableName>().unwrap();\n\n\n\n // Construct a temporary table name without a `--temporary` argument.\n\n let default_temp_name = table_name\n\n .temporary_table_name(&TemporaryStorage::new(vec![]))\n\n .unwrap()\n\n .to_string();\n\n assert!(default_temp_name.starts_with(\"project:dataset.temp_table_\"));\n\n\n\n // Now try it with a `--temporary` argument.\n\n let temporary_storage =\n\n TemporaryStorage::new(vec![\"bigquery:project2:temp\".to_owned()]);\n\n let temp_name = table_name\n\n .temporary_table_name(&temporary_storage)\n\n .unwrap()\n\n .to_string();\n\n assert!(temp_name.starts_with(\"project2:temp.temp_table_\"));\n\n}\n\n\n", "file_path": "dbcrossbarlib/src/drivers/bigquery_shared/table_name.rs", "rank": 2, "score": 150509.4137245803 }, { "content": "#[test]\n\nfn rejects_recursive_named_types() {\n\n // Many recursive types are probably fine, but we haven't defined semantics\n\n // yet, so we return an error rather than getting into unknown territory.\n\n let json = r#\"\n\n {\n\n \"named_data_types\": [{\n\n \"name\": \"colors\",\n\n \"data_type\": { \"array\": { \"named\": \"colors\" } }\n\n }],\n\n \"table\": {\n\n \"name\": \"example\",\n\n \"columns\": [\n\n { \"name\": \"i\", \"is_nullable\": false, \"data_type\": { \"named\": \"colors\" }}\n\n ]\n\n }\n\n }\n\n \"#;\n\n assert!(serde_json::from_str::<Schema>(json).is_err());\n\n}\n\n\n", "file_path": "dbcrossbarlib/src/schema.rs", "rank": 3, "score": 147795.4491400503 }, { "content": "#[test]\n\nfn accepts_defined_type_names() {\n\n let json = r#\"\n\n {\n\n \"named_data_types\": [{\n\n \"name\": \"color\",\n\n \"data_type\": { \"one_of\": [\"red\", \"green\", \"blue\"] }\n\n }],\n\n \"tables\": [{\n\n \"name\": \"example\",\n\n \"columns\": [\n\n { \"name\": \"i\", \"is_nullable\": false, \"data_type\": { \"named\": \"color\" }}\n\n ]\n\n }]\n\n }\n\n \"#;\n\n let schema = serde_json::from_str::<Schema>(json).expect(\"could not parse schema\");\n\n let mut expected_named_data_types = HashMap::new();\n\n expected_named_data_types.insert(\n\n \"color\".to_owned(),\n\n NamedDataType {\n", "file_path": "dbcrossbarlib/src/schema.rs", "rank": 4, "score": 147795.4491400503 }, { "content": "#[test]\n\nfn rejects_undefined_type_names() {\n\n let json = r#\"\n\n {\n\n \"named_data_types\": [],\n\n \"table\": {\n\n \"name\": \"example\",\n\n \"columns\": [\n\n { \"name\": \"i\", \"is_nullable\": false, \"data_type\": { \"named\": \"color\" }}\n\n ]\n\n }\n\n }\n\n \"#;\n\n assert!(serde_json::from_str::<Schema>(json).is_err());\n\n}\n\n\n", "file_path": "dbcrossbarlib/src/schema.rs", "rank": 5, "score": 147795.4491400503 }, { "content": "struct PgColumnSchema {\n\n column_name: String,\n\n is_nullable: String,\n\n data_type: String,\n\n udt_schema: String,\n\n udt_name: String,\n\n}\n\n\n\nimpl PgColumnSchema {\n\n /// Get the data type for a column.\n\n fn data_type(&self) -> Result<PgDataType> {\n\n pg_data_type(&self.data_type, &self.udt_schema, &self.udt_name)\n\n }\n\n}\n\n\n\n/// Fetch information about a table from the database.\n\n///\n\n/// Returns `None` if no matching table exists.\n\n#[instrument(level = \"trace\", skip(ctx))]\n\npub(crate) async fn fetch_from_url(\n\n ctx: &Context,\n\n database_url: &UrlWithHiddenPassword,\n\n table_name: &PgName,\n\n) -> Result<Option<PgSchema>> {\n\n let client = connect(ctx, database_url).await?;\n\n let schema = table_name.schema_or_public();\n\n let table = table_name.name();\n\n\n\n // Check to see if we have a table with this name.\n\n let count_matching_tables_sql = r#\"\n", "file_path": "dbcrossbarlib/src/drivers/postgres_shared/catalog.rs", "rank": 6, "score": 146137.56850241017 }, { "content": "#[test]\n\nfn preserves_valid() {\n\n let valid_names = &[\"a\", \"A\", \"_\", \"a2\", \"AA\", \"A_\", \"abc\"];\n\n for &n in valid_names {\n\n assert_eq!(ColumnName::try_from(n).unwrap().as_str(), n);\n\n }\n\n}\n\n\n", "file_path": "dbcrossbarlib/src/drivers/bigquery_shared/column_name.rs", "rank": 7, "score": 141827.11556355993 }, { "content": "#[test]\n\nfn rejects_invalid() {\n\n // The Turkish dotted İ character would break our underlying `data` layout.\n\n let invalid_names = &[\"\", \"2\", \"a,\", \"é\", \"İ\"];\n\n for &n in invalid_names {\n\n assert!(ColumnName::try_from(n).is_err());\n\n }\n\n}\n\n\n", "file_path": "dbcrossbarlib/src/drivers/bigquery_shared/column_name.rs", "rank": 8, "score": 141827.11556355993 }, { "content": "#[test]\n\nfn ignores_case_for_hash() {\n\n use std::collections::hash_map::DefaultHasher;\n\n\n\n let mut hasher_1 = DefaultHasher::new();\n\n ColumnName::try_from(\"a\").unwrap().hash(&mut hasher_1);\n\n\n\n let mut hasher_2 = DefaultHasher::new();\n\n ColumnName::try_from(\"A\").unwrap().hash(&mut hasher_2);\n\n\n\n assert_eq!(hasher_1.finish(), hasher_2.finish());\n\n}\n\n\n", "file_path": "dbcrossbarlib/src/drivers/bigquery_shared/column_name.rs", "rank": 9, "score": 138943.78859908608 }, { "content": "#[test]\n\nfn ignores_case_for_comparison() {\n\n assert_eq!(\n\n ColumnName::try_from(\"a\").unwrap(),\n\n ColumnName::try_from(\"A\").unwrap(),\n\n );\n\n assert!(ColumnName::try_from(\"a\").unwrap() < ColumnName::try_from(\"B\").unwrap());\n\n assert!(ColumnName::try_from(\"A\").unwrap() < ColumnName::try_from(\"b\").unwrap());\n\n}\n\n\n", "file_path": "dbcrossbarlib/src/drivers/bigquery_shared/column_name.rs", "rank": 10, "score": 138943.78859908608 }, { "content": "#[test]\n\nfn format_preserves_case() {\n\n let s = \"Aa\";\n\n let name = ColumnName::from_str(s).unwrap();\n\n assert_eq!(format!(\"{}\", name.quoted()), format!(\"`{}`\", s));\n\n assert_eq!(format!(\"{:?}\", name), format!(\"{:?}\", s));\n\n}\n", "file_path": "dbcrossbarlib/src/drivers/bigquery_shared/column_name.rs", "rank": 11, "score": 138943.78859908608 }, { "content": "#[test]\n\n#[ignore]\n\nfn cp_tricky_column_names_fails() {\n\n let testdir = TestDir::new(\"dbcrossbar\", \"cp_tricky_column_names\");\n\n let src = testdir.src_path(\"fixtures/tricky_column_names.csv\");\n\n let schema = testdir.src_path(\"fixtures/tricky_column_names.sql\");\n\n let pg_table = post_test_table_url(\"testme1.cp_tricky_column_names\");\n\n let bq_table = bq_test_table(\"cp_tricky_column_names\");\n\n let gs_temp_dir = gs_test_dir_url(\"cp_from_bigquery_with_where\");\n\n let bq_temp_ds = bq_temp_dataset();\n\n\n\n // CSV to Postgres.\n\n testdir\n\n .cmd()\n\n .args(&[\n\n \"cp\",\n\n \"--if-exists=overwrite\",\n\n &format!(\"--schema=postgres-sql:{}\", schema.display()),\n\n &format!(\"csv:{}\", src.display()),\n\n &pg_table,\n\n ])\n\n .tee_output()\n", "file_path": "dbcrossbar/tests/cli/cp/combined.rs", "rank": 12, "score": 138943.78859908605 }, { "content": "/// Given a stream of streams CSV data, return another stream of CSV streams\n\n/// where the CSV data is approximately `chunk_size` long whenever possible.\n\npub fn rechunk_csvs(\n\n ctx: Context,\n\n chunk_size: usize,\n\n streams: BoxStream<CsvStream>,\n\n) -> Result<BoxStream<CsvStream>> {\n\n // Convert out input `BoxStream<CsvStream>` into a single, concatenated\n\n // synchronous `Read` object.\n\n let input_csv_stream = concatenate_csv_streams(ctx.clone(), streams)?;\n\n let csv_rdr = SyncStreamReader::new(input_csv_stream.data);\n\n\n\n // Create a channel to which we can write `CsvStream` values once we've\n\n // created them.\n\n let (csv_stream_sender, csv_stream_receiver) =\n\n mpsc::channel::<Result<CsvStream>>(1);\n\n\n\n // Run a synchronous background worker thread that parsers our sync CSV\n\n // `Read`er into a stream of `CsvStream`s.\n\n let worker_fut = spawn_blocking(move || -> Result<()> {\n\n let mut rdr = csv::Reader::from_reader(csv_rdr);\n\n let hdr = rdr\n", "file_path": "dbcrossbarlib/src/rechunk.rs", "rank": 13, "score": 132190.47457949337 }, { "content": "/// Look up a specifc driver by `Locator` scheme.\n\npub fn find_driver(\n\n scheme: &str,\n\n enable_unstable: bool,\n\n) -> Result<&'static dyn LocatorDriver> {\n\n KNOWN_DRIVERS_BY_SCHEME\n\n .get(scheme)\n\n .copied()\n\n .filter(|&d| !d.is_unstable() || enable_unstable)\n\n .ok_or_else(|| format_err!(\"unknown locator scheme {:?}\", scheme))\n\n}\n", "file_path": "dbcrossbarlib/src/drivers/mod.rs", "rank": 14, "score": 129942.0694408782 }, { "content": "#[async_trait]\n\ntrait CredentialsSource: fmt::Debug + fmt::Display + Send + Sync + 'static {\n\n /// Look up an appropriate set of credentials.\n\n async fn get_credentials(&self) -> Result<Option<Credentials>>;\n\n}\n\n\n", "file_path": "dbcrossbarlib/src/credentials.rs", "rank": 15, "score": 122597.7727879491 }, { "content": "#[test]\n\nfn parse_schema_from_manual() {\n\n // We use this schema as an example in our manual, so make sure it parses.\n\n serde_json::from_str::<Schema>(include_str!(\n\n \"../../dbcrossbar/fixtures/dbcrossbar_schema.json\"\n\n ))\n\n .unwrap();\n\n}\n\n\n", "file_path": "dbcrossbarlib/src/schema.rs", "rank": 16, "score": 119394.69618965662 }, { "content": "/// Look up an environment variable by name, returning an error if it does not exist.\n\nfn var(name: &str) -> Result<String> {\n\n match try_var(name)? {\n\n Some(value) => Ok(value),\n\n None => Err(format_err!(\n\n \"expected environment variable {} to be set\",\n\n name,\n\n )),\n\n }\n\n}\n\n\n\n/// Load credentials stored in a file.\n", "file_path": "dbcrossbarlib/src/credentials.rs", "rank": 17, "score": 114480.79045822073 }, { "content": "#[test]\n\nfn column_without_mode() {\n\n let json = r#\"{\"type\":\"STRING\",\"name\":\"state\"}\"#;\n\n let col: BqColumn = serde_json::from_str(json).unwrap();\n\n assert_eq!(col.mode, Mode::Nullable);\n\n}\n\n\n\n/// A column mode.\n\n#[derive(Clone, Copy, Debug, Deserialize, Eq, PartialEq, Serialize)]\n\n#[serde(rename_all = \"SCREAMING_SNAKE_CASE\")]\n\npub(crate) enum Mode {\n\n /// This column is `NOT NULL`.\n\n Required,\n\n\n\n /// This column can contain `NULL` values.\n\n Nullable,\n\n\n\n /// (Undocumented.) This column is actually an `ARRAY` column,\n\n /// but the `type` doesn't actually mention that. This is an undocumented\n\n /// value that we see in the output of `bq show --schema`.\n\n Repeated,\n\n}\n\n\n\nimpl Default for Mode {\n\n /// The `mode` field appears to default to `NULLABLE` in `bq show --schema`\n\n /// output, so use that as our default.\n\n fn default() -> Self {\n\n Mode::Nullable\n\n }\n\n}\n", "file_path": "dbcrossbarlib/src/drivers/bigquery_shared/column.rs", "rank": 18, "score": 113974.39837630134 }, { "content": "SELECT column_name, is_nullable, data_type, udt_schema, udt_name \n\nFROM information_schema.columns\n\nWHERE\n\n table_schema = $1 AND\n\n table_name = $2\n\nORDER BY ordinal_position\n\n\"#;\n\n let rows = client.query(columns_sql, &[&schema, &table]).await?;\n\n let pg_columns = rows\n\n .into_iter()\n\n .map(|row| PgColumnSchema {\n\n column_name: row.get(\"column_name\"),\n\n is_nullable: row.get(\"is_nullable\"),\n\n data_type: row.get(\"data_type\"),\n\n udt_schema: row.get(\"udt_schema\"),\n\n udt_name: row.get(\"udt_name\"),\n\n })\n\n .collect::<Vec<PgColumnSchema>>();\n\n\n\n // Do we have any PostGIS geometry columns?\n", "file_path": "dbcrossbarlib/src/drivers/postgres_shared/catalog.rs", "rank": 19, "score": 113067.16036165057 }, { "content": "/// Should we retry an attempted deletion?\n\nfn should_retry_delete(err: &Error) -> bool {\n\n if let Some(err) = original_http_error(err) {\n\n // There appears to be some sort of Google Cloud Storage 403 race\n\n // condition on delete that shows up when preparing buckets. We have no\n\n // idea what causes this.\n\n err.status() == Some(StatusCode::FORBIDDEN)\n\n } else {\n\n false\n\n }\n\n}\n", "file_path": "dbcrossbarlib/src/clouds/gcloud/storage/rm_r.rs", "rank": 20, "score": 112384.34090467819 }, { "content": "#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct Table {\n\n schema: TableSchema,\n\n}\n\n\n\n/// Look up the schema of the specified table.\n\n#[instrument(level = \"trace\")]\n\npub(crate) async fn schema(name: &TableName) -> Result<BqTable> {\n\n trace!(\"fetching schema for {:?}\", name);\n\n\n\n // Build our URL.\n\n let url = format!(\n\n \"https://bigquery.googleapis.com/bigquery/v2/projects/{}/datasets/{}/tables/{}\",\n\n percent_encode(name.project()),\n\n percent_encode(name.dataset()),\n\n percent_encode(name.table()),\n\n );\n\n\n\n // Look up our schema.\n\n let client = Client::new().await?;\n\n let table = client.get::<Table, _, _>(&url, NoQuery).await?;\n\n Ok(BqTable {\n\n name: name.to_owned(),\n\n columns: table.schema.fields,\n\n })\n\n}\n", "file_path": "dbcrossbarlib/src/clouds/gcloud/bigquery/schema.rs", "rank": 21, "score": 111373.29139806886 }, { "content": "/// If a BigQuery extraction failed with `err`, should we retry it?\n\nfn should_retry_extract(err: &Error) -> bool {\n\n if let Some(bigquery_error) = original_bigquery_error(err) {\n\n // Retry `accessDenied` errors, which appear to be caused by some sort\n\n // of race condition where BigQuery extract workers don't receive\n\n // authorization to write to the output bucket soon enough.\n\n bigquery_error.is_access_denied()\n\n } else {\n\n // Not a BigQuery error.\n\n false\n\n }\n\n}\n", "file_path": "dbcrossbarlib/src/drivers/gs/write_remote_data.rs", "rank": 22, "score": 110640.33267631845 }, { "content": "#[test]\n\nfn parse_bool() {\n\n let examples = &[\n\n // True.\n\n (\"1\", true),\n\n (\"y\", true),\n\n (\"Y\", true),\n\n (\"yes\", true),\n\n (\"YES\", true),\n\n (\"Yes\", true),\n\n (\"on\", true),\n\n (\"ON\", true),\n\n (\"On\", true),\n\n (\"t\", true),\n\n (\"T\", true),\n\n // False.\n\n (\"0\", false),\n\n (\"n\", false),\n\n (\"N\", false),\n\n (\"no\", false),\n\n (\"NO\", false),\n", "file_path": "dbcrossbarlib/src/from_csv_cell.rs", "rank": 23, "score": 109460.95696516802 }, { "content": "#[test]\n\nfn random_tag() {\n\n assert_eq!(TemporaryStorage::random_tag().len(), 10);\n\n}\n", "file_path": "dbcrossbarlib/src/temporary_storage.rs", "rank": 24, "score": 109445.33626526364 }, { "content": "#[test]\n\nfn data_type_roundtrip() {\n\n let data_types = vec![\n\n DataType::Array(Box::new(DataType::Text)),\n\n DataType::Bool,\n\n DataType::Date,\n\n DataType::Decimal,\n\n DataType::Float32,\n\n DataType::Float64,\n\n DataType::Int16,\n\n DataType::Int32,\n\n DataType::Int64,\n\n DataType::Json,\n\n DataType::Named(\"name\".to_owned()),\n\n DataType::OneOf(vec![\"a\".to_owned()]),\n\n DataType::Struct(vec![StructField {\n\n name: \"x\".to_owned(),\n\n is_nullable: false,\n\n data_type: DataType::Float32,\n\n }]),\n\n DataType::Text,\n", "file_path": "dbcrossbarlib/src/schema.rs", "rank": 25, "score": 108930.33196642267 }, { "content": "#[test]\n\nfn round_trip_serialization() {\n\n let mut named_data_types = HashMap::new();\n\n named_data_types.insert(\n\n \"color\".to_owned(),\n\n NamedDataType {\n\n name: \"color\".to_owned(),\n\n data_type: DataType::OneOf(vec![\n\n \"red\".to_owned(),\n\n \"green\".to_owned(),\n\n \"blue\".to_owned(),\n\n ]),\n\n },\n\n );\n\n let schema = Schema {\n\n named_data_types,\n\n table: Table {\n\n name: \"example\".to_owned(),\n\n columns: vec![Column {\n\n name: \"i\".to_owned(),\n\n is_nullable: false,\n", "file_path": "dbcrossbarlib/src/schema.rs", "rank": 26, "score": 108930.33196642267 }, { "content": "/// Look up an environment variable by name, returning `Ok(None)` if it does not\n\n/// exist.\n\nfn try_var(name: &str) -> Result<Option<String>> {\n\n match env::var(name) {\n\n Ok(value) => Ok(Some(value)),\n\n Err(env::VarError::NotPresent) => Ok(None),\n\n Err(env::VarError::NotUnicode(..)) => Err(format_err!(\n\n \"environment variable {} cannot be converted to UTF-8\",\n\n name,\n\n )),\n\n }\n\n}\n\n\n", "file_path": "dbcrossbarlib/src/credentials.rs", "rank": 27, "score": 107599.0028923413 }, { "content": "#[test]\n\nfn temporaries_can_be_added_and_removed() {\n\n let temp = tempfile::Builder::new()\n\n .prefix(\"dbcrossbar\")\n\n .suffix(\".toml\")\n\n .tempfile()\n\n .unwrap();\n\n let path = temp.path();\n\n let mut config = Configuration::from_path(path).unwrap();\n\n let key = Key::global(\"temporary\");\n\n assert_eq!(config.temporaries().unwrap(), Vec::<String>::new());\n\n config.add_to_string_array(&key, \"s3://example/\").unwrap();\n\n assert_eq!(config.temporaries().unwrap(), &[\"s3://example/\".to_owned()]);\n\n config.write().unwrap();\n\n config = Configuration::from_path(path).unwrap();\n\n assert_eq!(config.temporaries().unwrap(), &[\"s3://example/\".to_owned()]);\n\n config\n\n .remove_from_string_array(&key, \"s3://example/\")\n\n .unwrap();\n\n assert_eq!(config.temporaries().unwrap(), Vec::<String>::new());\n\n}\n", "file_path": "dbcrossbarlib/src/config.rs", "rank": 28, "score": 107196.79256820842 }, { "content": "#[test]\n\nfn data_type_serialization_examples() {\n\n // Our serialization format is an external format, so let's write some tests\n\n // to make sure we don't change it accidentally.\n\n let examples = &[\n\n (\n\n DataType::Array(Box::new(DataType::Text)),\n\n json!({\"array\":\"text\"}),\n\n ),\n\n (DataType::Bool, json!(\"bool\")),\n\n (DataType::Date, json!(\"date\")),\n\n (DataType::Decimal, json!(\"decimal\")),\n\n (DataType::Float32, json!(\"float32\")),\n\n (DataType::Float64, json!(\"float64\")),\n\n (DataType::Int16, json!(\"int16\")),\n\n (DataType::Int32, json!(\"int32\")),\n\n (DataType::Int64, json!(\"int64\")),\n\n (DataType::Json, json!(\"json\")),\n\n (\n\n DataType::Named(\"name\".to_owned()),\n\n json!({ \"named\": \"name\" }),\n", "file_path": "dbcrossbarlib/src/schema.rs", "rank": 29, "score": 106695.56371554207 }, { "content": "/// All known drivers.\n\npub fn all_drivers() -> &'static [Box<dyn LocatorDriver>] {\n\n &KNOWN_DRIVERS[..]\n\n}\n\n\n", "file_path": "dbcrossbarlib/src/drivers/mod.rs", "rank": 30, "score": 105818.94314998391 }, { "content": "#[test]\n\n#[ignore]\n\nfn bigquery_roundtrips_structs() {\n\n let testdir = TestDir::new(\"dbcrossbar\", \"bigquery_roundtrips_structs\");\n\n let raw_src_path = testdir.src_path(\"fixtures/structs/struct.json\");\n\n let src = testdir.path(\"structs.csv\");\n\n let raw_data_type_path =\n\n testdir.src_path(\"fixtures/structs/struct-data-type.json\");\n\n let schema = testdir.path(\"structs-schema.json\");\n\n let bq_temp_ds = bq_temp_dataset();\n\n let gs_temp_dir = gs_test_dir_url(\"bigquery_roundtrips_structs\");\n\n let bq_table = bq_test_table(\"bigquery_roundtrips_structs\");\n\n\n\n // Use our example JSON to create a CSV file with two columns: One\n\n // containing our struct, and the other containing a single-element array\n\n // containing our struct.\n\n let raw_src = fs::read_to_string(&raw_src_path).unwrap();\n\n let src_data = format!(\n\n r#\"struct,structs\n\n\"{escaped}\",\"[{escaped}]\"\n\n\"#,\n\n escaped = raw_src.replace('\\n', \" \").replace('\"', \"\\\"\\\"\"),\n", "file_path": "dbcrossbar/tests/cli/cp/bigquery.rs", "rank": 31, "score": 104991.66355902412 }, { "content": "#[test]\n\n#[ignore]\n\nfn bigquery_record_columns() {\n\n let testdir = TestDir::new(\"dbcrossbar\", \"bigquery_record_columns\");\n\n let bq_temp_ds = bq_temp_dataset();\n\n let gs_temp_dir = gs_test_dir_url(\"bigquery_record_columns_to_json\");\n\n\n\n let dataset_name = bq_temp_dataset_name();\n\n let bare_dataset_name =\n\n &dataset_name[dataset_name.find(':').expect(\"no colon\") + 1..];\n\n let table_name = format!(\"record_cols_{}\", TemporaryStorage::random_tag());\n\n let locator = format!(\"bigquery:{}.{}\", dataset_name, table_name);\n\n\n\n // Create a BigQuery table containing record columns.\n\n let sql = format!(\n\n \"\n", "file_path": "dbcrossbar/tests/cli/cp/bigquery.rs", "rank": 32, "score": 104831.4954929763 }, { "content": "#[test]\n\nfn from_str_parses_schemas() {\n\n let examples = &[\n\n (\"postgres://user:pass@host/db#table\", \"table\"),\n\n (\"postgres://user:pass@host/db#public.table\", \"public.table\"),\n\n (\n\n \"postgres://user:pass@host/db#testme1.table\",\n\n \"testme1.table\",\n\n ),\n\n ];\n\n for &(url, table_name) in examples {\n\n assert_eq!(\n\n PostgresLocator::from_str(url).unwrap().table_name,\n\n table_name.parse::<PgName>().unwrap(),\n\n );\n\n }\n\n}\n\n\n\nimpl Locator for PostgresLocator {\n\n fn as_any(&self) -> &dyn Any {\n\n self\n", "file_path": "dbcrossbarlib/src/drivers/postgres/mod.rs", "rank": 33, "score": 104577.23309315578 }, { "content": "#[test]\n\nfn crc32c_matches_gcloud() {\n\n // Check that `data` hashes to `expected`.\n\n let check = |data: &[u8], expected: u32| {\n\n let mut hasher = Hasher::new();\n\n hasher.update(data);\n\n assert_eq!(hasher.finish(), expected);\n\n };\n\n\n\n // These test cases are from https://tools.ietf.org/html/rfc3720#page-217\n\n // and https://github.com/google/crc32c/blob/master/src/crc32c_unittest.cc\n\n check(&[0u8; 32], 0x8a91_36aa);\n\n check(&[0xff; 32], 0x62a8_ab43);\n\n let mut buf = [0u8; 32];\n\n for i in 0u8..=31 {\n\n buf[usize::from(i)] = i;\n\n }\n\n check(&buf, 0x46dd_794e);\n\n for i in 0u8..=31 {\n\n buf[usize::from(i)] = 31 - i;\n\n }\n", "file_path": "dbcrossbarlib/src/clouds/gcloud/crc32c_stream.rs", "rank": 34, "score": 103071.9111904492 }, { "content": "#[test]\n\nfn conv_old_dbcrossbar_schema_to_new() {\n\n let testdir = TestDir::new(\"dbcrossbar\", \"conv_old_dbcrossbar_schema_to_new\");\n\n\n\n static INPUT: &str = r#\"\n\n{\n\n \"name\": \"images\",\n\n \"columns\": [\n\n {\n\n \"name\": \"id\",\n\n \"is_nullable\": false,\n\n \"data_type\": \"uuid\"\n\n }\n\n ]\n\n}\n\n\"#;\n\n\n\n static EXPECTED: &str = r#\"\n\n{\n\n \"named_data_types\": [],\n\n \"tables\": [{\n", "file_path": "dbcrossbar/tests/cli/conv.rs", "rank": 35, "score": 102566.47106449977 }, { "content": "#[test]\n\nfn conv_pg_sql_to_bq_schema() {\n\n let testdir = TestDir::new(\"dbcrossbar\", \"conv_pg_sql_to_bq_schema\");\n\n let output = testdir\n\n .cmd()\n\n .args(&[\"schema\", \"conv\", \"postgres-sql:-\", \"bigquery-schema:-\"])\n\n .output_with_stdin(INPUT_SQL)\n\n .expect_success();\n\n assert!(output.stdout_str().contains(\"GEOGRAPHY\"));\n\n assert!(output.stdout_str().contains(\"REPEATED\"));\n\n}\n\n\n", "file_path": "dbcrossbar/tests/cli/conv.rs", "rank": 36, "score": 102566.47106449977 }, { "content": "#[test]\n\nfn conv_bq_schema_to_pg_sql() {\n\n let testdir = TestDir::new(\"dbcrossbar\", \"conv_bq_schema_to_pg_sql\");\n\n let input_json = testdir.src_path(\"fixtures/bigquery_schema.json\");\n\n let expected_sql = testdir.src_path(\"fixtures/bigquery_schema_converted.sql\");\n\n testdir\n\n .cmd()\n\n .args(&[\n\n \"schema\",\n\n \"conv\",\n\n &format!(\"bigquery-schema:{}\", input_json.display()),\n\n \"postgres-sql:output.sql\",\n\n ])\n\n .expect_success();\n\n let expected = fs::read_to_string(&expected_sql).unwrap();\n\n testdir.expect_file_contents(\"output.sql\", &expected);\n\n}\n\n\n", "file_path": "dbcrossbar/tests/cli/conv.rs", "rank": 37, "score": 102566.47106449977 }, { "content": "fn parse_locator(s: &str, enable_unstable: bool) -> Result<BoxLocator> {\n\n // Parse our locator into a URL-style scheme and the rest.\n\n lazy_static! {\n\n static ref SCHEME_RE: Regex =\n\n Regex::new(\"^[A-Za-z][-A-Za-z0-9+.]*:\").expect(\"invalid regex in source\");\n\n }\n\n let cap = SCHEME_RE\n\n .captures(s)\n\n .ok_or_else(|| format_err!(\"cannot parse locator: {:?}\", s))?;\n\n let scheme = &cap[0];\n\n\n\n // Select an appropriate locator type.\n\n let driver = find_driver(scheme, enable_unstable)?;\n\n driver.parse(s)\n\n}\n\n\n", "file_path": "dbcrossbarlib/src/locator.rs", "rank": 38, "score": 101676.426444507 }, { "content": "#[test]\n\nfn csv_stream_name_handles_directory_inputs() {\n\n let expected = &[\n\n (\"dir/\", \"dir/file1.csv\", \"file1\"),\n\n (\"dir\", \"dir/file1.csv\", \"file1\"),\n\n (\"dir/\", \"dir/subdir/file2.csv\", \"subdir/file2\"),\n\n (\n\n \"s3://bucket/dir/\",\n\n \"s3://bucket/dir/subdir/file3.csv\",\n\n \"subdir/file3\",\n\n ),\n\n ];\n\n for &(base_path, file_path, stream_name) in expected {\n\n assert_eq!(csv_stream_name(base_path, file_path).unwrap(), stream_name);\n\n }\n\n}\n", "file_path": "dbcrossbarlib/src/csv_stream.rs", "rank": 39, "score": 100978.35338589332 }, { "content": "#[test]\n\nfn postgres_name_is_quoted_correctly() {\n\n assert_eq!(\n\n format!(\"{}\", PgName::from_str(\"example\").unwrap().quoted()),\n\n \"\\\"example\\\"\"\n\n );\n\n assert_eq!(\n\n format!(\"{}\", PgName::from_str(\"schema.example\").unwrap().quoted()),\n\n \"\\\"schema\\\".\\\"example\\\"\"\n\n );\n\n\n\n // Don't parse this one, because we haven't decided how to parse weird names\n\n // like this yet.\n\n let with_quote = PgName {\n\n schema: Some(\"testme1\".to_owned()),\n\n name: \"lat-\\\"lon\".to_owned(),\n\n };\n\n assert_eq!(\n\n format!(\"{}\", with_quote.quoted()),\n\n \"\\\"testme1\\\".\\\"lat-\\\"\\\"lon\\\"\"\n\n );\n\n}\n", "file_path": "dbcrossbarlib/src/drivers/postgres_shared/mod.rs", "rank": 40, "score": 100978.35338589332 }, { "content": "#[test]\n\nfn csv_stream_name_handles_file_inputs() {\n\n let expected = &[\n\n (\"/path/to/file1.csv\", \"file1\"),\n\n (\"file2.csv\", \"file2\"),\n\n (\"s3://bucket/dir/file3.csv\", \"file3\"),\n\n (\"gs://bucket/dir/file4.csv\", \"file4\"),\n\n ];\n\n for &(file_path, stream_name) in expected {\n\n assert_eq!(csv_stream_name(file_path, file_path).unwrap(), stream_name);\n\n }\n\n}\n\n\n", "file_path": "dbcrossbarlib/src/csv_stream.rs", "rank": 41, "score": 100978.35338589332 }, { "content": "#[test]\n\nfn detects_duplicate_field_names() {\n\n let input = r#\"\n", "file_path": "dbcrossbarlib/src/drivers/dbcrossbar_ts/ast.rs", "rank": 42, "score": 100978.35338589332 }, { "content": "#[test]\n\n#[ignore]\n\nfn cp_pg_tricky_column_types() {\n\n let testdir = TestDir::new(\"dbcrossbar\", \"cp_pg_tricky_column_types\");\n\n let src = testdir.src_path(\"fixtures/more_pg_types.csv\");\n\n let schema = testdir.src_path(\"fixtures/more_pg_types.sql\");\n\n let pg_table = post_test_table_url(\"more_pg_types\");\n\n\n\n // Create a database table manually, forcing the use of the actual Postgres\n\n // types we want to test, and not the nearest `dbcrossbar` portable\n\n // equivalents.\n\n Command::new(\"psql\")\n\n .arg(postgres_test_url())\n\n .args(&[\"--command\", \"DROP TABLE IF EXISTS more_pg_types;\"])\n\n .expect_success();\n\n Command::new(\"psql\")\n\n .arg(postgres_test_url())\n\n .args(&[\n\n \"--command\",\n\n include_str!(\"../../../fixtures/more_pg_types.sql\"),\n\n ])\n\n .expect_success();\n", "file_path": "dbcrossbar/tests/cli/cp/postgres.rs", "rank": 43, "score": 100896.95771492927 }, { "content": "CREATE TABLE tricky_column_names (\n\n \"person__Delivery Zone 4.14\" text NOT NULL,\n\n \"person__$Region - 8/1\" text,\n\n \"person__Do you have any allergies?\" text,\n\n \"name\" text,\n\n \"Name\" text\n\n)\n", "file_path": "dbcrossbar/fixtures/tricky_column_names.sql", "rank": 44, "score": 100286.38119816073 }, { "content": "#[test]\n\nfn conv_pg_sql_to_dbcrossbar_schema_to_pg_sql() {\n\n let testdir = TestDir::new(\"dbcrossbar\", \"conv_pg_sql_to_pg_sql\");\n\n let output1 = testdir\n\n .cmd()\n\n .args(&[\"schema\", \"conv\", \"postgres-sql:-\", \"dbcrossbar-schema:-\"])\n\n .output_with_stdin(EXAMPLE_SQL)\n\n .expect_success();\n\n let output2 = testdir\n\n .cmd()\n\n .args(&[\"schema\", \"conv\", \"dbcrossbar-schema:-\", \"postgres-sql:-\"])\n\n .output_with_stdin(output1.stdout_str())\n\n .expect_success();\n\n assert!(output2.stdout_str().contains(\"CREATE TABLE\"));\n\n\n\n // And make sure it round-trips.\n\n let output3 = testdir\n\n .cmd()\n\n .args(&[\"schema\", \"conv\", \"postgres-sql:-\", \"dbcrossbar-schema:-\"])\n\n .output_with_stdin(output2.stdout_str())\n\n .expect_success();\n\n assert_eq!(output3.stdout_str(), output1.stdout_str());\n\n}\n\n\n", "file_path": "dbcrossbar/tests/cli/conv.rs", "rank": 45, "score": 98836.46269062397 }, { "content": "#[test]\n\nfn parses_shopify_schema() -> Result<()> {\n\n let file_string = include_str!(\"../../../../dbcrossbar/fixtures/shopify.ts\");\n\n let source_file =\n\n SourceFile::parse(\"shopify.ts\".to_owned(), file_string.to_owned())?;\n\n for def in &[\"Order\"] {\n\n source_file.definition_to_table(def)?;\n\n }\n\n Ok(())\n\n}\n", "file_path": "dbcrossbarlib/src/drivers/dbcrossbar_ts/ast.rs", "rank": 46, "score": 97431.25304700284 }, { "content": "/// Make sure a table name is legal for PostgreSQL.\n\n///\n\n/// This will use an valid-looking table name if it can find one somewhere in\n\n/// the string, or it will return a default value.\n\nfn sanitize_table_name(table_name: &str) -> Result<String> {\n\n lazy_static! {\n\n static ref RE: Regex = Regex::new(\n\n r\"(?x)\n\n ([_a-zA-Z][_a-zA-Z0-9]*\\.)?\n\n ([_a-zA-Z][_a-zA-Z0-9]*)\n\n $\"\n\n )\n\n .expect(\"could not compile regex in source\");\n\n }\n\n if let Some(cap) = RE.captures(table_name) {\n\n Ok(cap[0].to_owned())\n\n } else {\n\n // Just use a generic table name.\n\n Ok(\"data\".to_owned())\n\n }\n\n}\n", "file_path": "dbcrossbarlib/src/drivers/postgres_sql/mod.rs", "rank": 47, "score": 96625.55159914926 }, { "content": "CREATE TYPE mood AS ENUM ('happy', 'sad', 'amused');\n\n\n", "file_path": "dbcrossbarlib/src/drivers/postgres_shared/schema/schema_sql_example.sql", "rank": 48, "score": 95206.92347205618 }, { "content": "CREATE TYPE color AS ENUM ('red', 'green', 'blue');\n\n\n", "file_path": "dbcrossbarlib/src/drivers/postgres_shared/schema/schema_sql_example.sql", "rank": 49, "score": 95206.92347205618 }, { "content": " }\n\n\n\n /// Quote this for use in SQL.\n\n pub(crate) fn quoted(&self) -> ColumnNameQuoted<'_> {\n\n ColumnNameQuoted(self)\n\n }\n\n\n\n /// Quote this for use in JavaScript.\n\n pub(crate) fn javascript_quoted(&self) -> ColumnNameJavaScriptQuoted<'_> {\n\n ColumnNameJavaScriptQuoted(self)\n\n }\n\n}\n\n\n\nimpl PartialEq for ColumnName {\n\n fn eq(&self, other: &Self) -> bool {\n\n // Compare only the lowercase versions.\n\n self.as_lowercase() == other.as_lowercase()\n\n }\n\n}\n\n\n", "file_path": "dbcrossbarlib/src/drivers/bigquery_shared/column_name.rs", "rank": 50, "score": 82147.16668290622 }, { "content": "impl fmt::Debug for ColumnName {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n fmt::Debug::fmt(self.as_str(), f)\n\n }\n\n}\n\n\n\nimpl TryFrom<&str> for ColumnName {\n\n type Error = Error;\n\n\n\n fn try_from(s: &str) -> Result<Self, Self::Error> {\n\n // Check for validity.\n\n let mut chars = s.chars();\n\n match chars.next() {\n\n Some(c) if c == '_' || c.is_ascii_alphabetic() => {}\n\n _ => {\n\n return Err(format_err!(\n\n \"BigQuery column name {:?} must start with an underscore or an ASCII letter\",\n\n s,\n\n ));\n\n }\n", "file_path": "dbcrossbarlib/src/drivers/bigquery_shared/column_name.rs", "rank": 51, "score": 82144.32520737239 }, { "content": "}\n\n\n\n/// A wrapper type used to display column names as quoted JavaScript\n\n/// identifiers.\n\n///\n\n/// TODO: Do we need to anything special with case-handling here? BigQuery\n\n/// ignores case, but JavaScript treats it as significant.\n\npub(crate) struct ColumnNameJavaScriptQuoted<'a>(&'a ColumnName);\n\n\n\nimpl<'a> fmt::Display for ColumnNameJavaScriptQuoted<'a> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"\\\"{}\\\"\", self.0.as_str())\n\n }\n\n}\n\n\n\n#[test]\n", "file_path": "dbcrossbarlib/src/drivers/bigquery_shared/column_name.rs", "rank": 52, "score": 82137.61840009819 }, { "content": " fn deserialize<D>(deserializer: D) -> Result<ColumnName, D::Error>\n\n where\n\n D: Deserializer<'de>,\n\n {\n\n let s: &str = Deserialize::deserialize(deserializer)?;\n\n ColumnName::try_from(s).map_err(de::Error::custom)\n\n }\n\n}\n\n\n\n/// A wrapper type used to display column names in a quoted format.\n\n///\n\n/// We avoid defining `Display` directly on `ColumnName`, so that there's no way\n\n/// to display it without making a decision.\n\npub(crate) struct ColumnNameQuoted<'a>(&'a ColumnName);\n\n\n\nimpl<'a> fmt::Display for ColumnNameQuoted<'a> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n // Always quote, just in case the column name is a keyword.\n\n write!(f, \"`{}`\", self.0.as_str())\n\n }\n", "file_path": "dbcrossbarlib/src/drivers/bigquery_shared/column_name.rs", "rank": 53, "score": 82136.87065935855 }, { "content": "impl Eq for ColumnName {}\n\n\n\nimpl PartialOrd for ColumnName {\n\n fn partial_cmp(&self, other: &Self) -> Option<Ordering> {\n\n Some(self.cmp(other))\n\n }\n\n}\n\n\n\nimpl Ord for ColumnName {\n\n fn cmp(&self, other: &Self) -> Ordering {\n\n self.as_lowercase().cmp(other.as_lowercase())\n\n }\n\n}\n\n\n\nimpl Hash for ColumnName {\n\n fn hash<H: Hasher>(&self, state: &mut H) {\n\n self.as_lowercase().hash(state);\n\n }\n\n}\n\n\n", "file_path": "dbcrossbarlib/src/drivers/bigquery_shared/column_name.rs", "rank": 54, "score": 82136.85679412168 }, { "content": "//! BigQuery column names.\n\n\n\nuse serde::{de, Deserialize, Deserializer, Serialize, Serializer};\n\nuse std::{\n\n cmp::Ordering,\n\n convert::TryFrom,\n\n fmt,\n\n hash::{Hash, Hasher},\n\n str::FromStr,\n\n};\n\n\n\nuse crate::common::*;\n\n\n\n/// A BigQuery column name.\n\n///\n\n/// This behaves like a string that preserves case, but which ignores it for\n\n/// comparisons. It may only contain valid BigQuery column names.\n\n///\n\n/// According to the official docs:\n\n///\n", "file_path": "dbcrossbarlib/src/drivers/bigquery_shared/column_name.rs", "rank": 55, "score": 82133.5702757225 }, { "content": "}\n\n\n\nimpl FromStr for ColumnName {\n\n type Err = Error;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n Self::try_from(s)\n\n }\n\n}\n\n\n\nimpl Serialize for ColumnName {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n self.as_str().serialize(serializer)\n\n }\n\n}\n\n\n\nimpl<'de> Deserialize<'de> for ColumnName {\n", "file_path": "dbcrossbarlib/src/drivers/bigquery_shared/column_name.rs", "rank": 56, "score": 82132.86779522905 }, { "content": "/// > A column name must contain only letters (a-z, A-Z), numbers (0-9), or\n\n/// > underscores (_), and it must start with a letter or underscore. The\n\n/// > maximum column name length is 128 characters. A column name cannot use any\n\n/// > of the following prefixes:\n\n/// >\n\n/// > - _TABLE_\n\n/// > - _FILE_\n\n/// > - _PARTITION\n\n/// >\n\n/// > Duplicate column names are not allowed even if the case differs. For\n\n/// > example, a column named Column1 is considered identical to a column named\n\n/// > column1.\n\n///\n\n/// [docs]: https://cloud.google.com/bigquery/docs/schemas#column_names\n\n#[derive(Clone)]\n\npub(crate) struct ColumnName {\n\n /// The original, mixed-case string, followed by an all-lowercase copy.\n\n ///\n\n /// Since we know that ASCII strings always have one character per byte, and\n\n /// that lowercasing a string doesn't change its length, we can assume that\n", "file_path": "dbcrossbarlib/src/drivers/bigquery_shared/column_name.rs", "rank": 57, "score": 82130.91126221747 }, { "content": " /// the dividing point is always exactly in the middle.\n\n data: String,\n\n}\n\n\n\nimpl ColumnName {\n\n /// The original string, including case information.\n\n pub(crate) fn as_str(&self) -> &str {\n\n // We store the original string in the first half.\n\n &self.data[..self.data.len() / 2]\n\n }\n\n\n\n /// Am all-lowecase version. Used for comparison.\n\n fn as_lowercase(&self) -> &str {\n\n // We store the lowercase string in the second half.\n\n &self.data[self.data.len() / 2..]\n\n }\n\n\n\n /// Convert this to a portable name.\n\n pub(crate) fn to_portable_name(&self) -> String {\n\n self.as_str().to_owned()\n", "file_path": "dbcrossbarlib/src/drivers/bigquery_shared/column_name.rs", "rank": 58, "score": 82130.58759559624 }, { "content": " }\n\n if !chars.all(|c| c == '_' || c.is_ascii_alphanumeric()) {\n\n return Err(format_err!(\"BigQuery column name {:?} must contain only underscores, ASCII letters, or ASCII digits\", s,));\n\n }\n\n\n\n // Build data.\n\n let mut data = String::with_capacity(s.len() * 2);\n\n data.push_str(s);\n\n data.extend(s.chars().map(|c| c.to_ascii_lowercase()));\n\n assert!(data.len() == 2 * s.len());\n\n Ok(ColumnName { data })\n\n }\n\n}\n\n\n\nimpl TryFrom<&String> for ColumnName {\n\n type Error = Error;\n\n\n\n fn try_from(s: &String) -> Result<Self, Self::Error> {\n\n Self::try_from(&s[..])\n\n }\n", "file_path": "dbcrossbarlib/src/drivers/bigquery_shared/column_name.rs", "rank": 59, "score": 82129.73442673334 }, { "content": "/// Convert a cell to PostgreSQL `BINARY` format.\n\nfn cell_to_binary(wtr: &mut BufferedWriter, col: &PgColumn, cell: &str) -> Result<()> {\n\n if cell.is_empty() && col.is_nullable {\n\n // We found an empty string in the CSV and this column is\n\n // nullable, so represent it as an SQL `NULL`. If the column\n\n // isn't nullable, then somebody else will have to figure out\n\n // if they can do anything with the empty string.\n\n wtr.write_i32::<NE>(-1)?;\n\n } else {\n\n match &col.data_type {\n\n PgDataType::Array {\n\n dimension_count,\n\n ty,\n\n } => {\n\n array_to_binary(wtr, *dimension_count, ty, cell)?;\n\n }\n\n PgDataType::Scalar(ty) => {\n\n scalar_to_binary(wtr, ty, cell)?;\n\n }\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "dbcrossbarlib/src/drivers/postgres/csv_to_binary/mod.rs", "rank": 60, "score": 73482.28611348322 }, { "content": "#[derive(Debug)]\n\nstruct CredentialsSources {\n\n sources: Vec<Box<dyn CredentialsSource>>,\n\n}\n\n\n\nimpl CredentialsSources {\n\n /// Create a new list of credentials sources that will be searched in order.\n\n fn new(sources: Vec<Box<dyn CredentialsSource>>) -> Self {\n\n Self { sources }\n\n }\n\n}\n\n\n\nimpl fmt::Display for CredentialsSources {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n for s in &self.sources {\n\n write!(f, \"{}\", s)?;\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "dbcrossbarlib/src/credentials.rs", "rank": 61, "score": 72949.97930883527 }, { "content": "#[derive(Debug)]\n\nstruct EnvMapping {\n\n key: &'static str,\n\n var: &'static str,\n\n optional: bool,\n\n}\n\n\n\nimpl EnvMapping {\n\n /// Fetch the value of `key` from `var`.\n\n fn required(key: &'static str, var: &'static str) -> Self {\n\n Self {\n\n key,\n\n var,\n\n optional: false,\n\n }\n\n }\n\n\n\n /// Fetch the value of `key` from `var`, if present.\n\n fn optional(key: &'static str, var: &'static str) -> Self {\n\n Self {\n\n key,\n", "file_path": "dbcrossbarlib/src/credentials.rs", "rank": 62, "score": 72949.97930883527 }, { "content": "#[derive(Debug)]\n\nstruct FileCredentialsSource {\n\n key: &'static str,\n\n path: PathBuf,\n\n}\n\n\n\nimpl FileCredentialsSource {\n\n /// Specify how to find credentials in a file. The contents of the file will\n\n /// be mapped to `key` in the credential.\n\n fn new(key: &'static str, path: PathBuf) -> Self {\n\n Self { key, path }\n\n }\n\n}\n\n\n\nimpl fmt::Display for FileCredentialsSource {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n writeln!(f, \"- The file {}\", self.path.display())\n\n }\n\n}\n\n\n\n#[async_trait]\n", "file_path": "dbcrossbarlib/src/credentials.rs", "rank": 63, "score": 71754.35196041028 }, { "content": "#[derive(Debug)]\n\nstruct EnvCredentialsSource {\n\n mapping: Vec<EnvMapping>,\n\n}\n\n\n\nimpl EnvCredentialsSource {\n\n /// Create a new `EnvCredentialsSource`.\n\n ///\n\n /// `mapping` should contain at least one element. The first element must\n\n /// not be `optional`.\n\n fn new(mapping: Vec<EnvMapping>) -> Self {\n\n // Check our preconditions with assertions, since all callers will be\n\n // hard-coded in the source.\n\n assert!(!mapping.is_empty());\n\n assert!(!mapping[0].optional);\n\n Self { mapping }\n\n }\n\n}\n\n\n\nimpl fmt::Display for EnvCredentialsSource {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n", "file_path": "dbcrossbarlib/src/credentials.rs", "rank": 64, "score": 71754.35196041028 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct ErrorResponse {\n\n /// The actual error.\n\n error: GCloudError,\n\n}\n\n\n\n/// Information about a GCloud error.\n\n#[derive(Debug, Deserialize)]\n\n#[allow(dead_code)]\n\npub(crate) struct GCloudError {\n\n pub(crate) code: i32,\n\n pub(crate) message: String,\n\n pub(crate) errors: Vec<ErrorDetail>,\n\n}\n\n\n\nimpl fmt::Display for GCloudError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"Google Cloud error: {} {}\", self.code, self.message)\n\n }\n\n}\n\n\n", "file_path": "dbcrossbarlib/src/clouds/gcloud/client.rs", "rank": 65, "score": 70622.62571968905 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct Value {\n\n /// The actual value. This is normally represented as a string.\n\n ///\n\n /// This might also be a nested `Row` object, but we don't handle that yet.\n\n #[serde(rename = \"v\")]\n\n value: serde_json::Value,\n\n}\n\n\n\nimpl Value {\n\n /// Convert this value into a JSON value.\n\n fn to_json_value(&self) -> Result<serde_json::Value> {\n\n Ok(self.value.clone())\n\n }\n\n}\n\n\n\n/// Run a query that should return a small number of records, and return them as\n\n/// a JSON string.\n\n#[instrument(level = \"trace\", skip(labels))]\n\nasync fn query_all_json(\n\n project: &str,\n", "file_path": "dbcrossbarlib/src/clouds/gcloud/bigquery/queries.rs", "rank": 66, "score": 70622.62571968905 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct Row {\n\n /// The fields in this row.\n\n #[serde(rename = \"f\")]\n\n fields: Vec<Value>,\n\n}\n\n\n\nimpl Row {\n\n /// Convert this row into a JSON object using names and other metadata from\n\n /// columns. We don't try to decode anything that `serde_json` can later\n\n /// decode for us.\n\n fn to_json_object(&self, columns: &[BqColumn]) -> Result<serde_json::Value> {\n\n // Check that we have the right number of columns.\n\n if columns.len() != self.fields.len() {\n\n return Err(format_err!(\n\n \"schema contained {} columns, but row contains {}\",\n\n columns.len(),\n\n self.fields.len(),\n\n ));\n\n }\n\n let mut obj = serde_json::Map::with_capacity(columns.len());\n\n for (col, value) in columns.iter().zip(self.fields.iter()) {\n\n obj.insert(col.name.to_portable_name(), value.to_json_value()?);\n\n }\n\n Ok(serde_json::Value::Object(obj))\n\n }\n\n}\n\n\n\n/// A value returned in query results.\n", "file_path": "dbcrossbarlib/src/clouds/gcloud/bigquery/queries.rs", "rank": 67, "score": 70622.62571968905 }, { "content": "#[derive(Clone, Debug)]\n\nenum BigMlAction {\n\n /// Create a single `dataset/$ID` resource on BigML, containing all the data.\n\n CreateDataset,\n\n /// Create one or more `dataset/$ID` resources on BigML.\n\n CreateDatasets,\n\n /// Create a single `source/$ID` resource on BigML, containing all the data.\n\n CreateSource,\n\n /// Create one or more `source/$ID` resources on BigML.\n\n CreateSources,\n\n /// Read data from the specified dataset.\n\n ReadDataset(Id<Dataset>),\n\n /// This cannot be directly used as a source or destination, but it can be\n\n /// printed as output from our driver.\n\n OutputSource(Id<Source>),\n\n}\n\n\n\n/// (Internal.) Options for resource creation.\n\npub(self) struct CreateOptions {\n\n /// Should we concatenate our input CSVs into a single stream?\n\n pub(self) concat_csv_streams: bool,\n", "file_path": "dbcrossbarlib/src/drivers/bigml/mod.rs", "rank": 68, "score": 69959.62542508393 }, { "content": "#[derive(Clone, Copy, Debug, Eq, PartialEq)]\n\nstruct CallLimit {\n\n /// How much of our call limit have we used?\n\n used: u32,\n\n /// How much is remaining?\n\n limit: u32,\n\n}\n\n\n\nimpl CallLimit {\n\n /// Are we close enough to our call limit that we should chill out a bit?\n\n fn should_wait(self) -> bool {\n\n self.used.saturating_mul(2) >= self.limit\n\n }\n\n}\n\n\n\nimpl FromStr for CallLimit {\n\n type Err = Error;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n if let Some(split_pos) = s.find('/') {\n\n let used = s[..split_pos]\n", "file_path": "dbcrossbarlib/src/drivers/shopify/local_data.rs", "rank": 69, "score": 69555.49113142451 }, { "content": "#[derive(Debug)]\n\nstruct ShopifyResponse {\n\n /// How much of our API have we used?\n\n call_limit: CallLimit,\n\n\n\n /// The URL of the next page of data.\n\n next_page_url: Option<Url>,\n\n\n\n /// Individual data rows.\n\n rows: Vec<Value>,\n\n}\n\n\n\n/// A Shopify \"call limit\", specifying how much of our API quota we've used.\n", "file_path": "dbcrossbarlib/src/drivers/shopify/local_data.rs", "rank": 70, "score": 69549.98377354901 }, { "content": "#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct QueryResults {\n\n /// The schema of our query results.\n\n schema: TableSchema,\n\n\n\n /// Rows returned from the query.\n\n rows: Vec<Row>,\n\n\n\n /// Has this query completed?\n\n job_complete: bool,\n\n}\n\n\n\nimpl QueryResults {\n\n fn to_json_objects(&self) -> Result<Vec<serde_json::Value>> {\n\n let objects = self\n\n .rows\n\n .iter()\n\n .map(|row| row.to_json_object(&self.schema.fields))\n\n .collect::<Result<Vec<serde_json::Value>>>()?;\n\n trace!(\n\n \"rows as objects: {}\",\n\n serde_json::to_string(&objects).expect(\"should be able to serialize rows\"),\n\n );\n\n Ok(objects)\n\n }\n\n}\n\n\n\n/// A row returned in `QueryResults`.\n", "file_path": "dbcrossbarlib/src/clouds/gcloud/bigquery/queries.rs", "rank": 71, "score": 69549.69404151296 }, { "content": "#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct ListResponse {\n\n #[allow(dead_code)]\n\n kind: String,\n\n\n\n next_page_token: Option<String>,\n\n\n\n #[serde(default)]\n\n items: Vec<StorageObject>,\n\n}\n\n\n\n/// A local helper macro that works like `?`, except that it report errors\n\n/// by sending them to `sender` and returning `Ok(())`.\n\nmacro_rules! try_and_forward_errors {\n\n ($ctx:expr, $expression:expr, $sender:expr) => {\n\n match $expression {\n\n Ok(val) => val,\n\n Err(err) => {\n\n error!(\"error in gcloud worker: {}\", err);\n\n $sender.send(Err(err.into())).await.map_send_err()?;\n\n return Ok(());\n", "file_path": "dbcrossbarlib/src/clouds/gcloud/storage/ls.rs", "rank": 72, "score": 69549.69404151296 }, { "content": "#[derive(Clone, Copy)]\n\nenum CreateTableType {\n\n /// Regular `CREATE TABLE`.\n\n Plain,\n\n /// `CREATE TABLE IF NOT EXISTS`.\n\n IfNotExists,\n\n /// `CREATE OR REPLACE TABLE`.\n\n OrReplace,\n\n}\n\n\n\nimpl fmt::Display for CreateTableType {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n CreateTableType::Plain => write!(f, \"CREATE TABLE\"),\n\n CreateTableType::IfNotExists => write!(f, \"CREATE TABLE IF NOT EXISTS\"),\n\n CreateTableType::OrReplace => write!(f, \"CREATE OR REPLACE TABLE\"),\n\n }\n\n }\n\n}\n\n\n\n/// Extensions to `Column` (the portable version) to handle BigQuery-query\n", "file_path": "dbcrossbarlib/src/drivers/bigquery_shared/table.rs", "rank": 73, "score": 68934.4542531496 }, { "content": "#[derive(Debug)]\n\nstruct ChunkRanges {\n\n /// The size of chunk we want to return.\n\n chunk_size: u64,\n\n /// The total length of our file.\n\n len: u64,\n\n /// The place to start our next range.\n\n next_start: u64,\n\n}\n\n\n\nimpl Iterator for ChunkRanges {\n\n type Item = ops::Range<u64>;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n if self.next_start < self.len {\n\n let end = min(self.next_start + self.chunk_size, self.len);\n\n let range = self.next_start..end;\n\n self.next_start = end;\n\n Some(range)\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n", "file_path": "dbcrossbarlib/src/clouds/gcloud/storage/download_file.rs", "rank": 74, "score": 68531.75285889223 }, { "content": "#[derive(Debug, Serialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct DownloadQuery {\n\n /// What format should we return?\n\n alt: Alt,\n\n\n\n /// What object generation do we expect to download?\n\n if_generation_match: i64,\n\n}\n\n\n\n/// Download the file at the specified URL as a stream.\n\n#[instrument(level = \"trace\", skip(item), fields(item = %item.to_url_string()))]\n\npub(crate) async fn download_file(\n\n item: &StorageObject,\n\n) -> Result<BoxStream<BytesMut>> {\n\n let file_url = item.to_url_string().parse::<Url>()?;\n\n debug!(\"streaming from {}\", file_url);\n\n let (bucket, object) = parse_gs_url(&file_url)?;\n\n\n\n // Build our URL & common headers.\n\n let url = format!(\n\n \"https://storage.googleapis.com/storage/v1/b/{}/o/{}\",\n", "file_path": "dbcrossbarlib/src/clouds/gcloud/storage/download_file.rs", "rank": 75, "score": 68531.46312685618 }, { "content": "#[derive(Debug, Serialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct QueryResultsQuery {\n\n /// Geographic location. Mandatory outside of US and Europe.\n\n location: String,\n\n}\n\n\n\n/// Results of a query.\n", "file_path": "dbcrossbarlib/src/clouds/gcloud/bigquery/queries.rs", "rank": 76, "score": 68531.46312685618 }, { "content": "#[derive(Debug, Serialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct UploadQuery {\n\n /// The type of the upload we're performing.\n\n upload_type: &'static str,\n\n\n\n /// Only accept the upload if the existing object has the specified\n\n /// generation number. Use 0 to specify a non-existant object.\n\n if_generation_match: i64,\n\n\n\n /// The name of the object we're creating.\n\n name: String,\n\n}\n\n\n\n/// Upload `data` as a file at `url`.\n\n///\n\n/// Docs: https://cloud.google.com/storage/docs/json_api/v1/objects/insert\n\n///\n\n/// TODO: Support https://cloud.google.com/storage/docs/performing-resumable-uploads.\n\n#[instrument(level = \"trace\", skip(ctx, data))]\n\npub(crate) async fn upload_file<'a>(\n\n ctx: &'a Context,\n", "file_path": "dbcrossbarlib/src/clouds/gcloud/storage/upload_file.rs", "rank": 77, "score": 68531.46312685618 }, { "content": "#[test]\n\nfn parse_and_display() {\n\n let examples = [\n\n (\"error\", IfExists::Error),\n\n (\"append\", IfExists::Append),\n\n (\"overwrite\", IfExists::Overwrite),\n\n (\"upsert-on:id\", IfExists::Upsert(vec![\"id\".to_owned()])),\n\n (\n\n \"upsert-on:first,last\",\n\n IfExists::Upsert(vec![\"first\".to_owned(), \"last\".to_owned()]),\n\n ),\n\n ];\n\n for (serialized, value) in &examples {\n\n assert_eq!(&serialized.parse::<IfExists>().unwrap(), value);\n\n assert_eq!(serialized, &value.to_string());\n\n }\n\n}\n\n\n", "file_path": "dbcrossbarlib/src/if_exists.rs", "rank": 78, "score": 68237.27198826618 }, { "content": "#[test]\n\nfn version_flag() {\n\n let testdir = TestDir::new(\"dbcrossbar\", \"version_flag\");\n\n let output = testdir.cmd().arg(\"--version\").expect_success();\n\n assert!(output.stdout_str().contains(env!(\"CARGO_PKG_VERSION\")));\n\n}\n", "file_path": "dbcrossbar/tests/cli/about.rs", "rank": 79, "score": 68237.27198826618 }, { "content": "#[test]\n\nfn help_flag() {\n\n let testdir = TestDir::new(\"dbcrossbar\", \"help_flag\");\n\n let output = testdir.cmd().arg(\"--help\").expect_success();\n\n assert!(output.stdout_str().contains(\"dbcrossbar\"));\n\n}\n\n\n", "file_path": "dbcrossbar/tests/cli/about.rs", "rank": 80, "score": 68237.27198826618 }, { "content": "#[test]\n\nfn parse_geometry() {\n\n use geo_types::Point;\n\n use serde_json::json;\n\n\n\n let geojson_value = json!({\n\n \"type\": \"Point\",\n\n \"coordinates\": [-71, 42],\n\n });\n\n let geojson = serde_json::to_string(&geojson_value).unwrap();\n\n let geometry = Geometry::<f64>::from_csv_cell(&geojson).unwrap();\n\n let expected = Geometry::Point(Point::new(-71.0, 42.0));\n\n assert_eq!(geometry, expected);\n\n}\n\n\n\nimpl FromCsvCell for i16 {\n\n fn from_csv_cell(cell: &str) -> Result<Self> {\n\n cell.parse::<i16>()\n\n .with_context(|| format!(\"cannot parse {:?} as i16\", cell))\n\n }\n\n}\n", "file_path": "dbcrossbarlib/src/from_csv_cell.rs", "rank": 81, "score": 67059.46629019568 }, { "content": "#[test]\n\nfn must_have_upsert_keys() {\n\n assert!(\"upsert-on:\".parse::<IfExists>().is_err());\n\n}\n", "file_path": "dbcrossbarlib/src/if_exists.rs", "rank": 82, "score": 67059.46629019568 }, { "content": "#[test]\n\n#[ignore]\n\nfn count_bigquery() {\n\n let testdir = TestDir::new(\"dbcrossbar\", \"count_bigquery\");\n\n let src = testdir.src_path(\"fixtures/posts.csv\");\n\n let schema = testdir.src_path(\"fixtures/posts.sql\");\n\n let gs_temp_dir = gs_test_dir_url(\"count_bigquery\");\n\n let bq_temp_ds = bq_temp_dataset();\n\n let bq_table = bq_test_table(\"count_bigquery\");\n\n\n\n // CSV to BigQuery.\n\n testdir\n\n .cmd()\n\n .args(&[\n\n \"cp\",\n\n \"--if-exists=overwrite\",\n\n &format!(\"--temporary={}\", gs_temp_dir),\n\n &format!(\"--temporary={}\", bq_temp_ds),\n\n &format!(\"--schema=postgres-sql:{}\", schema.display()),\n\n &format!(\"csv:{}\", src.display()),\n\n &bq_table,\n\n ])\n", "file_path": "dbcrossbar/tests/cli/count.rs", "rank": 83, "score": 67059.46629019568 }, { "content": "/// Remove the CSV header from a CSV stream, passing everything else through\n\n/// untouched.\n\nfn strip_csv_header(\n\n ctx: Context,\n\n mut stream: BoxStream<BytesMut>,\n\n) -> Result<BoxStream<BytesMut>> {\n\n // Create an asynchronous background worker to do the actual work.\n\n let (mut sender, receiver) = bytes_channel(1);\n\n let worker = async move {\n\n // Accumulate bytes in this buffer until we see a full CSV header.\n\n let mut buffer: Option<BytesMut> = None;\n\n\n\n // Look for a full CSV header.\n\n while let Some(result) = stream.next().await {\n\n match result {\n\n Err(err) => {\n\n error!(\"error reading stream: {}\", err);\n\n return send_err(sender, err).await;\n\n }\n\n Ok(bytes) => {\n\n trace!(\"received {} bytes\", bytes.len());\n\n let mut new_buffer = if let Some(mut buffer) = buffer.take() {\n", "file_path": "dbcrossbarlib/src/concat.rs", "rank": 84, "score": 67059.46629019568 }, { "content": "#[test]\n\n#[ignore]\n\nfn count_postgres() {\n\n let testdir = TestDir::new(\"dbcrossbar\", \"count_postgres\");\n\n let src = testdir.src_path(\"fixtures/posts.csv\");\n\n let schema = testdir.src_path(\"fixtures/posts.sql\");\n\n let pg_table = post_test_table_url(\"count_postgres\");\n\n\n\n // CSV to PostgreSQL.\n\n testdir\n\n .cmd()\n\n .args(&[\n\n \"cp\",\n\n \"--if-exists=overwrite\",\n\n &format!(\"--schema=postgres-sql:{}\", schema.display()),\n\n &format!(\"csv:{}\", src.display()),\n\n &pg_table,\n\n ])\n\n .tee_output()\n\n .expect_success();\n\n\n\n // Count PostgreSQL.\n\n let output = testdir\n\n .cmd()\n\n .args(&[\"count\", &pg_table])\n\n .tee_output()\n\n .expect_success();\n\n\n\n assert_eq!(output.stdout_str().trim(), \"2\");\n\n}\n", "file_path": "dbcrossbar/tests/cli/count.rs", "rank": 85, "score": 67059.46629019568 }, { "content": "/// Insert `value` into `json` at `path`.\n\n///\n\n/// `path` must never be empty. If `json` is `Value::Null`, it will be replaced\n\n/// with either a JSON object or a JSON array, depending on the type of the next\n\n/// component in `path`.\n\nfn insert_into_json(\n\n json: &mut Value,\n\n file_info: &Arc<FileInfo>,\n\n path: &[Component],\n\n value: Value,\n\n) -> Result<(), ParseError> {\n\n // We should never be called with an empty path.\n\n assert!(!path.is_empty());\n\n\n\n // Helper function that builds a useful error.\n\n let conflict_err =\n\n |pos: &Range<usize>, message: &'static str, existing: &Value| {\n\n let existing = serde_json::to_string(existing).unwrap();\n\n ParseError::new(\n\n file_info.to_owned(),\n\n vec![Annotation::primary(pos.to_owned(), \"conflict here\")],\n\n format!(\"{}, but earlier arguments specified {}\", message, existing),\n\n )\n\n };\n\n\n", "file_path": "dbcrossbarlib/src/driver_args.rs", "rank": 86, "score": 67059.46629019568 }, { "content": "#[derive(Clone, Debug, Deserialize)]\n\n#[serde(deny_unknown_fields)]\n\nstruct BigMlDestinationArguments {\n\n /// The name of the source or dataset to create.\n\n name: Option<String>,\n\n\n\n /// The default optype to use for text fields.\n\n optype_for_text: Option<Optype>,\n\n\n\n /// Tags to apply to the resources we create.\n\n #[serde(default)]\n\n tags: Vec<String>,\n\n}\n\n\n\n/// Implementation of `write_local_data`, but as a real `async` function.\n\n#[instrument(\n\n level = \"debug\",\n\n name = \"bigml::write_local_data\",\n\n skip_all,\n\n fields(dest = %dest)\n\n)]\n\npub(crate) async fn write_local_data_helper(\n", "file_path": "dbcrossbarlib/src/drivers/bigml/write_local_data.rs", "rank": 87, "score": 66642.56802639802 }, { "content": "#[test]\n\n#[ignore]\n\nfn cp_from_bigquery_with_where() {\n\n let testdir = TestDir::new(\"dbcrossbar\", \"cp_from_bigquery_with_where\");\n\n let src = testdir.src_path(\"fixtures/posts.csv\");\n\n let filtered = testdir.src_path(\"fixtures/posts_where_author_id_1.csv\");\n\n let schema = testdir.src_path(\"fixtures/posts.sql\");\n\n let gs_temp_dir = gs_test_dir_url(\"cp_from_bigquery_with_where\");\n\n let bq_temp_ds = bq_temp_dataset();\n\n let bq_table = bq_test_table(\"cp_from_bigquery_with_where\");\n\n\n\n // CSV to BigQuery.\n\n testdir\n\n .cmd()\n\n .args(&[\n\n \"cp\",\n\n \"--if-exists=overwrite\",\n\n &format!(\"--temporary={}\", gs_temp_dir),\n\n &format!(\"--temporary={}\", bq_temp_ds),\n\n &format!(\"--schema=postgres-sql:{}\", schema.display()),\n\n &format!(\"csv:{}\", src.display()),\n\n &bq_table,\n", "file_path": "dbcrossbar/tests/cli/cp/bigquery.rs", "rank": 88, "score": 65944.66899432463 }, { "content": "#[test]\n\nfn do_not_display_password() {\n\n let l = \"redshift://user:pass@host/db#table\"\n\n .parse::<RedshiftLocator>()\n\n .expect(\"could not parse locator\");\n\n assert_eq!(format!(\"{}\", l), \"redshift://user:XXXXXX@host/db#table\");\n\n}\n\n\n\nimpl FromStr for RedshiftLocator {\n\n type Err = Error;\n\n\n\n fn from_str(s: &str) -> Result<Self> {\n\n if !s.starts_with(\"redshift:\") {\n\n // Don't print the unparsed locator in the error because that would\n\n // leak the password.\n\n return Err(format_err!(\"Redshift locator must begin with redshift://\"));\n\n }\n\n let postgres_locator = s.replacen(\"redshift:\", \"postgres:\", 1).parse()?;\n\n Ok(RedshiftLocator { postgres_locator })\n\n }\n\n}\n", "file_path": "dbcrossbarlib/src/drivers/redshift/mod.rs", "rank": 89, "score": 65944.66899432463 }, { "content": "#[test]\n\nfn to_https_url() {\n\n let loc = ShopifyLocator::from_str(\n\n \"shopify://example.myshopify.com/admin/api/2020-04/orders.json\",\n\n )\n\n .unwrap();\n\n assert_eq!(\n\n loc.to_https_url().unwrap().as_str(),\n\n \"https://example.myshopify.com/admin/api/2020-04/orders.json\",\n\n );\n\n}\n\n\n\nimpl fmt::Display for ShopifyLocator {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n self.url.fmt(f)\n\n }\n\n}\n\n\n\nimpl FromStr for ShopifyLocator {\n\n type Err = Error;\n\n\n", "file_path": "dbcrossbarlib/src/drivers/shopify/mod.rs", "rank": 90, "score": 65944.66899432463 }, { "content": "#[test]\n\nfn locator_from_str_to_string_roundtrip() {\n\n let locators = vec![\n\n \"bigquery:my_project:my_dataset.my_table\",\n\n \"bigquery-schema:dir/my_table.json\",\n\n \"bigml:dataset\",\n\n \"bigml:datasets\",\n\n \"bigml:dataset/abc123\",\n\n \"bigml:source\",\n\n \"bigml:sources\",\n\n \"csv:file.csv\",\n\n \"csv:dir/\",\n\n \"dbcrossbar-schema:file.json\",\n\n \"dbcrossbar-ts:file %231 20%25.ts#Type\",\n\n \"gs://example-bucket/tmp/\",\n\n \"postgres://localhost:5432/db#my_table\",\n\n \"postgres-sql:dir/my_table.sql\",\n\n \"s3://example/my-dir/\",\n\n \"shopify://example.myshopify.com/admin/api/2020-04/orders.json\",\n\n ];\n\n for locator in locators.into_iter() {\n", "file_path": "dbcrossbarlib/src/locator.rs", "rank": 91, "score": 65944.66899432463 }, { "content": "#[test]\n\n#[ignore]\n\nfn redshift_upsert() {\n\n let testdir = TestDir::new(\"dbcrossbar\", \"redshift_upsert\");\n\n let srcs = &[\n\n testdir.src_path(\"fixtures/redshift_upsert/upsert_1.csv\"),\n\n testdir.src_path(\"fixtures/redshift_upsert/upsert_2.csv\"),\n\n ];\n\n let expected = testdir.src_path(\"fixtures/redshift_upsert/upsert_result.csv\");\n\n let schema = testdir.src_path(\"fixtures/redshift_upsert/upsert.sql\");\n\n let s3_dir = s3_test_dir_url(\"redshift_upsert\");\n\n let redshift_table = match redshift_test_table_url(\"public.redshift_upsert\") {\n\n Some(redshift_table) => redshift_table,\n\n None => {\n\n // We allow this test to be disabled by default even when --ignored\n\n // is passed, because Redshift is hard to set up, and it costs a\n\n // minimum of ~$180/month to run.\n\n eprintln!(\"SKIPPING REDSHIFT TEST - PLEASE SET `REDSHIFT_TEST_URL`!\");\n\n return;\n\n }\n\n };\n\n let iam_role =\n", "file_path": "dbcrossbar/tests/cli/cp/redshift.rs", "rank": 92, "score": 65944.66899432463 }, { "content": "#[test]\n\n#[ignore]\n\nfn postgres_upsert() {\n\n let testdir = TestDir::new(\"dbcrossbar\", \"postgres_upsert\");\n\n let srcs = &[\n\n testdir.src_path(\"fixtures/upsert/upsert_1.csv\"),\n\n testdir.src_path(\"fixtures/upsert/upsert_2.csv\"),\n\n ];\n\n let expected = testdir.src_path(\"fixtures/upsert/upsert_result.csv\");\n\n let schema = testdir.src_path(\"fixtures/upsert/upsert.sql\");\n\n let pg_table = post_test_table_url(\"postgres_upsert\");\n\n\n\n // CSVes to Postgres.\n\n let mut first = true;\n\n for src in srcs {\n\n let if_exists = if first {\n\n first = false;\n\n \"--if-exists=overwrite\"\n\n } else {\n\n // Make sure we have a unique index on key1,key2 first.\n\n Command::new(\"psql\")\n\n .arg(postgres_test_url())\n", "file_path": "dbcrossbar/tests/cli/cp/postgres.rs", "rank": 93, "score": 65944.66899432463 }, { "content": "#[test]\n\nfn conv_help_flag() {\n\n let testdir = TestDir::new(\"dbcrossbar\", \"conv_help_flag\");\n\n let output = testdir\n\n .cmd()\n\n .args(&[\"schema\", \"conv\", \"--help\"])\n\n .expect_success();\n\n assert!(output.stdout_str().contains(\"EXAMPLE LOCATORS:\"));\n\n}\n\n\n", "file_path": "dbcrossbar/tests/cli/conv.rs", "rank": 94, "score": 65944.66899432463 }, { "content": "#[test]\n\nfn do_not_display_password() {\n\n let l = \"postgres://user:pass@host/db#table\"\n\n .parse::<PostgresLocator>()\n\n .expect(\"could not parse locator\");\n\n assert_eq!(format!(\"{}\", l), \"postgres://user:XXXXXX@host/db#table\");\n\n}\n\n\n\nimpl FromStr for PostgresLocator {\n\n type Err = Error;\n\n\n\n fn from_str(s: &str) -> Result<Self> {\n\n let mut url: Url = s.parse::<Url>().context(\"cannot parse Postgres URL\")?;\n\n if url.scheme() != &Self::scheme()[..Self::scheme().len() - 1] {\n\n Err(format_err!(\"expected URL scheme postgres: {:?}\", s))\n\n } else {\n\n // Extract table name from URL.\n\n let table_name = url\n\n .fragment()\n\n .ok_or_else(|| {\n\n format_err!(\"{} needs to be followed by #table_name\", url)\n\n })?\n\n .parse::<PgName>()?;\n\n url.set_fragment(None);\n\n let url = UrlWithHiddenPassword::new(url);\n\n Ok(PostgresLocator { url, table_name })\n\n }\n\n }\n\n}\n\n\n", "file_path": "dbcrossbarlib/src/drivers/postgres/mod.rs", "rank": 95, "score": 65944.66899432463 }, { "content": "#[test]\n\n#[ignore]\n\nfn cp_from_postgres_with_where() {\n\n let testdir = TestDir::new(\"dbcrossbar\", \"cp_from_postgres_with_where\");\n\n let src = testdir.src_path(\"fixtures/posts.csv\");\n\n let filtered = testdir.src_path(\"fixtures/posts_where_author_id_1.csv\");\n\n let schema = testdir.src_path(\"fixtures/posts.sql\");\n\n let pg_table = post_test_table_url(\"cp_from_postgres_with_where\");\n\n\n\n // CSV to Postgres.\n\n testdir\n\n .cmd()\n\n .args(&[\n\n \"cp\",\n\n \"--if-exists=overwrite\",\n\n &format!(\"--schema=postgres-sql:{}\", schema.display()),\n\n &format!(\"csv:{}\", src.display()),\n\n &pg_table,\n\n ])\n\n .tee_output()\n\n .expect_success();\n\n\n", "file_path": "dbcrossbar/tests/cli/cp/postgres.rs", "rank": 96, "score": 65944.66899432463 }, { "content": "#[test]\n\nfn parse_utc_timestamp() {\n\n use chrono::TimeZone;\n\n let examples = &[\n\n (\n\n \"1969-07-20 20:17:39+00\",\n\n Utc.ymd(1969, 7, 20).and_hms(20, 17, 39),\n\n ),\n\n (\n\n \"1969-07-20 19:17:39.0-0100\",\n\n Utc.ymd(1969, 7, 20).and_hms(20, 17, 39),\n\n ),\n\n (\n\n \"1969-07-20 21:17:39.0+01:00\",\n\n Utc.ymd(1969, 7, 20).and_hms(20, 17, 39),\n\n ),\n\n (\n\n \"1969-07-20T21:17:39.0+01:00\",\n\n Utc.ymd(1969, 7, 20).and_hms(20, 17, 39),\n\n ),\n\n ];\n", "file_path": "dbcrossbarlib/src/from_csv_cell.rs", "rank": 97, "score": 65944.66899432463 }, { "content": "#[test]\n\n#[ignore]\n\nfn bigquery_upsert() {\n\n let testdir = TestDir::new(\"dbcrossbar\", \"bigquery_upsert\");\n\n let srcs = &[\n\n testdir.src_path(\"fixtures/upsert/upsert_1.csv\"),\n\n testdir.src_path(\"fixtures/upsert/upsert_2.csv\"),\n\n ];\n\n let expected = testdir.src_path(\"fixtures/upsert/upsert_result.csv\");\n\n let schema = testdir.src_path(\"fixtures/upsert/upsert.sql\");\n\n let bq_temp_ds = bq_temp_dataset();\n\n let gs_temp_dir = gs_test_dir_url(\"bigquery_upsert\");\n\n let bq_table = bq_test_table(\"bigquery_upsert\");\n\n\n\n // CSVes to BigQuery.\n\n let mut first = true;\n\n for src in srcs {\n\n let if_exists = if first {\n\n first = false;\n\n \"--if-exists=overwrite\"\n\n } else {\n\n \"--if-exists=upsert-on:key1,key2\"\n", "file_path": "dbcrossbar/tests/cli/cp/bigquery.rs", "rank": 98, "score": 65944.66899432463 }, { "content": "#[test]\n\nfn conv_ts_to_portable() {\n\n let testdir = TestDir::new(\"dbcrossbar\", \"conv_ts_to_portable\");\n\n let input_ts = testdir.src_path(\"fixtures/dbcrossbar_ts/shapes.ts\");\n\n let output_json = testdir.path(\"output.json\");\n\n let expected_json = testdir.src_path(\"fixtures/dbcrossbar_ts/shapes.json\");\n\n testdir\n\n .cmd()\n\n .args(&[\n\n \"--enable-unstable\",\n\n \"schema\",\n\n \"conv\",\n\n &format!(\"dbcrossbar-ts:{}#Shape\", input_ts.display()),\n\n &format!(\"dbcrossbar-schema:{}\", output_json.display()),\n\n ])\n\n .expect_success();\n\n let output = fs::read_to_string(&output_json).unwrap();\n\n let expected = fs::read_to_string(&expected_json).unwrap();\n\n assert_eq!(\n\n serde_json::from_str::<serde_json::Value>(&output).unwrap(),\n\n serde_json::from_str::<serde_json::Value>(&expected).unwrap(),\n\n );\n\n}\n\n\n", "file_path": "dbcrossbar/tests/cli/conv.rs", "rank": 99, "score": 65944.66899432463 } ]
Rust
src/apu/dmc.rs
zeta0134/rusticnes-core
de44cda41670c8902e96f9f5f7b624b6cf1d8ac1
use mmc::mapper::Mapper; use super::audio_channel::AudioChannelState; use super::ring_buffer::RingBuffer; pub struct DmcState { pub name: String, pub chip: String, pub debug_disable: bool, pub debug_buffer: Vec<i16>, pub output_buffer: RingBuffer, pub looping: bool, pub period_initial: u16, pub period_current: u16, pub output_level: u8, pub starting_address: u16, pub sample_length: u16, pub current_address: u16, pub sample_buffer: u8, pub shift_register: u8, pub sample_buffer_empty: bool, pub bits_remaining: u8, pub bytes_remaining: u16, pub silence_flag: bool, pub interrupt_enabled: bool, pub interrupt_flag: bool, pub rdy_line: bool, pub rdy_delay: u8, } impl DmcState { pub fn new(channel_name: &str, chip_name: &str) -> DmcState { return DmcState { name: String::from(channel_name), chip: String::from(chip_name), debug_disable: false, debug_buffer: vec!(0i16; 4096), output_buffer: RingBuffer::new(32768), looping: false, period_initial: 428, period_current: 0, output_level: 0, starting_address: 0, sample_length: 0, current_address: 0, sample_buffer: 0, shift_register: 0, sample_buffer_empty: true, bits_remaining: 8, bytes_remaining: 0, silence_flag: false, interrupt_enabled: true, interrupt_flag: false, rdy_line: false, rdy_delay: 0, } } pub fn debug_status(&self) -> String { return format!("Rate: {:3} - Divisor: {:3} - Start: {:04X} - Current: {:04X} - Length: {:4} - R.Bytes: {:4} - R.Bits: {:1}", self.period_initial, self.period_current, self.starting_address, self.current_address, self.sample_length, self.bytes_remaining, self.bits_remaining); } pub fn read_next_sample(&mut self, mapper: &mut dyn Mapper) { match mapper.read_cpu(0x8000 | (self.current_address & 0x7FFF)) { Some(byte) => self.sample_buffer = byte, None => self.sample_buffer = 0, } self.current_address = self.current_address.wrapping_add(1); self.bytes_remaining -= 1; if self.bytes_remaining == 0 { if self.looping { self.current_address = self.starting_address; self.bytes_remaining = self.sample_length; } else { if self.interrupt_enabled { self.interrupt_flag = true; } } } self.sample_buffer_empty = false; self.rdy_line = false; self.rdy_delay = 0; } pub fn begin_output_cycle(&mut self) { self.bits_remaining = 8; if self.sample_buffer_empty { self.silence_flag = true; } else { self.silence_flag = false; self.shift_register = self.sample_buffer; self.sample_buffer_empty = true; } } pub fn update_output_unit(&mut self) { if !(self.silence_flag) { let mut target_output = self.output_level; if (self.shift_register & 0b1) == 0 { if self.output_level >= 2 { target_output -= 2; } } else { if self.output_level <= 125 { target_output += 2; } } self.output_level = target_output; } self.shift_register = self.shift_register >> 1; self.bits_remaining -= 1; if self.bits_remaining == 0 { self.begin_output_cycle(); } } pub fn clock(&mut self, mapper: &mut dyn Mapper) { if self.period_current == 0 { self.period_current = self.period_initial - 1; self.update_output_unit(); } else { self.period_current -= 1; } if self.sample_buffer_empty && self.bytes_remaining > 0 { self.rdy_line = true; self.rdy_delay += 1; if self.rdy_delay > 2 { self.read_next_sample(mapper); } } else { self.rdy_line = false; self.rdy_delay = 0; } } pub fn output(&self) -> i16 { return self.output_level as i16; } } impl AudioChannelState for DmcState { fn name(&self) -> String { return self.name.clone(); } fn chip(&self) -> String { return self.chip.clone(); } fn sample_buffer(&self) -> &RingBuffer { return &self.output_buffer; } fn record_current_output(&mut self) { self.output_buffer.push(self.output()); } fn min_sample(&self) -> i16 { return 0; } fn max_sample(&self) -> i16 { return 127; } fn muted(&self) -> bool { return self.debug_disable; } fn mute(&mut self) { self.debug_disable = true; } fn unmute(&mut self) { self.debug_disable = false; } fn playing(&self) -> bool { return true; } fn amplitude(&self) -> f64 { let buffer = self.output_buffer.buffer(); let mut index = (self.output_buffer.index() - 256) % buffer.len(); let mut max = buffer[index]; let mut min = buffer[index]; for _i in 0 .. 256 { if buffer[index] > max {max = buffer[index];} if buffer[index] < min {min = buffer[index];} index += 1; index = index % buffer.len(); } return (max - min) as f64 / 64.0; } }
use mmc::mapper::Mapper; use super::audio_channel::AudioChannelState; use super::ring_buffer::RingBuffer; pub struct DmcState { pub name: String, pub chip: String, pub debug_disable: bool, pub debug_buffer: Vec<i16>, pub output_buffer: RingBuffer, pub looping: bool, pub period_initial: u16, pub period_current: u16, pub output_level: u8, pub starting_address: u16, pub sample_length: u16, pub current_address: u16, pub sample_buffer: u8, pub shift_register: u8, pub sample_buffer_empty: bool, pub bits_remaining: u8, pub bytes_remaining: u16, pub silence_flag: bool, pub interrupt_enabled: bool, pub interrupt_flag: bool, pub rdy_line: bool, pub rdy_delay: u8, } impl DmcState { pub fn new(channel_name: &str, chip_name: &str) -> DmcState { return DmcState { name: String::from(channel_name), chip: String::from(chip_name), debug_disable: false, debug_buffer: vec!(0i16; 4096), output_buffer: RingBuffer::new(32768), looping: false, period_initial: 428, period_current: 0, output_level: 0, starting_address: 0, sample_length: 0, current_address: 0, sample_buffer: 0, shift_register: 0, sample_buffer_empty: true, bits_remaining: 8, bytes_remaining: 0, silence_flag: false, interrupt_enabled: true, interrupt_flag: false, rdy_line: false, rdy_delay: 0, } } pub fn debug_status(&self) -> String { return format!("Rate: {:3} - Divisor: {:3} - Start: {:04X} - Current: {:04X} - Length: {:4} - R.Bytes: {:4} - R.Bits: {:1}", self.period_initial, self.period_current, self.starting_address, self.current_address, self.sample_length, self.bytes_remaining, self.bits_remaining); } pub fn read_next_sample(&mut self, mapper: &mut dyn Mapper) { match mapper.read_cpu(0x8000 | (self.current_address & 0x7FFF)) { Some(byte) => self.sample_buffer = byte, None => self.sample_buffer = 0, } self.current_address = self.current_address.wrapping_add(1); self.bytes_remaining -= 1; if self.bytes_remaining == 0 { if self.looping { self.current_address = self.starting_address; self.bytes_remaining = self.sample_length; } else { if self.interrupt_enabled { self.interrupt_flag = true; } } } self.sample_buffer_empty = false; self.rdy_line = false; self.rdy_delay = 0; } pub fn begin_output_cycle(&mut self) { self.bits_remaining = 8; if self.sample_buffer_empty { self.silence_flag = true; } else { self.silence_flag = false; self.shift_register = self.sample_buffer; self.sample_buffer_empty = true; } }
pub fn clock(&mut self, mapper: &mut dyn Mapper) { if self.period_current == 0 { self.period_current = self.period_initial - 1; self.update_output_unit(); } else { self.period_current -= 1; } if self.sample_buffer_empty && self.bytes_remaining > 0 { self.rdy_line = true; self.rdy_delay += 1; if self.rdy_delay > 2 { self.read_next_sample(mapper); } } else { self.rdy_line = false; self.rdy_delay = 0; } } pub fn output(&self) -> i16 { return self.output_level as i16; } } impl AudioChannelState for DmcState { fn name(&self) -> String { return self.name.clone(); } fn chip(&self) -> String { return self.chip.clone(); } fn sample_buffer(&self) -> &RingBuffer { return &self.output_buffer; } fn record_current_output(&mut self) { self.output_buffer.push(self.output()); } fn min_sample(&self) -> i16 { return 0; } fn max_sample(&self) -> i16 { return 127; } fn muted(&self) -> bool { return self.debug_disable; } fn mute(&mut self) { self.debug_disable = true; } fn unmute(&mut self) { self.debug_disable = false; } fn playing(&self) -> bool { return true; } fn amplitude(&self) -> f64 { let buffer = self.output_buffer.buffer(); let mut index = (self.output_buffer.index() - 256) % buffer.len(); let mut max = buffer[index]; let mut min = buffer[index]; for _i in 0 .. 256 { if buffer[index] > max {max = buffer[index];} if buffer[index] < min {min = buffer[index];} index += 1; index = index % buffer.len(); } return (max - min) as f64 / 64.0; } }
pub fn update_output_unit(&mut self) { if !(self.silence_flag) { let mut target_output = self.output_level; if (self.shift_register & 0b1) == 0 { if self.output_level >= 2 { target_output -= 2; } } else { if self.output_level <= 125 { target_output += 2; } } self.output_level = target_output; } self.shift_register = self.shift_register >> 1; self.bits_remaining -= 1; if self.bits_remaining == 0 { self.begin_output_cycle(); } }
function_block-full_function
[ { "content": "pub fn mapper_from_file(file_data: &[u8]) -> Result<Box<dyn Mapper>, String> {\n\n let mut file_reader = file_data;\n\n return mapper_from_reader(&mut file_reader);\n\n}", "file_path": "src/cartridge.rs", "rank": 0, "score": 278454.8317243863 }, { "content": "pub fn mapper_from_reader(file_reader: &mut dyn Read) -> Result<Box<dyn Mapper>, String> {\n\n let mut entire_file = Vec::new();\n\n match file_reader.read_to_end(&mut entire_file) {\n\n Ok(_) => {/* proceed normally */},\n\n Err(e) => {\n\n return Err(format!(\"Failed to read any data at all, giving up.{}\\n\", e));\n\n }\n\n }\n\n\n\n let mut errors = String::new();\n\n match INesCartridge::from_reader(&mut entire_file.as_slice()) {\n\n Ok(ines) => {return mapper_from_ines(ines);},\n\n Err(e) => {errors += format!(\"ines: {}\\n\", e).as_str()}\n\n }\n\n\n\n match NsfFile::from_reader(&mut entire_file.as_slice()) {\n\n Ok(nsf) => {return Ok(Box::new(NsfMapper::from_nsf(nsf)?));},\n\n Err(e) => {errors += format!(\"nsf: {}\\n\", e).as_str()}\n\n }\n\n\n\n return Err(format!(\"Unable to open file as any known type, giving up.\\n{}\", errors));\n\n}\n\n\n", "file_path": "src/cartridge.rs", "rank": 1, "score": 276806.69810753304 }, { "content": "pub fn assemble(opcodes: Vec<Opcode>, starting_address: u16) -> Result<Vec<u8>, String> {\n\n let mut bytes: Vec<u8> = Vec::new();\n\n let flattened_opcodes = flatten(opcodes);\n\n let translated_opcodes = resolve_labels(flattened_opcodes, starting_address)?;\n\n for opcode in translated_opcodes {\n\n bytes.extend(opcode_bytes(opcode)?);\n\n }\n\n return Ok(bytes);\n\n}", "file_path": "src/asm.rs", "rank": 2, "score": 251721.29989628834 }, { "content": "pub fn read_byte(nes: &mut NesState, address: u16) -> u8 {\n\n let mapped_byte = nes.mapper.read_cpu(address).unwrap_or(nes.memory.open_bus);\n\n\n\n // This is a live read, handle any side effects\n\n match address {\n\n 0x2000 ..= 0x3FFF => {\n\n let ppu_reg = address & 0x7;\n\n match ppu_reg {\n\n // PPUSTATUS\n\n 2 => {\n\n nes.ppu.write_toggle = false;\n\n nes.ppu.latch = (nes.ppu.status & 0xE0) + (nes.ppu.latch & 0x1F);\n\n nes.ppu.status = nes.ppu.status & 0x7F; // Clear VBlank bit\n\n nes.event_tracker.snoop_cpu_read(nes.registers.pc, address, nes.ppu.latch);\n\n return nes.ppu.latch;\n\n },\n\n // OAMDATA\n\n 4 => {\n\n nes.ppu.latch = nes.ppu.oam[nes.ppu.oam_addr as usize];\n\n nes.event_tracker.snoop_cpu_read(nes.registers.pc, address, nes.ppu.latch);\n", "file_path": "src/memory.rs", "rank": 3, "score": 251202.3319544979 }, { "content": "pub fn write_byte(nes: &mut NesState, address: u16, data: u8) {\n\n // Track every byte written, unconditionally\n\n // (filtering is done inside the tracker)\n\n nes.event_tracker.snoop_cpu_write(nes.registers.pc, address, data);\n\n\n\n // The mapper *always* sees the write. Even to RAM, and even to internal registers.\n\n // Most mappers ignore writes to addresses below 0x6000. Some (notably MMC5) do not.\n\n nes.mapper.write_cpu(address, data);\n\n match address {\n\n 0x0000 ..= 0x1FFF => nes.memory.iram_raw[(address & 0x7FF) as usize] = data,\n\n 0x2000 ..= 0x3FFF => {\n\n // PPU\n\n let ppu_reg = address & 0x7;\n\n nes.ppu.latch = data;\n\n match ppu_reg {\n\n // PPUCTRL\n\n 0 => {\n\n nes.ppu.control = data;\n\n // Shift the nametable select bits into the temporary vram address\n\n // yyy_nn_YYYYY_XXXXX\n", "file_path": "src/memory.rs", "rank": 4, "score": 241392.6423833872 }, { "content": "pub fn addressing_bytes(addressing_mode: &str) -> u8 {\n\n\treturn match addressing_mode {\n\n\t\t\"#i\" | \"d\" | \"(d, x)\" | \"(d), y\" | \"d, x\" => 1,\n\n\t\t\"a\" | \"a, x\" | \"a, y\" | \"(a)\" => 2,\n\n\t\t_ => 0\n\n\t}\n\n}\n\n\n", "file_path": "src/opcode_info.rs", "rank": 5, "score": 230484.70314528106 }, { "content": "pub fn disassemble_instruction(opcode: u8, _: u8, _: u8) -> (String, u8) {\n\n let logic_block = opcode & 0b0000_0011;\n\n let addressing_mode_index = (opcode & 0b0001_1100) >> 2;\n\n let opcode_index = (opcode & 0b1110_0000) >> 5;\n\n\n\n let (opcode_name, addressing_mode) = match logic_block {\n\n 0b00 => control_block(opcode),\n\n 0b01 => alu_block(addressing_mode_index, opcode_index),\n\n 0b10 => rmw_block(opcode, addressing_mode_index, opcode_index),\n\n _ => (\"???\", \"\")\n\n };\n\n\n\n let instruction = format!(\"{} {}\", opcode_name, addressing_mode);\n\n let data_bytes = addressing_bytes(addressing_mode);\n\n return (instruction, data_bytes);\n\n}", "file_path": "src/opcode_info.rs", "rank": 6, "score": 211526.79362910293 }, { "content": "pub fn opcode_bytes(opcode: Opcode) -> Result<Vec<u8>, String> {\n\n match opcode {\n\n Opcode::Asl(AddressingMode::Accumulator) => {Ok(vec![0x0A])},\n\n Opcode::Asl(AddressingMode::ZeroPage(byte)) => {Ok(vec![0x06, byte])},\n\n Opcode::Asl(AddressingMode::ZeroPageX(byte)) => {Ok(vec![0x16, byte])},\n\n Opcode::Asl(AddressingMode::Absolute(address)) => {Ok(vec![0x0E, low(address), high(address)])},\n\n Opcode::Asl(AddressingMode::AbsoluteX(address)) => {Ok(vec![0x1E, low(address), high(address)])},\n\n\n\n Opcode::Bit(AddressingMode::ZeroPage(byte)) => {Ok(vec![0x24, byte])},\n\n Opcode::Bit(AddressingMode::Absolute(address)) => {Ok(vec![0x2C, low(address), high(address)])},\n\n Opcode::Brk => {Ok(vec![0x00])},\n\n Opcode::Bcc(AddressingMode::Relative(offset)) => {Ok(vec![0x90, offset as u8])},\n\n Opcode::Bcs(AddressingMode::Relative(offset)) => {Ok(vec![0xB0, offset as u8])},\n\n Opcode::Beq(AddressingMode::Relative(offset)) => {Ok(vec![0xF0, offset as u8])},\n\n Opcode::Bmi(AddressingMode::Relative(offset)) => {Ok(vec![0x30, offset as u8])},\n\n Opcode::Bne(AddressingMode::Relative(offset)) => {Ok(vec![0xD0, offset as u8])},\n\n Opcode::Bpl(AddressingMode::Relative(offset)) => {Ok(vec![0x10, offset as u8])},\n\n Opcode::Clc => {Ok(vec![0x18])},\n\n Opcode::Cli => {Ok(vec![0x58])},\n\n\n", "file_path": "src/asm.rs", "rank": 7, "score": 211464.54269087603 }, { "content": "pub fn debug_read_byte(nes: &NesState, address: u16) -> u8 {\n\n // Handle a few special cases for debug reads\n\n match address {\n\n 0x2000 ..= 0x3FFF => {\n\n let ppu_reg = address & 0x7;\n\n match ppu_reg {\n\n 7 => {\n\n let ppu_addr = nes.ppu.current_vram_address;\n\n // Note: does not simulate the data / palette fetch quirk.\n\n return nes.ppu.debug_read_byte(& *nes.mapper, ppu_addr);\n\n },\n\n _ => {}\n\n }\n\n },\n\n 0x4015 => {\n\n return nes.apu.debug_read_register(address);\n\n },\n\n _ => {}\n\n }\n\n\n\n let mapped_byte = nes.mapper.debug_read_cpu(address).unwrap_or(nes.memory.open_bus);\n\n return _read_byte(nes, address, mapped_byte);\n\n}\n\n\n", "file_path": "src/memory.rs", "rank": 8, "score": 210727.39041831938 }, { "content": "fn relative_offset(known_labels: &HashMap<String, u16>, label: &String, current_address: u16) -> Result<i8, String> {\n\n match known_labels.get(label) {\n\n Some(label_address) => {\n\n //let current_offset = assemble(translated_opcodes.clone())?.len();\n\n let relative_offset = (*label_address as i32) - (current_address as i32) - 2;\n\n println!(\"Will emit branch to label {} with relative offset {}\", label, relative_offset);\n\n if relative_offset > 127 || relative_offset < -128 {\n\n return Err(format!(\"Branch to label {} is out of range ({})\", label, relative_offset))\n\n }\n\n return Ok(relative_offset as i8);\n\n\n\n },\n\n None => return Err(format!(\"Label not found: {}\", label))\n\n }\n\n}\n\n\n", "file_path": "src/asm.rs", "rank": 9, "score": 207280.57798015635 }, { "content": "pub fn resolve_labels(opcodes: Vec<Opcode>, starting_address: u16) -> Result<Vec<Opcode>, String> {\n\n let mut known_labels: HashMap<String, u16> = HashMap::new();\n\n let mut total_bytes: u16 = 0;\n\n for opcode in &opcodes {\n\n match opcode {\n\n Opcode::Label(label) => {\n\n known_labels.insert(label.to_string(), total_bytes);\n\n println!(\"Registering label {} with offset {}\", label, total_bytes);\n\n },\n\n // These opcodes will fail to resolve in opcode_bytes, so we instead catch them here\n\n // and advance the total_bytes manually; we'll replace these in a later step\n\n Opcode::Bcc(AddressingMode::RelativeLabel(_)) => {total_bytes += 2},\n\n Opcode::Bcs(AddressingMode::RelativeLabel(_)) => {total_bytes += 2},\n\n Opcode::Beq(AddressingMode::RelativeLabel(_)) => {total_bytes += 2},\n\n Opcode::Bmi(AddressingMode::RelativeLabel(_)) => {total_bytes += 2},\n\n Opcode::Bne(AddressingMode::RelativeLabel(_)) => {total_bytes += 2},\n\n Opcode::Bpl(AddressingMode::RelativeLabel(_)) => {total_bytes += 2},\n\n Opcode::Jmp(AddressingMode::AbsoluteLabel(_)) => {total_bytes += 3},\n\n Opcode::Jsr(AddressingMode::AbsoluteLabel(_)) => {total_bytes += 3},\n\n\n", "file_path": "src/asm.rs", "rank": 10, "score": 205261.88052720227 }, { "content": "// Logical shift right\n\npub fn lsr(registers: &mut Registers, data: u8) -> u8 {\n\n registers.flags.carry = data & 0x1 != 0;\n\n let result: u8 = data >> 1;\n\n registers.flags.zero = result == 0;\n\n registers.flags.negative = false;\n\n return result;\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 11, "score": 203936.4685393261 }, { "content": "// Rotate Right\n\npub fn ror(registers: &mut Registers, data: u8) -> u8 {\n\n let old_carry = registers.flags.carry;\n\n registers.flags.carry = (data & 0x01) != 0;\n\n let result = (data >> 1) | ((old_carry as u8) << 7);\n\n registers.flags.zero = result == 0;\n\n registers.flags.negative = (result & 0x80) != 0;\n\n return result;\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 12, "score": 203936.4685393261 }, { "content": "// Increment Memory\n\npub fn inc(registers: &mut Registers, data: u8) -> u8 {\n\n let result: u8 = data.wrapping_add(1);\n\n registers.flags.zero = result == 0;\n\n registers.flags.negative = result & 0x80 != 0;\n\n return result;\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 13, "score": 203936.4685393261 }, { "content": "pub fn nop_modify(_: &mut Registers, data: u8) -> u8 {\n\n return data;\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 14, "score": 203936.4685393261 }, { "content": "// Arithmetic Shift Left\n\npub fn asl(registers: &mut Registers, data: u8) -> u8 {\n\n registers.flags.carry = data & 0x80 != 0;\n\n let result = (data & 0x7F) << 1;\n\n registers.flags.zero = result == 0;\n\n registers.flags.negative = result & 0x80 != 0;\n\n return result;\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 15, "score": 203936.4685393261 }, { "content": "// Decrement Memory\n\npub fn dec(registers: &mut Registers, data: u8) -> u8 {\n\n let result: u8 = data.wrapping_sub(1);\n\n registers.flags.zero = result == 0;\n\n registers.flags.negative = result & 0x80 != 0;\n\n return result;\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 16, "score": 203936.4685393261 }, { "content": "// Rotate left\n\npub fn rol(registers: &mut Registers, data: u8) -> u8 {\n\n let old_carry = registers.flags.carry;\n\n registers.flags.carry = (data & 0x80) != 0;\n\n let result = (data << 1) | (old_carry as u8);\n\n registers.flags.zero = result == 0;\n\n registers.flags.negative = (result & 0x80) != 0;\n\n return result;\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 17, "score": 203936.4685393261 }, { "content": "pub fn control_block(opcode: u8) -> (&'static str, &'static str) {\n\n // Everything is pretty irregular, so we'll just match the whole opcode\n\n return match opcode {\n\n \t0x10 => (\"BPL\", \"\"),\n\n \t0x30 => (\"BMI\", \"\"),\n\n \t0x50 => (\"BVC\", \"\"),\n\n \t0x70 => (\"BVS\", \"\"),\n\n \t0x90 => (\"BCC\", \"\"),\n\n \t0xB0 => (\"BCS\", \"\"),\n\n \t0xD0 => (\"BNE\", \"\"),\n\n \t0xF0 => (\"BEQ\", \"\"),\n\n\n\n 0x00 => (\"BRK\", \"\"),\n\n 0x80 => (\"NOP\", \"#i\"),\n\n\n\n // Opcodes with similar addressing modes\n\n 0xA0 => (\"LDY\", \"#i\"),\n\n 0xC0 => (\"CPY\", \"#i\"),\n\n 0xE0 => (\"CPX\", \"#i\"),\n\n 0x24 => (\"BIT\", \"d\"),\n", "file_path": "src/opcode_info.rs", "rank": 18, "score": 201256.03260227828 }, { "content": "pub fn nop_write(_: &mut Registers) -> u8 {\n\n return 0; // Meant to be discarded\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 19, "score": 201227.84258387794 }, { "content": "// Store X\n\npub fn stx(registers: &mut Registers) -> u8 {\n\n return registers.x\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 20, "score": 201227.8425838779 }, { "content": "// Store Y\n\npub fn sty(registers: &mut Registers) -> u8 {\n\n return registers.y\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 21, "score": 201227.84258387794 }, { "content": "// Store Accumulator\n\npub fn sta(registers: &mut Registers) -> u8 {\n\n return registers.a\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 22, "score": 201227.8425838779 }, { "content": "// Shift left and inclusive OR A\n\npub fn slo(registers: &mut Registers, data: u8) -> u8 {\n\n let result = opcodes::asl(registers, data);\n\n opcodes::ora(registers, result);\n\n return result;\n\n}\n\n\n", "file_path": "src/unofficial_opcodes.rs", "rank": 23, "score": 200035.916252267 }, { "content": "// Increment and subtract w/ carry\n\npub fn isc(registers: &mut Registers, data: u8) -> u8 {\n\n let result = opcodes::inc(registers, data);\n\n opcodes::sbc(registers, result);\n\n return result;\n\n}\n\n\n\n// Many of the following opcodes are unstable, and therefore not part of official tests.\n\n// Hardware results may depend on the alignment of the planets, and whether or not the code \n\n// is being run on an odd-numbered thursday in a month that ends with R.\n\n\n\n// The following opcodes perform an &H, which requires the address byte to be available at a certain\n\n// point. These are weird enough to break the opcode structure, and so get custom functions.\n\n\n", "file_path": "src/unofficial_opcodes.rs", "rank": 24, "score": 200035.916252267 }, { "content": "// Rotate right, then ADC result with A\n\npub fn rra(registers: &mut Registers, data: u8) -> u8 {\n\n let result = opcodes::ror(registers, data);\n\n opcodes::adc(registers, result);\n\n return result;\n\n}\n\n\n", "file_path": "src/unofficial_opcodes.rs", "rank": 25, "score": 200035.916252267 }, { "content": "// Decrement and compare\n\npub fn dcp(registers: &mut Registers, data: u8) -> u8 {\n\n let result = opcodes::dec(registers, data);\n\n opcodes::cmp(registers, result);\n\n return result;\n\n}\n\n\n", "file_path": "src/unofficial_opcodes.rs", "rank": 26, "score": 200035.916252267 }, { "content": "// Rotate left, then AND A\n\npub fn rla(registers: &mut Registers, data: u8) -> u8 {\n\n let result = opcodes::rol(registers, data);\n\n opcodes::and(registers, result);\n\n return result;\n\n}\n\n\n", "file_path": "src/unofficial_opcodes.rs", "rank": 27, "score": 200035.916252267 }, { "content": "// Shift right, then Exclisive OR A\n\npub fn sre(registers: &mut Registers, data: u8) -> u8 {\n\n let result = opcodes::lsr(registers, data);\n\n opcodes::eor(registers, result);\n\n return result;\n\n}\n\n\n", "file_path": "src/unofficial_opcodes.rs", "rank": 28, "score": 200035.916252267 }, { "content": "fn mapper_from_ines(ines: INesCartridge) -> Result<Box<dyn Mapper>, String> {\n\n let mapper_number = ines.header.mapper_number();\n\n\n\n let mapper: Box<dyn Mapper> = match mapper_number {\n\n 0 => Box::new(Nrom::from_ines(ines)?),\n\n 1 => Box::new(Mmc1::from_ines(ines)?),\n\n 2 => Box::new(UxRom::from_ines(ines)?),\n\n 3 => Box::new(CnRom::from_ines(ines)?),\n\n 4 => Box::new(Mmc3::from_ines(ines)?),\n\n 5 => Box::new(Mmc5::from_ines(ines)?),\n\n 7 => Box::new(AxRom::from_ines(ines)?),\n\n 9 => Box::new(PxRom::from_ines(ines)?),\n\n 19 => Box::new(Namco163::from_ines(ines)?),\n\n 24 => Box::new(Vrc6::from_ines(ines)?),\n\n 26 => Box::new(Vrc6::from_ines(ines)?),\n\n 31 => Box::new(INes31::from_ines(ines)?),\n\n 34 => Box::new(BnRom::from_ines(ines)?),\n\n 66 => Box::new(GxRom::from_ines(ines)?),\n\n 69 => Box::new(Fme7::from_ines(ines)?),\n\n _ => {\n\n return Err(format!(\"Unsupported iNES mapper: {}\", ines.header.mapper_number()));\n\n }\n\n };\n\n\n\n println!(\"Successfully loaded mapper: {}\", mapper_number);\n\n\n\n return Ok(mapper);\n\n}\n\n\n", "file_path": "src/cartridge.rs", "rank": 29, "score": 198983.07408327935 }, { "content": "pub fn rmw_block(opcode: u8, addressing_mode_index: u8, opcode_index: u8) -> (&'static str, &'static str) {\n\n // First, handle some block 10 opcodes that break the mold\n\n return match opcode {\n\n // Assorted NOPs\n\n 0x82 | 0xC2 | 0xE2 => (\"NOP\", \"#i\"),\n\n 0x1A | 0x3A | 0x5A | 0x7A | 0xDA | 0xEA | 0xFA => (\"NOP\", \"\"),\n\n // Certain opcodes may be vital to your success. THESE opcodes are not.\n\n 0x02 | 0x22 | 0x42 | 0x62 | 0x12 | 0x32 | 0x52 | 0x72 | 0x92 | 0xB2 | 0xD2 | 0xF2 => (\"STP\", \"\"),\n\n 0xA2 => (\"LDX\", \"#i\"),\n\n 0x8A => (\"TXA\", \"\"),\n\n 0xAA => (\"TAX\", \"\"),\n\n 0xCA => (\"DEX\", \"\"),\n\n 0x9A => (\"TXS\", \"\"),\n\n 0xBA => (\"TSX\", \"\"),\n\n 0x96 => (\"STX\", \"d, y\"),\n\n 0xB6 => (\"LDX\", \"d, y\"),\n\n 0xBE => (\"LDX\", \"a, y\"),\n\n _ => {\n\n let addressing_mode = match addressing_mode_index {\n\n // Zero Page Mode\n", "file_path": "src/opcode_info.rs", "rank": 30, "score": 198414.49890180258 }, { "content": "pub fn alu_block(addressing_mode_index: u8, opcode_index: u8) -> (&'static str, &'static str) {\n\n let addressing_mode = match addressing_mode_index {\n\n // Zero Page Mode\n\n 0b000 => \"(d, x)\",\n\n 0b001 => \"d\",\n\n 0b010 => \"#i\",\n\n 0b011 => \"a\",\n\n 0b100 => \"(d), y\",\n\n 0b101 => \"d, x\",\n\n 0b110 => \"a, y\",\n\n 0b111 => \"a, x\",\n\n\n\n // Not implemented yet\n\n _ => \"???\",\n\n };\n\n\n\n let opcode_name = match opcode_index {\n\n 0b000 => \"ORA\",\n\n 0b001 => \"AND\",\n\n 0b010 => \"EOR\",\n", "file_path": "src/opcode_info.rs", "rank": 31, "score": 198241.10009976607 }, { "content": "pub fn mirroring_mode_name(mode: Mirroring) -> &'static str {\n\n match mode {\n\n Mirroring::Horizontal => \"Horizontal\",\n\n Mirroring::Vertical => \"Vertical\",\n\n Mirroring::OneScreenLower => \"OneScreenLower\",\n\n Mirroring::OneScreenUpper => \"OneScreenUpper\",\n\n Mirroring::FourScreen => \"FourScreen\"\n\n }\n\n}\n\n\n", "file_path": "src/mmc/mapper.rs", "rank": 32, "score": 197100.31898357705 }, { "content": "pub fn and(registers: &mut Registers, data: u8) {\n\n registers.a = registers.a & data;\n\n registers.flags.zero = registers.a == 0;\n\n registers.flags.negative = registers.a & 0x80 != 0;\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 33, "score": 196802.0808938087 }, { "content": "// NOP - Read and Write variants\n\npub fn nop_read(_: &mut Registers, _: u8) {\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 34, "score": 196802.0808938087 }, { "content": "pub fn sax(registers: &mut Registers) -> u8 {\n\n let result = registers.a & registers.x;\n\n return result;\n\n}\n\n\n", "file_path": "src/unofficial_opcodes.rs", "rank": 35, "score": 196682.45905876037 }, { "content": "pub fn pop(nes: &mut NesState) -> u8 {\n\n nes.registers.s = nes.registers.s.wrapping_add(1);\n\n let address = (nes.registers.s as u16) + 0x0100;\n\n return read_byte(nes, address);\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 36, "score": 196682.45905876037 }, { "content": "// Logical inclusive OR\n\npub fn ora(registers: &mut Registers, data: u8) {\n\n registers.a = registers.a | data;\n\n registers.flags.zero = registers.a == 0;\n\n registers.flags.negative = registers.a & 0x80 != 0;\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 37, "score": 192256.69736869118 }, { "content": "// Load Y\n\npub fn ldy(registers: &mut Registers, data: u8) {\n\n registers.y = data;\n\n registers.flags.zero = registers.y == 0;\n\n registers.flags.negative = registers.y & 0x80 != 0;\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 38, "score": 192256.69736869118 }, { "content": "// Compare Accumulator\n\npub fn cmp(registers: &mut Registers, data: u8) {\n\n registers.flags.carry = registers.a >= data;\n\n let result: u8 = registers.a.wrapping_sub(data);\n\n registers.flags.zero = result == 0;\n\n registers.flags.negative = result & 0x80 != 0;\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 39, "score": 192256.69736869118 }, { "content": "// Add with Carry\n\npub fn adc(registers: &mut Registers, data: u8) {\n\n let result: u16 = registers.a as u16 + data as u16 + registers.flags.carry as u16;\n\n registers.flags.overflow = overflow(registers.a as i8, data as i8, registers.flags.carry as i8);\n\n registers.flags.carry = result > 0xFF;\n\n registers.a = (result & 0xFF) as u8;\n\n registers.flags.zero = registers.a == 0;\n\n registers.flags.negative = registers.a & 0x80 != 0;\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 40, "score": 192256.69736869118 }, { "content": "// Load X\n\npub fn ldx(registers: &mut Registers, data: u8) {\n\n registers.x = data;\n\n registers.flags.zero = registers.x == 0;\n\n registers.flags.negative = registers.x & 0x80 != 0;\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 41, "score": 192256.69736869118 }, { "content": "// Compare X\n\npub fn cpx(registers: &mut Registers, data: u8) {\n\n registers.flags.carry = registers.x >= data;\n\n let result: u8 = registers.x.wrapping_sub(data);\n\n registers.flags.zero = result == 0;\n\n registers.flags.negative = result & 0x80 != 0;\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 42, "score": 192256.69736869118 }, { "content": "// Exclusive OR\n\npub fn eor(registers: &mut Registers, data: u8) {\n\n registers.a = registers.a ^ data;\n\n registers.flags.zero = registers.a == 0;\n\n registers.flags.negative = registers.a & 0x80 != 0;\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 43, "score": 192256.69736869118 }, { "content": "// Subtract with Carry\n\npub fn sbc(registers: &mut Registers, data: u8) {\n\n // Preload the carry into bit 8\n\n let inverted_data = data ^ 0xFF;\n\n adc(registers, inverted_data);\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 44, "score": 192256.69736869118 }, { "content": "// Load Accumulator\n\npub fn lda(registers: &mut Registers, data: u8) {\n\n registers.a = data;\n\n registers.flags.zero = registers.a == 0;\n\n registers.flags.negative = registers.a & 0x80 != 0;\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 45, "score": 192256.69736869118 }, { "content": "// Bit Test\n\npub fn bit(registers: &mut Registers, data: u8) {\n\n let result: u8 = registers.a & data;\n\n registers.flags.zero = result == 0;\n\n registers.flags.overflow = data & 0x40 != 0;\n\n registers.flags.negative = data & 0x80 != 0;\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 46, "score": 192256.69736869118 }, { "content": "// Compare Y\n\npub fn cpy(registers: &mut Registers, data: u8) {\n\n registers.flags.carry = registers.y >= data;\n\n let result: u8 = registers.y.wrapping_sub(data);\n\n registers.flags.zero = result == 0;\n\n registers.flags.negative = result & 0x80 != 0;\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 47, "score": 192256.69736869118 }, { "content": "fn _read_byte(nes: &NesState, address: u16, mapped_byte: u8) -> u8 {\n\n match address {\n\n 0x0000 ..= 0x1FFF => {\n\n return nes.memory.iram_raw[(address & 0x7FF) as usize];\n\n },\n\n 0x2000 ..= 0x3FFF => {\n\n // PPU\n\n let ppu_reg = address & 0x7;\n\n match ppu_reg {\n\n // PPUCTRL, PPUMASK, OAMADDR | PPUSCROLL | PPUADDR (Write Only)\n\n 0 | 1 | 3 | 5 | 6 => {\n\n return nes.ppu.latch;\n\n },\n\n // PPUSTATUS\n\n 2 => {\n\n return (nes.ppu.status & 0xE0) + (nes.ppu.latch & 0x1F);\n\n },\n\n // OAMDATA\n\n 4 => {\n\n return nes.ppu.oam[nes.ppu.oam_addr as usize];\n", "file_path": "src/memory.rs", "rank": 48, "score": 190790.01451791136 }, { "content": "// \"Magic\" value is a complete guess. I don't know if the NES's decimal unit actually\n\n// exists and is stubbed out; I'm assuming here that it is NOT, so setting magic to\n\n// 0x00. The true effect of this instruction varies *by console* and the instruction\n\n// should not be used for any purpose.\n\n// http://visual6502.org/wiki/index.php?title=6502_Opcode_8B_%28XAA,_ANE%29\n\npub fn xaa(registers: &mut Registers, data: u8) {\n\n // A = (A | magic) & X & imm\n\n let magic = 0x00;\n\n registers.a = (registers.a | magic) & registers.x & data;\n\n registers.flags.zero = registers.a == 0;\n\n registers.flags.negative = registers.a & 0x80 != 0;\n\n}\n\n\n", "file_path": "src/unofficial_opcodes.rs", "rank": 49, "score": 188053.6452631011 }, { "content": "// AND with #imm, then LSR\n\npub fn alr(registers: &mut Registers, data: u8) {\n\n opcodes::and(registers, data);\n\n let result = registers.a;\n\n registers.a = opcodes::lsr(registers, result);\n\n}\n\n\n", "file_path": "src/unofficial_opcodes.rs", "rank": 50, "score": 188047.62426736008 }, { "content": "// Increment and subtract w/ carry\n\npub fn las(registers: &mut Registers, data: u8) {\n\n let result = registers.s & data;\n\n registers.a = result;\n\n registers.x = result;\n\n registers.s = result;\n\n}\n\n\n", "file_path": "src/unofficial_opcodes.rs", "rank": 51, "score": 188047.62426736008 }, { "content": "pub fn lax(registers: &mut Registers, data: u8) {\n\n opcodes::lda(registers, data);\n\n registers.x = data;\n\n}\n\n\n", "file_path": "src/unofficial_opcodes.rs", "rank": 52, "score": 188047.62426736008 }, { "content": "// AND with #imm, then ROR\n\npub fn arr(registers: &mut Registers, data: u8) {\n\n opcodes::and(registers, data);\n\n let result = registers.a;\n\n registers.a = opcodes::ror(registers, result);\n\n // Carry and Overflow are set weirdly:\n\n registers.flags.carry = (registers.a & 0b0100_0000) != 0;\n\n registers.flags.overflow = (((registers.a & 0b0100_0000) >> 6) ^ ((registers.a & 0b0010_0000) >> 5)) != 0;\n\n}\n\n\n", "file_path": "src/unofficial_opcodes.rs", "rank": 53, "score": 188047.62426736008 }, { "content": "// Memory Utilities\n\npub fn push(nes: &mut NesState, data: u8) {\n\n let address = (nes.registers.s as u16) + 0x0100;\n\n write_byte(nes, address, data);\n\n nes.registers.s = nes.registers.s.wrapping_sub(1);\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 54, "score": 188047.62426736008 }, { "content": "// AND with carry\n\npub fn anc(registers: &mut Registers, data: u8) {\n\n opcodes::and(registers, data);\n\n registers.flags.carry = (registers.a & 0b1000_0000) != 0;\n\n}\n\n\n", "file_path": "src/unofficial_opcodes.rs", "rank": 55, "score": 188047.62426736008 }, { "content": "pub fn axs(registers: &mut Registers, data: u8) {\n\n let initial = registers.a & registers.x;\n\n // CMP with #imm, but store value in x:\n\n registers.flags.carry = initial >= data;\n\n registers.x = initial.wrapping_sub(data);\n\n registers.flags.zero = registers.x == 0;\n\n registers.flags.negative = registers.x & 0x80 != 0;\n\n}\n\n\n", "file_path": "src/unofficial_opcodes.rs", "rank": 56, "score": 188047.62426736008 }, { "content": "fn label_address(known_labels: &HashMap<String, u16>, label: &String) -> Result<u16, String> {\n\n match known_labels.get(label) {\n\n Some(address) => Ok(*address),\n\n None => Err(format!(\"Label not found: {}\", label))\n\n }\n\n}\n\n\n", "file_path": "src/asm.rs", "rank": 57, "score": 177813.6833214624 }, { "content": "pub fn rmw_block(nes: &mut NesState, addressing_mode_index: u8, opcode_index: u8) {\n\n // First, handle some block 10 opcodes that break the mold\n\n match nes.cpu.opcode {\n\n // Assorted NOPs\n\n 0x82 | 0xC2 | 0xE2 => (addressing::IMMEDIATE.read) (nes, opcodes::nop_read),\n\n 0x1A | 0x3A | 0x5A | 0x7A | 0xDA | 0xEA | 0xFA => addressing::implied(nes, opcodes::nop),\n\n // Certain opcodes may be vital to your success. THESE opcodes are not.\n\n 0x02 | 0x22 | 0x42 | 0x62 | 0x12 | 0x32 | 0x52 | 0x72 | 0x92 | 0xB2 | 0xD2 | 0xF2 => {\n\n halt_cpu(nes);\n\n },\n\n 0xA2 => {(addressing::IMMEDIATE.read)(nes, opcodes::ldx)},\n\n 0x8A => addressing::implied(nes, opcodes::txa),\n\n 0xAA => addressing::implied(nes, opcodes::tax),\n\n 0xCA => addressing::implied(nes, opcodes::dex),\n\n 0x9A => addressing::implied(nes, opcodes::txs),\n\n 0xBA => addressing::implied(nes, opcodes::tsx),\n\n 0x96 => {(addressing::ZERO_PAGE_INDEXED_Y.write)(nes, opcodes::stx)},\n\n 0xB6 => {(addressing::ZERO_PAGE_INDEXED_Y.read)(nes, opcodes::ldx)},\n\n 0xBE => {(addressing::ABSOLUTE_INDEXED_Y.read)(nes, opcodes::ldx)},\n\n 0x9E => unofficial_opcodes::shx(nes),\n", "file_path": "src/cycle_cpu.rs", "rank": 58, "score": 177536.0639557165 }, { "content": "pub fn alu_block(nes: &mut NesState, addressing_mode_index: u8, opcode_index: u8) {\n\n let addressing_mode = match addressing_mode_index {\n\n // Zero Page Mode\n\n 0b000 => &addressing::INDEXED_INDIRECT_X,\n\n 0b001 => &addressing::ZERO_PAGE,\n\n 0b010 => &addressing::IMMEDIATE,\n\n 0b011 => &addressing::ABSOLUTE,\n\n 0b100 => &addressing::INDIRECT_INDEXED_Y,\n\n 0b101 => &addressing::ZERO_PAGE_INDEXED_X,\n\n 0b110 => &addressing::ABSOLUTE_INDEXED_Y,\n\n 0b111 => &addressing::ABSOLUTE_INDEXED_X,\n\n\n\n // Not implemented yet\n\n _ => &addressing::UNIMPLEMENTED,\n\n };\n\n\n\n match opcode_index {\n\n 0b000 => {(addressing_mode.read)(nes, opcodes::ora)},\n\n 0b001 => {(addressing_mode.read)(nes, opcodes::and)},\n\n 0b010 => {(addressing_mode.read)(nes, opcodes::eor)},\n\n 0b011 => {(addressing_mode.read)(nes, opcodes::adc)},\n\n 0b100 => {(addressing_mode.write)(nes, opcodes::sta)},\n\n 0b101 => {(addressing_mode.read)(nes, opcodes::lda)},\n\n 0b110 => {(addressing_mode.read)(nes, opcodes::cmp)},\n\n 0b111 => {(addressing_mode.read)(nes, opcodes::sbc)},\n\n _ => ()\n\n };\n\n}\n\n\n", "file_path": "src/cycle_cpu.rs", "rank": 59, "score": 177536.0639557165 }, { "content": "pub fn unofficial_block(nes: &mut NesState, addressing_mode_index: u8, opcode_index: u8) {\n\n // unofficial opcodes are surprisingly regular, but the following instructions break the\n\n // mold, mostly from the +0B block:\n\n match nes.cpu.opcode {\n\n 0x0B | 0x2B => {(addressing::IMMEDIATE.read)(nes, unofficial_opcodes::anc)},\n\n 0x4B => {(addressing::IMMEDIATE.read)(nes, unofficial_opcodes::alr)},\n\n 0x6B => {(addressing::IMMEDIATE.read)(nes, unofficial_opcodes::arr)},\n\n 0x8B => {(addressing::IMMEDIATE.read)(nes, unofficial_opcodes::xaa)},\n\n 0x93 => unofficial_opcodes::ahx_indirect_indexed_y(nes),\n\n 0x9B => unofficial_opcodes::tas(nes),\n\n 0x97 => {(addressing::ZERO_PAGE_INDEXED_Y.write)(nes, unofficial_opcodes::sax)},\n\n 0x9F => unofficial_opcodes::ahx_absolute_indexed_y(nes),\n\n 0xB7 => {(addressing::ZERO_PAGE_INDEXED_Y.read)(nes, unofficial_opcodes::lax)},\n\n 0xBB => {(addressing::ABSOLUTE_INDEXED_Y.read)(nes, unofficial_opcodes::las)},\n\n 0xBF => {(addressing::ABSOLUTE_INDEXED_Y.read)(nes, unofficial_opcodes::lax)},\n\n 0xCB => {(addressing::IMMEDIATE.read)(nes, unofficial_opcodes::axs)},\n\n 0xEB => {(addressing::IMMEDIATE.read)(nes, opcodes::sbc)},\n\n _ => {\n\n // The remaining opcodes all use the same addressing mode as the ALU block, and the wame\n\n // read / write / modify type as the corresponding RMW block. Opcodes are mostly a combination\n", "file_path": "src/cycle_cpu.rs", "rank": 60, "score": 177536.0639557165 }, { "content": "pub fn add_to_zero_page_address(nes: &mut NesState, offset: u8) {\n\n let effective_address = nes.cpu.data1 as u16;\n\n // Dummy read from original address, discarded\n\n let _ = read_byte(nes, effective_address);\n\n nes.cpu.data1 = nes.cpu.data1.wrapping_add(offset);\n\n}\n\n\n", "file_path": "src/addressing.rs", "rank": 61, "score": 177102.30291324542 }, { "content": "fn high(word: u16) -> u8 {\n\n return ((word & 0xFF00) >> 8) as u8;\n\n}\n\n\n", "file_path": "src/asm.rs", "rank": 62, "score": 172167.31445783336 }, { "content": "// Utilities to help compact the opcode decoding block\n\nfn low(word: u16) -> u8 {\n\n return (word & 0x00FF) as u8;\n\n}\n\n\n", "file_path": "src/asm.rs", "rank": 63, "score": 172167.31445783336 }, { "content": "pub fn vertical_mirroring(read_address: u16) -> u16 {\n\n let nt_base = read_address & 0xFFF;\n\n let nt_address = read_address & 0x3FF;\n\n match nt_base {\n\n // Nametable 0 (top-left)\n\n 0x000 ..= 0x3FF => nt_address + NT_OFFSET.0,\n\n 0x400 ..= 0x7FF => nt_address + NT_OFFSET.1,\n\n 0x800 ..= 0xBFF => nt_address + NT_OFFSET.0,\n\n 0xC00 ..= 0xFFF => nt_address + NT_OFFSET.1,\n\n _ => return 0, // wat\n\n }\n\n}\n\n\n", "file_path": "src/mmc/mirroring.rs", "rank": 64, "score": 169135.35444544372 }, { "content": "pub fn four_banks(read_address: u16) -> u16 {\n\n let nt_base = read_address & 0xFFF;\n\n let nt_address = read_address & 0x3FF;\n\n match nt_base {\n\n // Nametable 0 (top-left)\n\n 0x000 ..= 0x3FF => nt_address + NT_OFFSET.0,\n\n 0x400 ..= 0x7FF => nt_address + NT_OFFSET.1,\n\n 0x800 ..= 0xBFF => nt_address + NT_OFFSET.2,\n\n 0xC00 ..= 0xFFF => nt_address + NT_OFFSET.3,\n\n _ => return 0, // wat\n\n }\n\n}\n", "file_path": "src/mmc/mirroring.rs", "rank": 65, "score": 169135.35444544372 }, { "content": "pub fn horizontal_mirroring(read_address: u16) -> u16 {\n\n let nt_base = read_address & 0xFFF;\n\n let nt_address = read_address & 0x3FF;\n\n match nt_base {\n\n // Nametable 0 (top-left)\n\n 0x000 ..= 0x3FF => nt_address + NT_OFFSET.0,\n\n 0x400 ..= 0x7FF => nt_address + NT_OFFSET.0,\n\n 0x800 ..= 0xBFF => nt_address + NT_OFFSET.1,\n\n 0xC00 ..= 0xFFF => nt_address + NT_OFFSET.1,\n\n _ => return 0, // wat\n\n }\n\n}\n\n\n", "file_path": "src/mmc/mirroring.rs", "rank": 66, "score": 169135.35444544372 }, { "content": "type ModifyOpcode = fn(&mut Registers, u8) -> u8;\n\n\n\npub struct AddressingMode {\n\n pub read: fn(&mut NesState, ReadOpcode),\n\n pub write: fn(&mut NesState, WriteOpcode),\n\n pub modify: fn(&mut NesState, ModifyOpcode),\n\n}\n\n\n", "file_path": "src/addressing.rs", "rank": 67, "score": 168015.46882991336 }, { "content": "pub fn one_screen_lower(read_address: u16) -> u16 {\n\n let nt_base = read_address & 0xFFF;\n\n let nt_address = read_address & 0x3FF;\n\n match nt_base {\n\n // Nametable 0 (top-left)\n\n 0x000 ..= 0x3FF => nt_address + NT_OFFSET.0,\n\n 0x400 ..= 0x7FF => nt_address + NT_OFFSET.0,\n\n 0x800 ..= 0xBFF => nt_address + NT_OFFSET.0,\n\n 0xC00 ..= 0xFFF => nt_address + NT_OFFSET.0,\n\n _ => return 0, // wat\n\n }\n\n}\n\n\n", "file_path": "src/mmc/mirroring.rs", "rank": 68, "score": 166203.10594416698 }, { "content": "pub fn one_screen_upper(read_address: u16) -> u16 {\n\n let nt_base = read_address & 0xFFF;\n\n let nt_address = read_address & 0x3FF;\n\n match nt_base {\n\n // Nametable 0 (top-left)\n\n 0x000 ..= 0x3FF => nt_address + NT_OFFSET.1,\n\n 0x400 ..= 0x7FF => nt_address + NT_OFFSET.1,\n\n 0x800 ..= 0xBFF => nt_address + NT_OFFSET.1,\n\n 0xC00 ..= 0xFFF => nt_address + NT_OFFSET.1,\n\n _ => return 0, // wat\n\n }\n\n}\n\n\n", "file_path": "src/mmc/mirroring.rs", "rank": 69, "score": 166203.10594416698 }, { "content": "pub fn nametable_address(read_address: u16, mirroring: Mirroring) -> u16 {\n\n // Mirroring documented here, the ABCD references to the charts in this article:\n\n // https://wiki.nesdev.com/w/index.php/Mirroring\n\n let nt_address = read_address & 0x3FF;\n\n let nt_offset = (0x000, 0x400, 0x800, 0xC00);\n\n match read_address {\n\n // Nametable 0 (top-left)\n\n 0x2000 ..= 0x23FF => {\n\n return match mirroring {\n\n Mirroring::Horizontal => nt_address + nt_offset.0, // A\n\n Mirroring::Vertical => nt_address + nt_offset.0, // A\n\n Mirroring::OneScreenLower => nt_address + nt_offset.0, // A\n\n Mirroring::OneScreenUpper => nt_address + nt_offset.1, // B\n\n Mirroring::FourScreen => nt_address + nt_offset.0, // A\n\n }\n\n },\n\n // Nametable 1 (top-right)\n\n 0x2400 ..= 0x27FF => {\n\n return match mirroring {\n\n Mirroring::Horizontal => nt_address + nt_offset.0, // A\n", "file_path": "src/ppu.rs", "rank": 70, "score": 159296.3739492171 }, { "content": "// NOP (implemented with an implied signature, for consistency)\n\npub fn nop(_: &mut Registers) {\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 71, "score": 158740.0551072263 }, { "content": "type ReadOpcode = fn(&mut Registers, u8);\n", "file_path": "src/addressing.rs", "rank": 72, "score": 155440.1540708257 }, { "content": "type WriteOpcode = fn(&mut Registers) -> u8;\n", "file_path": "src/addressing.rs", "rank": 73, "score": 155440.1540708257 }, { "content": "// Set Decimal Flag\n\npub fn sed(registers: &mut Registers) {\n\n registers.flags.decimal = true;\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 74, "score": 154748.5588989088 }, { "content": "// Increment X\n\npub fn inx(registers: &mut Registers) {\n\n registers.x = registers.x.wrapping_add(1);\n\n registers.flags.zero = registers.x == 0;\n\n registers.flags.negative = registers.x & 0x80 != 0;\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 75, "score": 154748.5588989088 }, { "content": "// Decrement Y\n\npub fn dey(registers: &mut Registers) {\n\n registers.y = registers.y.wrapping_sub(1);\n\n registers.flags.zero = registers.y == 0;\n\n registers.flags.negative = registers.y & 0x80 != 0;\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 76, "score": 154748.5588989088 }, { "content": "// Set Carry Flag\n\npub fn sec(registers: &mut Registers) {\n\n registers.flags.carry = true;\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 77, "score": 154748.5588989088 }, { "content": "// Clear carry flag\n\npub fn clc(registers: &mut Registers) {\n\n registers.flags.carry = false\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 78, "score": 154748.5588989088 }, { "content": "// Transfer X -> A\n\npub fn txa(registers: &mut Registers) {\n\n registers.a = registers.x;\n\n registers.flags.zero = registers.a == 0;\n\n registers.flags.negative = registers.a & 0x80 != 0;\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 79, "score": 154748.5588989088 }, { "content": "// Increment Y\n\npub fn iny(registers: &mut Registers) {\n\n registers.y = registers.y.wrapping_add(1);\n\n registers.flags.zero = registers.y == 0;\n\n registers.flags.negative = registers.y & 0x80 != 0;\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 80, "score": 154748.5588989088 }, { "content": "// Transfer S -> X\n\npub fn tsx(registers: &mut Registers) {\n\n registers.x = registers.s;\n\n registers.flags.zero = registers.x == 0;\n\n registers.flags.negative = registers.x & 0x80 != 0;\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 81, "score": 154748.5588989088 }, { "content": "// Clear overflow flag\n\npub fn clv(registers: &mut Registers) {\n\n registers.flags.overflow = false\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 82, "score": 154748.5588989088 }, { "content": "// Decrement X\n\npub fn dex(registers: &mut Registers) {\n\n registers.x = registers.x.wrapping_sub(1);\n\n registers.flags.zero = registers.x == 0;\n\n registers.flags.negative = registers.x & 0x80 != 0;\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 83, "score": 154748.5588989088 }, { "content": "// Transfer A -> Y\n\npub fn tay(registers: &mut Registers) {\n\n registers.y = registers.a;\n\n registers.flags.zero = registers.y == 0;\n\n registers.flags.negative = registers.y & 0x80 != 0;\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 84, "score": 154748.5588989088 }, { "content": "// Set Interrupt Disable Flag\n\npub fn sei(registers: &mut Registers) {\n\n registers.flags.interrupts_disabled = true;\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 85, "score": 154748.5588989088 }, { "content": "// Transfer X -> S\n\npub fn txs(registers: &mut Registers) {\n\n registers.s = registers.x;\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 86, "score": 154748.5588989088 }, { "content": "// Clear decimal flag\n\npub fn cld(registers: &mut Registers) {\n\n registers.flags.decimal = false\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 87, "score": 154748.5588989088 }, { "content": "// Clear interrupt disable (enbales interrupts?)\n\npub fn cli(registers: &mut Registers) {\n\n registers.flags.interrupts_disabled = false\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 88, "score": 154748.5588989088 }, { "content": "// Transfer A -> X\n\npub fn tax(registers: &mut Registers) {\n\n registers.x = registers.a;\n\n registers.flags.zero = registers.x == 0;\n\n registers.flags.negative = registers.x & 0x80 != 0;\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 89, "score": 154748.5588989088 }, { "content": "// Transfer Y -> A\n\npub fn tya(registers: &mut Registers) {\n\n registers.a = registers.y;\n\n registers.flags.zero = registers.a == 0;\n\n registers.flags.negative = registers.a & 0x80 != 0;\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 90, "score": 154748.5588989088 }, { "content": "pub fn rti(nes: &mut NesState) {\n\n match nes.cpu.tick {\n\n 2 => addressing::dummy_data1(nes),\n\n 3 => {/* Would increment S here */},\n\n 4 => {\n\n let s = pop(nes);\n\n nes.registers.set_status_from_byte(s);\n\n },\n\n 5 => {\n\n // Read PCL\n\n nes.cpu.data1 = pop(nes);\n\n },\n\n 6 => {\n\n // Read PCH\n\n let pch = pop(nes) as u16;\n\n let pcl = nes.cpu.data1 as u16;\n\n nes.registers.pc = (pch << 8) | pcl;\n\n nes.cpu.tick = 0;\n\n },\n\n _ => ()\n\n };\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 91, "score": 151075.98843719732 }, { "content": "pub fn jsr(nes: &mut NesState) {\n\n match nes.cpu.tick {\n\n 2 => addressing::read_address_low(nes),\n\n 3 => {/* Internal Operation */},\n\n 4 => {\n\n let pch = ((nes.registers.pc & 0xFF00) >> 8) as u8;\n\n push(nes, pch);\n\n },\n\n 5 => {\n\n let pcl = (nes.registers.pc & 0x00FF) as u8;\n\n push(nes, pcl);\n\n },\n\n 6 => {\n\n addressing::read_address_high(nes);\n\n nes.registers.pc = nes.cpu.temp_address;\n\n nes.cpu.tick = 0;\n\n },\n\n _ => ()\n\n };\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 92, "score": 151075.98843719732 }, { "content": "pub fn php(nes: &mut NesState) {\n\n match nes.cpu.tick {\n\n 2 => {\n\n addressing::dummy_data1(nes);\n\n nes.cpu.upcoming_write = true;\n\n },\n\n 3 => {\n\n let status = nes.registers.status_as_byte(true);\n\n push(nes, status);\n\n nes.cpu.tick = 0;\n\n nes.cpu.upcoming_write = false;\n\n },\n\n _ => (),\n\n }\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 93, "score": 151075.98843719732 }, { "content": "// Branch\n\npub fn branch(nes: &mut NesState) {\n\n // Note: the documentation for branch timing, located at http://nesdev.com/6502_cpu.txt includes\n\n // the first cycle of the next instruction. Thus, when bailing here, we set nes.cpu.tick to 1\n\n // instead of zero, so we don't execute the fetch step a second time when we're done.\n\n match nes.cpu.tick {\n\n 2 => {\n\n let pc = nes.registers.pc;\n\n nes.cpu.data1 = read_byte(nes, pc);\n\n nes.registers.pc = nes.registers.pc.wrapping_add(1);\n\n\n\n // Determine if branch is to be taken\n\n let flag_index = (nes.cpu.opcode & 0b1100_0000) >> 6;\n\n let flag_cmp = (nes.cpu.opcode & 0b0010_0000) != 0;\n\n let branch_taken = match flag_index {\n\n 0b00 => flag_cmp == nes.registers.flags.negative,\n\n 0b01 => flag_cmp == nes.registers.flags.overflow,\n\n 0b10 => flag_cmp == nes.registers.flags.carry,\n\n 0b11 => flag_cmp == nes.registers.flags.zero,\n\n _ => {/* Impossible */ false},\n\n };\n", "file_path": "src/opcodes.rs", "rank": 94, "score": 151075.98843719732 }, { "content": "pub fn brk(nes: &mut NesState) {\n\n // BRK's first cycle is the same as any ordinary instruction.\n\n match nes.cpu.tick {\n\n 2 => {\n\n let pc = nes.registers.pc;\n\n let _ = read_byte(nes, pc);\n\n nes.registers.pc = nes.registers.pc.wrapping_add(1);\n\n nes.cpu.upcoming_write = true;\n\n },\n\n 3 ..= 4 => service_interrupt(nes),\n\n 5 => {\n\n // At this point, NMI always takes priority, otherwise we run\n\n // an IRQ. This is the source of the BRK hijack quirk / bug.\n\n if nes.cpu.nmi_requested {\n\n nes.cpu.nmi_requested = false;\n\n nes.cpu.temp_address = 0xFFFA;\n\n } else {\n\n nes.cpu.temp_address = 0xFFFE;\n\n }\n\n // Here we set the B flag to signal a BRK, even if we end up servicing an NMI instead.\n\n let status_byte = nes.registers.status_as_byte(true);\n\n push(nes, status_byte);\n\n nes.cpu.upcoming_write = false;\n\n },\n\n 6 ..= 7 => service_interrupt(nes),\n\n _ => ()\n\n }\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 95, "score": 151075.98843719732 }, { "content": "pub fn pla(nes: &mut NesState) {\n\n match nes.cpu.tick {\n\n 2 => addressing::dummy_data1(nes),\n\n 3 => {/* Increment S */},\n\n 4 => {\n\n let a = pop(nes);\n\n nes.registers.a = a;\n\n nes.registers.flags.zero = nes.registers.a == 0;\n\n nes.registers.flags.negative = nes.registers.a & 0x80 != 0;\n\n nes.cpu.tick = 0;\n\n },\n\n _ => (),\n\n }\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 96, "score": 151075.98843719732 }, { "content": "pub fn rts(nes: &mut NesState) {\n\n match nes.cpu.tick {\n\n 2 => addressing::dummy_data1(nes),\n\n 3 => {/* Would incremeent S here */},\n\n 4 => {\n\n // Read PCL\n\n nes.cpu.data1 = pop(nes);\n\n },\n\n 5 => {\n\n let pch = pop(nes) as u16;\n\n let pcl = nes.cpu.data1 as u16;\n\n nes.registers.pc = (pch << 8) | pcl;\n\n },\n\n 6 => {\n\n nes.registers.pc = nes.registers.pc.wrapping_add(0x1);\n\n nes.cpu.tick = 0;\n\n },\n\n _ => ()\n\n };\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 97, "score": 151075.98843719732 }, { "content": "pub fn plp(nes: &mut NesState) {\n\n match nes.cpu.tick {\n\n 2 => addressing::dummy_data1(nes),\n\n 3 => {/* Increment S */},\n\n 4 => {\n\n let s = pop(nes);\n\n nes.registers.set_status_from_byte(s);\n\n nes.cpu.tick = 0;\n\n },\n\n _ => (),\n\n }\n\n}", "file_path": "src/opcodes.rs", "rank": 98, "score": 151075.98843719732 }, { "content": "// Opcodes which access the stack\n\npub fn pha(nes: &mut NesState) {\n\n match nes.cpu.tick {\n\n 2 => {\n\n addressing::dummy_data1(nes);\n\n nes.cpu.upcoming_write = true;\n\n },\n\n 3 => {\n\n let a = nes.registers.a;\n\n push(nes, a);\n\n nes.cpu.tick = 0;\n\n nes.cpu.upcoming_write = false;\n\n },\n\n _ => (),\n\n }\n\n}\n\n\n", "file_path": "src/opcodes.rs", "rank": 99, "score": 151075.98843719732 } ]
Rust
src/lib.rs
ravenexp/python3-dll-a
3735d6543ced976ab9405fdda3f010243c7fe551
#![deny(missing_docs)] #![allow(clippy::needless_doctest_main)] use std::env; use std::fs::{create_dir_all, write}; use std::io::{Error, ErrorKind, Result}; use std::path::{Path, PathBuf}; use std::process::Command; const IMPLIB_EXT_GNU: &str = ".dll.a"; const IMPLIB_EXT_MSVC: &str = ".lib"; const DLLTOOL_GNU: &str = "x86_64-w64-mingw32-dlltool"; const DLLTOOL_GNU_32: &str = "i686-w64-mingw32-dlltool"; const DLLTOOL_MSVC: &str = "llvm-dlltool"; #[cfg(windows)] const LIB_MSVC: &str = "lib.exe"; #[derive(Debug, Clone)] pub struct ImportLibraryGenerator { arch: String, env: String, version: Option<(u8, u8)>, } impl ImportLibraryGenerator { pub fn new(arch: &str, env: &str) -> Self { Self { arch: arch.to_string(), env: env.to_string(), version: None, } } pub fn version(&mut self, version: Option<(u8, u8)>) -> &mut Self { self.version = version; self } pub fn generate(&self, out_dir: &Path) -> Result<()> { create_dir_all(out_dir)?; let defpath = self.write_def_file(out_dir)?; let dlltool_command = DllToolCommand::find_for_target(&self.arch, &self.env)?; let implib_ext = dlltool_command.implib_file_ext(); let implib_file = self.implib_file_path(out_dir, implib_ext); let mut command = dlltool_command.build(&defpath, &implib_file); let status = command.status().map_err(|e| { let msg = format!("{:?} failed with {}", command, e); Error::new(e.kind(), msg) })?; if status.success() { Ok(()) } else { let msg = format!("{:?} failed with {}", command, status); Err(Error::new(ErrorKind::Other, msg)) } } fn write_def_file(&self, out_dir: &Path) -> Result<PathBuf> { let (def_file, def_file_content) = match self.version { None => ("python3.def", include_str!("python3.def")), Some((3, 7)) => ("python37.def", include_str!("python37.def")), Some((3, 8)) => ("python38.def", include_str!("python38.def")), Some((3, 9)) => ("python39.def", include_str!("python39.def")), Some((3, 10)) => ("python310.def", include_str!("python310.def")), Some((3, 11)) => ("python311.def", include_str!("python311.def")), _ => return Err(Error::new(ErrorKind::Other, "Unsupported Python version")), }; let mut defpath = out_dir.to_owned(); defpath.push(def_file); write(&defpath, def_file_content)?; Ok(defpath) } fn implib_file_path(&self, out_dir: &Path, libext: &str) -> PathBuf { let libname = match self.version { Some((major, minor)) => { format!("python{}{}{}", major, minor, libext) } None => format!("python3{}", libext), }; let mut libpath = out_dir.to_owned(); libpath.push(libname); libpath } } pub fn generate_implib_for_target(out_dir: &Path, arch: &str, env: &str) -> Result<()> { ImportLibraryGenerator::new(arch, env).generate(out_dir) } #[derive(Debug)] enum DllToolCommand { Mingw { command: Command }, Llvm { command: Command, machine: String }, LibExe { command: Command, machine: String }, Zig { command: Command, machine: String }, } impl DllToolCommand { fn find_for_target(arch: &str, env: &str) -> Result<DllToolCommand> { let machine = match arch { "x86_64" => "i386:x86-64", "x86" => "i386", "aarch64" => "arm64", arch => arch, } .to_owned(); if let Some(command) = find_zig() { return Ok(DllToolCommand::Zig { command, machine }); } match (arch, env) { ("x86_64", "gnu") => Ok(DllToolCommand::Mingw { command: Command::new(DLLTOOL_GNU), }), ("x86", "gnu") => Ok(DllToolCommand::Mingw { command: Command::new(DLLTOOL_GNU_32), }), (_, "msvc") => { if let Some(command) = find_lib_exe(arch) { let machine = match arch { "x86_64" => "X64", "x86" => "X86", "aarch64" => "ARM64", arch => arch, } .to_owned(); Ok(DllToolCommand::LibExe { command, machine }) } else { let command = Command::new(DLLTOOL_MSVC); Ok(DllToolCommand::Llvm { command, machine }) } } _ => { let msg = format!("Unsupported target arch '{}' or env ABI '{}'", arch, env); Err(Error::new(ErrorKind::Other, msg)) } } } fn implib_file_ext(&self) -> &'static str { if let DllToolCommand::Mingw { .. } = self { IMPLIB_EXT_GNU } else { IMPLIB_EXT_MSVC } } fn build(self, defpath: &Path, libpath: &Path) -> Command { match self { Self::Mingw { mut command } => { command .arg("--input-def") .arg(defpath) .arg("--output-lib") .arg(libpath); command } Self::Llvm { mut command, machine, } => { command .arg("-m") .arg(machine) .arg("-d") .arg(defpath) .arg("-l") .arg(libpath); command } Self::LibExe { mut command, machine, } => { command .arg(format!("/MACHINE:{}", machine)) .arg(format!("/DEF:{}", defpath.display())) .arg(format!("/OUT:{}", libpath.display())); command } Self::Zig { mut command, machine, } => { command .arg("dlltool") .arg("-m") .arg(machine) .arg("-d") .arg(defpath) .arg("-l") .arg(libpath); command } } } } fn find_zig() -> Option<Command> { let zig_command = env::var("ZIG_COMMAND").ok()?; let mut zig_cmdlet = zig_command.split_ascii_whitespace(); let mut zig = Command::new(zig_cmdlet.next()?); zig.args(zig_cmdlet); Some(zig) } #[cfg(windows)] fn find_lib_exe(arch: &str) -> Option<Command> { let target = match arch { "x86_64" => "x86_64-pc-windows-msvc", "x86" => "i686-pc-windows-msvc", "aarch64" => "aarch64-pc-windows-msvc", _ => return None, }; cc::windows_registry::find(target, LIB_MSVC) } #[cfg(not(windows))] fn find_lib_exe(_arch: &str) -> Option<Command> { None } #[cfg(test)] mod tests { use std::path::PathBuf; use super::*; #[cfg(unix)] #[test] fn generate() { let mut dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")); dir.push("target"); dir.push("x86_64-pc-windows-gnu"); dir.push("python3-dll"); ImportLibraryGenerator::new("x86_64", "gnu") .generate(&dir) .unwrap(); for minor in 7..=11 { ImportLibraryGenerator::new("x86_64", "gnu") .version(Some((3, minor))) .generate(&dir) .unwrap(); } } #[cfg(unix)] #[test] fn generate_gnu32() { let mut dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")); dir.push("target"); dir.push("i686-pc-windows-gnu"); dir.push("python3-dll"); generate_implib_for_target(&dir, "x86", "gnu").unwrap(); } #[test] fn generate_msvc() { let mut dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")); dir.push("target"); dir.push("x86_64-pc-windows-msvc"); dir.push("python3-dll"); ImportLibraryGenerator::new("x86_64", "msvc") .generate(&dir) .unwrap(); for minor in 7..=11 { ImportLibraryGenerator::new("x86_64", "msvc") .version(Some((3, minor))) .generate(&dir) .unwrap(); } } #[test] fn generate_msvc32() { let mut dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")); dir.push("target"); dir.push("i686-pc-windows-msvc"); dir.push("python3-dll"); generate_implib_for_target(&dir, "x86", "msvc").unwrap(); } #[test] fn generate_msvc_arm64() { let mut dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")); dir.push("target"); dir.push("aarch64-pc-windows-msvc"); dir.push("python3-dll"); generate_implib_for_target(&dir, "aarch64", "msvc").unwrap(); } }
#![deny(missing_docs)] #![allow(clippy::needless_doctest_main)] use std::env; use std::fs::{create_dir_all, write}; use std::io::{Error, ErrorKind, Result}; use std::path::{Path, PathBuf}; use std::process::Command; const IMPLIB_EXT_GNU: &str = ".dll.a"; const IMPLIB_EXT_MSVC: &str = ".lib"; const DLLTOOL_GNU: &str = "x86_64-w64-mingw32-dlltool"; const DLLTOOL_GNU_32: &str = "i686-w64-mingw32-dlltool"; const DLLTOOL_MSVC: &str = "llvm-dlltool"; #[cfg(windows)] const LIB_MSVC: &str = "lib.exe"; #[derive(Debug, Clone)] pub struct ImportLibraryGenerator { arch: String, env: String, version: Option<(u8, u8)>, } impl ImportLibraryGenerator { pub fn new(arch: &str, env: &str) -> Self { Self { arch: arch.to_string(), env: env.to_string(), version: None, } } pub fn version(&mut self, version: Option<(u8, u8)>) -> &mut Self { self.version = version; self } pub fn generate(&self, out_dir: &Path) -> Result<()> { create_dir_all(out_dir)?; let defpath = self.write_def_file(out_dir)?; let dlltool_command = DllToolCommand::find_for_target(&self.arch, &self.env)?; let implib_ext = dlltool_command.implib_file_ext(); let implib_file = self.implib_file_path(out_dir, implib_ext); let mut command = dlltool_command.build(&defpath, &implib_file); let status = command.status().map_err(|e| { let msg = format!("{:?} failed with {}", command, e); Error::new(e.kind(), msg) })?; if status.success() { Ok(()) } else { let msg = format!("{:?} failed with {}", command, status); Err(Error::new(ErrorKind::Other, msg)) } } fn write_def_file(&self, out_dir: &Path) -> Result<PathBuf> { let (def_file, def_file_content) = match self.version { None => ("python3.def", include_str!("python3.def")), Some((3, 7)) => ("python37.def", include_str!("python37.def")), Some((3, 8)) => ("python38.def", include_str!("python38.def")), Some((3, 9)) => ("python39.def", include_str!("python39.def")), Some((3, 10)) => ("python310.def", include_str!("python310.def")), Some((3, 11)) => ("python311.def", include_str!("python311.def")), _ => return Err(Error::new(ErrorKind::Other, "Unsupported Python version")), }; let mut defpath = out_dir.to_owned(); defpath.push(def_file); write(&defpath, def_file_content)?; Ok(defpath) } fn implib_file_path(&self, out_dir: &Path, libext: &str) -> PathBuf { let libname = match self.version { Some((major, minor)) => { format!("python{}{}{}", major, minor, libext) } None => format!("python3{}", libext), }; let mut libpath = out_dir.to_owned(); libpath.push(libname); libpath } } pub fn generate_implib_for_target(out_dir: &Path, arch: &str, env: &str) -> Result<()> { ImportLibraryGenerator::new(arch, env).generate(out_dir) } #[derive(Debug)] enum DllToolCommand { Mingw { command: Command }, Llvm { command: Command, machine: String }, LibExe { command: Command, machine: String }, Zig { command: Command, machine: String }, } impl DllToolCommand { fn find_for_target(arch: &str, env: &str) -> Result<DllToolCommand> { let machine = match arch { "x86_64" => "i386:x86-64", "x86" => "i386", "aarch64" => "arm64", arch => arch, } .to_owned(); if let Some(command) = find_zig() { return Ok(DllToolCommand::Zig { command, machine }); } match (arch, env) { ("x86_64", "gnu") => Ok(DllToolCommand::Mingw { command: Command::new(DLLTOOL_GNU), }), ("x86", "gnu") => Ok(DllToolCommand::Mingw { command: Command::new(DLLTOOL_GNU_32), }), (_, "msvc") => { if let Some(command) = find_lib_exe(arch) { let machine = match arch { "x86_64" => "X64", "x86" => "X86", "aarch64" => "ARM64", arch => arch, } .to_owned(); Ok(DllToolCommand::LibExe { command, machine }) } else { let command = Command::new(DLLTOOL_MSVC); Ok(DllToolCommand::Llvm { command, machine }) } } _ => { let msg = format!("Unsupported target arch '{}' or env ABI '{}'", arch, env); Err(Error::new(ErrorKind::Other, msg)) } } } fn implib_file_ext(&self) -> &'static str { if let DllToolCommand::Mingw { .. } = self { IMPLIB_EXT_GNU } else { IMPLIB_EXT_MSVC } } fn build(self, defpath: &Path, libpath: &Path) -> Command { match self { Self::Mingw { mut command } => { command .arg("--input-def") .arg(defpath) .arg("--output-lib") .arg(libpath); command } Self::Llvm { mut command, machine, } => { command .arg("-m") .arg(machine) .arg("-d") .arg(defpath) .arg("-l") .arg(libpath); command } Self::LibExe { mut command, machine, } => { command .arg(format!("/MACHINE:{}", machine)) .arg(format!("/DEF:{}", defpath.display())) .arg(format!("/OUT:{}", libpath.display())); command } Self::Zig { mut command, machine, } => { command .arg("dlltool") .arg("-m") .arg(machine) .arg("-d") .arg(defpath) .arg("-l") .arg(libpath); command } } } } fn find_zig() -> Option<Command> { let zig_command = env::var("ZIG_COMMAND").ok()?; let mut zig_cmdlet = zig_command.split_ascii_whitespace(); let mut zig = Command::new(zig_cmdlet.next()?); zig.args(zig_cmdlet); Some(zig) } #[cfg(windows)] fn find_lib_exe(arch: &str) -> Option<Command> { let target = match arch { "x86_64" => "x86_64-pc-windows-msvc", "x86" => "i686-pc-windows-msvc", "aarch64" => "aarch64-pc-windows-msvc", _ => return None, }; cc::windows_registry::find(target, LIB_MSVC) } #[cfg(not(windows))] fn find_lib_exe(_arch: &str) -> Option<Command> { None } #[cfg(test)] mod tests { use std::path::PathBuf; use super::*; #[cfg(unix)] #[test] fn generate() { let mut dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")); dir.push("target"); dir.push("x86_64-pc-windows-gnu"); dir.push("python3-dll"); ImportLibraryGenerator::new("x86_64", "gnu") .generate(&dir) .unwrap(); for minor in 7..=11 { ImportLibraryGenerator::new("x86_64", "gnu") .version(Some((3, minor))) .generate(&dir) .unwrap(); } } #[cfg(unix)] #[test]
#[test] fn generate_msvc() { let mut dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")); dir.push("target"); dir.push("x86_64-pc-windows-msvc"); dir.push("python3-dll"); ImportLibraryGenerator::new("x86_64", "msvc") .generate(&dir) .unwrap(); for minor in 7..=11 { ImportLibraryGenerator::new("x86_64", "msvc") .version(Some((3, minor))) .generate(&dir) .unwrap(); } } #[test] fn generate_msvc32() { let mut dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")); dir.push("target"); dir.push("i686-pc-windows-msvc"); dir.push("python3-dll"); generate_implib_for_target(&dir, "x86", "msvc").unwrap(); } #[test] fn generate_msvc_arm64() { let mut dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")); dir.push("target"); dir.push("aarch64-pc-windows-msvc"); dir.push("python3-dll"); generate_implib_for_target(&dir, "aarch64", "msvc").unwrap(); } }
fn generate_gnu32() { let mut dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")); dir.push("target"); dir.push("i686-pc-windows-gnu"); dir.push("python3-dll"); generate_implib_for_target(&dir, "x86", "gnu").unwrap(); }
function_block-full_function
[ { "content": "#!/usr/bin/env python3\n\n# Parses Python Stable ABI symbol definitions from the manifest in the CPython repository located at https://github.com/python/cpython/blob/main/Misc/stable_abi.toml\n\n# and produces a definition file following the format described at https://docs.microsoft.com/en-us/cpp/build/reference/module-definition-dot-def-files.\n\nimport sys\n\nimport tomli\n\n\n\nstable_abi = tomli.load(sys.stdin.buffer)\n\n\n\nprint(\"LIBRARY python3.dll\")\n\nprint(\"EXPORTS\")\n\n\n\ncount = 0\n\n\n\nfor function in stable_abi[\"function\"].keys():\n\n print(function)\n\n count += 1\n\n\n\nfor data in stable_abi[\"data\"].keys():\n\n print(f\"{data} DATA\")\n\n count += 1\n\n\n\nassert count >= 859\n", "file_path": "parse-stable-abi.py", "rank": 5, "score": 14182.99096308242 }, { "content": "### Example `build.rs` script for an `abi3` PyO3 extension\n\n\n\nThe following cargo build script can be used to cross-compile Stable ABI\n\nPyO3 extension modules for Windows (64/32-bit x86 or 64-bit ARM)\n\nusing either MinGW-w64 or MSVC target environment ABI:\n\n\n\n```rust\n\nfn main() {\n\n if std::env::var(\"CARGO_CFG_TARGET_OS\").unwrap() == \"windows\" {\n\n let cross_lib_dir = std::env::var_os(\"PYO3_CROSS_LIB_DIR\")\n\n .expect(\"PYO3_CROSS_LIB_DIR is not set when cross-compiling\");\n\n let arch = std::env::var(\"CARGO_CFG_TARGET_ARCH\").unwrap();\n\n let env = std::env::var(\"CARGO_CFG_TARGET_ENV\").unwrap();\n\n\n\n let libdir = std::path::Path::new(&cross_lib_dir);\n\n python3_dll_a::generate_implib_for_target(libdir, &arch, &env)\n\n .expect(\"python3.dll import library generator failed\");\n\n }\n\n}\n\n```\n\n\n\nA compatible `python3.dll` import library file named `python3.dll.a`\n\nor `python3.lib` will be automatically created in the directory\n\npointed by the `PYO3_CROSS_LIB_DIR` environment variable.\n\n\n\n### Example `cargo build` invocation\n\n\n\n```sh\n\nPYO3_CROSS_LIB_DIR=target/python3-dll cargo build --target x86_64-pc-windows-gnu\n\n```\n\n\n\nGenerating version-specific `python3y.dll` import libraries\n\n-----------------------------------------------------------\n\n\n\nAs an advanced feature, `python3-dll-a` can generate Python version\n\nspecific import libraries such as `python39.lib`.\n\n\n\nSee the `ImportLibraryGenerator` builder API description for details.\n\n\n\nMaintenance\n\n-----------\n\n\n\nThis crate embeds Module-Defitions based on the `stable_abi.toml` file from CPython.\n\n\n\nThe upstream version of this file is located in the [CPython project][cpython]\n\nrepository under the path `Misc/stable_abi.toml`.\n\n\n\n[cpython]: https://github.com/python/cpython/blob/main/Misc/stable_abi.toml\n", "file_path": "README.md", "rank": 29, "score": 22.76556708572775 }, { "content": "Standalone `python3(y).dll` import library generator\n\n====================================================\n\n\n\nGenerates import libraries for the Python DLL\n\n(either `python3.dll` or `python3y.dll`)\n\nfor MinGW-w64 and MSVC (cross-)compile targets.\n\n\n\nThis crate **does not require** Python 3 distribution files\n\nto be present on the (cross-)compile host system.\n\n\n\n**Note:** MSVC cross-compile targets require either LLVM binutils\n\nor Zig to be available on the host system.\n\nMore specifically, `python3-dll-a` requires `llvm-dlltool` executable\n\nto be present in `PATH` when targeting `*-pc-windows-msvc` from Linux.\n\n\n\nAlternatively, `ZIG_COMMAND` environment variable may be set to e.g. `\"zig\"`\n\nor `\"python -m ziglang\"`, then `zig dlltool` will be used in place\n\nof `llvm-dlltool` (or MinGW binutils).\n\n\n\nPyO3 integration\n\n----------------\n\n\n\nSince version **0.16.5**, the `pyo3` crate implements support\n\nfor both the Stable ABI and version-specific Python DLL import\n\nlibrary generation via its new `generate-import-lib` feature.\n\n\n\nIn this configuration, `python3-dll-a` becomes a `pyo3` crate dependency\n\nand is automatically invoked by its build script in both native\n\nand cross compilation scenarios.\n\n\n\n### Example `Cargo.toml` usage for an `abi3` PyO3 extension module\n\n\n\n```toml\n\n[dependencies]\n\npyo3 = { version = \"0.16.5\", features = [\"extension-module\", \"abi3-py37\", \"generate-import-lib\"] }\n\n```\n\n\n\n### Example `Cargo.toml` usage for a standard PyO3 extension module\n\n\n\n```toml\n\n[dependencies]\n\npyo3 = { version = \"0.16.5\", features = [\"extension-module\", \"generate-import-lib\"] }\n\n```\n\n\n\nStandalone build script usage\n\n-----------------------------\n\n\n\nIf an older `pyo3` crate version is used, or a different Python bindings\n\nlibrary is required, `python3-dll-a` can be used directly\n\nfrom the crate build script.\n\n\n\nThe examples below assume using an older version of PyO3.\n\n\n", "file_path": "README.md", "rank": 30, "score": 14.901419499546561 }, { "content": "# Changelog\n\n\n\nAll notable changes to this project will be documented in this file.\n\n\n\n## [0.2.3] - 2022-05-17\n\n\n\n### Features\n\n\n\n- Add `zig dlltool` support in [#18](https://github.com/pyo3/python3-dll-a/pull/18)\n\n\n\n### Fixes\n\n\n\n- Improve error message when `dlltool` is not found in [#17](https://github.com/pyo3/python3-dll-a/pull/17)\n\n\n\n## [0.2.2] - 2022-05-10\n\n\n\n### Features\n\n\n\n- Include `python3.def` itself in the Rust source in [#10](https://github.com/pyo3/python3-dll-a/pull/10)\n\n- Add support for generating non-abi3 `pythonXY.dll` in [#15](https://github.com/pyo3/python3-dll-a/pull/15)\n\n\n\n### CI\n\n\n\n- Automate `stable_abi.txt` updates in [#6](https://github.com/pyo3/python3-dll-a/pull/6)\n\n\n\n## [0.2.1] - 2022-04-17\n\n\n\n### Features\n\n\n\n- Add support for `lib.exe` from MSVC when running on Windows in [#2](https://github.com/pyo3/python3-dll-a/pull/2)\n\n\n\n### Documentation\n\n\n\n- Mention the new PyO3 integration feature\n\n- Add maintenance section to README\n\n\n\n### Miscellaneous Tasks\n\n\n\n- Update stable_abi.txt to the latest main\n\n\n\n### CI\n\n\n\n- Add `rust.yml` workflow to build and run unit tests\n\n- Add `publish.yml` workflow to publish the crate to `crates.io`\n\n\n\n## [0.2.0] - 2022-03-21\n\n\n\n### Features\n\n\n\n- [**breaking**] Use `Path` type for the output directory arg\n\n\n\n## [0.1.2] - 2022-03-15\n\n\n\n### Documentation\n\n\n\n- Document MSVC ABI environment support\n\n\n\n### Features\n\n\n\n- Add support for the LLVM `dlltool` flavor\n\n\n\n### Testing\n\n\n\n- Build import libraries for all targets\n\n\n\n## [0.1.1] - 2022-03-14\n\n\n\n### Documentation\n\n\n\n- Add multi-arch `build.rs` examples\n\n\n\n### Features\n\n\n\n- Add support for the 32-bit target architecture\n\n\n\n## [0.1.0] - 2022-02-21\n\n\n\n### Documentation\n\n\n\n- Add `build.rs` usage examples\n\n\n\n### Features\n\n\n\n- Generate module definition and invoke dlltool\n\n- Implement Module-Definition file writing\n\n- Implement 'stable_abi.txt' syntax parser\n\n\n\n### Miscellaneous Tasks\n\n\n\n- Add `git-cliff` config file\n\n- Add a change log file\n\n\n\n<!-- generated by git-cliff -->\n", "file_path": "CHANGELOG.md", "rank": 31, "score": 10.404724977082369 } ]
Rust
src/lib.rs
Ekleog/libtest-mimic
f902a460e6b951af824ba5f1a5fed5602cbd0314
extern crate crossbeam_channel; extern crate rayon; #[macro_use] extern crate structopt; extern crate termcolor; use std::{ process, }; use rayon::prelude::*; mod args; mod printer; pub use args::{Arguments, ColorSetting, FormatSetting}; #[derive(Clone, Debug)] pub struct Test<D = ()> { pub name: String, pub kind: String, pub is_ignored: bool, pub is_bench: bool, pub data: D, } impl<D: Default> Test<D> { pub fn test(name: impl Into<String>) -> Self { Self { name: name.into(), kind: String::new(), is_ignored: false, is_bench: false, data: D::default(), } } pub fn bench(name: impl Into<String>) -> Self { Self { name: name.into(), kind: String::new(), is_ignored: false, is_bench: true, data: D::default(), } } } #[derive(Debug, Clone, PartialEq, Eq)] pub enum Outcome { Passed, Failed { msg: Option<String>, }, Ignored, Measured { avg: u64, variance: u64, }, } #[derive(Debug)] pub enum RunnerEvent<D> { Started { name: String, kind: String, }, Completed { test: Test<D>, outcome: Outcome, }, } #[derive(Clone, Debug)] #[must_use] pub struct Conclusion { has_failed: bool, num_filtered_out: u64, num_passed: u64, num_failed: u64, num_ignored: u64, num_benches: u64, } impl Conclusion { pub fn exit(&self) -> ! { self.exit_if_failed(); process::exit(0); } pub fn exit_if_failed(&self) { if self.has_failed { process::exit(101) } } pub fn has_failed(&self) -> bool { self.has_failed } pub fn num_filtered_out(&self) -> u64 { self.num_filtered_out } pub fn num_passed(&self) -> u64 { self.num_passed } pub fn num_failed(&self) -> u64 { self.num_failed } pub fn num_ignored(&self) -> u64 { self.num_ignored } pub fn num_benches(&self) -> u64 { self.num_benches } } fn run_tests_threaded<D: 'static + Send + Sync>( args: &Arguments, tests: Vec<Test<D>>, run_test: impl Fn(&Test<D>) -> Outcome + 'static + Send + Sync, ) -> impl IntoIterator<Item = RunnerEvent<D>> { let mut builder = rayon::ThreadPoolBuilder::new(); if let Some(n) = args.num_threads { builder = builder.num_threads(n); } let pool = builder.build().expect("Unable to spawn threads"); let args = args.clone(); let (send, recv) = crossbeam_channel::bounded(0); pool.spawn(move || { tests.into_par_iter().for_each(|test| { let _ = send.send(RunnerEvent::Started { name: test.name.clone(), kind: test.kind.clone(), }); let is_ignored = (test.is_ignored && !args.ignored) || (test.is_bench && args.test) || (!test.is_bench && args.bench); let outcome = if is_ignored { Outcome::Ignored } else { run_test(&test) }; let _ = send.send(RunnerEvent::Completed { test, outcome }); }); }); recv } pub fn run_tests<D: 'static + Send + Sync>( args: &Arguments, tests: Vec<Test<D>>, run_test: impl Fn(&Test<D>) -> Outcome + 'static + Send + Sync, ) -> Conclusion { let (tests, num_filtered_out) = if args.filter_string.is_some() || !args.skip.is_empty() { let len_before = tests.len() as u64; let mut tests = tests; tests.retain(|t| { if let Some(filter) = &args.filter_string { match args.exact { true if &t.name != filter => return false, false if !t.name.contains(filter) => return false, _ => {} }; } for skip_filter in &args.skip { match args.exact { true if &t.name == skip_filter => return false, false if t.name.contains(skip_filter) => return false, _ => {} } } true }); let num_filtered_out = len_before - tests.len() as u64; (tests, num_filtered_out) } else { (tests, 0) }; let mut printer = printer::Printer::new(args, &tests); if args.list { printer.print_list(&tests); return Conclusion { has_failed: false, num_filtered_out: 0, num_passed: 0, num_failed: 0, num_ignored: 0, num_benches: 0, }; } printer.print_title(tests.len() as u64); let mut failed_tests = Vec::new(); let mut num_ignored = 0; let mut num_benches = 0; let mut num_passed = 0; for event in run_tests_threaded(args, tests, run_test) { match event { RunnerEvent::Started { name, kind } => { if args.num_threads == Some(1) { printer.print_test(&name, &kind); } } RunnerEvent::Completed { test, outcome } => { if args.num_threads != Some(1) { printer.print_test(&test.name, &test.kind); } printer.print_single_outcome(&outcome); if test.is_bench { num_benches += 1; } match outcome { Outcome::Passed => num_passed += 1, Outcome::Failed { msg } => failed_tests.push((test, msg)), Outcome::Ignored => num_ignored += 1, Outcome::Measured { .. } => {} } } } } if !failed_tests.is_empty() { printer.print_failures(&failed_tests); } let num_failed = failed_tests.len() as u64; let conclusion = Conclusion { has_failed: num_failed != 0, num_filtered_out, num_passed, num_failed, num_ignored, num_benches, }; printer.print_summary(&conclusion); conclusion }
extern crate crossbeam_channel; extern crate rayon; #[macro_use] extern crate structopt; extern crate termcolor; use std::{ process, }; use rayon::prelude::*; mod args; mod printer; pub use args::{Arguments, ColorSetting, FormatSetting}; #[derive(Clone, Debug)] pub struct Test<
inter.print_title(tests.len() as u64); let mut failed_tests = Vec::new(); let mut num_ignored = 0; let mut num_benches = 0; let mut num_passed = 0; for event in run_tests_threaded(args, tests, run_test) { match event { RunnerEvent::Started { name, kind } => { if args.num_threads == Some(1) { printer.print_test(&name, &kind); } } RunnerEvent::Completed { test, outcome } => { if args.num_threads != Some(1) { printer.print_test(&test.name, &test.kind); } printer.print_single_outcome(&outcome); if test.is_bench { num_benches += 1; } match outcome { Outcome::Passed => num_passed += 1, Outcome::Failed { msg } => failed_tests.push((test, msg)), Outcome::Ignored => num_ignored += 1, Outcome::Measured { .. } => {} } } } } if !failed_tests.is_empty() { printer.print_failures(&failed_tests); } let num_failed = failed_tests.len() as u64; let conclusion = Conclusion { has_failed: num_failed != 0, num_filtered_out, num_passed, num_failed, num_ignored, num_benches, }; printer.print_summary(&conclusion); conclusion }
D = ()> { pub name: String, pub kind: String, pub is_ignored: bool, pub is_bench: bool, pub data: D, } impl<D: Default> Test<D> { pub fn test(name: impl Into<String>) -> Self { Self { name: name.into(), kind: String::new(), is_ignored: false, is_bench: false, data: D::default(), } } pub fn bench(name: impl Into<String>) -> Self { Self { name: name.into(), kind: String::new(), is_ignored: false, is_bench: true, data: D::default(), } } } #[derive(Debug, Clone, PartialEq, Eq)] pub enum Outcome { Passed, Failed { msg: Option<String>, }, Ignored, Measured { avg: u64, variance: u64, }, } #[derive(Debug)] pub enum RunnerEvent<D> { Started { name: String, kind: String, }, Completed { test: Test<D>, outcome: Outcome, }, } #[derive(Clone, Debug)] #[must_use] pub struct Conclusion { has_failed: bool, num_filtered_out: u64, num_passed: u64, num_failed: u64, num_ignored: u64, num_benches: u64, } impl Conclusion { pub fn exit(&self) -> ! { self.exit_if_failed(); process::exit(0); } pub fn exit_if_failed(&self) { if self.has_failed { process::exit(101) } } pub fn has_failed(&self) -> bool { self.has_failed } pub fn num_filtered_out(&self) -> u64 { self.num_filtered_out } pub fn num_passed(&self) -> u64 { self.num_passed } pub fn num_failed(&self) -> u64 { self.num_failed } pub fn num_ignored(&self) -> u64 { self.num_ignored } pub fn num_benches(&self) -> u64 { self.num_benches } } fn run_tests_threaded<D: 'static + Send + Sync>( args: &Arguments, tests: Vec<Test<D>>, run_test: impl Fn(&Test<D>) -> Outcome + 'static + Send + Sync, ) -> impl IntoIterator<Item = RunnerEvent<D>> { let mut builder = rayon::ThreadPoolBuilder::new(); if let Some(n) = args.num_threads { builder = builder.num_threads(n); } let pool = builder.build().expect("Unable to spawn threads"); let args = args.clone(); let (send, recv) = crossbeam_channel::bounded(0); pool.spawn(move || { tests.into_par_iter().for_each(|test| { let _ = send.send(RunnerEvent::Started { name: test.name.clone(), kind: test.kind.clone(), }); let is_ignored = (test.is_ignored && !args.ignored) || (test.is_bench && args.test) || (!test.is_bench && args.bench); let outcome = if is_ignored { Outcome::Ignored } else { run_test(&test) }; let _ = send.send(RunnerEvent::Completed { test, outcome }); }); }); recv } pub fn run_tests<D: 'static + Send + Sync>( args: &Arguments, tests: Vec<Test<D>>, run_test: impl Fn(&Test<D>) -> Outcome + 'static + Send + Sync, ) -> Conclusion { let (tests, num_filtered_out) = if args.filter_string.is_some() || !args.skip.is_empty() { let len_before = tests.len() as u64; let mut tests = tests; tests.retain(|t| { if let Some(filter) = &args.filter_string { match args.exact { true if &t.name != filter => return false, false if !t.name.contains(filter) => return false, _ => {} }; } for skip_filter in &args.skip { match args.exact { true if &t.name == skip_filter => return false, false if t.name.contains(skip_filter) => return false, _ => {} } } true }); let num_filtered_out = len_before - tests.len() as u64; (tests, num_filtered_out) } else { (tests, 0) }; let mut printer = printer::Printer::new(args, &tests); if args.list { printer.print_list(&tests); return Conclusion { has_failed: false, num_filtered_out: 0, num_passed: 0, num_failed: 0, num_ignored: 0, num_benches: 0, }; } pr
random
[ { "content": "/// Performs a couple of tidy tests.\n\nfn run_test(test: &Test<PathBuf>) -> Outcome {\n\n let path = &test.data;\n\n let content = fs::read(path).expect(\"io error\");\n\n\n\n // Check that the file is valid UTF-8\n\n let content = match String::from_utf8(content) {\n\n Err(_) => {\n\n return Outcome::Failed {\n\n msg: Some(\"The file's contents are not a valid UTF-8 string!\".into()),\n\n };\n\n }\n\n Ok(s) => s,\n\n };\n\n\n\n // Check for `\\r`: we only want `\\n` line breaks!\n\n if content.contains('\\r') {\n\n return Outcome::Failed {\n\n msg: Some(\"Contains '\\\\r' chars. Please use ' \\\\n' line breaks only!\".into()),\n\n };\n\n }\n", "file_path": "examples/tidy.rs", "rank": 1, "score": 41483.20340661949 }, { "content": "/// Formats the given integer with `,` as thousand separator.\n\npub fn fmt_with_thousand_sep(mut v: u64) -> String {\n\n let mut out = String::new();\n\n while v >= 1000 {\n\n out = format!(\",{:03}{}\", v % 1000, out);\n\n v /= 1000;\n\n }\n\n out = format!(\"{}{}\", v, out);\n\n\n\n out\n\n}\n\n\n", "file_path": "src/printer.rs", "rank": 2, "score": 41269.08785688609 }, { "content": "/// Creates one test for each `.rs` file in the current directory or\n\n/// sub-directories of the current directory.\n\nfn collect_tests() -> Vec<Test<PathBuf>> {\n\n fn visit_dir(path: &Path, tests: &mut Vec<Test<PathBuf>>) -> Result<(), Box<dyn Error>> {\n\n for entry in fs::read_dir(path)? {\n\n let entry = entry?;\n\n let file_type = entry.file_type()?;\n\n\n\n // Handle files\n\n let path = entry.path();\n\n if file_type.is_file() {\n\n if path.extension() == Some(OsStr::new(\"rs\")) {\n\n let name = path\n\n .strip_prefix(env::current_dir()?)?\n\n .display()\n\n .to_string();\n\n\n\n tests.push(Test {\n\n name,\n\n kind: \"tidy\".into(),\n\n is_ignored: false,\n\n is_bench: false,\n", "file_path": "examples/tidy.rs", "rank": 3, "score": 34999.49966400144 }, { "content": " /// Like `from_args()`, but operates on an explicit iterator and not the global arguments.\n\n pub fn from_iter<I>(iter: I) -> Self\n\n where\n\n Self: Sized,\n\n I: IntoIterator,\n\n I::Item: Into<std::ffi::OsString> + Clone,\n\n {\n\n structopt::StructOpt::from_iter(iter)\n\n }\n\n}\n\n\n\n/// Possible values for the `--color` option.\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\npub enum ColorSetting {\n\n /// Colorize output if stdout is a tty and tests are run on serially\n\n /// (default).\n\n Auto,\n\n\n\n /// Always colorize output.\n\n Always,\n", "file_path": "src/args.rs", "rank": 4, "score": 22757.789639214745 }, { "content": "use std::str::FromStr;\n\n\n\nuse structopt;\n\n\n\n/// Command line arguments.\n\n///\n\n/// This type represents everything the user can specify via CLI args. The main\n\n/// method is [`from_args`][Arguments::from_args] which reads the global\n\n/// `std::env::args()` and parses them into this type.\n\n///\n\n/// The CLI is very similar to the one from the native test harness. However,\n\n/// there are minor differences:\n\n/// - Most notable: the `--help` message is slightly different. This comes from\n\n/// the fact that this crate (right now) uses structopt (which uses `clap`)\n\n/// while the original `libtest` uses `docopt`.\n\n/// - `--skip` only accepts one value per occurence (but can occur multiple\n\n/// times). This solves ambiguity with the `filter` value at the very end.\n\n/// Consider \"`--skip foo bar`\": should this be parsed as `skip: vec![\"foo\",\n\n/// \"bar\"], filter: None` or `skip: vec![\"foo\"], filter: Some(\"bar\")`? Here,\n\n/// it's clearly the latter version. If you need multiple values for `skip`,\n", "file_path": "src/args.rs", "rank": 5, "score": 22756.881439604083 }, { "content": "/// do it like this: `--skip foo --skip bar`.\n\n/// - `--bench` and `--test` cannot be both set at the same time. It doesn't\n\n/// make sense, but it's allowed in `libtest` for some reason.\n\n///\n\n/// **Note**: just because all CLI args can be parsed, doesn't mean that they\n\n/// are all automatically used. Check [`run_tests`][::run_tests] for information on which\n\n/// arguments are automatically used and require special care.\n\n#[derive(StructOpt, Debug, Clone)]\n\n#[structopt(\n\n template = \"USAGE: [FLAGS] [OPTIONS] [FILTER]\\n\\n{all-args}\\n\\n\\n{after-help}\",\n\n setting = structopt::clap::AppSettings::DisableVersion,\n\n after_help = \"By default, all tests are run in parallel. This can be altered with the \\n\\\n\n --test-threads flag or the RUST_TEST_THREADS environment variable when running \\n\\\n\n tests (set it to 1).\\n\\\n\n \\n\\\n\n All tests have their standard output and standard error captured by default. \\n\\\n\n This can be overridden with the --nocapture flag or setting RUST_TEST_NOCAPTURE \\n\\\n\n environment variable to a value other than \\\"0\\\". Logging is not captured by default.\",\n\n)]\n\npub struct Arguments {\n", "file_path": "src/args.rs", "rank": 6, "score": 22756.81411654578 }, { "content": " // ============== POSITIONAL VALUES =======================================\n\n /// Filter string. Only tests which contain this string are run.\n\n #[structopt(\n\n name = \"FILTER\",\n\n help = \"The FILTER string is tested against the name of all tests, and only those tests \\\n\n whose names contain the filter are run.\",\n\n )]\n\n pub filter_string: Option<String>,\n\n}\n\n\n\nimpl Arguments {\n\n /// Parses the global CLI arguments given to the application.\n\n ///\n\n /// If the parsing fails (due to incorrect CLI args), an error is shown and\n\n /// the application exits. If help is requested (`-h` or `--help`), a help\n\n /// message is shown and the application exits, too.\n\n pub fn from_args() -> Self {\n\n structopt::StructOpt::from_args()\n\n }\n\n\n", "file_path": "src/args.rs", "rank": 7, "score": 22755.813035074552 }, { "content": " /// `None`.\n\n #[structopt(\n\n short = \"q\",\n\n long = \"--quiet\",\n\n conflicts_with = \"format\",\n\n help = \"Display one character per test instead of one line. Alias to --format=terse\",\n\n )]\n\n pub quiet: bool,\n\n\n\n // ============== OPTIONS =================================================\n\n /// Number of threads used for parallel testing.\n\n #[structopt(\n\n long = \"--test-threads\",\n\n help = \"Number of threads used for running tests in parallel\",\n\n )]\n\n pub num_threads: Option<usize>,\n\n\n\n /// Path of the logfile. If specified, everything will be written into the\n\n /// file instead of stdout.\n\n #[structopt(\n", "file_path": "src/args.rs", "rank": 8, "score": 22754.063617536947 }, { "content": " }\n\n}\n\n\n\n/// Possible values for the `--format` option.\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\npub enum FormatSetting {\n\n /// One line per test. Output for humans. (default)\n\n Pretty,\n\n\n\n /// One character per test. Usefull for test suites with many tests.\n\n Terse,\n\n\n\n /// Output as JSON.\n\n Json,\n\n}\n\n\n\nimpl Default for FormatSetting {\n\n fn default() -> Self {\n\n FormatSetting::Pretty\n\n }\n", "file_path": "src/args.rs", "rank": 9, "score": 22753.299081284527 }, { "content": "\n\n /// If set, stdout/stderr are not captured during the test but are instead\n\n /// printed directly.\n\n #[structopt(\n\n long = \"--nocapture\",\n\n help = \"don't capture stdout/stderr of each task, allow printing directly\",\n\n )]\n\n pub nocapture: bool,\n\n\n\n /// If set, filters are matched exactly rather than by substring.\n\n #[structopt(\n\n long = \"--exact\",\n\n help = \"Exactly match filters rather than by substring\",\n\n )]\n\n pub exact: bool,\n\n\n\n /// If set, display only one character per test instead of one line.\n\n /// Especially useful for huge test suites.\n\n ///\n\n /// This is an alias for `--format=terse`. If this is set, `format` is\n", "file_path": "src/args.rs", "rank": 10, "score": 22753.138398567295 }, { "content": " long = \"--logfile\",\n\n value_name = \"PATH\",\n\n help = \"Write logs to the specified file instead of stdout\",\n\n )]\n\n pub logfile: Option<String>,\n\n\n\n /// A list of filters. Tests whose names contain parts of any of these\n\n /// filters are skipped.\n\n #[structopt(\n\n long = \"--skip\",\n\n value_name = \"FILTER\",\n\n number_of_values = 1,\n\n help = \"Skip tests whose names contain FILTER (this flag can be used multiple times)\",\n\n )]\n\n pub skip: Vec<String>,\n\n\n\n /// Specifies whether or not to color the output.\n\n #[structopt(\n\n long = \"--color\",\n\n possible_values = &[\"auto\", \"always\", \"never\"],\n", "file_path": "src/args.rs", "rank": 11, "score": 22752.864248830574 }, { "content": " // ============== FLAGS ===================================================\n\n /// Determines if ignored tests should be run.\n\n #[structopt(long = \"--ignored\", help = \"Run ignored tests\")]\n\n pub ignored: bool,\n\n\n\n /// Run tests, but not benchmarks.\n\n #[structopt(\n\n long = \"--test\",\n\n conflicts_with = \"bench\",\n\n help = \"Run tests and not benchmarks\",\n\n )]\n\n pub test: bool,\n\n\n\n /// Run benchmarks, but not tests.\n\n #[structopt(long = \"--bench\", help = \"Run benchmarks instead of tests\")]\n\n pub bench: bool,\n\n\n\n /// Only list all tests and benchmarks.\n\n #[structopt(long = \"--list\", help = \"List all tests and benchmarks\")]\n\n pub list: bool,\n", "file_path": "src/args.rs", "rank": 12, "score": 22752.760627995518 }, { "content": " value_name = \"auto|always|never\",\n\n help = \"Configure coloring of output: \\n\\\n\n - auto = colorize if stdout is a tty and tests are run on serially (default)\\n\\\n\n - always = always colorize output\\n\\\n\n - never = never colorize output\\n\",\n\n )]\n\n pub color: Option<ColorSetting>,\n\n\n\n /// Specifies the format of the output.\n\n #[structopt(\n\n long = \"--format\",\n\n possible_values = &[\"pretty\", \"terse\", \"json\"],\n\n value_name = \"pretty|terse|json\",\n\n help = \"Configure formatting of output: \\n\\\n\n - pretty = Print verbose output\\n\\\n\n - terse = Display one character per test\\n\\\n\n - json = Output a json document\\n\",\n\n )]\n\n pub format: Option<FormatSetting>,\n\n\n", "file_path": "src/args.rs", "rank": 13, "score": 22751.311731162346 }, { "content": "\n\n /// Never colorize output.\n\n Never,\n\n}\n\n\n\nimpl Default for ColorSetting {\n\n fn default() -> Self {\n\n ColorSetting::Auto\n\n }\n\n}\n\n\n\nimpl FromStr for ColorSetting {\n\n type Err = &'static str;\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n match s {\n\n \"auto\" => Ok(ColorSetting::Auto),\n\n \"always\" => Ok(ColorSetting::Always),\n\n \"never\" => Ok(ColorSetting::Never),\n\n _ => Err(\"foo\"),\n\n }\n", "file_path": "src/args.rs", "rank": 14, "score": 22748.26549383626 }, { "content": "}\n\n\n\nimpl FromStr for FormatSetting {\n\n type Err = &'static str;\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n match s {\n\n \"pretty\" => Ok(FormatSetting::Pretty),\n\n \"terse\" => Ok(FormatSetting::Terse),\n\n \"json\" => Ok(FormatSetting::Json),\n\n _ => Err(\"foo\"),\n\n }\n\n }\n\n}\n", "file_path": "src/args.rs", "rank": 15, "score": 22748.26549383626 }, { "content": "//! Definition of the `Printer`.\n\n//!\n\n//! This is just an abstraction for everything that is printed to the screen\n\n//! (or logfile, if specified). These parameters influence printing:\n\n//! - `color`\n\n//! - `format` (and `quiet`)\n\n//! - `logfile`\n\n\n\nuse std::fs::File;\n\n\n\nuse termcolor::{Ansi, Color, ColorChoice, ColorSpec, NoColor, StandardStream, WriteColor};\n\n\n\nuse ::{Arguments, ColorSetting, Conclusion, FormatSetting, Outcome, Test};\n\n\n\npub(crate) struct Printer {\n\n out: Box<dyn WriteColor>,\n\n format: FormatSetting,\n\n name_width: usize,\n\n kind_width: usize,\n\n}\n", "file_path": "src/printer.rs", "rank": 16, "score": 22425.407008784012 }, { "content": "\n\nimpl Printer {\n\n /// Creates a new printer configured by the given arguments (`format`,\n\n /// `quiet`, `color` and `logfile` options).\n\n pub(crate) fn new<D>(args: &Arguments, tests: &[Test<D>]) -> Self {\n\n let color_arg = args.color.unwrap_or(ColorSetting::Auto);\n\n\n\n // Determine target of all output\n\n let out = if let Some(logfile) = &args.logfile {\n\n let f = File::create(logfile).expect(\"failed to create logfile\");\n\n if color_arg == ColorSetting::Always {\n\n Box::new(Ansi::new(f)) as Box<dyn WriteColor>\n\n } else {\n\n Box::new(NoColor::new(f))\n\n }\n\n } else {\n\n let choice = match color_arg {\n\n ColorSetting::Auto => ColorChoice::Auto,\n\n ColorSetting::Always => ColorChoice::Always,\n\n ColorSetting::Never => ColorChoice::Never,\n", "file_path": "src/printer.rs", "rank": 17, "score": 22422.033294307974 }, { "content": " conclusion.num_benches(),\n\n conclusion.num_filtered_out(),\n\n ).unwrap();\n\n writeln!(self.out).unwrap();\n\n }\n\n FormatSetting::Json => unimplemented!(),\n\n }\n\n }\n\n\n\n /// Prints a list of all tests. Used if `--list` is set.\n\n pub(crate) fn print_list<D>(&mut self, tests: &[Test<D>]) {\n\n for test in tests {\n\n let kind = if test.kind.is_empty() {\n\n format!(\"\")\n\n } else {\n\n format!(\"[{}] \", test.kind)\n\n };\n\n\n\n writeln!(\n\n self.out,\n", "file_path": "src/printer.rs", "rank": 18, "score": 22420.885648122166 }, { "content": " }\n\n\n\n /// Prints the first line \"running 3 tests\".\n\n pub(crate) fn print_title(&mut self, num_tests: u64) {\n\n match self.format {\n\n FormatSetting::Pretty | FormatSetting::Terse => {\n\n let plural_s = if num_tests == 1 {\n\n \"\"\n\n } else {\n\n \"s\"\n\n };\n\n\n\n writeln!(self.out).unwrap();\n\n writeln!(self.out, \"running {} test{}\", num_tests, plural_s).unwrap();\n\n }\n\n FormatSetting::Json => unimplemented!(),\n\n }\n\n }\n\n\n\n /// Prints the text announcing the test (e.g. \"test foo::bar ... \"). Prints\n", "file_path": "src/printer.rs", "rank": 19, "score": 22418.940206056206 }, { "content": " /// nothing in terse mode.\n\n pub(crate) fn print_test(&mut self, name: &str, kind: &str) {\n\n match self.format {\n\n FormatSetting::Pretty => {\n\n let kind = if kind.is_empty() {\n\n format!(\"\")\n\n } else {\n\n format!(\"[{}] \", kind)\n\n };\n\n\n\n write!(\n\n self.out,\n\n \"test {: <2$}{: <3$} ... \",\n\n kind,\n\n name,\n\n self.kind_width,\n\n self.name_width,\n\n ).unwrap();\n\n self.out.flush().unwrap();\n\n }\n", "file_path": "src/printer.rs", "rank": 20, "score": 22418.90745000497 }, { "content": " \"{}{}: {}\",\n\n kind,\n\n test.name,\n\n if test.is_bench { \"bench\" } else { \"test\" },\n\n ).unwrap();\n\n }\n\n }\n\n\n\n /// Prints a list of failed tests with their messages. This is only called\n\n /// if there were any failures.\n\n pub(crate) fn print_failures<D>(&mut self, fails: &[(Test<D>, Option<String>)]) {\n\n writeln!(self.out).unwrap();\n\n writeln!(self.out, \"failures:\").unwrap();\n\n writeln!(self.out).unwrap();\n\n\n\n // Print messages of all tests\n\n for (test, msg) in fails {\n\n writeln!(self.out, \"---- {} ----\", test.name).unwrap();\n\n if let Some(msg) = msg {\n\n writeln!(self.out, \"{}\", msg).unwrap();\n", "file_path": "src/printer.rs", "rank": 21, "score": 22418.87355274904 }, { "content": " Outcome::Ignored => 'i',\n\n Outcome::Measured { .. } => {\n\n // Benchmark are never printed in terse mode... for\n\n // some reason.\n\n self.print_outcome_pretty(outcome);\n\n writeln!(self.out).unwrap();\n\n return;\n\n }\n\n };\n\n\n\n self.out.set_color(&color_of_outcome(outcome)).unwrap();\n\n write!(self.out, \"{}\", c).unwrap();\n\n self.out.reset().unwrap();\n\n }\n\n FormatSetting::Json => unimplemented!(),\n\n }\n\n }\n\n\n\n /// Prints the summary line after all tests have been executed.\n\n pub(crate) fn print_summary(\n", "file_path": "src/printer.rs", "rank": 22, "score": 22418.301641169648 }, { "content": " FormatSetting::Terse => {\n\n // In terse mode, nothing is printed before the job. Only\n\n // `print_single_outcome` prints one character.\n\n }\n\n FormatSetting::Json => unimplemented!(),\n\n }\n\n }\n\n\n\n /// Prints the outcome of a single tests. `ok` or `FAILED` in pretty mode\n\n /// and `.` or `F` in terse mode.\n\n pub(crate) fn print_single_outcome(&mut self, outcome: &Outcome) {\n\n match self.format {\n\n FormatSetting::Pretty => {\n\n self.print_outcome_pretty(outcome);\n\n writeln!(self.out).unwrap();\n\n }\n\n FormatSetting::Terse => {\n\n let c = match outcome {\n\n Outcome::Passed => '.',\n\n Outcome::Failed { .. } => 'F',\n", "file_path": "src/printer.rs", "rank": 23, "score": 22417.583393159333 }, { "content": " };\n\n Box::new(StandardStream::stdout(choice))\n\n };\n\n\n\n // Determine correct format\n\n let format = if args.quiet {\n\n FormatSetting::Terse\n\n } else {\n\n args.format.unwrap_or(FormatSetting::Pretty)\n\n };\n\n\n\n // Determine max test name length to do nice formatting later.\n\n //\n\n // Unicode is hard and there is no way we can properly align/pad the\n\n // test names and outcomes. Counting the number of code points is just\n\n // a cheap way that works in most cases. Usually, these names are\n\n // ASCII.\n\n let name_width = tests.iter()\n\n .map(|test| test.name.chars().count())\n\n .max()\n", "file_path": "src/printer.rs", "rank": 24, "score": 22417.574204939625 }, { "content": " .unwrap_or(0);\n\n\n\n let kind_width = tests.iter()\n\n .map(|test| {\n\n if test.kind.is_empty() {\n\n 0\n\n } else {\n\n // The two braces [] and one space\n\n test.kind.chars().count() + 3\n\n }\n\n })\n\n .max()\n\n .unwrap_or(0);\n\n\n\n Self {\n\n out,\n\n format,\n\n name_width,\n\n kind_width,\n\n }\n", "file_path": "src/printer.rs", "rank": 25, "score": 22415.650894754966 }, { "content": " }\n\n writeln!(self.out).unwrap();\n\n }\n\n\n\n // Print summary list of failed tests\n\n writeln!(self.out).unwrap();\n\n writeln!(self.out, \"failures:\").unwrap();\n\n for (test, _) in fails {\n\n writeln!(self.out, \" {}\", test.name).unwrap();\n\n }\n\n }\n\n\n\n /// Prints a colored 'ok'/'FAILED'/'ignored'/'bench'.\n\n fn print_outcome_pretty(&mut self, outcome: &Outcome) {\n\n let s = match outcome {\n\n Outcome::Passed => \"ok\",\n\n Outcome::Failed { .. } => \"FAILED\",\n\n Outcome::Ignored => \"ignored\",\n\n Outcome::Measured { .. } => \"bench\",\n\n };\n", "file_path": "src/printer.rs", "rank": 26, "score": 22415.39323366041 }, { "content": " &mut self,\n\n conclusion: &Conclusion,\n\n ) {\n\n match self.format {\n\n FormatSetting::Pretty | FormatSetting::Terse => {\n\n let outcome = if conclusion.has_failed() {\n\n Outcome::Failed { msg: None }\n\n } else {\n\n Outcome::Passed\n\n };\n\n\n\n writeln!(self.out).unwrap();\n\n write!(self.out, \"test result: \").unwrap();\n\n self.print_outcome_pretty(&outcome);\n\n writeln!(\n\n self.out,\n\n \". {} passed; {} failed; {} ignored; {} measured; {} filtered out\",\n\n conclusion.num_passed(),\n\n conclusion.num_failed(),\n\n conclusion.num_ignored(),\n", "file_path": "src/printer.rs", "rank": 27, "score": 22414.808937392532 }, { "content": "\n\n self.out.set_color(&color_of_outcome(outcome)).unwrap();\n\n write!(self.out, \"{}\", s).unwrap();\n\n self.out.reset().unwrap();\n\n\n\n if let Outcome::Measured { avg, variance } = outcome {\n\n write!(\n\n self.out,\n\n \": {:>11} ns/iter (+/- {})\",\n\n fmt_with_thousand_sep(*avg),\n\n fmt_with_thousand_sep(*variance),\n\n ).unwrap();\n\n }\n\n }\n\n}\n\n\n\n/// Formats the given integer with `,` as thousand separator.\n", "file_path": "src/printer.rs", "rank": 28, "score": 22413.843043140714 }, { "content": "/// Returns the `ColorSpec` associated with the given outcome.\n\nfn color_of_outcome(outcome: &Outcome) -> ColorSpec {\n\n let mut out = ColorSpec::new();\n\n let color = match outcome {\n\n Outcome::Passed => Color::Green,\n\n Outcome::Failed { .. } => Color::Red,\n\n Outcome::Ignored => Color::Yellow,\n\n Outcome::Measured { .. } => Color::Cyan,\n\n };\n\n out.set_fg(Some(color));\n\n out\n\n}\n", "file_path": "src/printer.rs", "rank": 30, "score": 16207.42997070307 }, { "content": "extern crate libtest_mimic;\n\n\n\nuse std::{thread, time};\n\nuse libtest_mimic::{Arguments, Test, Outcome, run_tests};\n\n\n\n\n", "file_path": "examples/simple.rs", "rank": 32, "score": 11.603386199725803 }, { "content": "extern crate libtest_mimic;\n\n\n\nuse libtest_mimic::{Arguments, Test, Outcome, run_tests};\n\n\n\nuse std::{\n\n env,\n\n error::Error,\n\n ffi::OsStr,\n\n fs,\n\n path::{Path, PathBuf},\n\n};\n\n\n\n\n", "file_path": "examples/tidy.rs", "rank": 33, "score": 10.539827029125739 }, { "content": "extern crate libtest_mimic;\n\n\n\nuse libtest_mimic::{Arguments, Test, Outcome, run_tests};\n\n\n\n\n", "file_path": "examples/benches.rs", "rank": 36, "score": 9.065160912765556 }, { "content": "extern crate libtest_mimic;\n\n\n\nuse libtest_mimic::{Arguments, Test, Outcome, run_tests};\n\n\n\n\n", "file_path": "examples/many.rs", "rank": 37, "score": 9.065160912765556 }, { "content": "libtest-mimic\n\n=============\n\n[![Build Status](https://img.shields.io/travis/LukasKalbertodt/libtest-mimic/master.svg)](https://travis-ci.org/LukasKalbertodt/libtest-mimic)\n\n[![crates.io version](https://img.shields.io/crates/v/libtest-mimic.svg)](https://crates.io/crates/libtest-mimic)\n\n[![docs](https://docs.rs/libtest-mimic/badge.svg)](https://docs.rs/libtest-mimic)\n\n\n\nWrite your own test harness that looks and behaves like the built-in test harness (used by `rustc --test`)!\n\n\n\nThis is a simple and small testing framework that mimics the original `libtest` (used by `rustc --test`). That means: all output looks pretty much like `cargo test` and most CLI arguments are understood and used. With that plumbing work out of the way, your test runner can concentrate on the actual testing.\n\n\n\n**See it in action** (with the `tidy` example):\n\n\n\n[![asciicast](https://asciinema.org/a/ZBQ5vkwW5VaQCn7VGohuNFxr2.png)](https://asciinema.org/a/ZBQ5vkwW5VaQCn7VGohuNFxr2)\n\n\n\n\n\n# Example\n\n\n\n```rust\n\nextern crate libtest_mimic;\n\n\n\nuse libtest_mimic::{Arguments, Test, Outcome, run_tests};\n\n\n\n\n\n// Parse command line arguments\n\nlet args = Arguments::from_args();\n\n\n\n// Create a list of tests (in this case: three dummy tests)\n\nlet tests = vec![\n\n Test::test(\"toph\"),\n\n Test::test(\"sokka\"),\n\n Test {\n\n name: \"long_computation\".into(),\n\n kind: \"\".into(),\n\n is_ignored: true,\n\n is_bench: false,\n\n data: (),\n\n },\n\n];\n\n\n\n// Run all tests and exit the application appropriatly (in this case, the\n\n// test runner is a dummy runner which does nothing and says that all s\n\n// passed).\n\nrun_tests(&args, tests, |test| Outcome::Passed).exit();\n\n```\n\n\n\nFor more examples, see [`examples/`](https://github.com/LukasKalbertodt/libtest-mimic/tree/master/examples).\n\n\n\n---\n\n\n\n## License\n\n\n\nLicensed under either of\n\n\n\n * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)\n\n * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)\n\n\n\nat your option.\n\n\n\n### Contribution\n\n\n\nUnless you explicitly state otherwise, any contribution intentionally submitted\n\nfor inclusion in the work by you, as defined in the Apache-2.0 license, shall\n\nbe dual licensed as above, without any additional terms or conditions.\n", "file_path": "README.md", "rank": 40, "score": 6.8333156682835074 }, { "content": " kind: \"\".into(),\n\n is_ignored: false,\n\n is_bench: true,\n\n data: (19082, 99),\n\n },\n\n ];\n\n\n\n run_tests(&args, tests, |test| {\n\n let (avg, variance) = test.data;\n\n Outcome::Measured { avg, variance }\n\n }).exit();\n\n}\n", "file_path": "examples/benches.rs", "rank": 49, "score": 4.040600703363701 }, { "content": " },\n\n Test::test(\"katara\"),\n\n ];\n\n\n\n run_tests(&args, tests, |test| {\n\n if test.name == \"sokka\" {\n\n Outcome::Failed { msg: Some(\"Sokka tripped and fell :(\".into()) }\n\n } else if test.name == \"long_computation\" {\n\n thread::sleep(time::Duration::from_secs(1));\n\n Outcome::Passed\n\n } else {\n\n Outcome::Passed\n\n }\n\n }).exit();\n\n}\n", "file_path": "examples/simple.rs", "rank": 50, "score": 3.8491908684495746 }, { "content": "Examples\n\n========\n\n\n\n- `benches`: small example with three dummy benchmarks\n\n- `many`: 100 dummy tests are generated and executed\n\n- `simple`: five dummy tests are created and executed\n\n- **`tidy`**: more useful example. Generates a test for each `.rs` file and runs a simply tidy script as test.\n", "file_path": "examples/README.md", "rank": 52, "score": 3.6676203107418006 }, { "content": " data: path,\n\n })\n\n }\n\n } else if file_type.is_dir() {\n\n // Handle directories\n\n visit_dir(&path, tests)?;\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n // We recursively look for `.rs` files, starting from the current\n\n // directory.\n\n let mut tests = Vec::new();\n\n let current_dir = env::current_dir().expect(\"invalid working directory\");\n\n visit_dir(&current_dir, &mut tests).expect(\"unexpected IO error\");\n\n\n\n tests\n\n}\n\n\n", "file_path": "examples/tidy.rs", "rank": 56, "score": 1.6804736280748926 }, { "content": "# Changelog\n\nAll notable changes to this project will be documented in this file.\n\n\n\nThe format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)\n\nand this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).\n\n\n\n## [Unreleased]\n\n\n\n## [0.3.0] - 2020-06-28\n\n### Added\n\n- Add support for running tests in parallel #4\n\n- Add `Arguments::from_iter` #5\n\n\n\n## [0.2.0] - 2019-10-02\n\n### Changed\n\n- Upgrade dependencies #3\n\n- Flush stdout after printing test name 4a36b3318b69df233b0db7d1af3caf276e6bb070\n\n\n\n### Fixed\n\n- Fix overflow bug when calculating number of passed tests 264fe6f8a986ab0c02f4a85e64e42ee17596923c\n\n\n\n## 0.1.0 - 2018-07-23\n\n### Added\n\n- Everything.\n\n\n\n\n\n[Unreleased]: https://github.com/LukasKalbertodt/stable-vec/compare/v0.3.0...HEAD\n\n[0.3.0]: https://github.com/LukasKalbertodt/stable-vec/compare/v0.2.0...v0.3.0\n\n[0.2.0]: https://github.com/LukasKalbertodt/stable-vec/compare/v0.1.0...v0.2.0\n\n[0.1.1]: https://github.com/LukasKalbertodt/stable-vec/compare/v0.1.0...v0.1.1\n", "file_path": "CHANGELOG.md", "rank": 57, "score": 1.2253688661399025 } ]
Rust
src/indicators/woodies_cci.rs
b100pro/yata
18bf83e134e5a89bc57e58b39d0cff9d098c9543
#[cfg(feature = "serde")] use serde::{Deserialize, Serialize}; use super::commodity_channel_index::CommodityChannelIndexInstance; use super::CommodityChannelIndex; use crate::core::{Action, Error, Method, PeriodType, ValueType, Window, OHLC}; use crate::core::{IndicatorConfig, IndicatorInitializer, IndicatorInstance, IndicatorResult}; use crate::helpers::signi; use crate::methods::{Cross, CrossAbove, CrossUnder, SMA}; #[derive(Debug, Clone, Copy)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub struct WoodiesCCI { pub period1: PeriodType, pub period2: PeriodType, pub signal1_period: PeriodType, pub signal2_bars_count: isize, pub signal3_zone: ValueType, } impl IndicatorConfig for WoodiesCCI { const NAME: &'static str = "WoodiesCCI"; fn validate(&self) -> bool { self.period1 > self.period2 } fn set(&mut self, name: &str, value: String) -> Option<Error> { match name { "period1" => match value.parse() { Err(_) => return Some(Error::ParameterParse(name.to_string(), value.to_string())), Ok(value) => self.period1 = value, }, "period2" => match value.parse() { Err(_) => return Some(Error::ParameterParse(name.to_string(), value.to_string())), Ok(value) => self.period2 = value, }, "signal1_period" => match value.parse() { Err(_) => return Some(Error::ParameterParse(name.to_string(), value.to_string())), Ok(value) => self.signal1_period = value, }, "signal1_bars_count" => match value.parse() { Err(_) => return Some(Error::ParameterParse(name.to_string(), value.to_string())), Ok(value) => self.signal2_bars_count = value, }, "signal3_zone" => match value.parse() { Err(_) => return Some(Error::ParameterParse(name.to_string(), value.to_string())), Ok(value) => self.signal3_zone = value, }, _ => { return Some(Error::ParameterParse(name.to_string(), value)); } }; None } fn size(&self) -> (u8, u8) { (2, 3) } } impl<T: OHLC> IndicatorInitializer<T> for WoodiesCCI { type Instance = WoodiesCCIInstance; fn init(self, candle: T) -> Result<Self::Instance, Error> where Self: Sized, { if !self.validate() { return Err(Error::WrongConfig); } let cfg = self; let mut cci1 = CommodityChannelIndex::default(); cci1.period = cfg.period1; let mut cci2 = CommodityChannelIndex::default(); cci2.period = cfg.period2; Ok(Self::Instance { cci1: cci1.init(candle)?, cci2: cci2.init(candle)?, sma: SMA::new(cfg.signal1_period, 0.)?, cross1: Cross::default(), cross2: Cross::default(), s2_sum: 0, s3_sum: 0., s3_count: 0, window: Window::new(cfg.signal2_bars_count as PeriodType, 0), cross_above: CrossAbove::default(), cross_under: CrossUnder::default(), cfg, }) } } impl Default for WoodiesCCI { fn default() -> Self { Self { period1: 14, period2: 6, signal1_period: 9, signal2_bars_count: 6, signal3_zone: 0.2, } } } #[derive(Debug)] pub struct WoodiesCCIInstance { cfg: WoodiesCCI, cci1: CommodityChannelIndexInstance, cci2: CommodityChannelIndexInstance, sma: SMA, cross1: Cross, cross2: Cross, s2_sum: isize, s3_sum: ValueType, s3_count: PeriodType, window: Window<i8>, cross_above: CrossAbove, cross_under: CrossUnder, } impl<T: OHLC> IndicatorInstance<T> for WoodiesCCIInstance { type Config = WoodiesCCI; fn config(&self) -> &Self::Config { &self.cfg } fn next(&mut self, candle: T) -> IndicatorResult { let cci1 = self.cci1.next(candle).value(0); let cci2 = self.cci2.next(candle).value(0); let cci1_sign = signi(cci1); let d_cci = cci1 - cci2; let sma = self.sma.next(d_cci); let s1 = self.cross1.next((sma, 0.)); let s0 = self.cross2.next((cci1, 0.)); self.s2_sum += (cci1_sign - self.window.push(cci1_sign)) as isize; let s2 = (self.s2_sum >= self.cfg.signal2_bars_count) as i8 - (self.s2_sum <= -self.cfg.signal2_bars_count) as i8; let is_none = s0.is_none(); self.s3_sum *= is_none as i8 as ValueType; self.s3_count *= is_none as PeriodType; self.s3_sum += cci1; self.s3_count += 1; let s3v = self.s3_sum / self.s3_count as ValueType; let s3 = self.cross_above.next((s3v, self.cfg.signal3_zone)) - self.cross_under.next((s3v, -self.cfg.signal3_zone)); IndicatorResult::new(&[cci1, cci2], &[s1, Action::from(s2), s3]) } }
#[cfg(feature = "serde")] use serde::{Deserialize, Serialize}; use super::commodity_channel_index::CommodityChannelIndexInstance; use super::CommodityChannelIndex; use crate::core::{Action, Error, Method, PeriodType, ValueType, Window, OHLC}; use crate::core::{IndicatorConfig, IndicatorInitializer, IndicatorInstance, IndicatorResult}; use crate::helpers::signi; use crate::methods::{Cross, CrossAbove, CrossUnder, SMA}; #[derive(Debug, Clone, Copy)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub struct WoodiesCCI { pub period1: PeriodType, pub period2: PeriodType, pub signal1_period: PeriodType, pub signal2_bars_count: isize, pub signal3_zone: ValueType, } impl IndicatorConfig for WoodiesCCI { const NAME: &'static str = "WoodiesCCI"; fn validate(&self) -> bool { self.period1 > self.period2 } fn set(&mut self, name: &str, value: String) -> Option<Error> { match name { "period1" => match value.parse() { Err(_) => return Some(Error::ParameterParse(name.to_string(), value.to_string())), Ok(value) => self.period1 = value, }, "period2" => match value.parse() { Err(_) => return Some(Error::ParameterParse(name.to_string(), value.to_string())), Ok(value) => self.period2 = value, }, "signal1_period" => match value.parse() { Err(_) => return Some(Error::ParameterParse(name.to_string(), value.to_string())), Ok(value) => self.signal1_period = value, }, "signal1_bars_count" => match value.parse() { Err(_) => return Some(Error::ParameterParse(name.to_string(), value.to_string())), Ok(value) => self.signal2_bars_count = value, }, "signal3_zone" => match value.parse() { Err(_) => return Some(Error::ParameterParse(name.to_string(), value.to_string())), Ok(value) => self.signal3_zone = value, }, _ => { return Some(Error::ParameterParse(name.to_string(), value)); } }; None } fn size(&self) -> (u8, u8) { (2, 3) } } impl<T: OHLC> IndicatorInitializer<T> for WoodiesCCI { type Instance = WoodiesCCIInstance; fn init(self, candle: T) -> Result<Self::Instance, Error> where Self: Sized, { if !self.validate() { return Err(Error::WrongConfig); } let cfg = self; let mut cci1 = CommodityChannelIndex::default(); cci1.period = cfg.period1; let mut cci2 = CommodityChannelIndex::default(); cci2.period = cfg.period2; Ok(Self::Instance { cci1: cci1.init(candle)?, cci2: cci2.init(candle)?, sma: SMA::new(cfg.signal1_period, 0.)?, cross1: Cross::default(), cross2: Cross::default(), s2_sum: 0, s3_sum: 0., s3_count: 0, window: Window::new(cfg.signal2_bars_count as PeriodType, 0), cross_above: CrossAbove::default(), cross_under: CrossUnder::default(), cfg, }) } } impl Default for WoodiesCCI { fn default() -> Self { Self { period1: 14, period2: 6, signal1_period: 9, signal2_bars_count: 6, signal3_zone: 0.2, } } } #[derive(Debug)] pub struct WoodiesCCIInstance { cfg: WoodiesCCI, cci1: CommodityChannelIndexInstance, cci2: CommodityChannelIndexInstance, sma: SMA, cross1: Cross, cross2: Cross, s2_sum: isize, s3_sum: ValueType, s3_count: PeriodType, window: Window<i8>, cross_above: CrossAbove, cross_under: CrossUnder, } impl<T: OHLC> IndicatorInstance<T> for WoodiesCCIInstance { type Config = WoodiesCCI; fn config(&self) -> &Self::Config { &self.cfg }
}
fn next(&mut self, candle: T) -> IndicatorResult { let cci1 = self.cci1.next(candle).value(0); let cci2 = self.cci2.next(candle).value(0); let cci1_sign = signi(cci1); let d_cci = cci1 - cci2; let sma = self.sma.next(d_cci); let s1 = self.cross1.next((sma, 0.)); let s0 = self.cross2.next((cci1, 0.)); self.s2_sum += (cci1_sign - self.window.push(cci1_sign)) as isize; let s2 = (self.s2_sum >= self.cfg.signal2_bars_count) as i8 - (self.s2_sum <= -self.cfg.signal2_bars_count) as i8; let is_none = s0.is_none(); self.s3_sum *= is_none as i8 as ValueType; self.s3_count *= is_none as PeriodType; self.s3_sum += cci1; self.s3_count += 1; let s3v = self.s3_sum / self.s3_count as ValueType; let s3 = self.cross_above.next((s3v, self.cfg.signal3_zone)) - self.cross_under.next((s3v, -self.cfg.signal3_zone)); IndicatorResult::new(&[cci1, cci2], &[s1, Action::from(s2), s3]) }
function_block-full_function
[ { "content": "/// Basic trait for implementing [Open-High-Low-Close timeseries data](https://en.wikipedia.org/wiki/Candlestick_chart).\n\n///\n\n/// It has already implemented for tuple of 4 and 5 float values:\n\n/// ```\n\n/// use yata::prelude::OHLC;\n\n/// // open high low close\n\n/// let row = (2.0, 5.0, 1.0, 4.0 );\n\n/// assert_eq!(row.open(), row.0);\n\n/// assert_eq!(row.high(), row.1);\n\n/// assert_eq!(row.low(), row.2);\n\n/// assert_eq!(row.close(), row.3);\n\n/// ```\n\n///\n\n/// See also [Candle](crate::prelude::Candle).\n\npub trait OHLC: Copy + Debug + Default {\n\n\t/// Should return an *open* value of the period\n\n\tfn open(&self) -> ValueType;\n\n\n\n\t/// Should return an *highest* value of the period\n\n\tfn high(&self) -> ValueType;\n\n\n\n\t/// Should return an *lowest* value of the period\n\n\tfn low(&self) -> ValueType;\n\n\n\n\t/// Should return an *close* value of the candle\n\n\tfn close(&self) -> ValueType;\n\n\n\n\t/// Calculates [Typical price](https://en.wikipedia.org/wiki/Typical_price).\n\n\t/// It's just a simple (High + Low + Close) / 3\n\n\t///\n\n\t/// # Examples\n\n\t///\n\n\t/// ```\n\n\t/// use yata::prelude::*;\n", "file_path": "src/core/ohlcv.rs", "rank": 0, "score": 206538.24033123298 }, { "content": "#[inline]\n\npub fn sign(value: ValueType) -> ValueType {\n\n\t// if value > 0. {\n\n\t// \t1.\n\n\t// } else if value < 0. {\n\n\t// \t-1.\n\n\t// } else {\n\n\t// \t0.\n\n\t// }\n\n\t((value > 0.) as i8 - (value < 0.) as i8) as ValueType\n\n}\n\n\n\n/// signi is like [f64.signum](https://doc.rust-lang.org/std/primitive.f64.html#method.signum), except 2 things\n\n/// - when value == 0.0, then signi returns 0\n\n/// - signi always returns i8\n\n///\n\n/// See also [sign]\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use yata::helpers::signi;\n\n///\n\n/// assert_eq!(signi(4.65), 1);\n\n/// assert_eq!(signi(-25.6), -1);\n\n/// assert_eq!(signi(0.0), 0);\n\n/// assert_eq!(signi(-0.0), 0);\n\n/// assert_eq!(signi(0.000001), 1);\n\n/// assert_eq!(signi(-0.000001), -1);\n\n/// ```\n", "file_path": "src/helpers/mod.rs", "rank": 1, "score": 194691.15114787564 }, { "content": "/// Each indicator has it's own **Configuration** with parameters\n\n///\n\n/// Each that config should implement `IndicatorConfig` trait\n\n///\n\n/// See example with [`Example Indicator`](crate::indicators::example)\n\n// Config cannot be Copy because it might consist ov Vec-s. F.e. if indicator using Conv method with custom weights.\n\npub trait IndicatorConfig: Clone {\n\n\t/// Name of an indicator\n\n\tconst NAME: &'static str;\n\n\n\n\t/// Validates if **Configuration** is OK\n\n\tfn validate(&self) -> bool;\n\n\n\n\t/// Sets dynamically **Configuration** parameters\n\n\tfn set(&mut self, name: &str, value: String) -> Option<Error>;\n\n\n\n\t/// Should return `true` if indicator uses *volume* data\n\n\tfn is_volume_based(&self) -> bool {\n\n\t\tfalse\n\n\t}\n\n\n\n\t/// Returns a name of the indicator\n\n\tfn name(&self) -> &'static str {\n\n\t\tSelf::NAME\n\n\t}\n\n\n\n\t/// Returns an [IndicatorResult](crate::core::IndicatorResult) size processing by the indicator `(count of raw value, count of signals)`\n\n\tfn size(&self) -> (u8, u8);\n\n}\n\n\n", "file_path": "src/core/indicator/config.rs", "rank": 2, "score": 190464.12428670388 }, { "content": "pub fn method(\n\n\tmethod: RegularMethods,\n\n\tlength: PeriodType,\n\n\tinitial_value: ValueType,\n\n) -> Result<RegularMethod, Error> {\n\n\tmatch method {\n\n\t\tRegularMethods::SMA => Ok(Box::new(SMA::new(length, initial_value)?)),\n\n\t\tRegularMethods::WMA => Ok(Box::new(WMA::new(length, initial_value)?)),\n\n\t\tRegularMethods::HMA => Ok(Box::new(HMA::new(length, initial_value)?)),\n\n\t\tRegularMethods::RMA => Ok(Box::new(RMA::new(length, initial_value)?)),\n\n\t\tRegularMethods::EMA => Ok(Box::new(EMA::new(length, initial_value)?)),\n\n\t\tRegularMethods::DMA => Ok(Box::new(DMA::new(length, initial_value)?)),\n\n\t\tRegularMethods::DEMA => Ok(Box::new(DEMA::new(length, initial_value)?)),\n\n\t\tRegularMethods::TMA => Ok(Box::new(TMA::new(length, initial_value)?)),\n\n\t\tRegularMethods::TEMA => Ok(Box::new(TEMA::new(length, initial_value)?)),\n\n\t\tRegularMethods::WSMA => Ok(Box::new(WSMA::new(length, initial_value)?)),\n\n\t\tRegularMethods::SMM => Ok(Box::new(SMM::new(length, initial_value)?)),\n\n\t\tRegularMethods::SWMA => Ok(Box::new(SWMA::new(length, initial_value)?)),\n\n\t\tRegularMethods::LinReg => Ok(Box::new(LinReg::new(length, initial_value)?)),\n\n\t\tRegularMethods::TRIMA => Ok(Box::new(TRIMA::new(length, initial_value)?)),\n", "file_path": "src/helpers/methods.rs", "rank": 3, "score": 187230.03778607113 }, { "content": "#[inline]\n\npub fn signi(value: ValueType) -> i8 {\n\n\t// if value > 0. {\n\n\t// \t1\n\n\t// } else if value < 0. {\n\n\t// \t-1\n\n\t// } else {\n\n\t// \t0\n\n\t// }\n\n\n\n\t(value > 0.) as i8 - (value < 0.) as i8\n\n}\n\n\n\n/// Random Candles iterator for testing purposes\n\n#[derive(Debug, Clone, Copy)]\n\npub struct RandomCandles(u16);\n\n\n\nimpl RandomCandles {\n\n\tconst DEFAULT_PRICE: ValueType = 1.0;\n\n\tconst DEFAULT_VOLUME: ValueType = 10.0;\n\n\n", "file_path": "src/helpers/mod.rs", "rank": 4, "score": 182111.62959300305 }, { "content": "fn bench_indicator<T: IndicatorConfig + IndicatorInitializer<Candle> + Default>(\n\n\tb: &mut test::Bencher,\n\n) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).collect();\n\n\tlet mut iter = candles.iter().copied().cycle();\n\n\tlet mut indicator = T::default().init(iter.next().unwrap()).unwrap();\n\n\n\n\tfor _ in 0..50 {\n\n\t\tindicator.next(iter.next().unwrap());\n\n\t}\n\n\n\n\tb.iter(|| indicator.next(iter.next().unwrap()))\n\n}\n\n\n", "file_path": "benches/indicators.rs", "rank": 5, "score": 181232.82868558966 }, { "content": "#[bench]\n\nfn bench_cross_above(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new()\n\n\t\t.take(1000)\n\n\t\t.map(|c| c.close)\n\n\t\t.zip(RandomCandles::new().skip(15).take(1000).map(|c| c.close))\n\n\t\t.collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = CrossAbove::new((), candles[0]).unwrap();\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n", "file_path": "benches/methods.rs", "rank": 6, "score": 168282.1013091092 }, { "content": "#[bench]\n\nfn bench_cross_under(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new()\n\n\t\t.take(1000)\n\n\t\t.map(|c| c.close)\n\n\t\t.zip(RandomCandles::new().skip(15).take(1000).map(|c| c.close))\n\n\t\t.collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = CrossUnder::new((), candles[0]).unwrap();\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n\n// SMA -----------------------------------------------------------------------------------\n", "file_path": "benches/methods.rs", "rank": 7, "score": 168282.1013091092 }, { "content": "#[bench]\n\nfn bench_cross(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new()\n\n\t\t.take(1000)\n\n\t\t.map(|c| c.close)\n\n\t\t.zip(RandomCandles::new().skip(15).take(1000).map(|c| c.close))\n\n\t\t.collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = Cross::new((), candles[0]).unwrap();\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n", "file_path": "benches/methods.rs", "rank": 8, "score": 168282.1013091092 }, { "content": "#[bench]\n\nfn bench_sma_w10(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = SMA::new(10, candles[0]).unwrap();\n\n\tfor _ in 0..10 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n", "file_path": "benches/methods.rs", "rank": 9, "score": 164222.93240841458 }, { "content": "#[bench]\n\nfn bench_sma_w100(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = SMA::new(100, candles[0]).unwrap();\n\n\tfor _ in 0..100 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n\n// WMA -----------------------------------------------------------------------------------\n", "file_path": "benches/methods.rs", "rank": 10, "score": 164222.93240841458 }, { "content": "/// Base trait for implementing indicators **State**\n\npub trait IndicatorInstance<T: OHLC> {\n\n\t// type Config: IndicatorConfig + IndicatorInitializer<T>;\n\n\t/// Type of Indicator **Configuration**\n\n\ttype Config: IndicatorConfig;\n\n\n\n\t/// Returns a reference to the indicator **Configuration**\n\n\tfn config(&self) -> &Self::Config;\n\n\n\n\t// fn config(&self) -> &dyn IndicatorConfig<T>;\n\n\n\n\t/// Preceed given candle and returns [`IndicatorResult`](crate::core::IndicatorResult)\n\n\tfn next(&mut self, candle: T) -> IndicatorResult\n\n\twhere\n\n\t\tSelf: Sized;\n\n\n\n\t/// Evaluates the **State** over the given sequence of candles and returns sequence of `IndicatorResult`s.\n\n\t/// ```\n\n\t/// use yata::prelude::*;\n\n\t/// use yata::helpers::{RandomCandles};\n\n\t/// use yata::indicators::Trix;\n", "file_path": "src/core/indicator/instance.rs", "rank": 11, "score": 160411.59150837877 }, { "content": "#[inline]\n\nfn tfunc<T: OHLCV>(candle: &T, last_candle: &T) -> (ValueType, ValueType) {\n\n\tlet tp1 = candle.tp();\n\n\tlet tp2 = last_candle.tp();\n\n\n\n\t// if tp1 < tp2 {\n\n\t// \t(0., tp1 * candle.volume())\n\n\t// } else if tp1 > tp2 {\n\n\t// \t(tp1 * candle.volume(), 0.)\n\n\t// } else {\n\n\t// \t(0., 0.)\n\n\t// }\n\n\n\n\t(\n\n\t\t(tp1 > tp2) as i8 as ValueType * candle.volume(),\n\n\t\t(tp1 < tp2) as i8 as ValueType * candle.volume(),\n\n\t)\n\n}\n\n\n\nimpl<T: OHLCV> IndicatorInstance<T> for MoneyFlowIndexInstance<T> {\n\n\ttype Config = MoneyFlowIndex;\n", "file_path": "src/indicators/money_flow_index.rs", "rank": 12, "score": 157775.92133616266 }, { "content": "// find current value index\n\nfn find_index(value: ValueType, slice: &[ValueType], padding: usize) -> usize {\n\n\tif slice.len() == 1 {\n\n\t\treturn padding;\n\n\t}\n\n\n\n\tlet half = slice.len() / 2;\n\n\n\n\t// It's not a mistake. We really need a bit-to-bit comparison of float values here\n\n\tif value.to_bits() == slice[half].to_bits() {\n\n\t\tpadding + half\n\n\t} else if value > slice[half] {\n\n\t\tfind_index(value, &slice[(half + 1)..], padding + half + 1)\n\n\t} else {\n\n\t\tfind_index(value, &slice[..half], padding)\n\n\t}\n\n}\n\n\n", "file_path": "src/methods/smm.rs", "rank": 13, "score": 157725.2181665519 }, { "content": "// find new value insert index at\n\nfn find_insert_index(value: ValueType, slice: &[ValueType], padding: usize) -> usize {\n\n\tif slice.is_empty() {\n\n\t\treturn padding;\n\n\t}\n\n\n\n\tlet half = slice.len() / 2;\n\n\n\n\t// It's not a mistake. We really need a bit-to-bit comparison of float values here\n\n\tif value.to_bits() == slice[half].to_bits() {\n\n\t\tpadding + half\n\n\t} else if value > slice[half] {\n\n\t\tfind_insert_index(value, &slice[(half + 1)..], padding + half + 1)\n\n\t} else {\n\n\t\tfind_insert_index(value, &slice[..half], padding)\n\n\t}\n\n}\n\n///\n\n/// [Simple Moving Median](https://en.wikipedia.org/wiki/Moving_average#Moving_median) of specified `length` for timeseries of type [`ValueType`]\n\n///\n\n/// # Parameters\n", "file_path": "src/methods/smm.rs", "rank": 14, "score": 155038.2039690593 }, { "content": "/// To initialize an indicator's **State** indicator should implement `IndicatorInitializer`\n\npub trait IndicatorInitializer<T: OHLC> {\n\n\t/// Type of **State**\n\n\ttype Instance: IndicatorInstance<T>;\n\n\n\n\t/// Initializes the **State** based on current **Configuration**\n\n\tfn init(self, initial_value: T) -> Result<Self::Instance, Error>;\n\n\n\n\t/// Evaluates indicator config over sequence of OHLC and returns sequence of `IndicatorResult`s\n\n\t/// ```\n\n\t/// use yata::prelude::*;\n\n\t/// use yata::helpers::{RandomCandles};\n\n\t/// use yata::indicators::Trix;\n\n\t///\n\n\t/// let candles: Vec<_> = RandomCandles::new().take(10).collect();\n\n\t/// let trix = Trix::default();\n\n\t/// let results = trix.over(&candles).unwrap();\n\n\t/// println!(\"{:?}\", results);\n\n\t/// ```\n\n\tfn over(self, over_slice: &[T]) -> Result<Vec<IndicatorResult>, Error>\n\n\twhere\n", "file_path": "src/core/indicator/config.rs", "rank": 15, "score": 149629.41592833522 }, { "content": "#[inline]\n\nfn bound_value(value: ValueType) -> ValueType {\n\n\tvalue.min(BOUND).max(-BOUND)\n\n}\n\n\n\nimpl<T: OHLC> IndicatorInstance<T> for FisherTransformInstance {\n\n\ttype Config = FisherTransform;\n\n\n\n\tfn config(&self) -> &Self::Config {\n\n\t\t&self.cfg\n\n\t}\n\n\n\n\tfn next(&mut self, candle: T) -> IndicatorResult {\n\n\t\tlet src = candle.source(self.cfg.source);\n\n\n\n\t\t// converting original value to between -1.0 and 1.0 over period1\n\n\t\tlet h = self.highest.next(src);\n\n\t\tlet l = self.lowest.next(src);\n\n\t\t// we need to check division by zero, so we can really just check if `h` is equal to `l` without using any kind of round error checks\n\n\t\t#[allow(clippy::float_cmp)]\n\n\t\tlet is_different = (h != l) as i8 as ValueType;\n", "file_path": "src/indicators/fisher_transform.rs", "rank": 16, "score": 141927.72671846987 }, { "content": "#[inline]\n\nfn change(change: ValueType) -> (ValueType, ValueType) {\n\n\t// let pos = if change > 0. { change } else { 0. };\n\n\t// let neg = if change < 0. { change * -1. } else { 0. };\n\n\tlet pos = (change > 0.) as i8 as ValueType * change;\n\n\tlet neg = (change < 0.) as i8 as ValueType * -change;\n\n\n\n\t(pos, neg)\n\n}\n\n\n\nimpl<T: OHLC> IndicatorInstance<T> for ChandeMomentumOscillatorInstance {\n\n\ttype Config = ChandeMomentumOscillator;\n\n\n\n\tfn config(&self) -> &Self::Config {\n\n\t\t&self.cfg\n\n\t}\n\n\n\n\tfn next(&mut self, candle: T) -> IndicatorResult {\n\n\t\tlet ch = self.change.next(candle.source(self.cfg.source));\n\n\n\n\t\tlet left_value = self.window.push(ch);\n", "file_path": "src/indicators/chande_momentum_oscillator.rs", "rank": 17, "score": 139187.60115840874 }, { "content": "type SignalType = u8;\n\nconst BOUND: SignalType = SignalType::MAX;\n\n\n\n/// Action is basic type of Indicator's signals\n\n///\n\n/// It may be positive (means *Buy* some amount). It may be negative (means *Sell* some amount). Or there may be no signal at all.\n\n///\n\n/// `Action` may be analog {1, 0, -1} or digital in range [-1.0; 1.0]\n\n#[derive(Clone, Copy, Eq, Ord, PartialOrd)]\n\n#[cfg_attr(feature = \"serde\", derive(Serialize, Deserialize))]\n\npub enum Action {\n\n\t/// Buy signal\n\n\tBuy(SignalType),\n\n\t/// No signal\n\n\tNone,\n\n\t/// Sell signal\n\n\tSell(SignalType),\n\n}\n\n\n\nimpl Action {\n", "file_path": "src/core/action.rs", "rank": 18, "score": 139035.54989731632 }, { "content": "#[bench]\n\nfn bench_wma_w10(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = WMA::new(10, candles[0]).unwrap();\n\n\tfor _ in 0..10 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n", "file_path": "benches/methods.rs", "rank": 19, "score": 127671.22486809148 }, { "content": "#[bench]\n\nfn bench_adi_w100(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = ADI::new(100, candles[0]).unwrap();\n\n\tfor _ in 0..100 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n\n// Conv -----------------------------------------------------------------------------------\n", "file_path": "benches/methods.rs", "rank": 20, "score": 127671.22486809148 }, { "content": "#[bench]\n\nfn bench_tema_w10(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = TEMA::new(10, candles[0]).unwrap();\n\n\tfor _ in 0..10 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n", "file_path": "benches/methods.rs", "rank": 21, "score": 127671.22486809148 }, { "content": "#[bench]\n\nfn bench_vwma_w100(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = VWMA::new(100, (candles[0].close, candles[0].volume)).unwrap();\n\n\tfor _ in 0..100 {\n\n\t\tlet candle = iter.next().unwrap();\n\n\t\tmethod.next((candle.close, candle.volume));\n\n\t}\n\n\tb.iter(|| {\n\n\t\tlet candle = iter.next().unwrap();\n\n\t\tmethod.next((candle.close, candle.volume))\n\n\t})\n\n}\n\n\n\n// Highest -----------------------------------------------------------------------------------\n", "file_path": "benches/methods.rs", "rank": 22, "score": 127671.22486809148 }, { "content": "#[bench]\n\nfn bench_lowest_w10(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = Lowest::new(10, candles[0]).unwrap();\n\n\tfor _ in 0..10 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n", "file_path": "benches/methods.rs", "rank": 23, "score": 127671.22486809148 }, { "content": "#[bench]\n\nfn bench_tma_w10(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = TMA::new(10, candles[0]).unwrap();\n\n\tfor _ in 0..10 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n", "file_path": "benches/methods.rs", "rank": 24, "score": 127671.22486809148 }, { "content": "#[bench]\n\nfn bench_smm_w10(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = SMM::new(10, candles[0]).unwrap();\n\n\tfor _ in 0..10 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n", "file_path": "benches/methods.rs", "rank": 25, "score": 127671.22486809148 }, { "content": "#[bench]\n\nfn bench_ema_w100(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = EMA::new(100, candles[0]).unwrap();\n\n\tfor _ in 0..100 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n\n// DMA -----------------------------------------------------------------------------------\n", "file_path": "benches/methods.rs", "rank": 26, "score": 127671.22486809148 }, { "content": "#[bench]\n\nfn bench_derivative_w10(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = Derivative::new(10, candles[0]).unwrap();\n\n\tfor _ in 0..10 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n", "file_path": "benches/methods.rs", "rank": 27, "score": 127671.22486809148 }, { "content": "#[bench]\n\nfn bench_past_w100(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = Past::new(100, candles[0]).unwrap();\n\n\tfor _ in 0..100 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n\n// RateOfChange -----------------------------------------------------------------------------------\n", "file_path": "benches/methods.rs", "rank": 28, "score": 127671.22486809148 }, { "content": "#[bench]\n\nfn bench_conv_w10(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = Conv::new((0..10).map(|x| x as ValueType).collect(), candles[0]).unwrap();\n\n\tfor _ in 0..10 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n", "file_path": "benches/methods.rs", "rank": 29, "score": 127671.22486809148 }, { "content": "#[bench]\n\nfn bench_rma_w10(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = RMA::new(10, candles[0]).unwrap();\n\n\tfor _ in 0..10 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n", "file_path": "benches/methods.rs", "rank": 30, "score": 127671.22486809148 }, { "content": "#[bench]\n\nfn bench_trima_w100(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = TRIMA::new(100, candles[0]).unwrap();\n\n\tfor _ in 0..100 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n\n// LinearVolatility -----------------------------------------------------------------------------------\n", "file_path": "benches/methods.rs", "rank": 31, "score": 127671.22486809148 }, { "content": "#[bench]\n\nfn bench_trima_w10(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = TRIMA::new(10, candles[0]).unwrap();\n\n\tfor _ in 0..10 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n", "file_path": "benches/methods.rs", "rank": 32, "score": 127671.22486809148 }, { "content": "#[bench]\n\nfn bench_integral_w10(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = Integral::new(10, candles[0]).unwrap();\n\n\tfor _ in 0..10 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n", "file_path": "benches/methods.rs", "rank": 33, "score": 127671.22486809148 }, { "content": "#[bench]\n\nfn bench_dema_w10(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = DEMA::new(10, candles[0]).unwrap();\n\n\tfor _ in 0..10 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n", "file_path": "benches/methods.rs", "rank": 34, "score": 127671.22486809148 }, { "content": "#[bench]\n\nfn bench_tema_w100(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = TEMA::new(100, candles[0]).unwrap();\n\n\tfor _ in 0..100 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n\n// SMM -----------------------------------------------------------------------------------\n", "file_path": "benches/methods.rs", "rank": 35, "score": 127671.22486809148 }, { "content": "#[bench]\n\nfn bench_integral_w100(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = Integral::new(100, candles[0]).unwrap();\n\n\tfor _ in 0..100 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n\n// Momentum -----------------------------------------------------------------------------------\n", "file_path": "benches/methods.rs", "rank": 36, "score": 127671.22486809148 }, { "content": "#[bench]\n\nfn bench_rma_w100(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = RMA::new(100, candles[0]).unwrap();\n\n\tfor _ in 0..100 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n\n// StDev -----------------------------------------------------------------------------------\n", "file_path": "benches/methods.rs", "rank": 37, "score": 127671.22486809148 }, { "content": "#[bench]\n\nfn bench_past_w10(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = Past::new(10, candles[0]).unwrap();\n\n\tfor _ in 0..10 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n", "file_path": "benches/methods.rs", "rank": 38, "score": 127671.22486809148 }, { "content": "#[bench]\n\nfn bench_hma_w100(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = HMA::new(100, candles[0]).unwrap();\n\n\tfor _ in 0..100 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n\n// LinReg -----------------------------------------------------------------------------------\n", "file_path": "benches/methods.rs", "rank": 39, "score": 127671.22486809148 }, { "content": "#[bench]\n\nfn bench_swma_w100(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = SWMA::new(100, candles[0]).unwrap();\n\n\tfor _ in 0..100 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n\n// TRIMA -----------------------------------------------------------------------------------\n", "file_path": "benches/methods.rs", "rank": 40, "score": 127671.22486809148 }, { "content": "#[bench]\n\nfn bench_smm_w100(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = SMM::new(100, candles[0]).unwrap();\n\n\tfor _ in 0..100 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n\n// HMA -----------------------------------------------------------------------------------\n", "file_path": "benches/methods.rs", "rank": 41, "score": 127671.22486809148 }, { "content": "#[bench]\n\nfn bench_tma_w100(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = TMA::new(100, candles[0]).unwrap();\n\n\tfor _ in 0..100 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n\n// DEMA -----------------------------------------------------------------------------------\n", "file_path": "benches/methods.rs", "rank": 42, "score": 127671.22486809148 }, { "content": "#[bench]\n\nfn bench_cci_w100(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = CCI::new(100, candles[0]).unwrap();\n\n\tfor _ in 0..100 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n", "file_path": "benches/methods.rs", "rank": 43, "score": 127671.22486809148 }, { "content": "#[bench]\n\nfn bench_momentum_w10(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = Momentum::new(10, candles[0]).unwrap();\n\n\tfor _ in 0..10 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n", "file_path": "benches/methods.rs", "rank": 44, "score": 127671.22486809148 }, { "content": "#[bench]\n\nfn bench_highest_w100(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = Highest::new(100, candles[0]).unwrap();\n\n\tfor _ in 0..100 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n\n// Lowest -----------------------------------------------------------------------------------\n", "file_path": "benches/methods.rs", "rank": 45, "score": 127671.22486809148 }, { "content": "#[bench]\n\nfn bench_wma_w100(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = WMA::new(100, candles[0]).unwrap();\n\n\tfor _ in 0..100 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n\n// EMA -----------------------------------------------------------------------------------\n", "file_path": "benches/methods.rs", "rank": 46, "score": 127671.22486809148 }, { "content": "#[bench]\n\nfn bench_adi_w10(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = ADI::new(10, candles[0]).unwrap();\n\n\tfor _ in 0..10 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n", "file_path": "benches/methods.rs", "rank": 47, "score": 127671.22486809148 }, { "content": "#[bench]\n\nfn bench_derivative_w100(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = Derivative::new(100, candles[0]).unwrap();\n\n\tfor _ in 0..100 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n\n// Integral -----------------------------------------------------------------------------------\n", "file_path": "benches/methods.rs", "rank": 48, "score": 127671.22486809148 }, { "content": "#[bench]\n\nfn bench_cci_w10(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = CCI::new(10, candles[0]).unwrap();\n\n\tfor _ in 0..10 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n", "file_path": "benches/methods.rs", "rank": 49, "score": 127671.22486809148 }, { "content": "#[bench]\n\nfn bench_conv_w100(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = Conv::new((0..100).map(|x| x as ValueType).collect(), candles[0]).unwrap();\n\n\tfor _ in 0..100 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n\n// Cross -----------------------------------------------------------------------------------\n", "file_path": "benches/methods.rs", "rank": 50, "score": 127671.22486809148 }, { "content": "#[bench]\n\nfn bench_dma_w10(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = DMA::new(10, candles[0]).unwrap();\n\n\tfor _ in 0..10 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n", "file_path": "benches/methods.rs", "rank": 51, "score": 127671.22486809148 }, { "content": "#[bench]\n\nfn bench_highest_w10(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = Highest::new(10, candles[0]).unwrap();\n\n\tfor _ in 0..10 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n", "file_path": "benches/methods.rs", "rank": 52, "score": 127671.22486809148 }, { "content": "#[bench]\n\nfn bench_vwma_w10(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = VWMA::new(10, (candles[0].close, candles[0].volume)).unwrap();\n\n\tfor _ in 0..10 {\n\n\t\tlet candle = iter.next().unwrap();\n\n\t\tmethod.next((candle.close, candle.volume));\n\n\t}\n\n\tb.iter(|| {\n\n\t\tlet candle = iter.next().unwrap();\n\n\t\tmethod.next((candle.close, candle.volume))\n\n\t})\n\n}\n\n\n", "file_path": "benches/methods.rs", "rank": 53, "score": 127671.22486809148 }, { "content": "#[bench]\n\nfn bench_swma_w10(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = SWMA::new(10, candles[0]).unwrap();\n\n\tfor _ in 0..10 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n", "file_path": "benches/methods.rs", "rank": 54, "score": 127671.22486809148 }, { "content": "#[bench]\n\nfn bench_hma_w10(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = HMA::new(10, candles[0]).unwrap();\n\n\tfor _ in 0..10 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n", "file_path": "benches/methods.rs", "rank": 55, "score": 127671.22486809148 }, { "content": "#[bench]\n\nfn bench_momentum_w100(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = Momentum::new(100, candles[0]).unwrap();\n\n\tfor _ in 0..100 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n\n// Past -----------------------------------------------------------------------------------\n", "file_path": "benches/methods.rs", "rank": 56, "score": 127671.22486809148 }, { "content": "#[bench]\n\nfn bench_ema_w10(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = EMA::new(10, candles[0]).unwrap();\n\n\tfor _ in 0..10 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n", "file_path": "benches/methods.rs", "rank": 57, "score": 127671.22486809148 }, { "content": "#[bench]\n\nfn bench_dema_w100(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = DEMA::new(100, candles[0]).unwrap();\n\n\tfor _ in 0..100 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n\n// TEMA -----------------------------------------------------------------------------------\n", "file_path": "benches/methods.rs", "rank": 58, "score": 127671.22486809148 }, { "content": "#[bench]\n\nfn bench_dma_w100(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = DMA::new(100, candles[0]).unwrap();\n\n\tfor _ in 0..100 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n\n// TMA -----------------------------------------------------------------------------------\n", "file_path": "benches/methods.rs", "rank": 59, "score": 127671.22486809148 }, { "content": "#[bench]\n\nfn bench_lowest_w100(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = Lowest::new(100, candles[0]).unwrap();\n\n\tfor _ in 0..100 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n\n// HighestLowestDelta -----------------------------------------------------------------------------------\n", "file_path": "benches/methods.rs", "rank": 60, "score": 127671.22486809148 }, { "content": "#[bench]\n\nfn bench_lin_reg_w100(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = LinReg::new(100, candles[0]).unwrap();\n\n\tfor _ in 0..100 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n\n// Derivative -----------------------------------------------------------------------------------\n", "file_path": "benches/methods.rs", "rank": 61, "score": 124765.30841723178 }, { "content": "#[bench]\n\nfn bench_highest_index_w10(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = Highest::new(10, candles[0]).unwrap();\n\n\tfor _ in 0..10 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n", "file_path": "benches/methods.rs", "rank": 62, "score": 124765.30841723178 }, { "content": "#[bench]\n\nfn bench_reverse_low_w100(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = ReverseLowSignal::new(50, 50, candles[0]).unwrap();\n\n\tfor _ in 0..100 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n\n// ReverseHigh -----------------------------------------------------------------------------------\n", "file_path": "benches/methods.rs", "rank": 63, "score": 124765.30841723178 }, { "content": "#[bench]\n\nfn bench_reverse_signal_w100(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = ReverseSignal::new(50, 50, candles[0]).unwrap();\n\n\tfor _ in 0..100 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n\n// ReverseLow -----------------------------------------------------------------------------------\n", "file_path": "benches/methods.rs", "rank": 64, "score": 124765.30841723178 }, { "content": "#[bench]\n\nfn bench_lowest_index_w10(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = Lowest::new(10, candles[0]).unwrap();\n\n\tfor _ in 0..10 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n", "file_path": "benches/methods.rs", "rank": 65, "score": 124765.30841723178 }, { "content": "#[bench]\n\nfn bench_st_dev_w100(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = StDev::new(100, candles[0]).unwrap();\n\n\tfor _ in 0..100 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n\n// SWMA -----------------------------------------------------------------------------------\n", "file_path": "benches/methods.rs", "rank": 66, "score": 124765.30841723178 }, { "content": "#[bench]\n\nfn bench_rate_of_change_w10(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = RateOfChange::new(10, candles[0]).unwrap();\n\n\tfor _ in 0..10 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n", "file_path": "benches/methods.rs", "rank": 67, "score": 124765.30841723178 }, { "content": "#[bench]\n\nfn bench_reverse_signal_w10(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = ReverseSignal::new(5, 5, candles[0]).unwrap();\n\n\tfor _ in 0..10 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n", "file_path": "benches/methods.rs", "rank": 68, "score": 124765.30841723178 }, { "content": "#[bench]\n\nfn bench_highest_index_w100(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = Highest::new(100, candles[0]).unwrap();\n\n\tfor _ in 0..100 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n\n// LowestIndex -----------------------------------------------------------------------------------\n", "file_path": "benches/methods.rs", "rank": 69, "score": 124765.30841723178 }, { "content": "#[bench]\n\nfn bench_reverse_high_w10(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = ReverseLowSignal::new(5, 5, candles[0]).unwrap();\n\n\tfor _ in 0..10 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n", "file_path": "benches/methods.rs", "rank": 70, "score": 124765.30841723178 }, { "content": "#[bench]\n\nfn bench_reverse_high_w100(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = ReverseHighSignal::new(50, 50, candles[0]).unwrap();\n\n\tfor _ in 0..100 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n\n// RMA -----------------------------------------------------------------------------------\n", "file_path": "benches/methods.rs", "rank": 71, "score": 124765.30841723178 }, { "content": "#[bench]\n\nfn bench_lin_reg_w10(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = LinReg::new(10, candles[0]).unwrap();\n\n\tfor _ in 0..10 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n", "file_path": "benches/methods.rs", "rank": 72, "score": 124765.30841723178 }, { "content": "#[bench]\n\nfn bench_reverse_low_w10(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = ReverseLowSignal::new(5, 5, candles[0]).unwrap();\n\n\tfor _ in 0..10 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n", "file_path": "benches/methods.rs", "rank": 73, "score": 124765.30841723178 }, { "content": "#[bench]\n\nfn bench_linear_volatility_w10(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = LinearVolatility::new(10, candles[0]).unwrap();\n\n\tfor _ in 0..10 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n", "file_path": "benches/methods.rs", "rank": 74, "score": 124765.30841723178 }, { "content": "#[bench]\n\nfn bench_linear_volatility_w100(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = LinearVolatility::new(100, candles[0]).unwrap();\n\n\tfor _ in 0..100 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n\n// VWMA -----------------------------------------------------------------------------------\n", "file_path": "benches/methods.rs", "rank": 75, "score": 124765.30841723178 }, { "content": "#[bench]\n\nfn bench_rate_of_change_w100(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = RateOfChange::new(100, candles[0]).unwrap();\n\n\tfor _ in 0..100 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n\n// Reverse -----------------------------------------------------------------------------------\n", "file_path": "benches/methods.rs", "rank": 76, "score": 124765.30841723178 }, { "content": "#[bench]\n\nfn bench_st_dev_w10(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = StDev::new(10, candles[0]).unwrap();\n\n\tfor _ in 0..10 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n", "file_path": "benches/methods.rs", "rank": 77, "score": 124765.30841723178 }, { "content": "#[bench]\n\nfn bench_lowest_index_w100(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = Lowest::new(100, candles[0]).unwrap();\n\n\tfor _ in 0..100 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n\n// MeanAbsDev -----------------------------------------------------------------------------------\n", "file_path": "benches/methods.rs", "rank": 78, "score": 124765.30841723178 }, { "content": "/// Basic trait for implementing [Open-High-Low-Close-Volume timeseries data](https://en.wikipedia.org/wiki/Candlestick_chart).\n\n///\n\n/// It has already implemented for tuple of 5 float values:\n\n/// ```\n\n/// use yata::prelude::{OHLC, OHLCV};\n\n/// // open high low close volume\n\n/// let row = (2.0, 5.0, 1.0, 4.0, 10.0 );\n\n/// assert_eq!(row.open(), row.0);\n\n/// assert_eq!(row.high(), row.1);\n\n/// assert_eq!(row.low(), row.2);\n\n/// assert_eq!(row.close(), row.3);\n\n/// assert_eq!(row.volume(), row.4);\n\n/// ```\n\n///\n\n/// See also [Candle](crate::prelude::Candle).\n\npub trait OHLCV: OHLC {\n\n\t/// Should return *volume* value for the period\n\n\tfn volume(&self) -> ValueType;\n\n\n\n\t/// Validates candle attributes\n\n\t///\n\n\t/// See more at [OHLC#method.validate].\n\n\t#[inline]\n\n\tfn validate(&self) -> bool {\n\n\t\tOHLC::validate(self) && self.volume() >= 0. && self.volume().is_finite()\n\n\t}\n\n\n\n\t/// Returns [Source] field value of the candle.\n\n\t///\n\n\t/// See more at [OHLC#method.source].\n\n\t#[inline]\n\n\tfn source(&self, source: Source) -> ValueType {\n\n\t\tmatch source {\n\n\t\t\tSource::Volume => self.volume(),\n\n\t\t\t_ => OHLC::source(self, source),\n", "file_path": "src/core/ohlcv.rs", "rank": 79, "score": 123993.81669007224 }, { "content": "/// Trait for creating methods for timeseries\n\n///\n\n/// # Regular methods usage\n\n///\n\n/// ### Iterate over vector's values\n\n///\n\n/// ```\n\n/// use yata::methods::SMA;\n\n/// use yata::prelude::*;\n\n///\n\n/// let s: Vec<_> = vec![1.,2.,3.,4.,5.,6.,7.,8.,9.,10.];\n\n/// let mut ma = SMA::new(2, s[0]).unwrap();\n\n///\n\n/// s.iter().enumerate().for_each(|(index, &value)| {\n\n/// assert_eq!(ma.next(value), (value + s[index.saturating_sub(1)])/2.);\n\n/// });\n\n/// ```\n\n///\n\n/// ### Get a whole new vector over the input vector\n\n///\n\n/// ```\n\n/// use yata::methods::SMA;\n\n/// use yata::prelude::*;\n\n///\n\n/// let s: Vec<_> = vec![1.,2.,3.,4.,5.,6.,7.,8.,9.,10.];\n\n/// let mut ma = SMA::new(2, s[0]).unwrap();\n\n///\n\n/// let result = ma.over(s.iter().copied());\n\n/// assert_eq!(result.as_slice(), &[1., 1.5, 2.5, 3.5, 4.5, 5.5, 6.5, 7.5, 8.5, 9.5]);\n\n/// ```\n\n///\n\n/// ### Change vector values using method\n\n///\n\n/// ```\n\n/// use yata::core::Sequence;\n\n/// use yata::methods::SMA;\n\n/// use yata::prelude::*;\n\n///\n\n/// let mut s: Sequence<_> = Sequence::from(vec![1.,2.,3.,4.,5.,6.,7.,8.,9.,10.]);\n\n/// let mut ma = SMA::new(2, s[0]).unwrap();\n\n///\n\n/// s.apply(&mut ma);\n\n/// assert_eq!(s.as_slice(), &[1., 1.5, 2.5, 3.5, 4.5, 5.5, 6.5, 7.5, 8.5, 9.5]);\n\n/// ```\n\n///\n\n/// # Be advised\n\n/// There is no `reset` method on the trait. If you need reset a state of the `Method` instance, you should just create a new one.\n\npub trait Method: fmt::Debug {\n\n\t/// Method parameters\n\n\ttype Params;\n\n\t/// Input value type\n\n\ttype Input: Copy;\n\n\t/// Output value type\n\n\ttype Output: Copy; // = Self::Input;\n\n\n\n\t/// Static method for creating an instance of the method with given `parameters` and initial `value` (simply first input value)\n\n\tfn new(parameters: Self::Params, initial_value: Self::Input) -> Result<Self, Error>\n\n\twhere\n\n\t\tSelf: Sized;\n\n\n\n\t/// Generates next output value based on the given input `value`\n\n\tfn next(&mut self, value: Self::Input) -> Self::Output;\n\n\n\n\t/// Returns a name of the method\n\n\tfn name(&self) -> &str {\n\n\t\tlet parts = std::any::type_name::<Self>().split(\"::\");\n\n\t\tparts.last().unwrap_or_default()\n", "file_path": "src/core/method.rs", "rank": 80, "score": 123859.12384495081 }, { "content": "#[bench]\n\nfn bench_mean_abs_dev_w10(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = MeanAbsDev::new(10, candles[0]).unwrap();\n\n\tfor _ in 0..10 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n", "file_path": "benches/methods.rs", "rank": 81, "score": 122016.06275494484 }, { "content": "#[bench]\n\nfn bench_highest_lowest_delta_w10(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = HighestLowestDelta::new(10, candles[0]).unwrap();\n\n\tfor _ in 0..10 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n", "file_path": "benches/methods.rs", "rank": 82, "score": 122016.06275494484 }, { "content": "#[bench]\n\nfn bench_highest_lowest_delta_w100(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = HighestLowestDelta::new(10, candles[0]).unwrap();\n\n\tfor _ in 0..100 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n\n// HighestIndex -----------------------------------------------------------------------------------\n", "file_path": "benches/methods.rs", "rank": 83, "score": 122016.06275494484 }, { "content": "#[bench]\n\nfn bench_median_abs_dev_w100(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = MedianAbsDev::new(100, candles[0]).unwrap();\n\n\tfor _ in 0..100 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n\n// CCI -----------------------------------------------------------------------------------\n", "file_path": "benches/methods.rs", "rank": 84, "score": 122016.06275494484 }, { "content": "#[bench]\n\nfn bench_median_abs_dev_w10(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = MedianAbsDev::new(10, candles[0]).unwrap();\n\n\tfor _ in 0..10 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n", "file_path": "benches/methods.rs", "rank": 85, "score": 122016.06275494484 }, { "content": "#[bench]\n\nfn bench_mean_abs_dev_w100(b: &mut test::Bencher) {\n\n\tlet candles: Vec<_> = RandomCandles::new().take(1000).map(|c| c.close).collect();\n\n\tlet mut iter = candles.iter().cycle().copied();\n\n\tlet mut method = MeanAbsDev::new(100, candles[0]).unwrap();\n\n\tfor _ in 0..100 {\n\n\t\tmethod.next(iter.next().unwrap());\n\n\t}\n\n\tb.iter(|| method.next(iter.next().unwrap()))\n\n}\n\n\n\n// MedianAbsDev -----------------------------------------------------------------------------------\n", "file_path": "benches/methods.rs", "rank": 86, "score": 122016.06275494484 }, { "content": "#[bench]\n\nfn bench_awesome_oscillator(b: &mut test::Bencher) {\n\n\tbench_indicator::<AwesomeOscillator>(b);\n\n}\n", "file_path": "benches/indicators.rs", "rank": 87, "score": 94840.29125086385 }, { "content": "#[bench]\n\nfn bench_indicator_aroon(b: &mut test::Bencher) {\n\n\tbench_indicator::<Aroon>(b);\n\n}\n\n\n", "file_path": "benches/indicators.rs", "rank": 88, "score": 94840.29125086385 }, { "content": "#[bench]\n\nfn bench_detrended_price_oscillator(b: &mut test::Bencher) {\n\n\tbench_indicator::<DetrendedPriceOscillator>(b);\n\n}\n\n\n", "file_path": "benches/indicators.rs", "rank": 89, "score": 92843.92401477102 }, { "content": "#[bench]\n\nfn bench_indicator_average_directional_index(b: &mut test::Bencher) {\n\n\tbench_indicator::<AverageDirectionalIndex>(b);\n\n}\n\n\n", "file_path": "benches/indicators.rs", "rank": 90, "score": 90955.18975245726 }, { "content": "/// O(1)\n\n///\n\n/// [`ValueType`]: crate::core::ValueType\n\n/// [`PeriodType`]: crate::core::PeriodType\n\n\n\n#[derive(Debug, Clone)]\n\n#[cfg_attr(feature = \"serde\", derive(Serialize, Deserialize))]\n\npub struct SMA {\n\n\tdivider: ValueType,\n\n\tvalue: ValueType,\n\n\twindow: Window<ValueType>,\n\n}\n\n\n\nimpl SMA {\n\n\t/// Returns inner [`Window`](crate::core::Window). Useful for implementing in other methods and indicators.\n\n\t#[inline]\n\n\tpub fn get_window(&self) -> &Window<ValueType> {\n\n\t\t&self.window\n\n\t}\n\n\n", "file_path": "src/methods/sma.rs", "rank": 91, "score": 80962.91382201426 }, { "content": "\t/// Returns 1/`length`. Useful for implementing in other methods and indicators.\n\n\t#[inline]\n\n\tpub fn get_divider(&self) -> ValueType {\n\n\t\tself.divider\n\n\t}\n\n\n\n\t/// Returns last result value. Useful for implementing in other methods and indicators.\n\n\t#[inline]\n\n\tpub fn get_last_value(&self) -> ValueType {\n\n\t\tself.value\n\n\t}\n\n}\n\n\n\nimpl Method for SMA {\n\n\ttype Params = PeriodType;\n\n\ttype Input = ValueType;\n\n\ttype Output = Self::Input;\n\n\n\n\tfn new(length: Self::Params, value: Self::Input) -> Result<Self, Error> {\n\n\t\tmatch length {\n", "file_path": "src/methods/sma.rs", "rank": 92, "score": 80955.01376380768 }, { "content": "use crate::core::Method;\n\nuse crate::core::{Error, PeriodType, ValueType, Window};\n\n\n\n#[cfg(feature = \"serde\")]\n\nuse serde::{Deserialize, Serialize};\n\n\n\n/// [Simple Moving Average](https://en.wikipedia.org/wiki/Moving_average#Simple_moving_average) of specified `length` for timeseries of type [`ValueType`]\n\n///\n\n/// # Parameters\n\n///\n\n/// Has a single parameter `length`: [`PeriodType`]\n\n///\n\n/// `length` should be > 0\n\n///\n\n/// # Input type\n\n///\n\n/// Input type is [`ValueType`]\n\n///\n\n/// # Output type\n\n///\n", "file_path": "src/methods/sma.rs", "rank": 93, "score": 80947.78793960277 }, { "content": "\tuse super::{Method, SMA as TestingMethod};\n\n\tuse crate::core::ValueType;\n\n\tuse crate::helpers::RandomCandles;\n\n\tuse crate::methods::tests::test_const;\n\n\n\n\t#[allow(dead_code)]\n\n\tconst SIGMA: ValueType = 1e-5;\n\n\n\n\t#[test]\n\n\tfn test_sma_const() {\n\n\t\tfor i in 1..30 {\n\n\t\t\tlet input = (i as ValueType + 56.0) / 16.3251;\n\n\t\t\tlet mut method = TestingMethod::new(i, input).unwrap();\n\n\n\n\t\t\tlet output = method.next(input);\n\n\t\t\ttest_const(&mut method, input, output);\n\n\t\t}\n\n\t}\n\n\n\n\t#[test]\n", "file_path": "src/methods/sma.rs", "rank": 94, "score": 80945.46739782016 }, { "content": "\t\t\t0 => Err(Error::WrongMethodParameters),\n\n\t\t\tlength => Ok(Self {\n\n\t\t\t\tdivider: (length as ValueType).recip(),\n\n\t\t\t\tvalue,\n\n\t\t\t\twindow: Window::new(length, value),\n\n\t\t\t}),\n\n\t\t}\n\n\t}\n\n\n\n\t#[inline]\n\n\tfn next(&mut self, value: Self::Input) -> Self::Output {\n\n\t\tlet prev_value = self.window.push(value);\n\n\t\tself.value += (value - prev_value) * self.divider;\n\n\n\n\t\tself.value\n\n\t}\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "src/methods/sma.rs", "rank": 95, "score": 80941.52686778008 }, { "content": "\tfn test_sma1() {\n\n\t\tlet mut candles = RandomCandles::default();\n\n\n\n\t\tlet mut ma = TestingMethod::new(1, candles.first().close).unwrap();\n\n\n\n\t\tcandles.take(100).for_each(|x| {\n\n\t\t\tassert!((x.close - ma.next(x.close)).abs() < SIGMA);\n\n\t\t});\n\n\t}\n\n\n\n\t#[test]\n\n\tfn test_sma() {\n\n\t\tlet candles = RandomCandles::default();\n\n\n\n\t\tlet src: Vec<ValueType> = candles.take(100).map(|x| x.close).collect();\n\n\n\n\t\t(1..20).for_each(|sma_length| {\n\n\t\t\tlet mut sma = TestingMethod::new(sma_length, src[0]).unwrap();\n\n\n\n\t\t\tsrc.iter().enumerate().for_each(|(i, &x)| {\n", "file_path": "src/methods/sma.rs", "rank": 96, "score": 80934.83480948475 }, { "content": "/// Output type is [`ValueType`]\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use yata::prelude::*;\n\n/// use yata::methods::SMA;\n\n///\n\n/// // SMA of length=3\n\n/// let mut sma = SMA::new(3, 1.0).unwrap();\n\n///\n\n/// sma.next(1.0);\n\n/// sma.next(2.0);\n\n///\n\n/// assert_eq!(sma.next(3.0), 2.0);\n\n/// assert_eq!(sma.next(4.0), 3.0);\n\n/// ```\n\n///\n\n/// # Perfomance\n\n///\n", "file_path": "src/methods/sma.rs", "rank": 97, "score": 80934.39784001837 }, { "content": "\t\t\t\tlet value = sma.next(x);\n\n\t\t\t\tlet slice_from = i.saturating_sub((sma_length - 1) as usize);\n\n\t\t\t\tlet slice_to = i;\n\n\t\t\t\tlet slice = &src[slice_from..=slice_to];\n\n\t\t\t\tlet mut sum: ValueType = slice.iter().sum();\n\n\t\t\t\tif slice.len() < sma_length as usize {\n\n\t\t\t\t\tsum += (sma_length as usize - slice.len()) as ValueType * src.first().unwrap();\n\n\t\t\t\t}\n\n\n\n\t\t\t\tassert!((sum / sma_length as ValueType - value).abs() < SIGMA);\n\n\t\t\t});\n\n\t\t});\n\n\t}\n\n}\n", "file_path": "src/methods/sma.rs", "rank": 98, "score": 80927.87383505763 }, { "content": "///\n\n/// [Cross], [CrossAbove]\n\n///\n\n/// [`ValueType`]: crate::core::ValueType\n\n/// [`PeriodType`]: crate::core::PeriodType\n\n/// [`DigitalSignal`]: crate::core::DigitalSignal\n\n#[derive(Debug, Default, Clone, Copy)]\n\n#[cfg_attr(feature = \"serde\", derive(Serialize, Deserialize))]\n\npub struct CrossUnder {\n\n\tlast_delta: ValueType,\n\n}\n\n\n\nimpl CrossUnder {\n\n\t/// Returns `true` when value1 crosses `value2` timeseries downwards\n\n\t/// Otherwise returns `false`\n\n\t#[inline]\n\n\tpub fn binary(&mut self, value1: ValueType, value2: ValueType) -> bool {\n\n\t\tlet last_delta = self.last_delta;\n\n\t\tlet current_delta = value1 - value2;\n\n\n", "file_path": "src/methods/cross.rs", "rank": 99, "score": 80860.40103116285 } ]
Rust
src/stan_client/mod.rs
stevelr/ratsio
bcda26c44bf82895fa90603a4e2d706c9a663b3b
use crate::nats_client::{ClosableMessage, NatsClient, NatsClientOptions, NatsSid}; use crate::nuid::NUID; use std::{collections::HashMap, sync::Arc}; use tokio::sync::RwLock; use std::fmt::{Debug, Error, Formatter}; use tokio::sync::mpsc::UnboundedSender; pub mod client; const DEFAULT_DISCOVER_PREFIX: &str = "_STAN.discover"; const DEFAULT_ACK_PREFIX: &str = "_STAN.acks"; const DEFAULT_MAX_PUB_ACKS_INFLIGHT: u32 = 16384; const DEFAULT_PING_INTERVAL: u32 = 5; const DEFAULT_PING_MAX_OUT: u32 = 3; const DEFAULT_ACK_WAIT: i32 = 30 * 60000; const DEFAULT_MAX_INFLIGHT: i32 = 1024; #[derive(Debug, Clone, PartialEq, Builder)] #[builder(setter(into), default)] pub struct StanOptions { pub nats_options: NatsClientOptions, pub cluster_id: String, pub client_id: String, pub ping_interval: u32, pub ping_max_out: u32, pub max_pub_acks_inflight: u32, pub discover_prefix: String, pub ack_prefix: String, } #[derive(Debug, Clone)] pub struct StanSid(pub(crate) NatsSid); impl StanOptions { pub fn new<S>(cluster_id: S, client_id: S) -> StanOptions where S: ToString, { StanOptions { client_id: client_id.to_string(), cluster_id: cluster_id.to_string(), ..Default::default() } } pub fn with_options<T, S>(nats_options: T, cluster_id: S, client_id: S) -> StanOptions where T: Into<NatsClientOptions>, S: ToString, { StanOptions { client_id: client_id.to_string(), cluster_id: cluster_id.to_string(), nats_options: nats_options.into(), ..Default::default() } } pub fn builder() -> StanOptionsBuilder { StanOptionsBuilder::default() } } impl Default for StanOptions { fn default() -> Self { StanOptions { nats_options: NatsClientOptions::default(), cluster_id: String::from(""), client_id: String::from(""), ping_interval: DEFAULT_PING_INTERVAL, ping_max_out: DEFAULT_PING_MAX_OUT, max_pub_acks_inflight: DEFAULT_MAX_PUB_ACKS_INFLIGHT, discover_prefix: DEFAULT_DISCOVER_PREFIX.into(), ack_prefix: DEFAULT_ACK_PREFIX.into(), } } } pub(crate) struct AckHandler(Box<dyn Fn() + Send + Sync>); impl Drop for AckHandler { fn drop(&mut self) { self.0() } } impl Debug for AckHandler { fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> { f.write_str("<ack-handler>") } } #[derive(Debug, Builder)] #[builder(default)] pub struct StanMessage { pub subject: String, pub reply_to: Option<String>, pub payload: Vec<u8>, pub timestamp: i64, pub sequence: u64, pub redelivered: bool, pub ack_inbox: Option<String>, #[builder(setter(skip))] ack_handler: Option<AckHandler>, } impl StanMessage { pub fn new(subject: String, payload: Vec<u8>) -> Self { StanMessage { subject, payload, reply_to: None, timestamp: 0, sequence: 0, redelivered: false, ack_inbox: None, ack_handler: None, } } pub fn with_reply(subject: String, payload: Vec<u8>, reply_to: Option<String>) -> Self { StanMessage { subject, payload, reply_to, timestamp: 0, sequence: 0, redelivered: false, ack_inbox: None, ack_handler: None, } } pub fn builder() -> StanMessageBuilder { StanMessageBuilder::default() } } impl Default for StanMessage { fn default() -> Self { use std::time::{SystemTime, UNIX_EPOCH}; let now = SystemTime::now(); let tstamp = now.duration_since(UNIX_EPOCH).unwrap(); let tstamp_ms = tstamp.as_millis() as i64; StanMessage { subject: String::new(), reply_to: None, payload: Vec::new(), timestamp: tstamp_ms, sequence: 0, redelivered: false, ack_inbox: None, ack_handler: None, } } } impl Clone for StanMessage { fn clone(&self) -> Self { StanMessage { subject: self.subject.clone(), reply_to: self.reply_to.clone(), payload: self.payload.clone(), timestamp: self.timestamp, sequence: self.sequence, redelivered: self.redelivered, ack_inbox: self.ack_inbox.clone(), ack_handler: None, } } } #[derive(Clone, Debug, PartialEq)] pub enum StartPosition { NewOnly = 0, LastReceived = 1, TimeDeltaStart = 2, SequenceStart = 3, First = 4, } #[derive(Clone, Debug, PartialEq, Builder)] #[builder(default)] pub struct StanSubscribe { pub subject: String, pub queue_group: Option<String>, pub durable_name: Option<String>, pub max_in_flight: i32, pub ack_wait_in_secs: i32, pub start_position: StartPosition, pub start_sequence: u64, pub start_time_delta: Option<i32>, pub manual_acks: bool, } impl StanSubscribe { pub fn builder() -> StanSubscribeBuilder { StanSubscribeBuilder::default() } } impl Default for StanSubscribe { fn default() -> Self { StanSubscribe { subject: String::from(""), queue_group: None, durable_name: None, max_in_flight: DEFAULT_MAX_INFLIGHT, ack_wait_in_secs: DEFAULT_ACK_WAIT, start_position: StartPosition::LastReceived, start_sequence: 0, start_time_delta: None, manual_acks: false, } } } #[derive(Clone)] struct Subscription { client_id: String, subject: String, queue_group: Option<String>, durable_name: Option<String>, max_in_flight: i32, ack_wait_in_secs: i32, inbox: String, ack_inbox: String, unsub_requests: String, close_requests: String, sender: UnboundedSender<ClosableMessage>, } pub struct StanClient { pub options: StanOptions, pub nats_client: Arc<NatsClient>, pub client_id: String, client_info: Arc<RwLock<ClientInfo>>, id_generator: Arc<RwLock<NUID>>, conn_id: RwLock<Vec<u8>>, subscriptions: RwLock<HashMap<String, Subscription>>, self_reference: RwLock<Option<Arc<StanClient>>>, } #[derive(Clone, Debug, Default)] pub struct ClientInfo { pub_prefix: String, sub_requests: String, unsub_requests: String, sub_close_requests: String, close_requests: String, ping_requests: String, public_key: String, }
use crate::nats_client::{ClosableMessage, NatsClient, NatsClientOptions, NatsSid}; use crate::nuid::NUID; use std::{collections::HashMap, sync::Arc}; use tokio::sync::RwLock; use std::fmt::{Debug, Error, Formatter}; use tokio::sync::mpsc::UnboundedSender; pub mod client; const DEFAULT_DISCOVER_PREFIX: &str = "_STAN.discover"; const DEFAULT_ACK_PREFIX: &str = "_STAN.acks"; const DEFAULT_MAX_PUB_ACKS_INFLIGHT: u32 = 16384; const DEFAULT_PING_INTERVAL: u32 = 5; const DEFAULT_PING_MAX_OUT: u32 = 3; const DEFAULT_ACK_WAIT: i32 = 30 * 60000; const DEFAULT_MAX_INFLIGHT: i32 = 1024; #[derive(Debug, Clone, PartialEq, Builder)] #[builder(setter(into), default)] pub struct StanOptions { pub nats_options: NatsClientOptions, pub cluster_id: String, pub client_id: String, pub ping_interval: u32, pub ping_max_out: u32, pub max_pub_acks_inflight: u32, pub discover_prefix: String, pub ack_prefix: String, } #[derive(Debug, Clone)] pub struct StanSid(pub(crate) NatsSid); impl StanOptions { pub fn new<S>(cluster_id: S, client_id: S) -> StanOptions where S: ToString, { StanOptions { client_id: client_id.to_string(), cluster_id: cluster_id.to_string(), ..Default::default() } } pub fn with_options<T, S>(nats_options: T, cluster_id: S, client_id: S) -> StanOptions where T: Into<NatsClientOptions>, S: ToString, { StanOptions { client_id: client_id.to_string(), cluster_id: cluster_id.to_string(), nats_options: nats_options.into(), ..Default::default() } } pub fn builder() -> StanOptionsBuilder { StanOptionsBuilder::default() } } impl Default for StanOptions { fn default() -> Self { StanOptions { nats_options: NatsClientOptions::default(), cluster_id: String::from(""), client_id: String::from(""), ping_interval: DEFAULT_PING_INTERVAL, ping_max_out: DEFAULT_PING_MAX_OUT, max_pub_acks_inflight: DEFAULT_MAX_PUB_ACKS_INFLIGHT, discover_prefix: DEFAULT_DISCOVER_PREFIX.into(), ack_prefix: DEFAULT_ACK_PREFIX.into(), } } } pub(crate) struct AckHandler(Box<dyn Fn() + Send + Sync>); impl Drop for AckHandler { fn drop(&mut
one, payload: Vec::new(), timestamp: tstamp_ms, sequence: 0, redelivered: false, ack_inbox: None, ack_handler: None, } } } impl Clone for StanMessage { fn clone(&self) -> Self { StanMessage { subject: self.subject.clone(), reply_to: self.reply_to.clone(), payload: self.payload.clone(), timestamp: self.timestamp, sequence: self.sequence, redelivered: self.redelivered, ack_inbox: self.ack_inbox.clone(), ack_handler: None, } } } #[derive(Clone, Debug, PartialEq)] pub enum StartPosition { NewOnly = 0, LastReceived = 1, TimeDeltaStart = 2, SequenceStart = 3, First = 4, } #[derive(Clone, Debug, PartialEq, Builder)] #[builder(default)] pub struct StanSubscribe { pub subject: String, pub queue_group: Option<String>, pub durable_name: Option<String>, pub max_in_flight: i32, pub ack_wait_in_secs: i32, pub start_position: StartPosition, pub start_sequence: u64, pub start_time_delta: Option<i32>, pub manual_acks: bool, } impl StanSubscribe { pub fn builder() -> StanSubscribeBuilder { StanSubscribeBuilder::default() } } impl Default for StanSubscribe { fn default() -> Self { StanSubscribe { subject: String::from(""), queue_group: None, durable_name: None, max_in_flight: DEFAULT_MAX_INFLIGHT, ack_wait_in_secs: DEFAULT_ACK_WAIT, start_position: StartPosition::LastReceived, start_sequence: 0, start_time_delta: None, manual_acks: false, } } } #[derive(Clone)] struct Subscription { client_id: String, subject: String, queue_group: Option<String>, durable_name: Option<String>, max_in_flight: i32, ack_wait_in_secs: i32, inbox: String, ack_inbox: String, unsub_requests: String, close_requests: String, sender: UnboundedSender<ClosableMessage>, } pub struct StanClient { pub options: StanOptions, pub nats_client: Arc<NatsClient>, pub client_id: String, client_info: Arc<RwLock<ClientInfo>>, id_generator: Arc<RwLock<NUID>>, conn_id: RwLock<Vec<u8>>, subscriptions: RwLock<HashMap<String, Subscription>>, self_reference: RwLock<Option<Arc<StanClient>>>, } #[derive(Clone, Debug, Default)] pub struct ClientInfo { pub_prefix: String, sub_requests: String, unsub_requests: String, sub_close_requests: String, close_requests: String, ping_requests: String, public_key: String, }
self) { self.0() } } impl Debug for AckHandler { fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> { f.write_str("<ack-handler>") } } #[derive(Debug, Builder)] #[builder(default)] pub struct StanMessage { pub subject: String, pub reply_to: Option<String>, pub payload: Vec<u8>, pub timestamp: i64, pub sequence: u64, pub redelivered: bool, pub ack_inbox: Option<String>, #[builder(setter(skip))] ack_handler: Option<AckHandler>, } impl StanMessage { pub fn new(subject: String, payload: Vec<u8>) -> Self { StanMessage { subject, payload, reply_to: None, timestamp: 0, sequence: 0, redelivered: false, ack_inbox: None, ack_handler: None, } } pub fn with_reply(subject: String, payload: Vec<u8>, reply_to: Option<String>) -> Self { StanMessage { subject, payload, reply_to, timestamp: 0, sequence: 0, redelivered: false, ack_inbox: None, ack_handler: None, } } pub fn builder() -> StanMessageBuilder { StanMessageBuilder::default() } } impl Default for StanMessage { fn default() -> Self { use std::time::{SystemTime, UNIX_EPOCH}; let now = SystemTime::now(); let tstamp = now.duration_since(UNIX_EPOCH).unwrap(); let tstamp_ms = tstamp.as_millis() as i64; StanMessage { subject: String::new(), reply_to: N
random
[]
Rust
crates/napi/src/promise.rs
AlCalzone/napi-rs
110f2196a4095963d73cdcfc5d49f72a9c579aaa
use std::ffi::CStr; use std::future::Future; use std::marker::PhantomData; use std::os::raw::c_void; use std::ptr; use crate::{check_status, sys, JsError, Result}; pub struct FuturePromise<Data, Resolver: FnOnce(sys::napi_env, Data) -> Result<sys::napi_value>> { deferred: sys::napi_deferred, env: sys::napi_env, tsfn: sys::napi_threadsafe_function, async_resource_name: sys::napi_value, resolver: Resolver, _data: PhantomData<Data>, } unsafe impl<T, F: FnOnce(sys::napi_env, T) -> Result<sys::napi_value>> Send for FuturePromise<T, F> { } impl<Data, Resolver: FnOnce(sys::napi_env, Data) -> Result<sys::napi_value>> FuturePromise<Data, Resolver> { pub fn new(env: sys::napi_env, deferred: sys::napi_deferred, resolver: Resolver) -> Result<Self> { let mut async_resource_name = ptr::null_mut(); let s = unsafe { CStr::from_bytes_with_nul_unchecked(b"napi_resolve_promise_from_future\0") }; check_status!(unsafe { sys::napi_create_string_utf8(env, s.as_ptr(), 32, &mut async_resource_name) })?; Ok(FuturePromise { deferred, resolver, env, tsfn: ptr::null_mut(), async_resource_name, _data: PhantomData, }) } pub(crate) fn start(self) -> Result<TSFNValue> { let mut tsfn_value = ptr::null_mut(); let async_resource_name = self.async_resource_name; let env = self.env; let self_ref = Box::leak(Box::from(self)); check_status!(unsafe { sys::napi_create_threadsafe_function( env, ptr::null_mut(), ptr::null_mut(), async_resource_name, 0, 1, ptr::null_mut(), None, self_ref as *mut FuturePromise<Data, Resolver> as *mut c_void, Some(call_js_cb::<Data, Resolver>), &mut tsfn_value, ) })?; self_ref.tsfn = tsfn_value; Ok(TSFNValue(tsfn_value)) } } pub(crate) struct TSFNValue(sys::napi_threadsafe_function); unsafe impl Send for TSFNValue {} pub(crate) async fn resolve_from_future<Data: Send, Fut: Future<Output = Result<Data>>>( tsfn_value: TSFNValue, fut: Fut, ) { let val = fut.await; check_status!(unsafe { sys::napi_call_threadsafe_function( tsfn_value.0, Box::into_raw(Box::from(val)) as *mut c_void, sys::ThreadsafeFunctionCallMode::nonblocking, ) }) .expect("Failed to call thread safe function"); check_status!(unsafe { sys::napi_release_threadsafe_function(tsfn_value.0, sys::ThreadsafeFunctionReleaseMode::release) }) .expect("Failed to release thread safe function"); } unsafe extern "C" fn call_js_cb< Data, Resolver: FnOnce(sys::napi_env, Data) -> Result<sys::napi_value>, >( env: sys::napi_env, _js_callback: sys::napi_value, context: *mut c_void, data: *mut c_void, ) { let future_promise = unsafe { Box::from_raw(context as *mut FuturePromise<Data, Resolver>) }; let value = unsafe { Box::from_raw(data as *mut Result<Data>) }; let resolver = future_promise.resolver; let deferred = future_promise.deferred; let js_value_to_resolve = value.and_then(move |v| (resolver)(env, v)); match js_value_to_resolve { Ok(v) => { let status = unsafe { sys::napi_resolve_deferred(env, deferred, v) }; debug_assert!(status == sys::Status::napi_ok, "Resolve promise failed"); } Err(e) => { let status = unsafe { sys::napi_reject_deferred( env, deferred, if e.maybe_raw.is_null() { JsError::from(e).into_value(env) } else { let mut err = ptr::null_mut(); let get_err_status = sys::napi_get_reference_value(env, e.maybe_raw, &mut err); debug_assert!( get_err_status == sys::Status::napi_ok, "Get Error from Reference failed" ); let delete_reference_status = sys::napi_delete_reference(env, e.maybe_raw); debug_assert!( delete_reference_status == sys::Status::napi_ok, "Delete Error Reference failed" ); err }, ) }; debug_assert!(status == sys::Status::napi_ok, "Reject promise failed"); } }; }
use std::ffi::CStr; use std::future::Future; use std::marker::PhantomData; use std::os::raw::c_void; use std::ptr; use crate::{check_status, sys, JsError, Result}; pub struct FuturePromise<Data, Resolver: FnOnce(sys::napi_env, Data) -> Result<sys::napi_value>> { deferred: sys::napi_deferred, env: sys::napi_env, tsfn: sys::napi_threadsafe_function, async_resource_name: sys::napi_value, resolver: Resolver, _data: PhantomData<Data>, } unsafe impl<T, F: FnOnce(sys::napi_env, T) -> Result<sys::napi_value>> Send for FuturePromise<T, F> { } impl<Data, Resolver: FnOnce(sys::napi_env, Data) -> Result<sys::napi_value>> FuturePromise<Data, Resolver> { pub fn new(env: sys::napi_env, deferred: sys::napi_deferred, resolver: Resolver) -> Result<Self> { let mut async_resource_name = ptr::null_mut(); let s = unsafe { CStr::from_bytes_with_nul_unchecked(b"napi_resolve_promise_from_future\0") };
) }) .expect("Failed to call thread safe function"); check_status!(unsafe { sys::napi_release_threadsafe_function(tsfn_value.0, sys::ThreadsafeFunctionReleaseMode::release) }) .expect("Failed to release thread safe function"); } unsafe extern "C" fn call_js_cb< Data, Resolver: FnOnce(sys::napi_env, Data) -> Result<sys::napi_value>, >( env: sys::napi_env, _js_callback: sys::napi_value, context: *mut c_void, data: *mut c_void, ) { let future_promise = unsafe { Box::from_raw(context as *mut FuturePromise<Data, Resolver>) }; let value = unsafe { Box::from_raw(data as *mut Result<Data>) }; let resolver = future_promise.resolver; let deferred = future_promise.deferred; let js_value_to_resolve = value.and_then(move |v| (resolver)(env, v)); match js_value_to_resolve { Ok(v) => { let status = unsafe { sys::napi_resolve_deferred(env, deferred, v) }; debug_assert!(status == sys::Status::napi_ok, "Resolve promise failed"); } Err(e) => { let status = unsafe { sys::napi_reject_deferred( env, deferred, if e.maybe_raw.is_null() { JsError::from(e).into_value(env) } else { let mut err = ptr::null_mut(); let get_err_status = sys::napi_get_reference_value(env, e.maybe_raw, &mut err); debug_assert!( get_err_status == sys::Status::napi_ok, "Get Error from Reference failed" ); let delete_reference_status = sys::napi_delete_reference(env, e.maybe_raw); debug_assert!( delete_reference_status == sys::Status::napi_ok, "Delete Error Reference failed" ); err }, ) }; debug_assert!(status == sys::Status::napi_ok, "Reject promise failed"); } }; }
check_status!(unsafe { sys::napi_create_string_utf8(env, s.as_ptr(), 32, &mut async_resource_name) })?; Ok(FuturePromise { deferred, resolver, env, tsfn: ptr::null_mut(), async_resource_name, _data: PhantomData, }) } pub(crate) fn start(self) -> Result<TSFNValue> { let mut tsfn_value = ptr::null_mut(); let async_resource_name = self.async_resource_name; let env = self.env; let self_ref = Box::leak(Box::from(self)); check_status!(unsafe { sys::napi_create_threadsafe_function( env, ptr::null_mut(), ptr::null_mut(), async_resource_name, 0, 1, ptr::null_mut(), None, self_ref as *mut FuturePromise<Data, Resolver> as *mut c_void, Some(call_js_cb::<Data, Resolver>), &mut tsfn_value, ) })?; self_ref.tsfn = tsfn_value; Ok(TSFNValue(tsfn_value)) } } pub(crate) struct TSFNValue(sys::napi_threadsafe_function); unsafe impl Send for TSFNValue {} pub(crate) async fn resolve_from_future<Data: Send, Fut: Future<Output = Result<Data>>>( tsfn_value: TSFNValue, fut: Fut, ) { let val = fut.await; check_status!(unsafe { sys::napi_call_threadsafe_function( tsfn_value.0, Box::into_raw(Box::from(val)) as *mut c_void, sys::ThreadsafeFunctionCallMode::nonblocking,
random
[ { "content": "pub fn register_js(exports: &mut JsObject, env: &Env) -> Result<()> {\n\n let test_class = env.define_class(\n\n \"TestClass\",\n\n test_class_constructor,\n\n &[\n\n Property::new(\"miterNative\")?\n\n .with_getter(get_miter_native)\n\n .with_setter(set_miter_native),\n\n Property::new(\"miter\")?\n\n .with_getter(get_miter)\n\n .with_setter(set_miter),\n\n Property::new(\"lineJoinNative\")?\n\n .with_getter(get_line_join_native)\n\n .with_setter(set_line_join_native),\n\n Property::new(\"lineJoin\")?\n\n .with_getter(get_line_join)\n\n .with_setter(set_line_join),\n\n ],\n\n )?;\n\n exports.set_named_property(\"TestClass\", test_class)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "bench/src/get_set_property.rs", "rank": 0, "score": 355067.0678724764 }, { "content": "pub fn register_js(exports: &mut JsObject, env: &Env) -> Result<()> {\n\n exports.create_named_method(\"testThreadsafeFunction\", test_threadsafe_function)?;\n\n exports.create_named_method(\"testTsfnError\", test_tsfn_error)?;\n\n exports.create_named_method(\"testTokioReadfile\", test_tokio_readfile)?;\n\n exports.create_named_method(\n\n \"testAbortThreadsafeFunction\",\n\n test_abort_threadsafe_function,\n\n )?;\n\n exports.create_named_method(\n\n \"testAbortIndependentThreadsafeFunction\",\n\n test_abort_independent_threadsafe_function,\n\n )?;\n\n exports.create_named_method(\n\n \"testCallAbortedThreadsafeFunction\",\n\n test_call_aborted_threadsafe_function,\n\n )?;\n\n exports.create_named_method(\"testTsfnWithRef\", test_tsfn_with_ref)?;\n\n\n\n let obj = env.define_class(\"A\", constructor, &[])?;\n\n\n\n exports.set_named_property(\"A\", obj)?;\n\n Ok(())\n\n}\n", "file_path": "examples/napi-compat-mode/src/napi4/mod.rs", "rank": 1, "score": 349673.0268429084 }, { "content": "pub fn register_js(exports: &mut JsObject) -> Result<()> {\n\n exports.create_named_method(\"instanceof\", instanceof)?;\n\n exports.create_named_method(\"isTypedarray\", is_typedarray)?;\n\n exports.create_named_method(\"isDataview\", is_dataview)?;\n\n exports.create_named_method(\"strictEquals\", strict_equals)?;\n\n exports.create_named_method(\"castUnknown\", cast_unknown)?;\n\n exports.create_named_method(\"getEnvVariable\", get_env_variable)?;\n\n exports.create_named_method(\"throwSyntaxError\", throw_syntax_error)?;\n\n exports.create_named_method(\"coerceToBool\", coerce_to_bool)?;\n\n Ok(())\n\n}\n", "file_path": "examples/napi-compat-mode/src/env.rs", "rank": 2, "score": 321636.3616203714 }, { "content": "pub fn register_js(exports: &mut JsObject) -> Result<()> {\n\n exports.create_named_method(\"addCleanupHook\", add_cleanup_hook)?;\n\n exports.create_named_method(\"removeCleanupHook\", remove_cleanup_hook)?;\n\n Ok(())\n\n}\n", "file_path": "examples/napi-compat-mode/src/cleanup_env.rs", "rank": 3, "score": 318708.53568078426 }, { "content": "#[contextless_function]\n\npub fn set_instance_data(env: Env) -> ContextlessResult<JsUndefined> {\n\n env.set_instance_data(NativeObject { count: 1024 }, 0, |_ctx| {})?;\n\n env.get_undefined().map(Some)\n\n}\n\n\n", "file_path": "examples/napi-compat-mode/src/napi6/instance.rs", "rank": 4, "score": 307720.02234365745 }, { "content": "#[contextless_function]\n\npub fn get_instance_data(env: Env) -> ContextlessResult<JsNumber> {\n\n if let Some(obj) = env.get_instance_data::<NativeObject>()? {\n\n env.create_int64(obj.count).map(Some)\n\n } else {\n\n Ok(None)\n\n }\n\n}\n\n\n", "file_path": "examples/napi-compat-mode/src/napi6/instance.rs", "rank": 5, "score": 307720.02234365745 }, { "content": "#[module_exports]\n\nfn init(mut exports: JsObject, env: Env) -> Result<()> {\n\n exports.create_named_method(\"noop\", noop::noop)?;\n\n\n\n async_compute::register_js(&mut exports)?;\n\n buffer::register_js(&mut exports)?;\n\n plus::register_js(&mut exports)?;\n\n get_set_property::register_js(&mut exports, &env)?;\n\n create_array::register_js(&mut exports)?;\n\n get_value_from_js::register_js(&mut exports)?;\n\n query::register_js(&mut exports)?;\n\n\n\n Ok(())\n\n}\n", "file_path": "bench/src/lib.rs", "rank": 6, "score": 302893.5792469919 }, { "content": "#[contextless_function]\n\npub fn get_wrong_type_instance_data(env: Env) -> ContextlessResult<JsNumber> {\n\n if let Some(count) = env.get_instance_data::<i32>()? {\n\n env.create_int64(*count as i64).map(Some)\n\n } else {\n\n Ok(None)\n\n }\n\n}\n", "file_path": "examples/napi-compat-mode/src/napi6/instance.rs", "rank": 7, "score": 302587.72132546897 }, { "content": "#[module_exports]\n\nfn init(mut exports: JsObject, env: Env) -> Result<()> {\n\n exports.create_named_method(\"getNapiVersion\", get_napi_version)?;\n\n array::register_js(&mut exports)?;\n\n error::register_js(&mut exports)?;\n\n string::register_js(&mut exports)?;\n\n serde::register_js(&mut exports)?;\n\n task::register_js(&mut exports)?;\n\n external::register_js(&mut exports)?;\n\n arraybuffer::register_js(&mut exports)?;\n\n buffer::register_js(&mut exports)?;\n\n either::register_js(&mut exports)?;\n\n symbol::register_js(&mut exports)?;\n\n function::register_js(&mut exports)?;\n\n class::register_js(&mut exports)?;\n\n env::register_js(&mut exports)?;\n\n object::register_js(&mut exports)?;\n\n global::register_js(&mut exports)?;\n\n cleanup_env::register_js(&mut exports)?;\n\n #[cfg(feature = \"latest\")]\n\n napi4::register_js(&mut exports, &env)?;\n", "file_path": "examples/napi-compat-mode/src/lib.rs", "rank": 8, "score": 295885.49850374734 }, { "content": "pub fn register_js(exports: &mut JsObject) -> Result<()> {\n\n exports.create_named_method(\"benchCreateBuffer\", bench_create_buffer)?;\n\n Ok(())\n\n}\n", "file_path": "bench/src/buffer.rs", "rank": 9, "score": 286490.25186303107 }, { "content": "pub fn register_js(exports: &mut JsObject) -> Result<()> {\n\n exports.create_named_method(\"plus\", bench_plus)?;\n\n Ok(())\n\n}\n", "file_path": "bench/src/plus.rs", "rank": 10, "score": 286490.25186303107 }, { "content": "#[napi]\n\npub fn set_symbol_in_obj(env: Env, symbol: JsSymbol) -> Result<JsObject> {\n\n let mut obj = env.create_object()?;\n\n obj.set_property(symbol, env.create_string(\"a symbol\")?)?;\n\n Ok(obj)\n\n}\n\n\n", "file_path": "examples/napi/src/symbol.rs", "rank": 11, "score": 284760.2866456596 }, { "content": "pub fn register_js(exports: &mut JsObject) -> Result<()> {\n\n exports.create_named_method(\"createArrayJson\", create_array_json)?;\n\n exports.create_named_method(\"createArray\", create_array)?;\n\n exports.create_named_method(\"createArrayWithSerdeTrait\", create_array_with_serde_trait)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "bench/src/create_array.rs", "rank": 12, "score": 283942.78318072355 }, { "content": "pub fn register_js(exports: &mut JsObject) -> Result<()> {\n\n exports.create_named_method(\"benchAsyncTask\", bench_async_task)?;\n\n exports.create_named_method(\"benchThreadsafeFunction\", bench_threadsafe_function)?;\n\n exports.create_named_method(\"benchTokioFuture\", bench_tokio_future)?;\n\n Ok(())\n\n}\n", "file_path": "bench/src/async_compute.rs", "rank": 13, "score": 283942.78318072355 }, { "content": "pub fn register_js(exports: &mut JsObject) -> Result<()> {\n\n exports.create_named_method(\"getArrayFromJson\", get_array_from_json)?;\n\n exports.create_named_method(\"getArrayFromJsArray\", get_array_from_js_array)?;\n\n exports.create_named_method(\"getArrayWithForLoop\", get_array_with_for_loop)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "bench/src/get_value_from_js.rs", "rank": 14, "score": 281467.7782154606 }, { "content": "pub fn register_js(exports: &mut JsObject) -> Result<()> {\n\n exports.create_named_method(\"concatString\", concat_string)?;\n\n exports.create_named_method(\"concatUTF16String\", concat_utf16_string)?;\n\n exports.create_named_method(\"concatLatin1String\", concat_latin1_string)?;\n\n exports.create_named_method(\"createLatin1\", create_latin1)?;\n\n Ok(())\n\n}\n", "file_path": "examples/napi-compat-mode/src/string.rs", "rank": 15, "score": 279062.18843847234 }, { "content": "pub fn register_js(exports: &mut JsObject) -> Result<()> {\n\n exports.create_named_method(\"make_num_77\", make_num_77)?;\n\n exports.create_named_method(\"make_num_32\", make_num_32)?;\n\n exports.create_named_method(\"make_str_hello\", make_str_hello)?;\n\n exports.create_named_method(\"make_num_array\", make_num_array)?;\n\n exports.create_named_method(\"make_buff\", make_buff)?;\n\n exports.create_named_method(\"make_obj\", make_obj)?;\n\n exports.create_named_method(\"make_object\", make_object)?;\n\n exports.create_named_method(\"make_map\", make_map)?;\n\n exports.create_named_method(\"make_bytes_struct\", make_bytes_struct)?;\n\n\n\n exports.create_named_method(\"expect_hello_world\", expect_hello_world)?;\n\n exports.create_named_method(\"expect_obj\", expect_obj)?;\n\n exports.create_named_method(\"expect_num_array\", expect_num_array)?;\n\n exports.create_named_method(\"expect_buffer\", expect_buffer)?;\n\n\n\n exports.create_named_method(\"roundtrip_object\", roundtrip_object)?;\n\n exports.create_named_method(\"from_json_string\", from_json_string)?;\n\n exports.create_named_method(\"json_to_string\", json_to_string)?;\n\n Ok(())\n\n}\n", "file_path": "examples/napi-compat-mode/src/serde.rs", "rank": 16, "score": 279062.18843847234 }, { "content": "pub fn register_js(exports: &mut JsObject) -> Result<()> {\n\n exports.create_named_method(\"createTestClass\", create_test_class)?;\n\n exports.create_named_method(\"newTestClass\", new_test_class)?;\n\n Ok(())\n\n}\n", "file_path": "examples/napi-compat-mode/src/class.rs", "rank": 17, "score": 279062.18843847234 }, { "content": "pub fn register_js(exports: &mut JsObject) -> Result<()> {\n\n exports.create_named_method(\"setTimeout\", set_timeout)?;\n\n exports.create_named_method(\"clearTimeout\", clear_timeout)?;\n\n Ok(())\n\n}\n", "file_path": "examples/napi-compat-mode/src/global.rs", "rank": 18, "score": 279062.18843847234 }, { "content": "pub fn register_js(exports: &mut JsObject) -> Result<()> {\n\n exports.create_named_method(\"createExternal\", create_external)?;\n\n exports.create_named_method(\"createExternalWithHint\", create_external_with_hint)?;\n\n exports.create_named_method(\"getExternalCount\", get_external_count)?;\n\n Ok(())\n\n}\n", "file_path": "examples/napi-compat-mode/src/external.rs", "rank": 19, "score": 279062.18843847234 }, { "content": "pub fn register_js(exports: &mut JsObject) -> Result<()> {\n\n exports.create_named_method(\"getArraybufferLength\", get_arraybuffer_length)?;\n\n exports.create_named_method(\"mutateUint8Array\", mutate_uint8_array)?;\n\n exports.create_named_method(\"mutateUint16Array\", mutate_uint16_array)?;\n\n exports.create_named_method(\"mutateInt16Array\", mutate_int16_array)?;\n\n exports.create_named_method(\"mutateFloat32Array\", mutate_float32_array)?;\n\n exports.create_named_method(\"mutateFloat64Array\", mutate_float64_array)?;\n\n #[cfg(feature = \"latest\")]\n\n exports.create_named_method(\"mutateI64Array\", mutate_i64_array)?;\n\n Ok(())\n\n}\n", "file_path": "examples/napi-compat-mode/src/arraybuffer.rs", "rank": 20, "score": 279062.18843847234 }, { "content": "pub fn register_js(exports: &mut JsObject) -> Result<()> {\n\n exports.create_named_method(\"createNamedSymbol\", create_named_symbol)?;\n\n exports.create_named_method(\"createUnnamedSymbol\", create_unnamed_symbol)?;\n\n exports.create_named_method(\"createSymbolFromJsString\", create_symbol_from_js_string)?;\n\n Ok(())\n\n}\n", "file_path": "examples/napi-compat-mode/src/symbol.rs", "rank": 21, "score": 279062.18843847234 }, { "content": "pub fn register_js(exports: &mut JsObject) -> Result<()> {\n\n exports.create_named_method(\"testCreateArray\", test_create_array)?;\n\n exports.create_named_method(\"testCreateArrayWithLength\", test_create_array_with_length)?;\n\n exports.create_named_method(\"testSetElement\", test_set_element)?;\n\n exports.create_named_method(\"testHasElement\", test_has_element)?;\n\n exports.create_named_method(\"testDeleteElement\", test_delete_element)?;\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/napi-compat-mode/src/array.rs", "rank": 22, "score": 279062.18843847234 }, { "content": "pub fn register_js(exports: &mut JsObject) -> Result<()> {\n\n exports.create_named_method(\"testSpawnThread\", test_spawn_thread)?;\n\n exports.create_named_method(\"testSpawnThreadWithRef\", test_spawn_thread_with_ref)?;\n\n Ok(())\n\n}\n", "file_path": "examples/napi-compat-mode/src/task.rs", "rank": 23, "score": 279062.18843847234 }, { "content": "pub fn register_js(exports: &mut JsObject) -> Result<()> {\n\n exports.create_named_method(\"eitherNumberString\", either_number_string)?;\n\n exports.create_named_method(\"dynamicArgumentLength\", dynamic_argument_length)?;\n\n Ok(())\n\n}\n", "file_path": "examples/napi-compat-mode/src/either.rs", "rank": 24, "score": 279062.18843847234 }, { "content": "pub fn register_js(exports: &mut JsObject) -> Result<()> {\n\n exports.create_named_method(\"testThrow\", test_throw)?;\n\n exports.create_named_method(\"testThrowWithReason\", test_throw_with_reason)?;\n\n exports.create_named_method(\"isError\", is_error)?;\n\n Ok(())\n\n}\n", "file_path": "examples/napi-compat-mode/src/error.rs", "rank": 25, "score": 279062.18843847234 }, { "content": "pub fn register_js(exports: &mut JsObject) -> Result<()> {\n\n exports.create_named_method(\"testSetProperty\", test_set_property)?;\n\n exports.create_named_method(\"testGetProperty\", test_get_property)?;\n\n\n\n exports.create_named_method(\"testSetNamedProperty\", test_set_named_property)?;\n\n exports.create_named_method(\"testGetNamedProperty\", test_get_named_property)?;\n\n exports.create_named_method(\"testHasNamedProperty\", test_has_named_property)?;\n\n\n\n exports.create_named_method(\"testHasOwnProperty\", test_has_own_property)?;\n\n exports.create_named_method(\"testHasOwnPropertyJs\", test_has_own_property_js)?;\n\n exports.create_named_method(\"testHasProperty\", test_has_property)?;\n\n exports.create_named_method(\"testHasPropertyJs\", test_has_property_js)?;\n\n exports.create_named_method(\"testDeleteProperty\", test_delete_property)?;\n\n exports.create_named_method(\"testDeleteNamedProperty\", test_delete_named_property)?;\n\n exports.create_named_method(\"testGetPropertyNames\", test_get_property_names)?;\n\n exports.create_named_method(\"testGetPrototype\", test_get_prototype)?;\n\n exports.create_named_method(\"testSetElement\", test_set_element)?;\n\n exports.create_named_method(\"testHasElement\", test_has_element)?;\n\n exports.create_named_method(\"testGetElement\", test_get_element)?;\n\n exports.create_named_method(\"testDeleteElement\", test_delete_element)?;\n\n exports.create_named_method(\"testDefineProperties\", test_define_properties)?;\n\n\n\n exports.create_named_method(\"testIsPromise\", test_is_promise)?;\n\n Ok(())\n\n}\n", "file_path": "examples/napi-compat-mode/src/object.rs", "rank": 26, "score": 279062.18843847234 }, { "content": "pub fn register_js(exports: &mut JsObject) -> Result<()> {\n\n exports.create_named_method(\"getBufferLength\", get_buffer_length)?;\n\n exports.create_named_method(\"bufferToString\", buffer_to_string)?;\n\n exports.create_named_method(\"copyBuffer\", copy_buffer)?;\n\n exports.create_named_method(\n\n \"createBorrowedBufferWithNoopFinalize\",\n\n create_borrowed_buffer_with_noop_finalize,\n\n )?;\n\n exports.create_named_method(\n\n \"createBorrowedBufferWithFinalize\",\n\n create_borrowed_buffer_with_finalize,\n\n )?;\n\n exports.create_named_method(\"mutateBuffer\", mutate_buffer)?;\n\n Ok(())\n\n}\n", "file_path": "examples/napi-compat-mode/src/buffer.rs", "rank": 27, "score": 279062.18843847234 }, { "content": "pub fn register_js(exports: &mut JsObject) -> Result<()> {\n\n exports.create_named_method(\"testCallFunction\", call_function)?;\n\n exports.create_named_method(\n\n \"testCallFunctionWithRefArguments\",\n\n call_function_with_ref_arguments,\n\n )?;\n\n exports.create_named_method(\"testCallFunctionWithThis\", call_function_with_this)?;\n\n Ok(())\n\n}\n", "file_path": "examples/napi-compat-mode/src/function.rs", "rank": 28, "score": 279062.18843847234 }, { "content": "pub fn register_js(exports: &mut JsObject) -> napi::Result<()> {\n\n exports.create_named_method(\"engine\", new_engine)?;\n\n exports.create_named_method(\"query\", query)?;\n\n Ok(())\n\n}\n", "file_path": "bench/src/query.rs", "rank": 29, "score": 278014.8096487088 }, { "content": "pub fn register_js(exports: &mut JsObject) -> Result<()> {\n\n exports.create_named_method(\"testDetachArrayBuffer\", detach_arraybuffer)?;\n\n exports.create_named_method(\"testIsDetachedArrayBuffer\", is_detach_arraybuffer)?;\n\n Ok(())\n\n}\n", "file_path": "examples/napi-compat-mode/src/napi7/mod.rs", "rank": 30, "score": 276723.1339571918 }, { "content": "pub fn register_js(exports: &mut JsObject) -> Result<()> {\n\n exports.create_named_method(\"testCreateBigintFromI64\", test_create_bigint_from_i64)?;\n\n exports.create_named_method(\"testCreateBigintFromU64\", test_create_bigint_from_u64)?;\n\n exports.create_named_method(\"testCreateBigintFromI128\", test_create_bigint_from_i128)?;\n\n exports.create_named_method(\"testCreateBigintFromU128\", test_create_bigint_from_u128)?;\n\n exports.create_named_method(\"testCreateBigintFromWords\", test_create_bigint_from_words)?;\n\n exports.create_named_method(\"testGetBigintI64\", test_get_bigint_i64)?;\n\n exports.create_named_method(\"testGetBigintU64\", test_get_bigint_u64)?;\n\n exports.create_named_method(\"testGetBigintWords\", test_get_bigint_words)?;\n\n\n\n exports.create_named_method(\"setInstanceData\", set_instance_data)?;\n\n exports.create_named_method(\"getInstanceData\", get_instance_data)?;\n\n exports.create_named_method(\"getWrongTypeInstanceData\", get_wrong_type_instance_data)?;\n\n Ok(())\n\n}\n", "file_path": "examples/napi-compat-mode/src/napi6/mod.rs", "rank": 31, "score": 276723.1339571918 }, { "content": "pub fn register_js(exports: &mut JsObject) -> Result<()> {\n\n exports.create_named_method(\"testObjectIsDate\", date::test_object_is_date)?;\n\n exports.create_named_method(\"testCreateDate\", date::test_create_date)?;\n\n exports.create_named_method(\"testGetDateValue\", date::test_get_date_value)?;\n\n Ok(())\n\n}\n", "file_path": "examples/napi-compat-mode/src/napi5/mod.rs", "rank": 32, "score": 276723.1339571918 }, { "content": "pub fn register_js(exports: &mut JsObject) -> Result<()> {\n\n exports.create_named_method(\"testSealObject\", seal_object)?;\n\n exports.create_named_method(\"testFreezeObject\", freeze_object)?;\n\n exports.create_named_method(\n\n \"testAddRemovableAsyncCleanupHook\",\n\n add_removable_async_cleanup_hook,\n\n )?;\n\n exports.create_named_method(\"testRemoveAsyncCleanupHook\", remove_async_cleanup_hook)?;\n\n exports.create_named_method(\"testAddAsyncCleanupHook\", add_async_cleanup_hook)?;\n\n Ok(())\n\n}\n", "file_path": "examples/napi-compat-mode/src/napi8/mod.rs", "rank": 33, "score": 276723.1339571918 }, { "content": "#[contextless_function]\n\npub fn bench_create_buffer(env: Env) -> ContextlessResult<JsBuffer> {\n\n let mut output = Vec::with_capacity(1024);\n\n output.push(1);\n\n output.push(2);\n\n env\n\n .create_buffer_with_data(output)\n\n .map(|v| Some(v.into_raw()))\n\n}\n\n\n", "file_path": "bench/src/buffer.rs", "rank": 34, "score": 275134.05756070005 }, { "content": "#[contextless_function]\n\npub fn create_array(env: Env) -> ContextlessResult<JsObject> {\n\n let a: Vec<u32> = vec![42; 1000];\n\n let mut ret = env.create_array_with_length(a.len())?;\n\n for (index, item) in a.iter().enumerate() {\n\n ret.set_element(index as u32, env.create_uint32(*item)?)?;\n\n }\n\n Ok(Some(ret))\n\n}\n\n\n", "file_path": "bench/src/create_array.rs", "rank": 35, "score": 275134.05756070005 }, { "content": "pub fn register_js(exports: &mut JsObject) -> Result<()> {\n\n exports.create_named_method(\"testExecuteTokioReadfile\", test_execute_tokio_readfile)?;\n\n exports.create_named_method(\"testTokioError\", error_from_tokio_future)?;\n\n Ok(())\n\n}\n", "file_path": "examples/napi-compat-mode/src/tokio_rt/mod.rs", "rank": 36, "score": 274447.8920137017 }, { "content": "#[contextless_function]\n\nfn add_cleanup_hook(mut env: Env) -> ContextlessResult<JsExternal> {\n\n let hook = env.add_env_cleanup_hook((), |_| {\n\n println!(\"cleanup hook executed\");\n\n })?;\n\n env.create_external(hook, None).map(Some)\n\n}\n\n\n", "file_path": "examples/napi-compat-mode/src/cleanup_env.rs", "rank": 37, "score": 273069.67223500245 }, { "content": "#[contextless_function]\n\npub fn create_array_json(env: Env) -> ContextlessResult<JsString> {\n\n let a: Vec<u32> = vec![42; 1000];\n\n let arr_string = to_string(&a)?;\n\n env.create_string(arr_string.as_str()).map(Some)\n\n}\n\n\n", "file_path": "bench/src/create_array.rs", "rank": 38, "score": 272866.0443824535 }, { "content": "#[contextless_function]\n\npub fn create_array_with_serde_trait(env: Env) -> ContextlessResult<JsUnknown> {\n\n let a: Vec<u32> = vec![42; 1000];\n\n env.to_js_value(&a).map(Some)\n\n}\n", "file_path": "bench/src/create_array.rs", "rank": 39, "score": 270655.91981696023 }, { "content": "#[contextless_function]\n\npub fn noop(_env: Env) -> ContextlessResult<JsUndefined> {\n\n Ok(None)\n\n}\n", "file_path": "bench/src/noop.rs", "rank": 40, "score": 269209.2888967718 }, { "content": "#[contextless_function]\n\npub fn create_borrowed_buffer_with_finalize(env: Env) -> ContextlessResult<JsBuffer> {\n\n let data = vec![1, 2, 3];\n\n let data_ptr = data.as_ptr();\n\n let length = data.len();\n\n let manually_drop = ManuallyDrop::new(data);\n\n\n\n unsafe {\n\n env.create_buffer_with_borrowed_data(\n\n data_ptr,\n\n length,\n\n manually_drop,\n\n |mut hint: ManuallyDrop<Vec<u8>>, _| {\n\n ManuallyDrop::drop(&mut hint);\n\n },\n\n )\n\n }\n\n .map(|b| Some(b.into_raw()))\n\n}\n\n\n", "file_path": "examples/napi-compat-mode/src/buffer.rs", "rank": 41, "score": 266400.4721595353 }, { "content": "#[contextless_function]\n\npub fn create_borrowed_buffer_with_noop_finalize(env: Env) -> ContextlessResult<JsBuffer> {\n\n let data = vec![1, 2, 3];\n\n let data_ptr = data.as_ptr();\n\n let length = data.len();\n\n let manually_drop = ManuallyDrop::new(data);\n\n\n\n unsafe { env.create_buffer_with_borrowed_data(data_ptr, length, manually_drop, noop_finalize) }\n\n .map(|b| Some(b.into_raw()))\n\n}\n\n\n", "file_path": "examples/napi-compat-mode/src/buffer.rs", "rank": 42, "score": 264351.00102099654 }, { "content": "#[js_function(2)]\n\npub fn test_tsfn_with_ref(ctx: CallContext) -> Result<JsUndefined> {\n\n let callback = ctx.get::<JsFunction>(0)?;\n\n let options = ctx.get::<JsObject>(1)?;\n\n let options_ref = ctx.env.create_reference(options)?;\n\n let tsfn = ctx.env.create_threadsafe_function(\n\n &callback,\n\n 0,\n\n |mut ctx: ThreadSafeCallContext<Ref<()>>| {\n\n ctx\n\n .env\n\n .get_reference_value_unchecked::<JsObject>(&ctx.value)\n\n .and_then(|obj| ctx.value.unref(ctx.env).map(|_| vec![obj]))\n\n },\n\n )?;\n\n\n\n thread::spawn(move || {\n\n tsfn.call(Ok(options_ref), ThreadsafeFunctionCallMode::Blocking);\n\n });\n\n\n\n ctx.env.get_undefined()\n\n}\n", "file_path": "examples/napi-compat-mode/src/napi4/tsfn.rs", "rank": 43, "score": 254765.2127428632 }, { "content": "#[js_function(1)]\n\npub fn test_tsfn_error(ctx: CallContext) -> Result<JsUndefined> {\n\n let func = ctx.get::<JsFunction>(0)?;\n\n let tsfn = ctx\n\n .env\n\n .create_threadsafe_function(&func, 0, |ctx: ThreadSafeCallContext<()>| {\n\n ctx.env.get_undefined().map(|v| vec![v])\n\n })?;\n\n thread::spawn(move || {\n\n tsfn.call(\n\n Err(Error::new(Status::GenericFailure, \"invalid\".to_owned())),\n\n ThreadsafeFunctionCallMode::Blocking,\n\n );\n\n });\n\n\n\n ctx.env.get_undefined()\n\n}\n\n\n\nasync fn read_file_content(filepath: &Path) -> Result<Vec<u8>> {\n\n tokio::fs::read(filepath)\n\n .await\n\n .map_err(|e| Error::new(Status::GenericFailure, format!(\"{}\", e)))\n\n}\n\n\n", "file_path": "examples/napi-compat-mode/src/napi4/tsfn.rs", "rank": 44, "score": 254765.2127428632 }, { "content": "#[js_function(1)]\n\npub fn is_dataview(ctx: CallContext) -> Result<JsBoolean> {\n\n let js_value = ctx.get::<JsUnknown>(0)?;\n\n ctx.env.get_boolean(js_value.is_dataview()?)\n\n}\n\n\n", "file_path": "examples/napi-compat-mode/src/env.rs", "rank": 45, "score": 249944.09150581 }, { "content": "#[js_function(2)]\n\npub fn instanceof(ctx: CallContext) -> Result<JsBoolean> {\n\n let object = ctx.get::<JsUnknown>(0)?;\n\n let constructor = ctx.get::<JsUnknown>(1)?;\n\n ctx.env.get_boolean(object.instanceof(constructor)?)\n\n}\n\n\n", "file_path": "examples/napi-compat-mode/src/env.rs", "rank": 46, "score": 249944.09150581 }, { "content": "#[js_function(1)]\n\npub fn is_typedarray(ctx: CallContext) -> Result<JsBoolean> {\n\n let js_value = ctx.get::<JsUnknown>(0)?;\n\n ctx.env.get_boolean(js_value.is_typedarray()?)\n\n}\n\n\n", "file_path": "examples/napi-compat-mode/src/env.rs", "rank": 47, "score": 249944.09150581004 }, { "content": "#[napi]\n\npub fn get_str_from_object(env: Env) {\n\n let mut obj = env.create_object().unwrap();\n\n obj.set(\"name\", \"value\").unwrap();\n\n assert_eq!(obj.get(\"name\").unwrap(), Some(\"value\"));\n\n}\n", "file_path": "examples/napi/src/object.rs", "rank": 48, "score": 247863.46701332298 }, { "content": "#[js_function(1)]\n\npub fn cast_unknown(ctx: CallContext) -> Result<JsObject> {\n\n let arg: JsUnknown = ctx.get(0)?;\n\n Ok(unsafe { arg.cast::<JsObject>() })\n\n}\n\n\n", "file_path": "examples/napi-compat-mode/src/env.rs", "rank": 49, "score": 247609.51240827533 }, { "content": "#[js_function(2)]\n\npub fn strict_equals(ctx: CallContext) -> Result<JsBoolean> {\n\n let a: JsUnknown = ctx.get(0)?;\n\n let b: JsUnknown = ctx.get(1)?;\n\n ctx.env.get_boolean(ctx.env.strict_equals(a, b)?)\n\n}\n\n\n", "file_path": "examples/napi-compat-mode/src/env.rs", "rank": 50, "score": 247609.5124082754 }, { "content": "#[js_function(1)]\n\npub fn throw_syntax_error(ctx: CallContext) -> Result<JsUndefined> {\n\n let message: JsString = ctx.get(0)?;\n\n let syntax_error = ctx\n\n .env\n\n .get_global()?\n\n .get_named_property::<JsFunction>(\"SyntaxError\")?;\n\n ctx.env.throw(syntax_error.new_instance(&[message])?)?;\n\n ctx.env.get_undefined()\n\n}\n\n\n", "file_path": "examples/napi-compat-mode/src/env.rs", "rank": 51, "score": 245338.623754071 }, { "content": "#[js_function(2)]\n\npub fn test_tokio_readfile(ctx: CallContext) -> Result<JsUndefined> {\n\n let js_filepath = ctx.get::<JsString>(0)?;\n\n let js_func = ctx.get::<JsFunction>(1)?;\n\n let path_str = js_filepath.into_utf8()?.into_owned()?;\n\n\n\n let tsfn =\n\n ctx\n\n .env\n\n .create_threadsafe_function(&js_func, 0, |ctx: ThreadSafeCallContext<Vec<u8>>| {\n\n ctx\n\n .env\n\n .create_buffer_with_data(ctx.value)\n\n .map(|v| vec![v.into_raw()])\n\n })?;\n\n let rt = tokio::runtime::Runtime::new()\n\n .map_err(|e| Error::from_reason(format!(\"Create tokio runtime failed {}\", e)))?;\n\n\n\n rt.block_on(async move {\n\n let ret = read_file_content(Path::new(&path_str)).await;\n\n tsfn.call(ret, ThreadsafeFunctionCallMode::Blocking);\n\n });\n\n\n\n ctx.env.get_undefined()\n\n}\n\n\n", "file_path": "examples/napi-compat-mode/src/napi4/tsfn.rs", "rank": 52, "score": 243546.4120605533 }, { "content": "#[js_function(1)]\n\npub fn test_threadsafe_function(ctx: CallContext) -> Result<JsUndefined> {\n\n let func = ctx.get::<JsFunction>(0)?;\n\n\n\n let tsfn =\n\n ctx\n\n .env\n\n .create_threadsafe_function(&func, 0, |ctx: ThreadSafeCallContext<Vec<u32>>| {\n\n ctx\n\n .value\n\n .iter()\n\n .map(|v| ctx.env.create_uint32(*v))\n\n .collect::<Result<Vec<JsNumber>>>()\n\n })?;\n\n\n\n let tsfn_cloned = tsfn.clone();\n\n\n\n thread::spawn(move || {\n\n let output: Vec<u32> = vec![0, 1, 2, 3];\n\n // It's okay to call a threadsafe function multiple times.\n\n tsfn.call(Ok(output), ThreadsafeFunctionCallMode::Blocking);\n", "file_path": "examples/napi-compat-mode/src/napi4/tsfn.rs", "rank": 53, "score": 243546.4120605533 }, { "content": "pub fn spawn<F>(fut: F)\n\nwhere\n\n F: 'static + Send + Future<Output = ()>,\n\n{\n\n RT.0.spawn(fut);\n\n}\n\n\n", "file_path": "crates/napi/src/tokio_runtime.rs", "rank": 54, "score": 243419.63267441833 }, { "content": "#[js_function(1)]\n\npub fn test_abort_threadsafe_function(ctx: CallContext) -> Result<JsBoolean> {\n\n let func = ctx.get::<JsFunction>(0)?;\n\n\n\n let tsfn =\n\n ctx\n\n .env\n\n .create_threadsafe_function(&func, 0, |ctx: ThreadSafeCallContext<Vec<u32>>| {\n\n ctx\n\n .value\n\n .iter()\n\n .map(|v| ctx.env.create_uint32(*v))\n\n .collect::<Result<Vec<JsNumber>>>()\n\n })?;\n\n\n\n let tsfn_cloned = tsfn.clone();\n\n\n\n tsfn_cloned.abort()?;\n\n ctx.env.get_boolean(tsfn.aborted())\n\n}\n\n\n", "file_path": "examples/napi-compat-mode/src/napi4/tsfn.rs", "rank": 55, "score": 241389.78255516087 }, { "content": "#[napi]\n\nfn get_global(env: Env) -> Result<JsGlobal> {\n\n env.get_global()\n\n}\n\n\n", "file_path": "examples/napi/src/object.rs", "rank": 56, "score": 240066.11200385762 }, { "content": "#[napi]\n\nfn get_undefined(env: Env) -> Result<JsUndefined> {\n\n env.get_undefined()\n\n}\n\n\n", "file_path": "examples/napi/src/object.rs", "rank": 57, "score": 240066.11200385762 }, { "content": "#[napi]\n\nfn get_null(env: Env) -> Result<JsNull> {\n\n env.get_null()\n\n}\n\n\n", "file_path": "examples/napi/src/object.rs", "rank": 58, "score": 240066.11200385762 }, { "content": "#[js_function(1)]\n\npub fn test_abort_independent_threadsafe_function(ctx: CallContext) -> Result<JsBoolean> {\n\n let func = ctx.get::<JsFunction>(0)?;\n\n\n\n let tsfn = ctx\n\n .env\n\n .create_threadsafe_function(&func, 0, |ctx: ThreadSafeCallContext<u32>| {\n\n ctx.env.create_uint32(ctx.value).map(|v| vec![v])\n\n })?;\n\n\n\n let tsfn_other =\n\n ctx\n\n .env\n\n .create_threadsafe_function(&func, 0, |ctx: ThreadSafeCallContext<u32>| {\n\n ctx.env.create_uint32(ctx.value).map(|v| vec![v])\n\n })?;\n\n\n\n tsfn_other.abort()?;\n\n ctx.env.get_boolean(tsfn.aborted())\n\n}\n\n\n", "file_path": "examples/napi-compat-mode/src/napi4/tsfn.rs", "rank": 59, "score": 239289.67576689093 }, { "content": "#[js_function(1)]\n\npub fn test_call_aborted_threadsafe_function(ctx: CallContext) -> Result<JsUndefined> {\n\n let func = ctx.get::<JsFunction>(0)?;\n\n\n\n let tsfn = ctx\n\n .env\n\n .create_threadsafe_function(&func, 0, |ctx: ThreadSafeCallContext<u32>| {\n\n ctx.env.create_uint32(ctx.value).map(|v| vec![v])\n\n })?;\n\n\n\n let tsfn_clone = tsfn.clone();\n\n tsfn_clone.abort()?;\n\n\n\n let call_status = tsfn.call(Ok(1), ThreadsafeFunctionCallMode::NonBlocking);\n\n assert!(call_status == Status::Closing);\n\n ctx.env.get_undefined()\n\n}\n\n\n", "file_path": "examples/napi-compat-mode/src/napi4/tsfn.rs", "rank": 60, "score": 239289.67576689093 }, { "content": "#[js_function(1)]\n\npub fn constructor(ctx: CallContext) -> napi::Result<JsUndefined> {\n\n let callback = ctx.get::<JsFunction>(0)?;\n\n\n\n let mut cb =\n\n ctx\n\n .env\n\n .create_threadsafe_function(&callback, 0, |ctx: ThreadSafeCallContext<String>| {\n\n ctx\n\n .env\n\n .create_string_from_std(ctx.value)\n\n .map(|js_string| vec![js_string])\n\n })?;\n\n\n\n cb.unref(ctx.env)?;\n\n\n\n let mut this: JsObject = ctx.this_unchecked();\n\n let obj = A { cb };\n\n\n\n ctx.env.wrap(&mut this, obj)?;\n\n ctx.env.get_undefined()\n\n}\n", "file_path": "examples/napi-compat-mode/src/napi4/tsfn_dua_instance.rs", "rank": 61, "score": 237014.62546833226 }, { "content": "/// This function could be used for `create_buffer_with_borrowed_data` and want do noting when Buffer finalized.\n\npub fn noop_finalize<Hint>(_hint: Hint, _env: Env) {}\n\n\n\nunsafe extern \"C\" fn drop_buffer(\n\n _env: sys::napi_env,\n\n finalize_data: *mut c_void,\n\n hint: *mut c_void,\n\n) {\n\n let length_ptr = hint as *mut (usize, usize);\n\n let (length, cap) = unsafe { *Box::from_raw(length_ptr) };\n\n mem::drop(unsafe { Vec::from_raw_parts(finalize_data as *mut u8, length, cap) });\n\n}\n\n\n\npub(crate) unsafe extern \"C\" fn raw_finalize<T>(\n\n env: sys::napi_env,\n\n finalize_data: *mut c_void,\n\n finalize_hint: *mut c_void,\n\n) {\n\n let tagged_object = finalize_data as *mut TaggedObject<T>;\n\n unsafe { Box::from_raw(tagged_object) };\n\n if !finalize_hint.is_null() {\n", "file_path": "crates/napi/src/env.rs", "rank": 62, "score": 223660.67782169842 }, { "content": "#[napi]\n\npub fn call_threadsafe_function(callback: JsFunction) -> Result<()> {\n\n let tsfn: ThreadsafeFunction<u32, ErrorStrategy::CalleeHandled> = callback\n\n .create_threadsafe_function(0, |ctx| {\n\n ctx.env.create_uint32(ctx.value + 1).map(|v| vec![v])\n\n })?;\n\n for n in 0..100 {\n\n let tsfn = tsfn.clone();\n\n thread::spawn(move || {\n\n tsfn.call(Ok(n), ThreadsafeFunctionCallMode::Blocking);\n\n });\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/napi/src/threadsafe_function.rs", "rank": 63, "score": 214069.47435241198 }, { "content": "#[napi]\n\npub fn threadsafe_function_throw_error(cb: JsFunction) -> Result<()> {\n\n let tsfn: ThreadsafeFunction<bool, ErrorStrategy::CalleeHandled> =\n\n cb.create_threadsafe_function(0, |ctx| ctx.env.get_boolean(ctx.value).map(|v| vec![v]))?;\n\n thread::spawn(move || {\n\n tsfn.call(\n\n Err(Error::new(\n\n Status::GenericFailure,\n\n \"ThrowFromNative\".to_owned(),\n\n )),\n\n ThreadsafeFunctionCallMode::Blocking,\n\n );\n\n });\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/napi/src/threadsafe_function.rs", "rank": 64, "score": 212274.0065498767 }, { "content": "#[napi]\n\npub fn threadsafe_function_fatal_mode(cb: JsFunction) -> Result<()> {\n\n let tsfn: ThreadsafeFunction<bool, ErrorStrategy::Fatal> =\n\n cb.create_threadsafe_function(0, |ctx| ctx.env.get_boolean(ctx.value).map(|v| vec![v]))?;\n\n thread::spawn(move || {\n\n tsfn.call(true, ThreadsafeFunctionCallMode::Blocking);\n\n });\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/napi/src/threadsafe_function.rs", "rank": 65, "score": 212274.00654987668 }, { "content": "#[contextless_function]\n\nfn get_env_variable(env: Env) -> ContextlessResult<JsString> {\n\n env\n\n .create_string_from_std(std::env::var(\"npm_package_name\").unwrap())\n\n .map(Some)\n\n}\n\n\n", "file_path": "examples/napi-compat-mode/src/env.rs", "rank": 66, "score": 212105.46649087523 }, { "content": "#[napi]\n\npub fn threadsafe_function_fatal_mode_error(cb: JsFunction) -> Result<()> {\n\n let tsfn: ThreadsafeFunction<bool, ErrorStrategy::Fatal> =\n\n cb.create_threadsafe_function(0, |_ctx| {\n\n Err::<Vec<JsBoolean>, Error>(Error::new(\n\n Status::GenericFailure,\n\n \"Generic tsfn error\".to_owned(),\n\n ))\n\n })?;\n\n thread::spawn(move || {\n\n tsfn.call(true, ThreadsafeFunctionCallMode::Blocking);\n\n });\n\n Ok(())\n\n}\n", "file_path": "examples/napi/src/threadsafe_function.rs", "rank": 67, "score": 210528.19891064859 }, { "content": "pub fn check_recorded_struct_for_impl(ident: &Ident, opts: &BindgenAttrs) -> BindgenResult<String> {\n\n STRUCTS.with(|state| {\n\n let struct_name = ident.to_string();\n\n let mut map = state.parsed.borrow_mut();\n\n if let Some(parsed) = map.get_mut(&struct_name) {\n\n if opts.constructor().is_some() && !cfg!(debug_assertions) {\n\n if parsed.ctor_defined {\n\n bail_span!(\n\n ident,\n\n \"Constructor has already been defined for struct `{}`\",\n\n &struct_name\n\n );\n\n } else {\n\n parsed.ctor_defined = true;\n\n }\n\n }\n\n\n\n Ok(parsed.js_name.clone())\n\n } else {\n\n bail_span!(\n\n ident,\n\n \"Did not find struct `{}` parsed before expand #[napi] for impl\",\n\n &struct_name,\n\n )\n\n }\n\n })\n\n}\n", "file_path": "crates/macro/src/parser/attrs.rs", "rank": 68, "score": 208260.19217532285 }, { "content": "#[module_exports]\n\nfn init(mut exports: napi::JsObject) -> napi::Result<()> {\n\n exports.create_named_method(\"testAsync\", test_async)?;\n\n exports.create_named_method(\"convertFromJS\", from_js)?;\n\n Ok(())\n\n}\n", "file_path": "memory-testing/src/lib.rs", "rank": 69, "score": 208106.37329175253 }, { "content": "#[js_function(1)]\n\npub fn is_error(ctx: CallContext) -> Result<JsBoolean> {\n\n let js_value = ctx.get::<JsUnknown>(0)?;\n\n ctx.env.get_boolean(js_value.is_error()?)\n\n}\n\n\n", "file_path": "examples/napi-compat-mode/src/error.rs", "rank": 70, "score": 207369.9183239109 }, { "content": "#[js_function(1)]\n\npub fn call_function(ctx: CallContext) -> Result<JsNull> {\n\n let js_func = ctx.get::<JsFunction>(0)?;\n\n let js_string_hello = ctx.env.create_string(\"hello\".as_ref())?.into_unknown();\n\n let js_string_world = ctx.env.create_string(\"world\".as_ref())?.into_unknown();\n\n\n\n js_func.call(None, &[js_string_hello, js_string_world])?;\n\n\n\n ctx.env.get_null()\n\n}\n\n\n", "file_path": "examples/napi-compat-mode/src/function.rs", "rank": 71, "score": 205624.11068468285 }, { "content": "#[js_function(1)]\n\npub fn call_function_with_this(ctx: CallContext) -> Result<JsNull> {\n\n let js_this: JsObject = ctx.this_unchecked();\n\n let js_func = ctx.get::<JsFunction>(0)?;\n\n\n\n js_func.call_without_args(Some(&js_this))?;\n\n\n\n ctx.env.get_null()\n\n}\n\n\n", "file_path": "examples/napi-compat-mode/src/function.rs", "rank": 72, "score": 205624.11068468285 }, { "content": "#[js_function(1)]\n\npub fn create_external(ctx: CallContext) -> Result<JsExternal> {\n\n let count = ctx.get::<JsNumber>(0)?.try_into()?;\n\n let native = NativeObject { count };\n\n ctx.env.create_external(native, None)\n\n}\n\n\n", "file_path": "examples/napi-compat-mode/src/external.rs", "rank": 73, "score": 205624.11068468285 }, { "content": "#[js_function(2)]\n\npub fn set_timeout(ctx: CallContext) -> Result<JsTimeout> {\n\n let handler: JsFunction = ctx.get(0)?;\n\n let timeout: JsNumber = ctx.get(1)?;\n\n ctx\n\n .env\n\n .get_global()?\n\n .set_timeout(handler, timeout.try_into()?)\n\n}\n\n\n", "file_path": "examples/napi-compat-mode/src/global.rs", "rank": 74, "score": 205624.11068468282 }, { "content": "#[js_function(1)]\n\npub fn buffer_to_string(ctx: CallContext) -> Result<JsString> {\n\n let buffer = ctx.get::<JsBuffer>(0)?.into_value()?;\n\n ctx.env.create_string(\n\n str::from_utf8(&buffer).map_err(|e| Error::new(Status::StringExpected, format!(\"{}\", e)))?,\n\n )\n\n}\n\n\n", "file_path": "examples/napi-compat-mode/src/buffer.rs", "rank": 75, "score": 205624.11068468285 }, { "content": "#[js_function(1)]\n\npub fn copy_buffer(ctx: CallContext) -> Result<JsBuffer> {\n\n let buffer = ctx.get::<JsBuffer>(0)?.into_value()?;\n\n ctx.env.create_buffer_copy(buffer).map(|b| b.into_raw())\n\n}\n\n\n", "file_path": "examples/napi-compat-mode/src/buffer.rs", "rank": 76, "score": 205624.11068468282 }, { "content": "#[js_function(1)]\n\npub fn clear_timeout(ctx: CallContext) -> Result<JsUndefined> {\n\n let timer: JsTimeout = ctx.get(0)?;\n\n ctx.env.get_global()?.clear_timeout(timer)\n\n}\n\n\n", "file_path": "examples/napi-compat-mode/src/global.rs", "rank": 77, "score": 205624.11068468282 }, { "content": "#[contextless_function]\n\nfn test_create_array(env: Env) -> ContextlessResult<JsObject> {\n\n env.create_empty_array().map(Some)\n\n}\n\n\n", "file_path": "examples/napi-compat-mode/src/array.rs", "rank": 78, "score": 204236.0496263532 }, { "content": "#[js_function(1)]\n\n#[cfg(feature = \"latest\")]\n\npub fn mutate_i64_array(ctx: CallContext) -> Result<JsUndefined> {\n\n let mut buffer = ctx.get::<JsTypedArray>(0)?.into_value()?;\n\n let buffer_mut_ref: &mut [i64] = buffer.as_mut();\n\n buffer_mut_ref[0] = 9223372036854775807;\n\n ctx.env.get_undefined()\n\n}\n\n\n", "file_path": "examples/napi-compat-mode/src/arraybuffer.rs", "rank": 79, "score": 203925.9310151514 }, { "content": "#[js_function(1)]\n\npub fn seal_object(ctx: CallContext) -> Result<JsUndefined> {\n\n let mut obj: JsObject = ctx.get(0)?;\n\n obj.seal()?;\n\n ctx.env.get_undefined()\n\n}\n\n\n", "file_path": "examples/napi-compat-mode/src/napi8/object.rs", "rank": 80, "score": 203925.93101515144 }, { "content": "#[js_function]\n\npub fn test_throw_with_panic(_ctx: CallContext) -> Result<JsUnknown> {\n\n panic!(\"don't panic.\");\n\n}\n\n\n", "file_path": "examples/napi-compat-mode/src/error.rs", "rank": 81, "score": 203925.9310151514 }, { "content": "#[js_function(1)]\n\npub fn mutate_uint8_array(ctx: CallContext) -> Result<JsUndefined> {\n\n let mut buffer = ctx.get::<JsTypedArray>(0)?.into_value()?;\n\n let buffer_mut_ref: &mut [u8] = buffer.as_mut();\n\n buffer_mut_ref[0] = 42;\n\n ctx.env.get_undefined()\n\n}\n\n\n", "file_path": "examples/napi-compat-mode/src/arraybuffer.rs", "rank": 82, "score": 203925.9310151514 }, { "content": "#[js_function]\n\npub fn create_unnamed_symbol(ctx: CallContext) -> Result<JsSymbol> {\n\n ctx.env.create_symbol(None)\n\n}\n\n\n", "file_path": "examples/napi-compat-mode/src/symbol.rs", "rank": 83, "score": 203925.93101515144 }, { "content": "#[js_function(1)]\n\npub fn get_external_count(ctx: CallContext) -> Result<JsNumber> {\n\n let attached_obj = ctx.get::<JsExternal>(0)?;\n\n let native_object = ctx.env.get_value_external::<NativeObject>(&attached_obj)?;\n\n ctx.env.create_int32(native_object.count)\n\n}\n\n\n", "file_path": "examples/napi-compat-mode/src/external.rs", "rank": 84, "score": 203925.93101515144 }, { "content": "#[js_function(1)]\n\npub fn detach_arraybuffer(ctx: CallContext) -> Result<JsUndefined> {\n\n let input = ctx.get::<JsArrayBuffer>(0)?;\n\n input.detach()?;\n\n ctx.env.get_undefined()\n\n}\n\n\n", "file_path": "examples/napi-compat-mode/src/napi7/buffer.rs", "rank": 85, "score": 203925.93101515144 }, { "content": "#[js_function]\n\npub fn create_named_symbol(ctx: CallContext) -> Result<JsSymbol> {\n\n ctx.env.create_symbol(Some(\"native\"))\n\n}\n\n\n", "file_path": "examples/napi-compat-mode/src/symbol.rs", "rank": 86, "score": 203925.93101515144 }, { "content": "#[js_function(1)]\n\npub fn mutate_float32_array(ctx: CallContext) -> Result<JsUndefined> {\n\n let mut buffer = ctx.get::<JsTypedArray>(0)?.into_value()?;\n\n let buffer_mut_ref: &mut [f32] = buffer.as_mut();\n\n buffer_mut_ref[0] = 3.33;\n\n ctx.env.get_undefined()\n\n}\n\n\n", "file_path": "examples/napi-compat-mode/src/arraybuffer.rs", "rank": 87, "score": 203925.9310151514 }, { "content": "#[js_function(1)]\n\npub fn freeze_object(ctx: CallContext) -> Result<JsUndefined> {\n\n let mut obj: JsObject = ctx.get(0)?;\n\n obj.freeze()?;\n\n ctx.env.get_undefined()\n\n}\n", "file_path": "examples/napi-compat-mode/src/napi8/object.rs", "rank": 88, "score": 203925.93101515144 }, { "content": "#[js_function(1)]\n\npub fn get_buffer_length(ctx: CallContext) -> Result<JsNumber> {\n\n let buffer = ctx.get::<JsBuffer>(0)?.into_value()?;\n\n ctx.env.create_uint32((&buffer).len() as u32)\n\n}\n\n\n", "file_path": "examples/napi-compat-mode/src/buffer.rs", "rank": 89, "score": 203925.9310151514 }, { "content": "#[js_function(1)]\n\npub fn mutate_int16_array(ctx: CallContext) -> Result<JsUndefined> {\n\n let mut buffer = ctx.get::<JsTypedArray>(0)?.into_value()?;\n\n let buffer_mut_ref: &mut [i16] = buffer.as_mut();\n\n buffer_mut_ref[0] = 32767;\n\n ctx.env.get_undefined()\n\n}\n\n\n", "file_path": "examples/napi-compat-mode/src/arraybuffer.rs", "rank": 90, "score": 203925.9310151514 }, { "content": "#[js_function(1)]\n\npub fn mutate_float64_array(ctx: CallContext) -> Result<JsUndefined> {\n\n let mut buffer = ctx.get::<JsTypedArray>(0)?.into_value()?;\n\n let buffer_mut_ref: &mut [f64] = buffer.as_mut();\n\n buffer_mut_ref[0] = PI;\n\n ctx.env.get_undefined()\n\n}\n\n\n", "file_path": "examples/napi-compat-mode/src/arraybuffer.rs", "rank": 91, "score": 203925.9310151514 }, { "content": "#[js_function(1)]\n\npub fn get_arraybuffer_length(ctx: CallContext) -> Result<JsNumber> {\n\n let buffer = ctx.get::<JsArrayBuffer>(0)?.into_value()?;\n\n ctx.env.create_uint32((&buffer).len() as u32)\n\n}\n\n\n", "file_path": "examples/napi-compat-mode/src/arraybuffer.rs", "rank": 92, "score": 203925.9310151514 }, { "content": "#[js_function(1)]\n\npub fn is_detach_arraybuffer(ctx: CallContext) -> Result<JsBoolean> {\n\n let input = ctx.get::<JsArrayBuffer>(0)?;\n\n ctx.env.get_boolean(input.is_detached()?)\n\n}\n", "file_path": "examples/napi-compat-mode/src/napi7/buffer.rs", "rank": 93, "score": 203925.93101515144 }, { "content": "#[js_function(1)]\n\npub fn mutate_uint16_array(ctx: CallContext) -> Result<JsUndefined> {\n\n let mut buffer = ctx.get::<JsTypedArray>(0)?.into_value()?;\n\n let buffer_mut_ref: &mut [u16] = buffer.as_mut();\n\n buffer_mut_ref[0] = 65535;\n\n ctx.env.get_undefined()\n\n}\n\n\n", "file_path": "examples/napi-compat-mode/src/arraybuffer.rs", "rank": 94, "score": 203925.9310151514 }, { "content": "#[js_function(1)]\n\npub fn dynamic_argument_length(ctx: CallContext) -> Result<JsNumber> {\n\n let value: Option<JsNumber> = ctx.try_get::<JsNumber>(0)?.into();\n\n if let Some(n) = value {\n\n let n: u32 = n.try_into()?;\n\n ctx.env.create_uint32(n + 100)\n\n } else {\n\n ctx.env.create_uint32(42)\n\n }\n\n}\n\n\n", "file_path": "examples/napi-compat-mode/src/either.rs", "rank": 95, "score": 203925.9310151514 }, { "content": "#[js_function(1)]\n\npub fn create_external_with_hint(ctx: CallContext) -> Result<JsExternal> {\n\n let count = ctx.get::<JsNumber>(0)?.try_into()?;\n\n let native = NativeObject { count };\n\n ctx.env.create_external(native, Some(5))\n\n}\n\n\n", "file_path": "examples/napi-compat-mode/src/external.rs", "rank": 96, "score": 203925.9310151514 }, { "content": "#[js_function(1)]\n\npub fn test_object_is_date(ctx: CallContext) -> Result<JsBoolean> {\n\n let obj = ctx.get::<JsUnknown>(0)?;\n\n ctx.env.get_boolean(obj.is_date()?)\n\n}\n\n\n", "file_path": "examples/napi-compat-mode/src/napi5/date.rs", "rank": 97, "score": 202273.45651148114 }, { "content": "#[js_function(1)]\n\npub fn test_create_date(ctx: CallContext) -> Result<JsDate> {\n\n let timestamp: f64 = ctx.get::<JsNumber>(0)?.try_into()?;\n\n ctx.env.create_date(timestamp)\n\n}\n\n\n", "file_path": "examples/napi-compat-mode/src/napi5/date.rs", "rank": 98, "score": 202273.45651148114 }, { "content": "#[js_function(1)]\n\npub fn call_function_with_ref_arguments(ctx: CallContext) -> Result<JsNull> {\n\n let js_func = ctx.get::<JsFunction>(0)?;\n\n let js_string_hello = ctx.env.create_string(\"hello\".as_ref())?;\n\n let js_string_world = ctx.env.create_string(\"world\".as_ref())?;\n\n\n\n js_func.call(None, &[&js_string_hello, &js_string_world])?;\n\n\n\n ctx.env.get_null()\n\n}\n\n\n", "file_path": "examples/napi-compat-mode/src/function.rs", "rank": 99, "score": 202273.45651148114 } ]
Rust
src/sys/component_manager/tests/test_utils.rs
mehulagg/fuchsia
3f56175ee594da6b287d5fb19f2f0eccea2897f0
use { breakpoint_system_client::BreakpointSystemClient, failure::{format_err, Error, ResultExt}, fidl_fuchsia_io::DirectoryProxy, fidl_fuchsia_sys::{ ComponentControllerEvent, EnvironmentControllerEvent, EnvironmentControllerProxy, EnvironmentMarker, EnvironmentOptions, FileDescriptor, LauncherProxy, }, fidl_fuchsia_test_breakpoints::*, files_async, fuchsia_component::client::*, fuchsia_runtime::HandleType, fuchsia_zircon as zx, futures::future, futures::stream::{StreamExt, TryStreamExt}, parking_lot::{Condvar, Mutex}, rand::random, std::fs::*, std::path::PathBuf, std::{fs::File, io::Read, sync::Arc, thread, time::Duration}, }; pub static COMPONENT_MANAGER_URL: &str = "fuchsia-pkg://fuchsia.com/component_manager#meta/component_manager.cmx"; pub struct BlackBoxTest { pub env: EnvironmentControllerProxy, pub component_manager_app: App, pub component_manager_url: String, pub root_component_url: String, pub label: String, } impl BlackBoxTest { pub async fn default(root_component_url: &str) -> Result<Self, Error> { Self::custom(COMPONENT_MANAGER_URL, root_component_url, vec![], None).await } pub async fn custom( component_manager_url: &str, root_component_url: &str, dir_handles: Vec<(String, zx::Handle)>, output_file_descriptor: Option<FileDescriptor>, ) -> Result<Self, Error> { let random_num = random::<u32>(); let label = format!("test_{}", random_num); let (env, launcher) = create_isolated_environment(&label).await?; let component_manager_app = launch_component_manager( launcher, component_manager_url, root_component_url, dir_handles, output_file_descriptor, ) .await?; let test = Self { env, component_manager_app, component_manager_url: component_manager_url.to_string(), root_component_url: root_component_url.to_string(), label, }; Ok(test) } pub fn get_component_manager_path(&self) -> PathBuf { find_component_manager_in_hub(&self.component_manager_url, &self.label) } pub fn get_hub_v2_path(&self) -> PathBuf { let path = self.get_component_manager_path(); path.join("out/hub") } pub async fn connect_to_breakpoint_system(&self) -> Result<BreakpointSystemClient, Error> { let path = self.get_component_manager_path(); connect_to_breakpoint_system(&path).await } } pub async fn launch_component_and_expect_output( root_component_url: &str, expected_output: String, ) -> Result<(), Error> { launch_component_and_expect_output_with_extra_dirs(root_component_url, vec![], expected_output) .await } pub async fn launch_component_and_expect_output_with_extra_dirs( root_component_url: &str, dir_handles: Vec<(String, zx::Handle)>, expected_output: String, ) -> Result<(), Error> { let (file, pipe_handle) = make_pipe(); let test = BlackBoxTest::custom( COMPONENT_MANAGER_URL, root_component_url, dir_handles, Some(pipe_handle), ) .await?; let breakpoint_system_client = &test.connect_to_breakpoint_system().await?; breakpoint_system_client.start_component_manager().await?; read_from_pipe(file, expected_output) } async fn create_isolated_environment( label: &str, ) -> Result<(EnvironmentControllerProxy, LauncherProxy), Error> { let env = connect_to_service::<EnvironmentMarker>() .context("could not connect to current environment")?; let (new_env, new_env_server_end) = fidl::endpoints::create_proxy().context("could not create proxy")?; let (controller, controller_server_end) = fidl::endpoints::create_proxy().context("could not create proxy")?; let (launcher, launcher_server_end) = fidl::endpoints::create_proxy().context("could not create proxy")?; let mut env_options = EnvironmentOptions { inherit_parent_services: true, use_parent_runners: true, kill_on_oom: false, delete_storage_on_death: true, }; env.create_nested_environment( new_env_server_end, controller_server_end, label, None, &mut env_options, ) .context("could not create isolated environment")?; let EnvironmentControllerEvent::OnCreated {} = controller.take_event_stream().next().await.unwrap().unwrap(); new_env .get_launcher(launcher_server_end) .context("could not get isolated environment launcher")?; Ok((controller, launcher)) } async fn launch_component_manager( launcher: LauncherProxy, component_manager_url: &str, root_component_url: &str, dir_handles: Vec<(String, zx::Handle)>, output_file_descriptor: Option<FileDescriptor>, ) -> Result<App, Error> { let mut options = LaunchOptions::new(); if let Some(output_file_descriptor) = output_file_descriptor { options.set_out(output_file_descriptor); } for dir in dir_handles { options.add_handle_to_namespace(dir.0, dir.1); } let component_manager_app = launch_with_options( &launcher, component_manager_url.to_string(), Some(vec![root_component_url.to_string(), "--debug".to_string()]), options, ) .context("could not launch component manager")?; let event_stream = component_manager_app.controller().take_event_stream(); event_stream .try_filter_map(|event| { let event = match event { ComponentControllerEvent::OnDirectoryReady {} => Some(event), _ => None, }; future::ready(Ok(event)) }) .next() .await; Ok(component_manager_app) } async fn connect_to_breakpoint_system( component_manager_path: &PathBuf, ) -> Result<BreakpointSystemClient, Error> { let path_to_svc = component_manager_path.join("out/svc"); let path_to_svc = path_to_svc.to_str().expect("found invalid chars"); let proxy = connect_to_service_at::<BreakpointSystemMarker>(path_to_svc) .context("could not connect to BreakpointSystem service")?; Ok(BreakpointSystemClient::from_proxy(proxy)) } fn find_component_manager_in_hub(component_manager_url: &str, label: &str) -> PathBuf { let path_to_env = format!("/hub/r/{}", label); let dir: Vec<DirEntry> = read_dir(path_to_env) .expect("could not open nested environment in the hub") .map(|x| x.expect("entry unreadable")) .collect(); assert_eq!(dir.len(), 1); let component_name = component_manager_url .split("/") .last() .expect("the URL for component manager must have at least one '/' character"); let path_to_cm = dir[0].path().join("c").join(component_name); let dir: Vec<DirEntry> = read_dir(path_to_cm) .expect("could not open component manager in the hub") .map(|x| x.expect("entry unreadable")) .collect(); assert_eq!(dir.len(), 1); dir[0].path() } const WAIT_TIMEOUT_SEC: u64 = 10; fn make_pipe() -> (std::fs::File, FileDescriptor) { match fdio::pipe_half() { Err(_) => panic!("failed to create pipe"), Ok((pipe, handle)) => { let pipe_handle = FileDescriptor { type0: HandleType::FileDescriptor as i32, type1: 0, type2: 0, handle0: Some(handle.into()), handle1: None, handle2: None, }; (pipe, pipe_handle) } } } fn read_from_pipe(mut f: File, expected_msg: String) -> Result<(), Error> { let pair = Arc::new((Mutex::new(Vec::new()), Condvar::new())); { let pair = pair.clone(); let expected_msg = expected_msg.clone(); thread::spawn(move || { let expected = expected_msg.as_bytes(); let mut buf = [0; 1024]; loop { let n = f.read(&mut buf).expect("failed to read pipe"); let (actual, cond) = &*pair; let mut actual = actual.lock(); actual.extend_from_slice(&buf[0..n]); if &**actual == expected { cond.notify_one(); return; } } }); } let (actual, cond) = &*pair; let mut actual = actual.lock(); if cond.wait_for(&mut actual, Duration::from_secs(WAIT_TIMEOUT_SEC)).timed_out() { let actual_msg = String::from_utf8(actual.clone()) .map(|v| format!("'{}'", v)) .unwrap_or(format!("{:?}", actual)); return Err(format_err!( "Timed out waiting for matching output\n\ Expected: '{}'\n\ Actual: {}", expected_msg, actual_msg, )); } Ok(()) } pub async fn list_directory(root_proxy: &DirectoryProxy) -> Result<Vec<String>, Error> { let entries = files_async::readdir(&root_proxy).await?; let mut items = entries.iter().map(|entry| entry.name.clone()).collect::<Vec<String>>(); items.sort(); Ok(items) }
use { breakpoint_system_client::BreakpointSystemClient, failure::{format_err, Error, ResultExt}, fidl_fuchsia_io::DirectoryProxy, fidl_fuchsia_sys::{ ComponentControllerEvent, EnvironmentControllerEvent, EnvironmentControllerProxy, EnvironmentMarker, EnvironmentOptions, FileDescriptor, LauncherProxy, }, fidl_fuchsia_test_breakpoints::*, files_async, fuchsia_component::client::*, fuchsia_runtime::HandleType, fuchsia_zircon as zx, futures::future, futures::stream::{StreamExt, TryStreamExt}, parking_lot::{Condvar, Mutex}, rand::random, std::fs::*, std::path::PathBuf, std::{fs::File, io::Read, sync::Arc, thread, time::Duration}, }; pub static COMPONENT_MANAGER_URL: &str = "fuchsia-pkg://fuchsia.com/component_manager#meta/component_manager.cmx"; pub struct BlackBoxTest { pub env: EnvironmentControllerProxy, pub component_manager_app: App, pub component_manager_url: String, pub root_component_url: String, pub label: String, } impl BlackBoxTest { pub async fn default(root_component_url: &str) -> Result<Self, Error> { Self::custom(COMPONENT_MANAGER_URL, root_component_url, vec![], None).await } pub async fn custom( component_manager_url: &str, root_component_url: &str, dir_handles: Vec<(String, zx::Handle)>, output_file_descriptor: Option<FileDescriptor>, ) -> Result<Self, Error> { let random_num = random::<u32>(); let label = format!("test_{}", random_num); let (env, launcher) = create_isolated_environment(&label).await?; let component_manager_app = launch_component_manager( launcher, component_manager_url, root_component_url, dir_handles, output_file_descriptor, ) .await?; let test = Self { env, component_manager_app, component_manager_url: component_manager_url.to_string(), root_component_url: root_component_url.to_string(), label, }; Ok(test) } pub fn get_component_manager_path(&self) -> PathBuf { find_component_manager_in_hub(&self.component_manager_url, &self.label) } pub fn get_hub_v2_path(&self) -> PathBuf { let path = self.get_component_manager_path(); path.join("out/hub") } pub async fn connect_to_breakpoint_system(&self) -> Result<BreakpointSystemClient, Error> { let path = self.get_component_manager_path(); connect_to_breakpoint_system(&path).await } } pub async fn launch_component_and_expect_output( root_component_url: &str, expected_output: String, ) -> Result<(), Error> { launch_component_and_expect_output_with_extra_dirs(root_component_url, vec![], expected_output) .await } pub async fn launch_component_and_expect_output_with_extra_dirs( root_component_url: &str, dir_handles: Vec<(String, zx::Handle)>, expected_output: String, ) -> Result<(), Error> { let (file, pipe_handle) = make_pipe(); let test = BlackBoxTest::custom( COMPONENT_MANAGER_URL, root_component_url, dir_handles, Some(pipe_handle), ) .await?; let breakpoint_system_client = &test.connect_to_breakpoint_system().await?; breakpoint_system_client.start_component_manager().await?; read_from_pipe(file, expected_output) } async fn create_isolated_environment( label: &str, ) -> Result<(EnvironmentControllerProxy, LauncherProxy), Error> { let env = connect_to_service::<EnvironmentMarker>() .context("could not connect to current environment")?; let (new_env, new_env_server_end) = fidl::endpoints::create_proxy().context("could not create proxy")?; let (controller, controller_server_end) = fidl::endpoints::create_proxy().context("could not create proxy")?; let (launcher, launcher_server_end) = fidl::endpoints::create_proxy().context("could not create proxy")?; let mut env_options = EnvironmentOptions { inherit_parent_services: true, use_parent_runners: true, kill_on_oom: false, delete_storage_on_death: true, }; env.create_nested_environment( new_env_server_end, controller_server_end, label, None, &mut env_options, ) .context("could not create isolated environment")?; let EnvironmentControllerEvent::OnCreated {} = controller.take_event_stream().next().await.unwrap().unwrap(); new_env .get_launcher(launcher_server_end) .context("could not get isolated environment launcher")?; Ok((controller, launcher)) } async fn launch_component_manager( launcher: LauncherProxy, component_manager_url: &str, root_component_url: &str, dir_handles: Vec<(String, zx::Handle)>, output_file_descriptor: Option<FileDescriptor>, ) -> Result<App, Error> { let mut options = LaunchOptions::new(); if let Some(output_file_descriptor) = output_file_descriptor { options.set_out(output_file_descriptor); } for dir in dir_handles { options.add_handle_to_namespace(dir.0, dir.1); } let component_manager_app = launch_with_options( &launcher, component_manager_url.to_string(), Some(vec![root_component_url.to_string(), "--debug".to_string()]), options, ) .context("could not launch component manager")?; let event_stream = component_manager_app.controller().take_event_stream(); event_stream .try_filter_map(|event| { let event = match event { ComponentControllerEvent::OnDirectoryReady {} => Some(event), _ => None, }; future::ready(Ok(event)) }) .next() .await; Ok(component_manager_app) } async fn connec
fn find_component_manager_in_hub(component_manager_url: &str, label: &str) -> PathBuf { let path_to_env = format!("/hub/r/{}", label); let dir: Vec<DirEntry> = read_dir(path_to_env) .expect("could not open nested environment in the hub") .map(|x| x.expect("entry unreadable")) .collect(); assert_eq!(dir.len(), 1); let component_name = component_manager_url .split("/") .last() .expect("the URL for component manager must have at least one '/' character"); let path_to_cm = dir[0].path().join("c").join(component_name); let dir: Vec<DirEntry> = read_dir(path_to_cm) .expect("could not open component manager in the hub") .map(|x| x.expect("entry unreadable")) .collect(); assert_eq!(dir.len(), 1); dir[0].path() } const WAIT_TIMEOUT_SEC: u64 = 10; fn make_pipe() -> (std::fs::File, FileDescriptor) { match fdio::pipe_half() { Err(_) => panic!("failed to create pipe"), Ok((pipe, handle)) => { let pipe_handle = FileDescriptor { type0: HandleType::FileDescriptor as i32, type1: 0, type2: 0, handle0: Some(handle.into()), handle1: None, handle2: None, }; (pipe, pipe_handle) } } } fn read_from_pipe(mut f: File, expected_msg: String) -> Result<(), Error> { let pair = Arc::new((Mutex::new(Vec::new()), Condvar::new())); { let pair = pair.clone(); let expected_msg = expected_msg.clone(); thread::spawn(move || { let expected = expected_msg.as_bytes(); let mut buf = [0; 1024]; loop { let n = f.read(&mut buf).expect("failed to read pipe"); let (actual, cond) = &*pair; let mut actual = actual.lock(); actual.extend_from_slice(&buf[0..n]); if &**actual == expected { cond.notify_one(); return; } } }); } let (actual, cond) = &*pair; let mut actual = actual.lock(); if cond.wait_for(&mut actual, Duration::from_secs(WAIT_TIMEOUT_SEC)).timed_out() { let actual_msg = String::from_utf8(actual.clone()) .map(|v| format!("'{}'", v)) .unwrap_or(format!("{:?}", actual)); return Err(format_err!( "Timed out waiting for matching output\n\ Expected: '{}'\n\ Actual: {}", expected_msg, actual_msg, )); } Ok(()) } pub async fn list_directory(root_proxy: &DirectoryProxy) -> Result<Vec<String>, Error> { let entries = files_async::readdir(&root_proxy).await?; let mut items = entries.iter().map(|entry| entry.name.clone()).collect::<Vec<String>>(); items.sort(); Ok(items) }
t_to_breakpoint_system( component_manager_path: &PathBuf, ) -> Result<BreakpointSystemClient, Error> { let path_to_svc = component_manager_path.join("out/svc"); let path_to_svc = path_to_svc.to_str().expect("found invalid chars"); let proxy = connect_to_service_at::<BreakpointSystemMarker>(path_to_svc) .context("could not connect to BreakpointSystem service")?; Ok(BreakpointSystemClient::from_proxy(proxy)) }
function_block-function_prefixed
[]
Rust
src/lib.rs
laurmaedje/symslice
5e650353099e46b35a2b614b64f4eee7d0307bac
use std::collections::HashMap; use std::fmt::{self, Display, Formatter}; use std::path::Path; use crate::elf::ElfFile; use crate::ir::{Microcode, MicroEncoder}; use crate::x86_64::Instruction; #[macro_use] mod helper { use std::fmt::{self, Formatter}; use crate::math::DataType; pub fn write_signed_hex(f: &mut Formatter, value: i64) -> fmt::Result { if value > 0 { write!(f, "+{:#x}", value) } else if value < 0 { write!(f, "-{:#x}", -value) } else { Ok(()) } } pub fn signed_name(s: bool) -> &'static str { if s { " signed" } else { "" } } pub fn boxed<T>(value: T) -> Box<T> { Box::new(value) } pub fn check_compatible(a: DataType, b: DataType, operation: &str) { assert_eq!(a, b, "incompatible data types for {}", operation); } macro_rules! debug_display { ($type:ty) => { impl std::fmt::Debug for $type { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { std::fmt::Display::fmt(self, f) } } }; } } pub mod flow; pub mod math; pub mod sym; pub mod elf; pub mod ir; pub mod x86_64; #[cfg(feature = "timings")] pub mod timings; #[cfg(not(feature = "timings"))] mod timings { pub(crate) fn with<S: Into<String>, F, T>(_: S, f: F) -> T where F: FnOnce() -> T { f() } pub(crate) fn start<S>(_: S) {} pub(crate) fn stop() {} } #[derive(Debug, Clone, Eq, PartialEq)] pub struct Program { pub base: u64, pub entry: u64, pub binary: Vec<u8>, pub code: Vec<(u64, u64, Instruction, Microcode)>, pub symbols: HashMap<u64, String>, } impl Program { pub fn new<P: AsRef<Path>>(filename: P) -> Program { crate::timings::start("program"); let mut file = ElfFile::new(filename).unwrap(); let text = file.get_section(".text").unwrap(); let base = text.header.addr; let binary = text.data; let mut index = 0; let mut code = Vec::new(); let mut encoder = MicroEncoder::new(); while index < binary.len() as u64 { let len = Instruction::length(&binary[index as usize ..]); let bytes = &binary[index as usize .. (index + len) as usize]; let instruction = Instruction::decode(bytes).unwrap(); let microcode = encoder.encode(&instruction).unwrap(); code.push((base + index, len, instruction, microcode)); index += len; } let mut symbols = HashMap::new(); if let Ok(symbol_entries) = file.get_symbols() { for entry in symbol_entries { if !entry.name.is_empty() { symbols.insert(entry.value, entry.name); } } } crate::timings::stop(); Program { base, entry: file.header.entry, binary, code, symbols } } pub fn get_instruction(&self, addr: u64) -> Option<&Instruction> { self.code.iter() .find(|entry| entry.0 == addr) .map(|entry| &entry.2) } } impl Display for Program { fn fmt(&self, f: &mut Formatter) -> fmt::Result { write!(f, "Program [")?; if !self.code.is_empty() { writeln!(f)?; } let mut first = true; for (addr, _, instruction, microcode) in &self.code { if f.alternate() && !first { writeln!(f)?; } first = false; writeln!(f, " {:x}: {}", addr, instruction)?; if f.alternate() { for op in &microcode.ops { writeln!(f, " | {}", op)?; } } } write!(f, "]") } } #[cfg(test)] mod tests { use super::*; fn test(filename: &str) { let path = format!("target/bin/{}", filename); Program::new(path); } #[test] fn program() { test("block-1"); test("block-2"); test("case"); test("twice"); test("loop"); test("recursive-1"); test("recursive-2"); test("func"); test("bufs"); test("paths"); test("deep"); test("overwrite"); test("min"); } }
use std::collections::HashMap; use std::fmt::{self, Display, Formatter}; use std::path::Path; use crate::elf::ElfFile; use crate::ir::{Microcode, MicroEncoder}; use crate::x86_64::Instruction; #[macro_use] mod helper { use std::fmt::{self, Formatter}; use crate::math::DataType; pub fn write_signed_hex(f: &mut Formatter, value: i64) -> fmt::Result { if value > 0 { write!(f, "+{:#x}", value) } else if value < 0 { write!(f, "-{:#x}", -value) } else {
" {:x}: {}", addr, instruction)?; if f.alternate() { for op in &microcode.ops { writeln!(f, " | {}", op)?; } } } write!(f, "]") } } #[cfg(test)] mod tests { use super::*; fn test(filename: &str) { let path = format!("target/bin/{}", filename); Program::new(path); } #[test] fn program() { test("block-1"); test("block-2"); test("case"); test("twice"); test("loop"); test("recursive-1"); test("recursive-2"); test("func"); test("bufs"); test("paths"); test("deep"); test("overwrite"); test("min"); } }
Ok(()) } } pub fn signed_name(s: bool) -> &'static str { if s { " signed" } else { "" } } pub fn boxed<T>(value: T) -> Box<T> { Box::new(value) } pub fn check_compatible(a: DataType, b: DataType, operation: &str) { assert_eq!(a, b, "incompatible data types for {}", operation); } macro_rules! debug_display { ($type:ty) => { impl std::fmt::Debug for $type { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { std::fmt::Display::fmt(self, f) } } }; } } pub mod flow; pub mod math; pub mod sym; pub mod elf; pub mod ir; pub mod x86_64; #[cfg(feature = "timings")] pub mod timings; #[cfg(not(feature = "timings"))] mod timings { pub(crate) fn with<S: Into<String>, F, T>(_: S, f: F) -> T where F: FnOnce() -> T { f() } pub(crate) fn start<S>(_: S) {} pub(crate) fn stop() {} } #[derive(Debug, Clone, Eq, PartialEq)] pub struct Program { pub base: u64, pub entry: u64, pub binary: Vec<u8>, pub code: Vec<(u64, u64, Instruction, Microcode)>, pub symbols: HashMap<u64, String>, } impl Program { pub fn new<P: AsRef<Path>>(filename: P) -> Program { crate::timings::start("program"); let mut file = ElfFile::new(filename).unwrap(); let text = file.get_section(".text").unwrap(); let base = text.header.addr; let binary = text.data; let mut index = 0; let mut code = Vec::new(); let mut encoder = MicroEncoder::new(); while index < binary.len() as u64 { let len = Instruction::length(&binary[index as usize ..]); let bytes = &binary[index as usize .. (index + len) as usize]; let instruction = Instruction::decode(bytes).unwrap(); let microcode = encoder.encode(&instruction).unwrap(); code.push((base + index, len, instruction, microcode)); index += len; } let mut symbols = HashMap::new(); if let Ok(symbol_entries) = file.get_symbols() { for entry in symbol_entries { if !entry.name.is_empty() { symbols.insert(entry.value, entry.name); } } } crate::timings::stop(); Program { base, entry: file.header.entry, binary, code, symbols } } pub fn get_instruction(&self, addr: u64) -> Option<&Instruction> { self.code.iter() .find(|entry| entry.0 == addr) .map(|entry| &entry.2) } } impl Display for Program { fn fmt(&self, f: &mut Formatter) -> fmt::Result { write!(f, "Program [")?; if !self.code.is_empty() { writeln!(f)?; } let mut first = true; for (addr, _, instruction, microcode) in &self.code { if f.alternate() && !first { writeln!(f)?; } first = false; writeln!(f,
random
[ { "content": "/// Write the closing of the file.\n\npub fn write_footer<W: Write>(mut f: W) -> Result<()> {\n\n writeln!(f, \"}}\")\n\n}\n\n\n\n\n\n#[cfg(test)]\n\npub mod test {\n\n use std::fs::{self, File};\n\n use std::process::Command;\n\n use super::*;\n\n\n\n /// Compile the file with graphviz.\n\n pub fn compile<F>(dir: &str, filename: &str, writer: F) where F: FnOnce(File) -> Result<()> {\n\n fs::create_dir(\"target/out\").ok();\n\n let dir = format!(\"target/out/{}\", dir);\n\n\n\n fs::create_dir(&dir).ok();\n\n let temp_path = \"target/graph.dot\";\n\n let temp_file = File::create(temp_path).unwrap();\n\n writer(temp_file).unwrap();\n", "file_path": "src/flow/visualize.rs", "rank": 0, "score": 106853.8054823231 }, { "content": "/// Reset all measurements.\n\npub fn reset() {\n\n timer!().reset();\n\n}\n\n\n", "file_path": "src/timings.rs", "rank": 1, "score": 104101.92618409928 }, { "content": "/// Retrieve the finished measurements.\n\npub fn get() -> Measurement {\n\n let timer = timer!(\"could not acquire timer\");\n\n\n\n let parts = timer.finished.clone();\n\n let mut measurement = Measurement {\n\n name: \"Measurement\".to_string(),\n\n duration: parts.iter().map(|m| m.duration).sum(),\n\n parts\n\n };\n\n\n\n measurement.resort();\n\n measurement\n\n}\n\n\n", "file_path": "src/timings.rs", "rank": 2, "score": 96817.19314003031 }, { "content": "/// Write the preamble of the graphviz file.\n\npub fn write_header<W: Write>(mut f: W, title: &str, fontsize: u32) -> Result<()> {\n\n writeln!(f, \"digraph Flow {{\")?;\n\n write!(f, \"graph [label=\\\"{}\\\", labelloc=\\\"t\\\", fontsize={}, \", title, fontsize)?;\n\n writeln!(f, \"fontname=\\\"Source Code Pro\\\"]\")?;\n\n writeln!(f, \"node [fontname=\\\"Source Code Pro\\\"]\")?;\n\n writeln!(f, \"edge [fontname=\\\"Source Code Pro\\\"]\")\n\n}\n\n\n", "file_path": "src/flow/visualize.rs", "rank": 3, "score": 90035.59333995567 }, { "content": "/// Write condition edges.\n\npub fn write_edges<W: Write, F, T>(\n\n mut f: W,\n\n edges: &HashMap<(usize, usize), T>,\n\n writer: F\n\n) -> Result<()> where F: Fn(&mut W, ((usize, usize), &T)) -> Result<()> {\n\n // Export the edges, but sort them first to make the graphviz output\n\n // deterministic eventhough the hash map cannot be traversed in order.\n\n let mut edges = edges.iter().collect::<Vec<_>>();\n\n edges.sort_by_key(|edge| edge.0);\n\n for (&(start, end), edge) in edges {\n\n write!(f, \"b{} -> b{} [\", start, end)?;\n\n writer(&mut f, ((start, end), edge))?;\n\n writeln!(f, \"]\")?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/flow/visualize.rs", "rank": 4, "score": 76644.70575233898 }, { "content": "/// Parse the section headers of the file and return the string table with it.\n\nfn parse_section_headers<R>(header: Header, target: &mut R)\n\n -> ElfResult<Vec<SectionHeader>> where R: Read + Seek {\n\n // Read the section headers.\n\n target.seek(SeekFrom::Start(header.section_headers_offset))?;\n\n let mut headers = Vec::with_capacity(header.section_header_entries as usize);\n\n for _ in 0 .. header.section_header_entries {\n\n let header = SectionHeader {\n\n name: String::new(),\n\n name_offset: target.read_u32::<LE>()?,\n\n section_type: target.read_u32::<LE>()?,\n\n flags: target.read_u64::<LE>()?,\n\n addr: target.read_u64::<LE>()?,\n\n offset: target.read_u64::<LE>()?,\n\n size: target.read_u64::<LE>()?,\n\n link: target.read_u32::<LE>()?,\n\n info: target.read_u32::<LE>()?,\n\n addr_align: target.read_u64::<LE>()?,\n\n entry_size: target.read_u64::<LE>()?,\n\n };\n\n\n", "file_path": "src/elf.rs", "rank": 5, "score": 72915.56725942637 }, { "content": "/// Parse the header of the file.\n\nfn parse_header<R>(target: &mut R) -> ElfResult<Header> where R: Read + Seek {\n\n let header = Header {\n\n identification: {\n\n let mut buf = [0; 16];\n\n target.read_exact(&mut buf)?;\n\n buf\n\n },\n\n file_type: target.read_u16::<LE>()?,\n\n machine: target.read_u16::<LE>()?,\n\n version: target.read_u32::<LE>()?,\n\n entry: target.read_u64::<LE>()?,\n\n program_headers_offset: target.read_u64::<LE>()?,\n\n section_headers_offset: target.read_u64::<LE>()?,\n\n flags: target.read_u32::<LE>()?,\n\n header_size: target.read_u16::<LE>()?,\n\n program_header_size: target.read_u16::<LE>()?,\n\n program_header_entries: target.read_u16::<LE>()?,\n\n section_header_size: target.read_u16::<LE>()?,\n\n section_header_entries: target.read_u16::<LE>()?,\n\n section_name_string_table_index: target.read_u16::<LE>()?,\n", "file_path": "src/elf.rs", "rank": 6, "score": 62354.566907885324 }, { "content": "void helper() {}\n", "file_path": "test/loop.c", "rank": 7, "score": 57927.37469111332 }, { "content": "void helper() {}\n", "file_path": "test/twice.c", "rank": 8, "score": 57927.37469111332 }, { "content": "fn main() {\n\n bench(\"bufs\");\n\n bench(\"paths\");\n\n bench(\"deep\");\n\n bench(\"overwrite\");\n\n}\n\n\n", "file_path": "benches/times.rs", "rank": 9, "score": 56005.98115547891 }, { "content": "/// Addresses of things stored in memory (registers).\n\npub trait MemoryMapped {\n\n /// Address of the memory mapped thing.\n\n fn address(&self) -> u64;\n\n}\n\n\n\nimpl MemoryMapped for Register {\n\n /// Address of a register in the register memory space.\n\n fn address(&self) -> u64 {\n\n match self {\n\n AL | AX | EAX | RAX => 0x00,\n\n CL | CX | ECX | RCX => 0x08,\n\n DL | DX | EDX | RDX => 0x10,\n\n BL | BX | EBX | RBX => 0x18,\n\n AH | SP | ESP | RSP => 0x20,\n\n CH | BP | EBP | RBP => 0x28,\n\n DH | SI | ESI | RSI => 0x30,\n\n BH | DI | EDI | RDI => 0x38,\n\n R8 => 0x40,\n\n R9 => 0x48,\n\n R10 => 0x50,\n", "file_path": "src/ir.rs", "rank": 10, "score": 50970.37544216332 }, { "content": "fn bench(filename: &str) {\n\n let path = format!(\"target/bin/{}\", filename);\n\n\n\n timings::reset();\n\n\n\n let program = Program::new(path);\n\n let cfg = ControlFlowGraph::new(&program);\n\n let _ddg = DataDependencyGraph::new(&cfg);\n\n\n\n let measurements = timings::get();\n\n\n\n fs::create_dir(\"target/bench\").ok();\n\n let bench_path = format!(\"target/bench/{}.txt\", filename);\n\n let mut bench_file = File::create(bench_path).unwrap();\n\n\n\n writeln!(bench_file, \"Benchmark for {}\\n\", filename).unwrap();\n\n write!(bench_file, \"{}\", measurements).unwrap();\n\n}\n", "file_path": "benches/times.rs", "rank": 11, "score": 48065.23246480223 }, { "content": "/// Remove all cycles from a list of comparable items, where `cmp` determines\n\n/// if two items are equal. For example this turns 1 -> 2 -> 3 -> 2 -> 4 into\n\n/// 1 -> 2 -> 4.\n\nfn decycle<T: Clone, F>(sequence: &[T], cmp: F) -> Vec<T> where F: Fn(&T, &T) -> bool {\n\n let mut out = Vec::new();\n\n\n\n for item in sequence {\n\n if let Some(pos) = out.iter().position(|x| cmp(item, x)) {\n\n for _ in 0 .. out.len() - pos - 1 {\n\n out.pop();\n\n }\n\n } else {\n\n out.push(item.clone());\n\n }\n\n }\n\n\n\n out\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::flow::visualize::test::compile;\n", "file_path": "src/flow/control.rs", "rank": 12, "score": 41063.306925411714 }, { "content": "/// Parse a string from the string table.\n\nfn parse_string(strings: &[u8], offset: u32) -> String {\n\n let mut zero = offset as usize;\n\n while strings[zero] != 0 {\n\n zero += 1;\n\n }\n\n\n\n CStr::from_bytes_with_nul(&strings[offset as usize .. zero + 1])\n\n .expect(\"invalid C string in elf string table\")\n\n .to_string_lossy()\n\n .into_owned()\n\n}\n\n\n\n\n\n/// The error type for `ELF` loading.\n\npub enum ElfError {\n\n Invalid,\n\n MissingSection(String),\n\n Io(io::Error),\n\n}\n\n\n", "file_path": "src/elf.rs", "rank": 13, "score": 39008.2927232472 }, { "content": "/// Return the condition under which `ptr` is in the area spanned by the\n\n/// pointer of `area` and the following bytes based on the data type.\n\nfn contains_ptr(area: &TypedMemoryAccess, ptr: &SymExpr) -> SymCondition {\n\n let area_len = SymExpr::Int(Integer::from_ptr(area.1.bytes() as u64));\n\n let left = area.0.clone();\n\n ptr.clone().sub(left).less_than(area_len, false)\n\n}\n\n\n\nimpl Display for AliasMap {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n write!(f, \"AliasMap [\")?;\n\n if !self.map.is_empty() { writeln!(f)?; }\n\n let mut map: Vec<_> = self.map.iter().collect();\n\n map.sort_by_key(|pair| pair.0);\n\n for (location, (condition, symbols)) in map {\n\n writeln!(f, \" {}: {}\", location, condition)?;\n\n if !symbols.is_empty() {\n\n let mut symbols: Vec<_> = symbols.iter().collect();\n\n symbols.sort_by_key(|pair| pair.0);\n\n writeln!(f, \" where \")?;\n\n for (symbol, location) in symbols {\n\n writeln!(f, \" {} is {}\", symbol, location)?;\n", "file_path": "src/flow/alias.rs", "rank": 14, "score": 34879.95259233021 }, { "content": "/// Fast way to make an error.\n\nfn err<T, S: Into<String>>(message: S) -> ParseResult<T> {\n\n Err(message.into())\n\n}\n\n\n\n\n\n/// The error type for decoding a Z3 Ast into a symbolic expression/condition.\n\npub struct FromAstError {\n\n ast: String,\n\n index: usize,\n\n message: String\n\n}\n\n\n\nimpl FromAstError {\n\n fn new(ast: &str, index: usize, message: String) -> FromAstError {\n\n FromAstError {\n\n ast: ast.to_string(),\n\n index,\n\n message: message.into(),\n\n }\n\n }\n", "file_path": "src/math/smt.rs", "rank": 15, "score": 33683.52244906183 }, { "content": "pub struct Symbol(pub DataType, pub &'static str, pub usize);\n\n\n\nimpl Display for Symbol {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n write!(f, \"{}{}:{}\", self.1, self.2, self.0)\n\n }\n\n}\n\n\n\n/// A reference to an expression or condition node in the traversed tree.\n\n#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]\n\npub enum Traversed<'a> {\n\n Expr(&'a SymExpr),\n\n Condition(&'a SymCondition),\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::helper::boxed;\n", "file_path": "src/math/mod.rs", "rank": 16, "score": 32737.26508723315 }, { "content": "//! Machine numbers and symbolic expressions.\n\n\n\n#![macro_use]\n\n\n\nuse std::fmt::{self, Display, Formatter};\n\n\n\nmacro_rules! z3_binop {\n\n ($ctx:expr, $a:expr, $b:expr, $op:ident) => {\n\n $a.to_z3_ast($ctx).$op(&$b.to_z3_ast($ctx))\n\n };\n\n}\n\n\n\nmod num;\n\nmod expr;\n\nmod cond;\n\nmod smt;\n\n\n\npub use num::*;\n\npub use expr::*;\n\npub use cond::*;\n", "file_path": "src/math/mod.rs", "rank": 17, "score": 32735.143182192707 }, { "content": "//! Control and data flow models.\n\n\n\nuse std::fmt::{self, Display, Formatter};\n\nuse crate::math::{Integer, DataType};\n\nuse crate::x86_64::{Register, Operand};\n\n\n\nmod control;\n\nmod alias;\n\nmod data;\n\nmod visualize;\n\n\n\npub use control::*;\n\npub use alias::*;\n\npub use data::*;\n\n\n\n\n\n/// A storage location within the context in which it is valid.\n\n#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct AbstractLocation {\n\n /// The address at which the location is valid.\n", "file_path": "src/flow/mod.rs", "rank": 18, "score": 32734.526663491295 }, { "content": "//! Symbolic microcode execution.\n\n\n\nuse std::collections::HashMap;\n\nuse std::fmt::{self, Display, Formatter};\n\n\n\nuse crate::flow::{AbstractLocation, StorageLocation};\n\nuse crate::ir::{MicroOperation, Location, Temporary, MemoryMapped};\n\nuse crate::math::{SymExpr, SymCondition, Integer, DataType, Symbol, SharedSolver, Traversed};\n\nuse crate::x86_64::{Instruction, Mnemoic, Register};\n\nuse DataType::*;\n\n\n\nmod mem;\n\npub use mem::*;\n\n\n\n\n\n/// The symbolic execution state.\n\n#[derive(Debug, Clone)]\n\npub struct SymState {\n\n /// The values of the temporaries (T0, T1, ...).\n\n pub temporaries: HashMap<usize, SymExpr>,\n", "file_path": "src/sym/mod.rs", "rank": 19, "score": 32733.362459327793 }, { "content": "}\n\n\n\nimpl Display for ValueSource {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n match self {\n\n ValueSource::Storage(storage) => write!(f, \"{}\", storage),\n\n ValueSource::Const(int) => write!(f, \"{}\", int),\n\n }\n\n }\n\n}\n", "file_path": "src/flow/mod.rs", "rank": 20, "score": 32731.12778402448 }, { "content": "#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]\n\npub enum ValueSource {\n\n Storage(StorageLocation),\n\n Const(Integer),\n\n}\n\n\n\nimpl Display for AbstractLocation {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n write!(f, \"{} at {:x}\", self.storage, self.addr)?;\n\n if !self.trace.is_empty() {\n\n write!(f, \" by \")?;\n\n }\n\n let mut first = true;\n\n for &addr in &self.trace {\n\n if !first { write!(f, \" -> \")?; } first = false;\n\n write!(f, \"{:x}\", addr)?;\n\n }\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/flow/mod.rs", "rank": 21, "score": 32731.051820833614 }, { "content": "\n\nimpl Display for StorageLocation {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n use StorageLocation::*;\n\n use crate::helper::write_signed_hex;\n\n\n\n match *self {\n\n Direct(reg) => write!(f, \"{}\", reg),\n\n Indirect { data_type, base, scaled_offset, displacement } => {\n\n write!(f, \"[{}\", base)?;\n\n if let Some((index, scale)) = scaled_offset {\n\n write!(f, \"+{}*{}\", index, scale)?;\n\n }\n\n if let Some(disp) = displacement {\n\n write_signed_hex(f, disp)?;\n\n }\n\n write!(f, \":{}]\", data_type)\n\n },\n\n }\n\n }\n", "file_path": "src/flow/mod.rs", "rank": 22, "score": 32730.772689666443 }, { "content": " }\n\n\n\n let kind = if read { StdioKind::Stdin } else { StdioKind::Stdout };\n\n Some(Event::Stdio(kind, locs))\n\n },\n\n\n\n // System exit\n\n 60 => Some(Event::Exit),\n\n s => panic!(\"do_syscall: unimplemented syscall number {}\", s),\n\n }\n\n }\n\n}\n\n\n\n/// A typed symbolic memory access.\n\n#[derive(Debug, Clone, Eq, PartialEq, Hash)]\n\npub struct TypedMemoryAccess(pub SymExpr, pub DataType);\n\n\n\nimpl Display for TypedMemoryAccess {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n write!(f, \"[{}]:{}\", self.0, self.1)\n\n }\n\n}\n", "file_path": "src/sym/mod.rs", "rank": 23, "score": 32728.16914319002 }, { "content": " let expr = self.temporaries[&temp.1].clone();\n\n assert_eq!(temp.0, expr.data_type(), \"get_temp: incompatible data types\");\n\n expr\n\n }\n\n\n\n /// Set the temporary to a new value.\n\n pub fn set_temp(&mut self, temp: Temporary, value: SymExpr) {\n\n assert_eq!(temp.0, value.data_type(), \"set_temp: incompatible data types\");\n\n self.temporaries.insert(temp.1, value);\n\n }\n\n\n\n /// Get a value from a register.\n\n pub fn get_reg(&self, reg: Register) -> SymExpr {\n\n self.memory[1].read_direct(reg.address(), reg.data_type())\n\n }\n\n\n\n /// Set a register to a value.\n\n pub fn set_reg(&mut self, reg: Register, value: SymExpr) {\n\n self.memory[1].write_direct(reg.address(), value);\n\n }\n", "file_path": "src/sym/mod.rs", "rank": 24, "score": 32727.776343342186 }, { "content": "\n\n /// Do a binary operation.\n\n fn do_binop<F>(&mut self, target: Temporary, a: Temporary, b: Temporary, binop: F)\n\n where F: FnOnce(SymExpr, SymExpr) -> SymExpr {\n\n self.set_temp(target, binop(self.get_temp(a), self.get_temp(b)));\n\n }\n\n\n\n /// Move a value from a location to another location.\n\n fn do_move(&mut self, dest: Location, src: Location) {\n\n assert_eq!(dest.data_type(), src.data_type(), \"do_move: incompatible data types for move\");\n\n let value = self.read_location(src);\n\n self.write_location(dest, value);\n\n }\n\n\n\n /// Emulate a Linux syscall.\n\n fn do_syscall(&mut self, num: u64) -> Option<Event> {\n\n match num {\n\n // Read from or write to a file descriptor.\n\n // We generate one symbol per byte read / written.\n\n 0 | 1 => {\n", "file_path": "src/sym/mod.rs", "rank": 25, "score": 32725.941741322782 }, { "content": " /// Write data to a location.\n\n pub fn write_location(&mut self, dest: Location, value: SymExpr) {\n\n assert_eq!(dest.data_type(), value.data_type(),\n\n \"write_location: incompatible data types for write\");\n\n\n\n match dest {\n\n Location::Temp(temp) => self.set_temp(temp, value),\n\n Location::Direct(_, space, addr) => {\n\n self.memory[space].write_direct(addr, value);\n\n },\n\n Location::Indirect(_, space, temp) => {\n\n let addr = self.get_temp(temp);\n\n assert_eq!(addr.data_type(), N64, \"write_location: address has to be 64-bit\");\n\n self.memory[space].write_expr(addr, value);\n\n }\n\n }\n\n }\n\n\n\n /// Return the integer stored in the temporary.\n\n pub fn get_temp(&self, temp: Temporary) -> SymExpr {\n", "file_path": "src/sym/mod.rs", "rank": 26, "score": 32725.938998689686 }, { "content": " /// The two memory spaces for main memory and registers.\n\n pub memory: [SymMemory; 2],\n\n /// A mapping from symbols to the abstract locations where they could be\n\n /// found in an actual execution.\n\n pub symbol_map: SymbolMap,\n\n /// The path of trace points which were set for this state. These are u sed\n\n /// for describing the context in symbol generation.\n\n pub trace: Vec<u64>,\n\n /// The current instruction pointer.\n\n pub ip: u64,\n\n /// The shared SMT solver.\n\n pub solver: SharedSolver,\n\n /// The number of used symbols.\n\n stdin_symbols: usize,\n\n stdout_symbols: usize,\n\n}\n\n\n\n/// When and where to find the symbolic values in memory in a real execution.\n\npub type SymbolMap = HashMap<Symbol, AbstractLocation>;\n\n\n", "file_path": "src/sym/mod.rs", "rank": 27, "score": 32725.529922650305 }, { "content": "pub use smt::{Solver, SharedSolver, FromAstError};\n\n\n\n\n\n/// A dynamically typed symbolic value.\n\n#[derive(Debug, Clone, Eq, PartialEq, Hash)]\n\npub enum SymDynamic {\n\n Expr(SymExpr),\n\n Condition(SymCondition),\n\n}\n\n\n\nimpl From<SymExpr> for SymDynamic {\n\n fn from(expr: SymExpr) -> SymDynamic { SymDynamic::Expr(expr) }\n\n}\n\n\n\nimpl From<SymCondition> for SymDynamic {\n\n fn from(cond: SymCondition) -> SymDynamic { SymDynamic::Condition(cond) }\n\n}\n\n\n\n/// A symbol value identified by an index.\n\n#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n", "file_path": "src/math/mod.rs", "rank": 28, "score": 32725.33397647557 }, { "content": " }\n\n\n\n /// Whether this in an indirect access.\n\n pub fn accesses_memory(&self) -> bool {\n\n match self {\n\n StorageLocation::Direct(_) => false,\n\n StorageLocation::Indirect { .. } => true,\n\n }\n\n }\n\n\n\n /// The underlying data type of the value at the location.\n\n pub fn data_type(&self) -> DataType {\n\n match *self {\n\n StorageLocation::Direct(reg) => reg.data_type(),\n\n StorageLocation::Indirect { data_type, .. } => data_type,\n\n }\n\n }\n\n}\n\n\n\n/// Where a value comes from.\n", "file_path": "src/flow/mod.rs", "rank": 29, "score": 32724.15607181282 }, { "content": " temporaries: HashMap::new(),\n\n memory: [\n\n SymMemory::new(\"mem\", mem_strategy, solver.clone()),\n\n SymMemory::new(\"reg\", MemoryStrategy::PerfectMatches, solver.clone())\n\n ],\n\n symbol_map: SymbolMap::new(),\n\n trace: Vec::new(),\n\n ip: 0,\n\n stdin_symbols: 0,\n\n stdout_symbols: 0,\n\n solver\n\n }\n\n }\n\n\n\n /// Execute a micro operation.\n\n pub fn step(&mut self, addr: u64, operation: &MicroOperation) -> Option<Event> {\n\n use MicroOperation as Op;\n\n\n\n crate::timings::start(\"sym-step\");\n\n\n", "file_path": "src/sym/mod.rs", "rank": 30, "score": 32723.89631723326 }, { "content": "\n\n /// Return the address expression and data type of the storage location if\n\n /// it is a memory access.\n\n pub fn get_access_for_storage(&self, location: StorageLocation) -> Option<TypedMemoryAccess> {\n\n use StorageLocation::*;\n\n match location {\n\n Direct(_) => None,\n\n Indirect { data_type, base, scaled_offset, displacement } => Some({\n\n let mut addr = self.get_reg(base);\n\n\n\n if let Some((index, scale)) = scaled_offset {\n\n let scale = SymExpr::from_ptr(scale as u64);\n\n let offset = self.get_reg(index).mul(scale);\n\n addr = addr.add(offset);\n\n }\n\n\n\n if let Some(disp) = displacement {\n\n addr = addr.add(SymExpr::from_ptr(disp as u64));\n\n }\n\n\n", "file_path": "src/sym/mod.rs", "rank": 31, "score": 32723.46855654566 }, { "content": " pub addr: u64,\n\n /// The call trace in which the location is valid.\n\n pub trace: Vec<u64>,\n\n /// The storage location.\n\n pub storage: StorageLocation,\n\n}\n\n\n\nimpl AbstractLocation {\n\n pub fn new(addr: u64, trace: Vec<u64>, storage: StorageLocation) -> AbstractLocation {\n\n AbstractLocation { addr, trace, storage }\n\n }\n\n}\n\n\n\n/// A location in a real execution.\n\n#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub enum StorageLocation {\n\n /// Directly the register.\n\n Direct(Register),\n\n /// The value at the address stored in base plus:\n\n /// - optionally a scaled offset of register * scale.\n", "file_path": "src/flow/mod.rs", "rank": 32, "score": 32723.343510670402 }, { "content": " let read = num == 0;\n\n\n\n let buf = self.get_reg(Register::RSI);\n\n let count = self.get_reg(Register::RDX);\n\n let byte_count = match count {\n\n SymExpr::Int(Integer(N64, bytes)) => bytes,\n\n _ => panic!(\"do_syscall: read: unknown byte count\"),\n\n };\n\n\n\n let mut locs = vec![];\n\n\n\n for i in 0 .. byte_count {\n\n let symbol_ptr = if read {\n\n &mut self.stdin_symbols\n\n } else {\n\n &mut self.stdout_symbols\n\n };\n\n\n\n let symbol = Symbol(N8, if read { \"stdin\" } else { \"stdout\" }, *symbol_ptr);\n\n let value = SymExpr::Sym(symbol);\n", "file_path": "src/sym/mod.rs", "rank": 33, "score": 32723.08898060968 }, { "content": " }\n\n },\n\n }\n\n\n\n crate::timings::stop();\n\n None\n\n }\n\n\n\n /// Adjust the trace based on the instruction.\n\n pub fn track(&mut self, instruction: &Instruction, addr: u64) {\n\n // Adjust the trace.\n\n match instruction.mnemoic {\n\n Mnemoic::Call => self.trace.push(addr),\n\n Mnemoic::Ret => { self.trace.pop(); },\n\n _ => {},\n\n };\n\n }\n\n\n\n /// Evaluate a symbolic expression with temporary symbols.\n\n pub fn evaluate_condition(&self, condition: &SymCondition) -> SymCondition {\n", "file_path": "src/sym/mod.rs", "rank": 34, "score": 32723.090270471486 }, { "content": " let mut evaluated = condition.clone();\n\n evaluated.replace_symbols(&|sym| match sym {\n\n Symbol(data_type, \"T\", index) => self.get_temp(Temporary(data_type, index)),\n\n sym => SymExpr::Sym(sym),\n\n });\n\n evaluated\n\n }\n\n\n\n /// Generate a symbol map with just the symbols needed for the condition.\n\n pub fn get_symbol_map_for(&self, condition: &SymCondition) -> SymbolMap {\n\n let mut symbols = HashMap::new();\n\n condition.traverse(&mut |node| {\n\n if let Traversed::Expr(&SymExpr::Sym(symbol)) = node {\n\n if let Some(loc) = self.symbol_map.get(&symbol) {\n\n symbols.insert(symbol, loc.clone());\n\n }\n\n }\n\n });\n\n symbols\n\n }\n", "file_path": "src/sym/mod.rs", "rank": 35, "score": 32722.58981402357 }, { "content": "/// Events occuring during symbolic execution.\n\n#[derive(Debug, Clone, Eq, PartialEq)]\n\npub enum Event {\n\n Jump { target: SymExpr, condition: SymCondition, relative: bool },\n\n Stdio(StdioKind, Vec<(Symbol, TypedMemoryAccess)>),\n\n Exit,\n\n}\n\n\n\n/// Kinds of standard interfaces (stdin or stdout).\n\n#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub enum StdioKind {\n\n Stdin,\n\n Stdout,\n\n}\n\n\n\nimpl SymState {\n\n /// Create a blank symbolic state that will use the given solver and strategy for\n\n /// main memory.\n\n pub fn new(mem_strategy: MemoryStrategy, solver: SharedSolver) -> SymState {\n\n SymState {\n", "file_path": "src/sym/mod.rs", "rank": 36, "score": 32722.563566130295 }, { "content": " /// Create a storage location from an operand if it is a memory operand.\n\n pub fn from_operand(operand: Operand) -> Option<StorageLocation> {\n\n match operand {\n\n Operand::Direct(reg) => Some(StorageLocation::Direct(reg)),\n\n Operand::Indirect { data_type, base, scaled_offset, displacement } => Some(\n\n StorageLocation::Indirect { data_type, base, scaled_offset, displacement }\n\n ),\n\n _ => None,\n\n }\n\n }\n\n\n\n /// Change to base 64-bit registers for direct storage.\n\n ///\n\n /// This can be useful for comparing storage locations with registers\n\n /// that are not equal but share the same memory (like RAX and EAX).\n\n pub fn normalized(self) -> StorageLocation {\n\n match self {\n\n StorageLocation::Direct(reg) => StorageLocation::Direct(reg.base()),\n\n s => s,\n\n }\n", "file_path": "src/flow/mod.rs", "rank": 37, "score": 32722.195476830704 }, { "content": " /// - optionally an immediate displacement.\n\n Indirect {\n\n data_type: DataType,\n\n base: Register,\n\n scaled_offset: Option<(Register, u8)>,\n\n displacement: Option<i64>,\n\n },\n\n}\n\n\n\nimpl StorageLocation {\n\n /// Create a storage location with just a simple indirect register.\n\n pub fn indirect_reg(data_type: DataType, base: Register) -> StorageLocation {\n\n StorageLocation::Indirect {\n\n data_type,\n\n base,\n\n scaled_offset: None,\n\n displacement: None,\n\n }\n\n }\n\n\n", "file_path": "src/flow/mod.rs", "rank": 38, "score": 32722.134238352828 }, { "content": " *symbol_ptr += 1;\n\n\n\n let target = buf.clone().add(SymExpr::from_ptr(i));\n\n if read {\n\n self.memory[0].write_expr(target.clone(), value);\n\n }\n\n\n\n let location = AbstractLocation {\n\n addr: self.ip,\n\n trace: self.trace.clone(),\n\n storage: StorageLocation::Indirect {\n\n data_type: N8,\n\n base: Register::RSI,\n\n scaled_offset: None,\n\n displacement: if i > 0 { Some(i as i64) } else { None },\n\n },\n\n };\n\n\n\n self.symbol_map.insert(symbol, location);\n\n locs.push((symbol, TypedMemoryAccess(target, N8)));\n", "file_path": "src/sym/mod.rs", "rank": 39, "score": 32722.101046886524 }, { "content": " use crate::math::Integer;\n\n use crate::math::DataType::*;\n\n use SymExpr::*;\n\n\n\n fn n(x: u64) -> SymExpr { Int(Integer(N64, x)) }\n\n fn x() -> SymExpr { Sym(Symbol(N64, \"stdin\", 0)) }\n\n fn y() -> SymExpr { Sym(Symbol(N8, \"stdin\", 1)) }\n\n\n\n #[test]\n\n fn calculations() {\n\n assert_eq!(x().add(n(0)), x());\n\n assert_eq!(n(10).add(n(0)), n(10));\n\n assert_eq!(x().add(n(5)).add(n(10)), Add(boxed(x()), boxed(n(15))));\n\n assert_eq!(x().sub(n(5)).add(n(10)), Add(boxed(x()), boxed(n(5))));\n\n assert_eq!(x().sub(n(10)).sub(n(5)), Sub(boxed(x()), boxed(n(15))));\n\n assert_eq!(x().add(n(10)).sub(n(5)), Add(boxed(x()), boxed(n(5))));\n\n assert_eq!(x().sub(n(8)).sub(n(8)).add(n(8)), Sub(boxed(x()), boxed(n(8))));\n\n\n\n assert_ne!(n(10).add(x()).add(x()).add(n(5)), n(10).add(x()).add(x()));\n\n\n", "file_path": "src/math/mod.rs", "rank": 40, "score": 32720.5847278544 }, { "content": " self.set_reg(Register::RIP, SymExpr::from_ptr(addr));\n\n self.ip = addr;\n\n\n\n match operation {\n\n Op::Mov { dest, src } => self.do_move(*dest, *src),\n\n\n\n Op::Const { dest, constant } => self.set_temp(*dest, SymExpr::Int(*constant)),\n\n Op::Cast { target, new, signed } => {\n\n let new_value = self.get_temp(*target).cast(*new, *signed);\n\n self.set_temp(Temporary(*new, target.1), new_value);\n\n },\n\n\n\n Op::Add { sum, a, b } => self.do_binop(*sum, *a, *b, SymExpr::add),\n\n Op::Sub { diff, a, b } => self.do_binop(*diff, *a, *b, SymExpr::sub),\n\n Op::Mul { prod, a, b } => self.do_binop(*prod, *a, *b, SymExpr::mul),\n\n\n\n Op::And { and, a, b } => self.do_binop(*and, *a, *b, SymExpr::bitand),\n\n Op::Or { or, a, b } => self.do_binop(*or, *a, *b, SymExpr::bitor),\n\n Op::Not { not, a } => self.set_temp(*not, self.get_temp(*a).bitnot()),\n\n\n", "file_path": "src/sym/mod.rs", "rank": 41, "score": 32719.929405929663 }, { "content": " TypedMemoryAccess(addr, data_type)\n\n }),\n\n }\n\n }\n\n\n\n /// Retrieve data from a location.\n\n pub fn read_location(&self, src: Location) -> SymExpr {\n\n match src {\n\n Location::Temp(temp) => self.get_temp(temp),\n\n Location::Direct(data_type, space, addr) => {\n\n self.memory[space].read_direct(addr, data_type)\n\n },\n\n Location::Indirect(data_type, space, temp) => {\n\n let addr = self.get_temp(temp);\n\n assert_eq!(addr.data_type(), N64, \"read_location: address has to be 64-bit\");\n\n self.memory[space].read_expr(addr, data_type)\n\n }\n\n }\n\n }\n\n\n", "file_path": "src/sym/mod.rs", "rank": 42, "score": 32719.293003658597 }, { "content": " Op::Set { target, condition } => {\n\n self.set_temp(*target, self.evaluate_condition(&condition).as_expr(target.0));\n\n },\n\n Op::Jump { target, condition, relative } => {\n\n crate::timings::stop();\n\n return Some(Event::Jump {\n\n target: self.get_temp(*target),\n\n condition: condition.clone(),\n\n relative: *relative,\n\n });\n\n },\n\n\n\n Op::Syscall => {\n\n if let SymExpr::Int(int) = self.get_reg(Register::RAX) {\n\n if let Some(event) = self.do_syscall(int.1) {\n\n crate::timings::stop();\n\n return Some(event);\n\n }\n\n } else {\n\n panic!(\"step: unhandled symbolic syscall number\");\n", "file_path": "src/sym/mod.rs", "rank": 43, "score": 32717.127414131723 }, { "content": " assert_eq!(y().cast(N32, false).cast(N8, false), y());\n\n assert_eq!(y().cast(N32, false).cast(N64, false), y().cast(N64, false));\n\n assert_eq!(y().cast(N8, false), y());\n\n }\n\n\n\n #[test]\n\n fn ast() {\n\n let solver = Solver::new();\n\n\n\n let expr = n(10).add(x()).add(x()).add(n(5));\n\n assert_eq!(solver.simplify_expr(&expr), n(15).add(n(2).mul(x())));\n\n }\n\n}\n", "file_path": "src/math/mod.rs", "rank": 44, "score": 32717.127414131723 }, { "content": "/// Extend `right` by `bits` bits (ones if signed, zeros otherwise).\n\nfn bit_extend(bits: usize, right: SymExpr, signed: bool) -> ParseResult<SymExpr> {\n\n match bits + right.data_type().bits() {\n\n 16 => Ok(right.cast(N16, signed)),\n\n 32 => Ok(right.cast(N32, signed)),\n\n 64 => Ok(right.cast(N64, signed)),\n\n s => err(format!(\"unhandled bit extension: invalid target size {}\", s)),\n\n }\n\n}\n\n\n", "file_path": "src/math/smt.rs", "rank": 45, "score": 30908.762827186263 }, { "content": " }\n\n}\n\n\n\nimpl Display for EncodingError {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n write!(f, \"Failed to encode instruction: {} [{}]\", self.1, self.0)\n\n }\n\n}\n\n\n\nimpl std::error::Error for EncodingError {}\n\ndebug_display!(EncodingError);\n\n\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::x86_64::*;\n\n use super::*;\n\n\n\n fn test(bytes: &[u8], display: &str) {\n\n test_with_encoder(&mut MicroEncoder::new(), bytes, display);\n", "file_path": "src/ir.rs", "rank": 48, "score": 16.50454328986327 }, { "content": " }\n\n}\n\n\n\nimpl Display for Mnemoic {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n write!(f, \"{}\", format!(\"{:?}\", self).to_lowercase())\n\n }\n\n}\n\n\n\nimpl Display for Operand {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n use Operand::*;\n\n use crate::helper::write_signed_hex;\n\n\n\n match *self {\n\n Direct(reg) => write!(f, \"{}\", reg),\n\n Indirect { data_type, base, scaled_offset, displacement } => {\n\n write!(f, \"{} ptr [{}\", data_type.name(), base)?;\n\n if let Some((index, scale)) = scaled_offset {\n\n write!(f, \"+{}*{}\", index, scale)?;\n", "file_path": "src/x86_64.rs", "rank": 49, "score": 16.000453767839915 }, { "content": "//! Integers with machine semantics.\n\n\n\nuse std::fmt::{self, Display, Formatter};\n\nuse std::ops::{BitAnd, BitOr};\n\nuse byteorder::{ByteOrder, LittleEndian};\n\n\n\nuse crate::helper::check_compatible;\n\nuse DataType::*;\n\n\n\n\n\n/// Variable data type integer with machine semantics.\n\n#[derive(Debug, Copy, Clone, Hash)]\n\npub struct Integer(pub DataType, pub u64);\n\n\n\n/// Different width numeric types.\n\n#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub enum DataType {\n\n N8,\n\n N16,\n\n N32,\n", "file_path": "src/math/num.rs", "rank": 50, "score": 14.993139512681578 }, { "content": "//! Decoding of `x86_64` instructions.\n\n\n\nuse std::fmt::{self, Display, Formatter};\n\nuse byteorder::{ByteOrder, LittleEndian};\n\n\n\nuse crate::flow::{ValueSource, StorageLocation};\n\nuse crate::math::{Integer, DataType};\n\nuse DataType::*;\n\n\n\n\n\n/// A decoded machine code instruction.\n\n#[derive(Debug, Clone, Eq, PartialEq)]\n\npub struct Instruction {\n\n pub bytes: Vec<u8>,\n\n pub mnemoic: Mnemoic,\n\n pub operands: Vec<Operand>,\n\n}\n\n\n\n/// Identifies an instruction.\n\n#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n", "file_path": "src/x86_64.rs", "rank": 51, "score": 14.761655697561876 }, { "content": "//! Microcode encoding of instructions.\n\n\n\nuse std::fmt::{self, Display, Formatter};\n\n\n\nuse crate::math::{SymExpr, SymCondition, Integer, DataType, Symbol};\n\nuse crate::x86_64::{Instruction, Mnemoic, Operand, Register};\n\nuse Register::*;\n\nuse SymCondition::*;\n\n\n\n\n\n/// A sequence of micro operations.\n\n#[derive(Debug, Clone, Eq, PartialEq)]\n\npub struct Microcode {\n\n pub ops: Vec<MicroOperation>,\n\n}\n\n\n\n/// A minimal executable action.\n\n#[derive(Debug, Clone, Eq, PartialEq)]\n\npub enum MicroOperation {\n\n /// Store the value at location `src` in location `dest`.\n", "file_path": "src/ir.rs", "rank": 52, "score": 14.521772070316809 }, { "content": " writeln!(f, \" {}\", operation)?;\n\n }\n\n write!(f, \"]\")\n\n }\n\n}\n\n\n\nimpl Display for MicroOperation {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n use MicroOperation::*;\n\n use crate::helper::signed_name;\n\n\n\n fn show_condition(condition: &SymCondition) -> String {\n\n match condition {\n\n &SymCondition::TRUE => \"\".to_string(),\n\n cond => format!(\" if {}\", cond),\n\n }\n\n }\n\n\n\n match self {\n\n Mov { dest, src } => write!(f, \"mov {} = {}\", dest, src),\n", "file_path": "src/ir.rs", "rank": 53, "score": 14.178013366584835 }, { "content": " for part in other.parts {\n\n if let Some(same) = self.parts.iter_mut().find(|m| m.name == part.name) {\n\n same.merge_with(part);\n\n } else {\n\n self.parts.push(part);\n\n }\n\n }\n\n self.resort();\n\n }\n\n\n\n /// Resort the parts by duration high to low.\n\n pub fn resort(&mut self) {\n\n self.parts.sort_by_key(|m| std::cmp::Reverse(m.duration));\n\n }\n\n\n\n fn display_with_indent(&self, f: &mut Formatter, indent: usize) -> fmt::Result {\n\n let ind = \" \".repeat(indent);\n\n writeln!(f, \"{}{}: {:?}\", ind, self.name, self.duration)?;\n\n let mut first = true;\n\n let mut used_newline = false;\n", "file_path": "src/timings.rs", "rank": 54, "score": 13.940591659319612 }, { "content": "impl Display for SymCondition {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n use crate::helper::signed_name;\n\n match self {\n\n Bool(b) => write!(f, \"{}\", b),\n\n Equal(a, b) => write!(f, \"({} == {})\", a, b),\n\n LessThan(a, b, s) => write!(f, \"({} < {}{})\", a, b, signed_name(*s)),\n\n LessEqual(a, b, s) => write!(f, \"({} <= {}{})\", a, b, signed_name(*s)),\n\n GreaterThan(a, b, s) => write!(f, \"({} > {}{})\", a, b, signed_name(*s)),\n\n GreaterEqual(a, b, s) => write!(f, \"({} >= {}{})\", a, b, signed_name(*s)),\n\n And(a, b) => write!(f, \"({} and {})\", a, b),\n\n Or(a, b) => write!(f, \"({} or {})\", a, b),\n\n Not(a) => write!(f, \"(not {})\", a),\n\n }\n\n }\n\n}\n", "file_path": "src/math/cond.rs", "rank": 55, "score": 13.613178457704885 }, { "content": " /// Convert this into a symbol expression.\n\n pub fn to_expr(self) -> SymExpr {\n\n SymExpr::Sym(self.to_symbol())\n\n }\n\n}\n\n\n\nimpl Display for Temporary {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n write!(f, \"T{}:{}\", self.1, self.0)\n\n }\n\n}\n\n\n", "file_path": "src/ir.rs", "rank": 56, "score": 12.824978355262651 }, { "content": "//! Symbolic memory models.\n\n\n\nuse std::cell::RefCell;\n\nuse std::collections::BTreeMap;\n\nuse std::fmt::{self, Display, Formatter};\n\n\n\nuse crate::math::{SymExpr, SymCondition, DataType, Symbol, SharedSolver};\n\n\n\n\n\n/// Symbolic memory handling writes and reads involving symbolic\n\n/// values and addresses.\n\n#[derive(Debug, Clone)]\n\npub struct SymMemory {\n\n data: RefCell<MemoryData>,\n\n solver: SharedSolver,\n\n strategy: MemoryStrategy,\n\n}\n\n\n\n/// How the memory handled complex symbolic queries.\n\n#[derive(Debug, Copy, Clone, Eq, PartialEq)]\n", "file_path": "src/sym/mem.rs", "rank": 57, "score": 12.607882799612812 }, { "content": "//! Symbolic boolean expressions.\n\n\n\nuse std::fmt::{self, Display, Formatter};\n\nuse z3::Context as Z3Context;\n\nuse z3::ast::{Ast, Bool as Z3Bool};\n\n\n\nuse crate::helper::{check_compatible, boxed};\n\nuse super::{SymExpr, Symbol, Integer, DataType, Traversed};\n\nuse super::smt::{Z3Parser, FromAstError};\n\nuse SymCondition::*;\n\nuse SymExpr::*;\n\n\n\n\n\n/// A possibly nested symbolic boolean expression.\n\n#[derive(Debug, Clone, Eq, PartialEq, Hash)]\n\npub enum SymCondition {\n\n Bool(bool),\n\n And(Box<SymCondition>, Box<SymCondition>),\n\n Or(Box<SymCondition>, Box<SymCondition>),\n\n Not(Box<SymCondition>),\n", "file_path": "src/math/cond.rs", "rank": 58, "score": 12.603447856485682 }, { "content": " }\n\n }\n\n\n\n if used_default_symbol {\n\n data.generate_default_symbol(addr, data_type);\n\n }\n\n\n\n tree\n\n }\n\n\n\n /// Write a value to a symbolic address.\n\n pub fn write_expr(&mut self, addr: SymExpr, value: SymExpr) {\n\n crate::timings::with(\"sym-mem\", || {\n\n let mut data = self.data.borrow_mut();\n\n\n\n let new_write = MemoryWrite { addr, value };\n\n\n\n for (_, write) in data.writes.iter_mut().rev() {\n\n if write.addr == new_write.addr {\n\n *write = new_write;\n", "file_path": "src/sym/mem.rs", "rank": 59, "score": 12.487013831754624 }, { "content": "//! Symbolic integer expressions.\n\n\n\nuse std::fmt::{self, Display, Formatter};\n\nuse z3::Context as Z3Context;\n\nuse z3::ast::{BV as Z3BitVec};\n\n\n\nuse crate::helper::{check_compatible, boxed};\n\nuse super::{Symbol, SymCondition, Integer, DataType, Traversed};\n\nuse super::smt::{Z3Parser, FromAstError};\n\nuse SymExpr::*;\n\nuse SymCondition::*;\n\n\n\n\n\n/// A possibly nested symbolic machine integer expression.\n\n#[derive(Debug, Clone, Eq, PartialEq, Hash)]\n\npub enum SymExpr {\n\n Int(Integer),\n\n Sym(Symbol),\n\n Add(Box<SymExpr>, Box<SymExpr>),\n\n Sub(Box<SymExpr>, Box<SymExpr>),\n", "file_path": "src/math/expr.rs", "rank": 60, "score": 12.483519391276065 }, { "content": " self.writes.insert(0, MemoryWrite {\n\n addr,\n\n value: value.clone(),\n\n });\n\n self.symbols += 1;\n\n value\n\n }\n\n}\n\n\n\nimpl Display for SymMemory {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n write!(f, \"SymMemory [\")?;\n\n let data = self.data.borrow();\n\n if !data.writes.is_empty() { writeln!(f)?; }\n\n for (epoch, write) in &data.writes {\n\n writeln!(f, \" [{}] {} => {}\", epoch, write.addr, write.value)?;\n\n }\n\n writeln!(f, \"]\")\n\n }\n\n}\n", "file_path": "src/sym/mem.rs", "rank": 61, "score": 12.416471535463621 }, { "content": "}\n\n\n\nimpl Display for FromAstError {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n writeln!(f, \"Failed to parse Z3 value at index {}: {} [\", self.index, self.message)?;\n\n for line in self.ast.lines() {\n\n writeln!(f, \" {}\", line)?;\n\n }\n\n write!(f, \"]\")\n\n }\n\n}\n\n\n\nimpl std::error::Error for FromAstError {}\n\ndebug_display!(FromAstError);\n", "file_path": "src/math/smt.rs", "rank": 62, "score": 12.387333071836695 }, { "content": "//! Parsing of the 64-bit `ELF` file format.\n\n\n\nuse std::ffi::CStr;\n\nuse std::fmt::{self, Display, Formatter};\n\nuse std::fs::File;\n\nuse std::io::{self, Cursor, Read, Seek, SeekFrom};\n\nuse std::path::Path;\n\nuse byteorder::{ReadBytesExt, LE};\n\n\n\n\n\n/// Handle for an `ELF` file.\n\n#[derive(Debug)]\n\npub struct ElfFile<R> where R: Read + Seek {\n\n target: R,\n\n pub header: Header,\n\n pub section_headers: Vec<SectionHeader>,\n\n}\n\n\n\n/// Header of a file.\n\n#[derive(Debug, Copy, Clone, Eq, PartialEq)]\n", "file_path": "src/elf.rs", "rank": 63, "score": 12.28983098062417 }, { "content": "//! Functionality for using the Z3 SMT solver.\n\n\n\nuse std::collections::HashMap;\n\nuse std::fmt::{self, Display, Debug, Formatter};\n\nuse z3::Context as Z3Context;\n\nuse z3::ast::Ast;\n\n\n\nuse super::{SymExpr, SymCondition, SymDynamic, Symbol, Integer, DataType};\n\nuse SymExpr::*;\n\nuse SymCondition::*;\n\nuse DataType::*;\n\n\n\n\n\n/// Solves and simplifies conditions and expressions using Z3.\n\npub struct Solver {\n\n ctx: Z3Context,\n\n}\n\n\n\n/// A reference-counted condition solver.\n\npub type SharedSolver = std::rc::Rc<Solver>;\n", "file_path": "src/math/smt.rs", "rank": 64, "score": 12.168041892888606 }, { "content": " s => forward!(s, replace_symbols, symbols),\n\n }\n\n }\n\n}\n\n\n\nimpl Display for SymExpr {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n use crate::helper::signed_name;\n\n\n\n match self {\n\n Int(int) => write!(f, \"{}\", int),\n\n Sym(sym) => write!(f, \"{}\", sym),\n\n Add(a, b) => write!(f, \"({} + {})\", a, b),\n\n Sub(a, b) => write!(f, \"({} - {})\", a, b),\n\n Mul(a, b) => write!(f, \"({} * {})\", a, b),\n\n BitAnd(a, b) => write!(f, \"({} & {})\", a, b),\n\n BitOr(a, b) => write!(f, \"({} | {})\", a, b),\n\n BitNot(a) => write!(f, \"(!{})\", a),\n\n Cast(x, new, signed) => write!(f, \"({} as {}{})\", x, new, signed_name(*signed)),\n\n AsExpr(c, data_type) => write!(f, \"({} as {})\", c, data_type),\n\n IfThenElse(c, a, b) => write!(f, \"if {} then {} else {}\", c, a, b),\n\n }\n\n }\n\n}\n", "file_path": "src/math/expr.rs", "rank": 65, "score": 12.03534356908726 }, { "content": "/// Run and measure `what` by executing the closure.\n\npub(crate) fn with<S: Into<String>, F, T>(what: S, f: F) -> T where F: FnOnce() -> T {\n\n start(what);\n\n let value = f();\n\n stop();\n\n value\n\n}\n\n\n\n/// Tell the timer, that `what` is now being executed.\n\npub(crate) fn start<S: Into<String>>(what: S) {\n\n let started = Instant::now();\n\n\n\n let mut timer = timer!();\n\n timer.stack.push(Timing {\n\n name: what.into(),\n\n started,\n\n children: HashMap::new(),\n\n });\n\n}\n\n\n", "file_path": "src/timings.rs", "rank": 66, "score": 11.976415613505006 }, { "content": " }\n\n }\n\n\n\n /// Number of bits this data types needs to be stored.\n\n pub fn bits(&self) -> usize {\n\n self.bytes() * 8\n\n }\n\n}\n\n\n\nimpl Display for DataType {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n write!(f, \"{}\", match self {\n\n N8 => \"n8\",\n\n N16 => \"n16\",\n\n N32 => \"n32\",\n\n N64 => \"n64\",\n\n })\n\n }\n\n}\n\n\n", "file_path": "src/math/num.rs", "rank": 67, "score": 11.948411052620735 }, { "content": "//! Alias analysis.\n\n\n\nuse std::collections::{HashMap, HashSet};\n\nuse std::fmt::{self, Display, Formatter};\n\nuse std::rc::Rc;\n\n\n\nuse crate::math::{SymExpr, SymCondition, Integer, DataType, SharedSolver, Solver};\n\nuse crate::sym::{SymState, MemoryStrategy, SymbolMap, TypedMemoryAccess};\n\nuse super::{ControlFlowGraph, ValueSource, AbstractLocation};\n\nuse DataType::*;\n\n\n\n\n\n/// Maps all abstract locations to the conditions under which there is a\n\n/// memory access happening at them aliasing with the main access.\n\n#[derive(Debug, Clone)]\n\npub struct AliasMap {\n\n pub map: HashMap<AbstractLocation, (SymCondition, SymbolMap)>,\n\n}\n\n\n\nimpl AliasMap {\n\n /// Create a new alias map for a target abstract location.\n\n pub fn new(cfg: &ControlFlowGraph, target: &AbstractLocation) -> AliasMap {\n\n crate::timings::with(\"alias-map\", || AliasExplorer::new(cfg, target).run())\n\n }\n\n}\n\n\n\n/// Analyzes the aliasing conditions from a writing memory access with all reading ones.\n", "file_path": "src/flow/alias.rs", "rank": 68, "score": 11.805549421948285 }, { "content": " };\n\n self.index += off;\n\n value\n\n }\n\n\n\n /// Decode a variable width signed value.\n\n fn decode_signed_value(&mut self, width: DataType) -> i64 {\n\n let bytes = &self.bytes[self.index ..];\n\n let (value, off) = match width {\n\n N8 => (bytes[0] as i8 as i64, 1),\n\n N16 => (LittleEndian::read_i16(bytes) as i64, 2),\n\n N32 => (LittleEndian::read_i32(bytes) as i64, 4),\n\n N64 => (LittleEndian::read_i64(bytes), 8),\n\n };\n\n self.index += off;\n\n value\n\n }\n\n}\n\n\n\n/// A REX prefix.\n", "file_path": "src/x86_64.rs", "rank": 69, "score": 11.360044865340315 }, { "content": " }\n\n}\n\n\n\n\n\n/// The error type for instruction decoding.\n\npub struct DecodingError(Vec<u8>);\n\npub(in super) type DecodeResult<T> = Result<T, DecodingError>;\n\n\n\nimpl DecodingError {\n\n /// Create a new decoding error from bytes.\n\n fn new(bytes: Vec<u8>) -> DecodingError {\n\n DecodingError(bytes)\n\n }\n\n}\n\n\n\nimpl Display for DecodingError {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n write!(f, \"Failed to decode instruction: {:02x?}\", self.0)\n\n }\n\n}\n", "file_path": "src/x86_64.rs", "rank": 70, "score": 10.965420555789924 }, { "content": " fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n use Location::*;\n\n match self {\n\n Temp(temp) => write!(f, \"{}\", temp),\n\n Direct(data, space, offset) => write!(f, \"[m{}][{:#x}:{}]\", space, offset, data),\n\n Indirect(data, space, temp) => write!(f, \"[m{}][({}):{}]\", space, temp, data),\n\n }\n\n }\n\n}\n\n\n\n/// Temporary variable identified by an index.\n\n#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct Temporary(pub DataType, pub usize);\n\n\n\nimpl Temporary {\n\n /// Create a symbol matching this temporary.\n\n pub fn to_symbol(self) -> Symbol {\n\n Symbol(self.0, \"T\", self.1)\n\n }\n\n\n", "file_path": "src/ir.rs", "rank": 72, "score": 10.694827498860816 }, { "content": " IP, EIP, RIP,\n\n}\n\n\n\nimpl Display for Register {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n write!(f, \"{}\", format!(\"{:?}\", self).to_lowercase())\n\n }\n\n}\n\n\n\nimpl Register {\n\n /// The data type (bit width) of the register.\n\n pub fn data_type(&self) -> DataType {\n\n use Register::*;\n\n match self {\n\n RAX | RCX | RDX | RBX | RSP | RBP | RSI | RDI |\n\n R8 | R9 | R10 | R11 | R12 | R13 | R14 | R15 | RIP => N64,\n\n EAX | ECX | EDX | EBX | ESP | EBP | ESI | EDI | EIP => N32,\n\n AX | CX | DX | BX | SP | BP | SI | DI | IP => N16,\n\n AL | CL | DL | BL | AH | CH | DH | BH => N8,\n\n }\n", "file_path": "src/x86_64.rs", "rank": 74, "score": 10.27169567416137 }, { "content": " /// This will go through the write map from newest to oldest and build\n\n /// an if-then-else chain with all values that possibly match and the\n\n /// conditions under which they match.\n\n fn read_conditional(&self, addr: SymExpr, data_type: DataType) -> SymExpr {\n\n let mut data = self.data.borrow_mut();\n\n\n\n let default = data.get_default_value(data_type);\n\n let mut tree = default.clone();\n\n let mut active = &mut tree;\n\n let mut used_default_symbol = true;\n\n\n\n // We traverse the memory writes from latest to oldest. If we find\n\n // a write that perfectly matches our read, we can quit early\n\n // because anything before would have been overwritten for sure.\n\n for (_, write) in data.writes.iter().rev() {\n\n // If it matches perfectly, we can stop here.\n\n if write.addr == addr {\n\n *active = write.value.clone();\n\n used_default_symbol = false;\n\n break;\n", "file_path": "src/sym/mem.rs", "rank": 75, "score": 10.173532484158494 }, { "content": " pub fn length(bytes: &[u8]) -> u64 {\n\n lde::X64.ld(bytes) as u64\n\n }\n\n\n\n /// Pairs of (source, sink) describing data dependencies in the instruction.\n\n pub fn flows(&self) -> Vec<(ValueSource, StorageLocation)> {\n\n use Mnemoic::*;\n\n use Register::*;\n\n\n\n let loc = StorageLocation::from_operand;\n\n let reg = StorageLocation::Direct;\n\n let stg = ValueSource::Storage;\n\n let src = |op| match op {\n\n Operand::Immediate(int) => Some(ValueSource::Const(int)),\n\n op => loc(op).map(|s| ValueSource::Storage(s)),\n\n };\n\n\n\n macro_rules! get {\n\n ($op:expr) => { if let Some(s) = loc($op) { s } else { return vec![] } };\n\n }\n", "file_path": "src/x86_64.rs", "rank": 76, "score": 10.159783973961032 }, { "content": " GreaterThan(a, b, s) => a.evaluate(symbols).greater_than(b.evaluate(symbols), *s),\n\n GreaterEqual(a, b, s) => a.evaluate(symbols).greater_equal(b.evaluate(symbols), *s),\n\n And(a, b) => a.evaluate(symbols) && b.evaluate(symbols),\n\n Or(a, b) => a.evaluate(symbols) && b.evaluate(symbols),\n\n Not(a) => !a.evaluate(symbols),\n\n }\n\n }\n\n\n\n /// Call a function for every node in the expression/condition tree.\n\n pub fn traverse<F>(&self, f: &mut F) where F: FnMut(Traversed) {\n\n f(Traversed::Condition(self));\n\n forward!(self, traverse, f);\n\n }\n\n\n\n /// Replace the symbols with new expressions.\n\n pub fn replace_symbols<S>(&mut self, symbols: &S) where S: Fn(Symbol) -> SymExpr {\n\n forward!(self, replace_symbols, symbols);\n\n }\n\n}\n\n\n", "file_path": "src/math/cond.rs", "rank": 77, "score": 9.828350619107432 }, { "content": " Cast(a, data_type, signed) => a.evaluate(symbols).cast(*data_type, *signed),\n\n AsExpr(a, data_type) => Integer::from_bool(a.evaluate(symbols), *data_type),\n\n IfThenElse(c, a, b) => if c.evaluate(symbols) {\n\n a.evaluate(symbols)\n\n } else {\n\n b.evaluate(symbols)\n\n }\n\n }\n\n }\n\n\n\n /// Call a function for every node in the expression/condition tree.\n\n pub fn traverse<F>(&self, f: &mut F) where F: FnMut(Traversed) {\n\n f(Traversed::Expr(self));\n\n forward!(self, traverse, f);\n\n }\n\n\n\n /// Replace the symbols with new expressions.\n\n pub fn replace_symbols<S>(&mut self, symbols: &S) where S: Fn(Symbol) -> SymExpr {\n\n match self {\n\n Sym(sym) => *self = symbols(*sym),\n", "file_path": "src/math/expr.rs", "rank": 78, "score": 9.691642769315372 }, { "content": " }\n\n\n\n /// Read from memory using the perfect matches strategy.\n\n ///\n\n /// This will only return the value if the address expression match\n\n /// perfectly. This is not sound but way faster than `read_conditional`.\n\n fn read_perfect(&self, addr: SymExpr, data_type: DataType) -> SymExpr {\n\n let mut data = self.data.borrow_mut();\n\n\n\n for (_, write) in data.writes.iter().rev() {\n\n if write.addr == addr {\n\n return write.value.clone();\n\n }\n\n }\n\n\n\n data.generate_default_symbol(addr, data_type)\n\n }\n\n\n\n /// Read from memory using the conditional trees strategy.\n\n ///\n", "file_path": "src/sym/mem.rs", "rank": 79, "score": 9.688064267797515 }, { "content": " fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n self.display_with_indent(f, 0)?;\n\n writeln!(f)?;\n\n let mut total: Vec<_> = self.total().into_iter().collect();\n\n total.sort_by_key(|p| std::cmp::Reverse(p.1));\n\n writeln!(f, \"Total:\")?;\n\n for (name, time) in &total {\n\n let ratio = (time.as_nanos() as f64) / (self.duration.as_nanos() as f64);\n\n writeln!(f, \" {}: {:?} ({:.2} %)\", name, time, 100.0 * ratio)?;\n\n }\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/timings.rs", "rank": 80, "score": 9.135464128485724 }, { "content": "//! Measurements of which parts of analysis take how long.\n\n\n\nuse std::collections::HashMap;\n\nuse std::fmt::{self, Display, Formatter};\n\nuse std::sync::Mutex;\n\nuse std::time::{Instant, Duration};\n\nuse lazy_static::lazy_static;\n\n\n\n\n\nlazy_static! {\n\n /// Manages all measurements.\n\n static ref TIMER: Mutex<Timer> = Mutex::new(Timer::new());\n\n}\n\n\n\n/// Try to get the timer or return early.\n\nmacro_rules! timer {\n\n () => { if let Ok(timer) = TIMER.lock() { timer } else { return; } };\n\n ($message:expr) => { TIMER.lock().expect($message) }\n\n}\n\n\n\n/// Retrieve the finished measurements.\n", "file_path": "src/timings.rs", "rank": 81, "score": 8.841559214583578 }, { "content": "pub(in super) type ElfResult<T> = Result<T, ElfError>;\n\n\n\nimpl Display for ElfError {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n match self {\n\n ElfError::Invalid => write!(f, \"Invalid ELF file\"),\n\n ElfError::MissingSection(name) => write!(f, \"Missing section: {}\", name),\n\n ElfError::Io(err) => write!(f, \"I/O error: {}\", err),\n\n }\n\n }\n\n}\n\n\n\nimpl std::error::Error for ElfError {\n\n fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {\n\n match self {\n\n ElfError::Io(err) => Some(err),\n\n _ => None,\n\n }\n\n }\n\n}\n\n\n\ndebug_display!(ElfError);\n\n\n\nimpl From<io::Error> for ElfError {\n\n fn from(err: io::Error) -> ElfError {\n\n ElfError::Io(err)\n\n }\n\n}\n", "file_path": "src/elf.rs", "rank": 82, "score": 8.810561129423775 }, { "content": "\n\nimpl std::error::Error for DecodingError {}\n\ndebug_display!(DecodingError);\n\n\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n fn test(binary: &[u8], display: &str) {\n\n let inst = Instruction::decode(binary).unwrap();\n\n assert_eq!(inst.to_string(), display);\n\n }\n\n\n\n #[test]\n\n fn decode() {\n\n // Calculations\n\n test(&[0x01, 0xd0], \"add eax, edx\");\n\n test(&[0x4c, 0x03, 0x47, 0x0a], \"add r8, qword ptr [rdi+0xa]\");\n\n test(&[0x83, 0xc0, 0x01], \"add eax, 0x1\");\n", "file_path": "src/x86_64.rs", "rank": 83, "score": 8.60918733623554 }, { "content": "\n\nimpl ControlFlowGraph {\n\n /// Generate a control flow graph of a program.\n\n pub fn new(program: &Program) -> ControlFlowGraph {\n\n crate::timings::with(\"control-flow-graph\", || ControlFlowExplorer::new(program).run())\n\n }\n\n\n\n /// Visualize this flow graph in a graphviz DOT file.\n\n pub fn visualize<W: Write>(\n\n &self,\n\n target: W,\n\n program: &Program,\n\n title: &str,\n\n style: VisualizationStyle\n\n ) -> io::Result<()> {\n\n use super::visualize::*;\n\n let mut f = target;\n\n\n\n write_header(&mut f, &format!(\"Control flow graph for {}\", title), 20)?;\n\n\n", "file_path": "src/flow/control.rs", "rank": 84, "score": 8.442886982227918 }, { "content": " /// - optionally an immediate displacement.\n\n Indirect {\n\n data_type: DataType,\n\n base: Register,\n\n scaled_offset: Option<(Register, u8)>,\n\n displacement: Option<i64>,\n\n },\n\n /// A direct immediate value.\n\n Immediate(Integer),\n\n /// A direct offset.\n\n Offset(i64),\n\n}\n\n\n\nimpl Instruction {\n\n /// Tries to decode an instruction from raw bytes.\n\n pub fn decode(bytes: &[u8]) -> DecodeResult<Instruction> {\n\n Decoder::new(bytes).decode()\n\n }\n\n\n\n /// The byte length of the first instruction in the given slice.\n", "file_path": "src/x86_64.rs", "rank": 85, "score": 8.404748267181477 }, { "content": "/// Arithemtic operation flags returned by some functions on integers.\n\n#[derive(Debug, Default, Copy, Clone, Eq, PartialEq)]\n\npub struct Flags {\n\n pub zero: bool,\n\n pub sign: bool,\n\n pub overflow: bool,\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn flags() {\n\n assert_eq!(Integer(N8, 150).flagged_add(Integer(N8, 100)).1,\n\n Flags { zero: false, sign: true, overflow: false });\n\n\n\n assert_eq!(Integer(N8, -56i8 as u64).flagged_add(Integer(N8, 56)).1,\n\n Flags { zero: true, sign: false, overflow: false });\n", "file_path": "src/math/num.rs", "rank": 86, "score": 8.390845966728362 }, { "content": " }\n\n }\n\n\n\n /// Encode an instruction into microcode.\n\n pub fn encode(&mut self, inst: &Instruction) -> EncodeResult<Microcode> {\n\n self.encode_internal(inst)\n\n .map_err(|msg| EncodingError::new(inst.clone(), msg))\n\n }\n\n\n\n /// The actual encoding but with a different result type that the public interface.\n\n fn encode_internal(&mut self, inst: &Instruction) -> EncoderResult<Microcode> {\n\n use MicroOperation as Op;\n\n use Mnemoic::*;\n\n\n\n match inst.mnemoic {\n\n // Load both operands, perform an operation and write the result back.\n\n Add => {\n\n let (a, b) = self.encode_binop(inst, |sum, a, b| Op::Add { sum, a, b });\n\n self.last_flag_op = Some(a.to_expr().add(b.to_expr()));\n\n },\n", "file_path": "src/ir.rs", "rank": 87, "score": 8.285826196260658 }, { "content": " /// Read from a direct address.\n\n pub fn read_direct(&self, addr: u64, data_type: DataType) -> SymExpr {\n\n self.read_expr(SymExpr::from_ptr(addr), data_type)\n\n }\n\n\n\n /// Write a value to a direct address.\n\n pub fn write_direct(&mut self, addr: u64, value: SymExpr) {\n\n self.write_expr(SymExpr::from_ptr(addr), value)\n\n }\n\n\n\n /// Read from a symbolic address.\n\n pub fn read_expr(&self, addr: SymExpr, data_type: DataType) -> SymExpr {\n\n crate::timings::with(\"sym-mem\", || {\n\n let expr = match self.strategy {\n\n MemoryStrategy::PerfectMatches => self.read_perfect(addr, data_type),\n\n MemoryStrategy::ConditionalTrees => self.read_conditional(addr, data_type),\n\n };\n\n\n\n if expr.data_type() == data_type { expr } else { expr.cast(data_type, false) }\n\n })\n", "file_path": "src/sym/mem.rs", "rank": 88, "score": 8.24060160381898 }, { "content": "/// Pinpoints a target in memory or temporaries.\n\n#[derive(Debug, Copy, Clone, Eq, PartialEq)]\n\npub enum Location {\n\n Temp(Temporary),\n\n Direct(DataType, usize, u64),\n\n Indirect(DataType, usize, Temporary),\n\n}\n\n\n\nimpl Location {\n\n /// The underlying data type of the value at the location.\n\n pub fn data_type(&self) -> DataType {\n\n match *self {\n\n Location::Temp(temp) => temp.0,\n\n Location::Direct(data, _, _) => data,\n\n Location::Indirect(data, _, _) => data,\n\n }\n\n }\n\n}\n\n\n\nimpl Display for Location {\n", "file_path": "src/ir.rs", "rank": 89, "score": 8.215502569555973 }, { "content": " self.skip_white();\n\n self.expect(')')?;\n\n Ok(expr)\n\n }\n\n\n\n /// Parse a bitvector function with variable number of arguments.\n\n fn parse_bv_varop<F>(&mut self, op: F) -> ParseResult<SymExpr>\n\n where F: Fn(SymExpr, SymExpr) -> SymExpr {\n\n let mut expr = op(self.parse_bitvec()?, self.parse_bitvec()?);\n\n\n\n self.skip_white();\n\n while self.peek() != Some(')') {\n\n expr = op(expr, self.parse_bitvec()?);\n\n self.skip_white();\n\n }\n\n\n\n Ok(expr)\n\n }\n\n\n\n /// Parse a bitvector immediate value.\n", "file_path": "src/math/smt.rs", "rank": 90, "score": 8.134452791922854 }, { "content": " typed!(cast => self.0, signed, {\n\n cast(self.1).$op(&cast(other.1))\n\n })\n\n }\n\n };\n\n}\n\n\n\nimpl Integer {\n\n /// Create a pointer-sized integer.\n\n pub fn from_ptr(value: u64) -> Integer {\n\n Integer(N64, value)\n\n }\n\n\n\n /// Create a boolean-based integer.\n\n pub fn from_bool(value: bool, data_type: DataType) -> Integer {\n\n Integer(data_type, value as u64)\n\n }\n\n\n\n /// Read an integer of a specific type from bytes.\n\n pub fn from_bytes(bytes: &[u8], data_type: DataType) -> Integer {\n", "file_path": "src/math/num.rs", "rank": 91, "score": 7.958586757778363 }, { "content": " write_edges(&mut f, &self.edges, |f, (_, condition)| {\n\n if condition != &SymCondition::TRUE {\n\n write!(f, \"label=\\\"{}\\\", \", condition)?;\n\n }\n\n Ok(())\n\n })?;\n\n\n\n write_footer(&mut f)\n\n }\n\n}\n\n\n\n/// How to visualize the control flow graph.\n\n#[derive(Debug, Copy, Clone, Eq, PartialEq)]\n\npub enum VisualizationStyle {\n\n /// Only display the addresses and call traces of blocks.\n\n Addresses,\n\n /// Show the assembly instructions.\n\n Instructions,\n\n /// Show the whole microcode representation of the instructions.\n\n Microcode,\n\n}\n\n\n\n/// Constructs a control flow graph representation of a program.\n\n#[derive(Clone)]\n", "file_path": "src/flow/control.rs", "rank": 92, "score": 7.938720684664483 }, { "content": " params.set_bool(\"elim_sign_ext\", false);\n\n params\n\n }\n\n}\n\n\n\nimpl Debug for Solver {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n write!(f, \"Solver\")\n\n }\n\n}\n\n\n\n/// Parses Z3 string representations.\n\n#[derive(Debug, Clone)]\n\npub struct Z3Parser<'a> {\n\n ast: &'a str,\n\n active: &'a str,\n\n bindings: HashMap<String, SymDynamic>,\n\n}\n\n\n\nimpl<'a> Z3Parser<'a> {\n", "file_path": "src/math/smt.rs", "rank": 93, "score": 7.870064824921778 }, { "content": "//! Control flow graph calculation.\n\n\n\nuse std::collections::HashMap;\n\nuse std::io::{self, Write};\n\nuse std::rc::Rc;\n\n\n\nuse crate::Program;\n\nuse crate::ir::{Microcode, MicroEncoder};\n\nuse crate::math::{SymExpr, SymCondition, Integer, DataType, Solver};\n\nuse crate::sym::{SymState, MemoryStrategy, Event};\n\nuse crate::x86_64::{Instruction, Mnemoic};\n\n\n\n\n\n/// The control flow graph representation of a program.\n\n#[derive(Debug, Clone)]\n\npub struct ControlFlowGraph {\n\n /// The basic blocks in their respective contexts.\n\n pub nodes: Vec<ControlFlowNode>,\n\n /// The basic blocks without context.\n\n pub blocks: HashMap<u64, BasicBlock>,\n", "file_path": "src/flow/control.rs", "rank": 94, "score": 7.765768694541766 }, { "content": " for part in &self.parts {\n\n if !first && (!part.parts.is_empty() || used_newline) {\n\n writeln!(f)?;\n\n used_newline = true;\n\n }\n\n first = false;\n\n part.display_with_indent(f, indent + 4)?;\n\n }\n\n if !self.parts.is_empty() {\n\n let remaining = self.duration - self.parts.iter().map(|m| m.duration).sum();\n\n if remaining > Duration::from_nanos(0) {\n\n if used_newline { writeln!(f)?; }\n\n writeln!(f, \"{} other: {:?}\", ind, remaining)?;\n\n }\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl Display for Measurement {\n", "file_path": "src/timings.rs", "rank": 95, "score": 7.751032754576891 }, { "content": " }\n\n\n\n if self.solver.check_equal_sat(&write.addr, &addr) {\n\n let condition = write.addr.clone().equal(addr.clone());\n\n let simplified = self.solver.simplify_condition(&condition);\n\n\n\n // If it didn't match perfectly but still always is the same thing\n\n // we can also stop here.\n\n if simplified == SymCondition::TRUE {\n\n *active = write.value.clone();\n\n used_default_symbol = false;\n\n break;\n\n\n\n } else {\n\n *active = simplified.if_then_else(write.value.clone(), default.clone());\n\n active = match active {\n\n SymExpr::IfThenElse(_, _, ref mut b) => b,\n\n _ => panic!(\"read_conditional: expected if-then-else\"),\n\n };\n\n }\n", "file_path": "src/sym/mem.rs", "rank": 96, "score": 7.6791256686299265 }, { "content": " }\n\n\n\n /// Decodes an immediate value with given bit width.\n\n fn decode_immediate(&mut self, width: DataType) -> Operand {\n\n Operand::Immediate(Integer(width, self.decode_unsigned_value(width)))\n\n }\n\n\n\n /// Decodes an offset value similar to [`decode_immediate`].\n\n fn decode_offset(&mut self, width: DataType) -> Operand {\n\n Operand::Offset(self.decode_signed_value(width))\n\n }\n\n\n\n /// Decode a variable width unsigned value.\n\n fn decode_unsigned_value(&mut self, width: DataType) -> u64 {\n\n let bytes = &self.bytes[self.index ..];\n\n let (value, off) = match width {\n\n N8 => (bytes[0] as u64, 1),\n\n N16 => (LittleEndian::read_u16(bytes) as u64, 2),\n\n N32 => (LittleEndian::read_u32(bytes) as u64, 4),\n\n N64 => (LittleEndian::read_u64(bytes), 8),\n", "file_path": "src/x86_64.rs", "rank": 97, "score": 7.5854561309033075 }, { "content": "//! Data flow analysis.\n\n\n\nuse std::collections::HashMap;\n\nuse std::io::{self, Write};\n\nuse std::rc::Rc;\n\n\n\nuse crate::x86_64::Register;\n\nuse crate::math::{SymCondition, Integer, Symbol, SharedSolver, Solver};\n\nuse crate::sym::{SymState, Event, MemoryStrategy, TypedMemoryAccess, SymbolMap, StdioKind};\n\nuse super::*;\n\n\n\n\n\n/// Contains the conditions under which data flows between abstract locations.\n\n#[derive(Debug, Clone)]\n\npub struct DataDependencyGraph {\n\n /// The nodes in the graph, i.e. all abstract locations in the executable.\n\n pub nodes: Vec<DependencyNode>,\n\n /// The conditions for data flow between the abstract locations.\n\n /// The key pairs are indices into the `nodes` vector.\n\n pub edges: HashMap<(usize, usize), (SymCondition, SymbolMap)>,\n", "file_path": "src/flow/data.rs", "rank": 98, "score": 7.511387278098276 }, { "content": " }\n\n }\n\n }\n\n writeln!(f, \"]\")\n\n }\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::fs::{self, File};\n\n use std::io::Write;\n\n use crate::Program;\n\n use crate::x86_64::Register;\n\n use crate::math::Symbol;\n\n use crate::flow::StorageLocation;\n\n use super::*;\n\n\n\n fn test(filename: &str, location: AbstractLocation) -> AliasMap {\n\n let path = format!(\"target/bin/{}\", filename);\n", "file_path": "src/flow/alias.rs", "rank": 99, "score": 7.429332342038366 } ]
Rust
native-windows-gui/examples/treeview_d.rs
gyk/native-windows-gui
163d0fcb5c2af8a859f603ce23a7ec218e9b6813
/*! An application that show how to use the TreeView control. Requires the following features: `cargo run --example treeview_d --features "tree-view tree-view-iterator listbox image-list frame"` */ extern crate native_windows_derive as nwd; extern crate native_windows_gui as nwg; use nwd::NwgUi; use nwg::NativeUi; #[derive(Default, NwgUi)] pub struct TreeViewApp { #[nwg_control(size: (600, 350), position: (300, 300), title: "TreeView - Musteloidea")] #[nwg_events( OnWindowClose: [TreeViewApp::exit], OnInit: [TreeViewApp::load_data] )] window: nwg::Window, #[nwg_resource(initial: 5, size: (16, 16))] view_icons: nwg::ImageList, #[nwg_layout(parent: window)] layout: nwg::GridLayout, #[nwg_control(focus: true)] #[nwg_layout_item(layout: layout, col: 0, col_span: 3, row: 0, row_span: 6)] #[nwg_events( OnTreeViewClick: [TreeViewApp::log_events(SELF, EVT)], OnTreeViewDoubleClick: [TreeViewApp::log_events(SELF, EVT)], OnTreeViewRightClick: [TreeViewApp::log_events(SELF, EVT)], OnTreeFocusLost: [TreeViewApp::log_events(SELF, EVT)], OnTreeFocus: [TreeViewApp::log_events(SELF, EVT)], OnTreeItemDelete: [TreeViewApp::log_events(SELF, EVT)], OnTreeItemExpanded: [TreeViewApp::log_events(SELF, EVT)], OnTreeItemChanged: [TreeViewApp::log_events(SELF, EVT)], OnTreeItemSelectionChanged: [TreeViewApp::log_events(SELF, EVT)], )] tree_view: nwg::TreeView, #[nwg_control(flags: "VISIBLE")] #[nwg_layout_item(layout: layout, col: 3, col_span: 2, row: 0, row_span: 3,)] control_frame: nwg::Frame, #[nwg_layout(parent: control_frame, spacing: 3, margin: [0,0,0,0])] control_layout: nwg::GridLayout, #[nwg_control(parent: control_frame, text: "Options:")] #[nwg_layout_item(layout: control_layout, col: 0, row: 0)] label1: nwg::Label, #[nwg_control(parent: control_frame, text: "New item name")] #[nwg_layout_item(layout: control_layout, col: 0, col_span: 2, row: 1)] new_item: nwg::TextInput, #[nwg_control(parent: control_frame, text: "Add")] #[nwg_layout_item(layout: control_layout, col: 0, row: 2)] #[nwg_events(OnButtonClick: [TreeViewApp::button_actions(SELF, CTRL)])] add_btn: nwg::Button, #[nwg_control(parent: control_frame, text: "Del")] #[nwg_layout_item(layout: control_layout, col: 1, row: 2)] #[nwg_events(OnButtonClick: [TreeViewApp::button_actions(SELF, CTRL)])] remove_btn: nwg::Button, #[nwg_control(text: "Events:")] #[nwg_layout_item(layout: layout, col: 3, col_span: 2, row: 3)] label2: nwg::Label, #[nwg_control] #[nwg_layout_item(layout: layout, col: 3, col_span: 2, row: 4, row_span: 2)] events_log: nwg::ListBox<String>, } impl TreeViewApp { fn load_data(&self) { let tv = &self.tree_view; let icons = &self.view_icons; icons.add_icon_from_filename("./test_rc/cog.ico").unwrap(); icons.add_icon_from_filename("./test_rc/love.ico").unwrap(); tv.set_image_list(Some(icons)); let root = tv.insert_item("Caniformia", None, nwg::TreeInsert::Root); tv.insert_item( "Canidae (dogs and other canines)", Some(&root), nwg::TreeInsert::Last, ); let arc = tv.insert_item("Arctoidea", Some(&root), nwg::TreeInsert::Last); tv.insert_item("Ursidae (bears)", Some(&arc), nwg::TreeInsert::Last); let mus = tv.insert_item("Musteloidea (weasel)", Some(&arc), nwg::TreeInsert::Last); tv.insert_item("Mephitidae (skunks)", Some(&mus), nwg::TreeInsert::Last); tv.insert_item("Ailuridae (red panda)", Some(&mus), nwg::TreeInsert::Last); tv.insert_item( "Procyonidae (raccoons and allies)", Some(&mus), nwg::TreeInsert::Last, ); tv.insert_item( "Mustelidae (weasels and allies)", Some(&mus), nwg::TreeInsert::Last, ); tv.set_text_color(50, 50, 200); for item in tv.iter() { tv.set_expand_state(&item, nwg::ExpandState::Expand); tv.set_item_image(&item, 1, true); } } fn button_actions(&self, btn: &nwg::Button) { let tv = &self.tree_view; if btn == &self.add_btn { let text = self.new_item.text(); let item = match tv.selected_item() { Some(i) => tv.insert_item(&text, Some(&i), nwg::TreeInsert::Last), None => tv.insert_item(&text, None, nwg::TreeInsert::Root), }; tv.set_item_image(&item, 1, true); } else if btn == &self.remove_btn { if let Some(item) = tv.selected_item() { tv.remove_item(&item); } } } fn log_events(&self, evt: nwg::Event) { self.events_log.insert(0, format!("{:?}", evt)); } fn exit(&self) { nwg::stop_thread_dispatch(); } } fn main() { nwg::init().expect("Failed to init Native Windows GUI"); nwg::Font::set_global_family("Segoe UI").expect("Failed to set default font"); let _app = TreeViewApp::build_ui(Default::default()).expect("Failed to build UI"); nwg::dispatch_thread_events(); }
/*! An application that show how to use the TreeView control. Requires the following features: `cargo run --example treeview_d --features "tree-view tree-view-iterator listbox image-list frame"` */ extern crate native_windows_derive as nwd; extern crate native_windows_gui as nwg; use nwd::NwgUi; use nwg::NativeUi; #[derive(Default, NwgUi)] pub struct TreeViewApp { #[nwg_control(size: (600, 350), position: (300, 300), title: "TreeView - Musteloidea")] #[nwg_events( OnWindowClose: [TreeViewApp::exit], OnInit: [TreeViewApp::load_data] )] window: nwg::Window, #[nwg_resource(initial: 5, size: (16, 16))] view_icons: nwg::ImageList, #[nwg_layout(parent: window)] layout: nwg::GridLayout, #[nwg_control(focus: true)] #[nwg_layout_item(layout: layout, col: 0, col_span: 3, row: 0, row_span: 6)] #[nwg_events( OnTreeViewClick: [TreeViewApp::log_events(SELF, EVT)], OnTreeViewDoubleClick: [TreeViewApp::log_events(SELF, EVT)], OnTreeViewRightClick: [TreeViewApp::log_events(SELF, EVT)], OnTreeFocusLost: [TreeViewApp::log_events(SELF, EVT)], OnTreeFocus: [TreeViewApp::log_events(SELF, EVT)], OnTreeItemDelete: [TreeViewApp::log_events(SELF, EVT)], OnTreeItemExpanded: [TreeViewApp::log_events(SELF, EVT)], OnTreeItemChanged: [TreeViewApp::log_events(SELF, EVT)], OnTreeItemSelectionChanged: [TreeViewApp::log_events(SELF, EVT)], )] tree_view: nwg::TreeView, #[nwg_control(flags: "VISIBLE")] #[nwg_layout_item(layout: layout, col: 3, col_span: 2, row: 0, row_span: 3,)] control_frame: nwg::Frame, #[nwg_layout(parent: control_frame, spacing: 3, margin: [0,0,0,0])] control_layout: nwg::GridLayout, #[nwg_control(parent: control_frame, text: "Options:")] #[nwg_layout_item(layout: control_layout, col: 0, row: 0)] label1: nwg::Label, #[nwg_control(parent: control_frame, text: "New item name")] #[nwg_layout_item(layout: control_layout, col: 0, col_span: 2, row: 1)] new_item: nwg::TextInput, #[nwg_control(parent: control_frame, text: "Add")] #[nwg_layout_item(layout: control_layout, col: 0, row: 2)] #[nwg_events(OnButtonClick: [TreeViewApp::button_actions(SELF, CTRL)])] add_btn: nwg::Button, #[nwg_control(parent: control_frame, text: "Del")] #[nwg_layout_item(layout: control_layout, col: 1, row: 2)] #[nwg_events(OnButtonClick: [TreeViewApp::button_actions(SELF, CTRL)])] remove_btn: nwg::Button, #[nwg_control(text: "Events:")] #[nwg_layout_item(layout: layout, col: 3, col_span: 2, row: 3)] label2: nwg::Label, #[nwg_control] #[nwg_layout_item(layout: layout, col: 3, col_span: 2, row: 4, row_span: 2)] events_log: nwg::ListBox<String>, } impl TreeViewApp { fn load_data(&self) { let tv = &self.tree_view; let icons = &self.view_icons; icons.add_icon_from_filename("./test_rc/cog.ico").unwrap(); icons.add_icon_from_filename("./test_rc/love.ico").unwrap(); tv.set_image_list(Some(icons)); let root = tv.insert_item("Caniformia", None, nwg::TreeInsert::Root); tv.insert_item( "Canidae (dogs and other canines)", Some(&root), nwg::TreeInsert::Last, ); let arc = tv.insert_item("Arctoidea", Some(&root), nwg::TreeInsert::Last); tv.insert_item("Ursidae (bears)", Some(&arc), nwg::TreeInsert::Last); let mus = tv.insert_item("Musteloidea (weasel)", Some(&arc), nwg::TreeInsert::Last); tv.insert_item("Mephitidae (skunks)", Some(&mus), nwg::TreeInsert::Last); tv.insert_item("Ailuridae (red panda)", Some(&mus), nwg::TreeInsert::Last); tv.insert_item( "Procyonidae (raccoons and allies)", Some(&mus), nwg::TreeInsert::Last, ); tv.insert_item( "Mustelidae (weasels and allies)", Some(&mus), nwg::TreeInsert::Last, ); tv.set_text_color(50, 50, 200); for item in tv.iter() { tv.set_expand_state(&item, nwg::ExpandState::Expand); tv.set_item_image(&item, 1, true); } } fn button_actions(&self, btn: &nwg::Button) { let tv = &self.tree_view; if btn == &self.add_btn { let text = self.new_item.text(); let item = match tv.selected_item() { Some(i) => tv.insert_item(&text, Some(&i), nwg::TreeInsert::Last), None => tv.insert_item(&text, None, nwg::TreeInsert::Root), };
fn log_events(&self, evt: nwg::Event) { self.events_log.insert(0, format!("{:?}", evt)); } fn exit(&self) { nwg::stop_thread_dispatch(); } } fn main() { nwg::init().expect("Failed to init Native Windows GUI"); nwg::Font::set_global_family("Segoe UI").expect("Failed to set default font"); let _app = TreeViewApp::build_ui(Default::default()).expect("Failed to build UI"); nwg::dispatch_thread_events(); }
tv.set_item_image(&item, 1, true); } else if btn == &self.remove_btn { if let Some(item) = tv.selected_item() { tv.remove_item(&item); } } }
function_block-function_prefix_line
[ { "content": "fn build_frame(button: &mut nwg::ImageFrame, window: &nwg::Window, ico: &nwg::Icon) {\n\n nwg::ImageFrame::builder()\n\n .parent(window)\n\n .build(button);\n\n}\n\n```\n\n*/\n\n#[derive(Default)]\n\npub struct ImageFrame {\n\n pub handle: ControlHandle,\n\n background_brush: Option<HBRUSH>,\n\n handler0: RefCell<Option<RawEventHandler>>,\n\n}\n\n\n\nimpl ImageFrame {\n\n pub fn builder<'a>() -> ImageFrameBuilder<'a> {\n\n ImageFrameBuilder {\n\n size: (100, 100),\n\n position: (0, 0),\n\n flags: None,\n", "file_path": "native-windows-gui/src/controls/image_frame.rs", "rank": 0, "score": 312083.36072363466 }, { "content": "fn run_text_tests(app: &ControlsTest, _evt: Event) {\n\n if !app.runs.borrow().text {\n\n app.test_text_input.set_text(\"New Text\");\n\n assert_eq!(&app.test_text_input.text(), \"New Text\");\n\n\n\n app.test_text_input.set_limit(32);\n\n assert_eq!(app.test_text_input.limit(), 32);\n\n\n\n assert_eq!(app.test_text_input.password_char(), None);\n\n app.test_text_input.set_password_char(Some('X'));\n\n assert_eq!(app.test_text_input.password_char(), Some('X'));\n\n\n\n app.test_text_input.set_modified(true);\n\n assert_eq!(app.test_text_input.modified(), true);\n\n\n\n app.test_text_input.set_selection(0..4);\n\n assert_eq!(app.test_text_input.selection(), 0..4);\n\n\n\n assert_eq!(app.test_text_input.len(), 8);\n\n\n", "file_path": "native-windows-gui/src/tests/control_test.rs", "rank": 1, "score": 293003.5523079793 }, { "content": "/// Adding/Updating a tooltip after the initial tooltip creation\n\nfn add_tooltip(btn: &nwg::Button, tt: &nwg::Tooltip) {\n\n tt.register(btn, \"This is a button!\");\n\n}\n\n\n", "file_path": "native-windows-gui/src/controls/tooltip.rs", "rank": 2, "score": 285359.1141400137 }, { "content": "fn next_treeview_item(handle: &ControlHandle, action: usize, item: HTREEITEM) -> Option<TreeItem> {\n\n use winapi::um::commctrl::TVM_GETNEXTITEM;\n\n\n\n if handle.blank() {\n\n panic!(\"{}\", NOT_BOUND);\n\n }\n\n let handle = handle.hwnd().expect(BAD_HANDLE);\n\n\n\n let handle = wh::send_message(handle, TVM_GETNEXTITEM, action as _, item as _) as HTREEITEM;\n\n if handle.is_null() {\n\n None\n\n } else {\n\n Some(TreeItem { handle })\n\n }\n\n}\n\n\n", "file_path": "native-windows-gui/src/controls/treeview.rs", "rank": 3, "score": 282351.1698548105 }, { "content": "/// Dynamic tooltip callback setup\n\nfn add_dynamic_tooltip(tt: &nwg::Tooltip, btn: &nwg::Button) {\n\n tt.register_callback(btn);\n\n}\n\n\n\n\n", "file_path": "native-windows-gui/src/controls/tooltip.rs", "rank": 4, "score": 281434.455163646 }, { "content": "fn print_char(data: &nwg::EventData) {\n\n println!(\"{:?}\", data.on_char());\n\n}\n\n\n", "file_path": "native-windows-gui/examples/dyn_layout_d.rs", "rank": 5, "score": 259384.38260625757 }, { "content": "fn run_window_tests(app: &ControlsTest, _evt: Event) {\n\n if !app.runs.borrow().window {\n\n assert_eq!(&app.window.text(), \"Controls\");\n\n app.window.set_text(\"Controls New title\");\n\n assert_eq!(&app.window.text(), \"Controls New title\");\n\n\n\n assert_eq!(app.window.visible(), true);\n\n app.window.set_visible(false);\n\n assert_eq!(app.window.visible(), false);\n\n app.window.set_visible(true);\n\n\n\n assert_eq!(app.window.enabled(), true);\n\n app.window.set_enabled(false);\n\n assert_eq!(app.window.enabled(), false);\n\n app.window.set_enabled(true);\n\n\n\n app.window.set_position(100, 100);\n\n assert_eq!(app.window.position(), (100, 100));\n\n\n\n app.window.set_size(500, 420);\n\n // The actual size return here might be less because it does not take account of the menubar\n\n // assert_eq!(app.window.size(), (500, 400));\n\n\n\n app.runs.borrow_mut().window = true;\n\n } else {\n\n app.window.set_text(\"Controls\");\n\n app.runs.borrow_mut().window = false;\n\n }\n\n}\n\n\n", "file_path": "native-windows-gui/src/tests/control_test.rs", "rank": 6, "score": 255272.77417423448 }, { "content": "fn show_pop_menu(app: &ControlsTest, _evt: Event) {\n\n let (x, y) = GlobalCursor::position();\n\n app.pop_menu.popup(x, y);\n\n}\n\n\n", "file_path": "native-windows-gui/src/tests/control_test.rs", "rank": 7, "score": 254144.19959690125 }, { "content": "fn run_list_tests(app: &ControlsTest, _evt: Event) {\n\n if !app.runs.borrow().list {\n\n app.test_list_box2.unselect_all();\n\n\n\n {\n\n let col = app.test_list_box1.collection();\n\n assert_eq!(\n\n &col as &[&'static str],\n\n &[\"Red\", \"White\", \"Green\", \"Yellow\"]\n\n );\n\n }\n\n\n\n {\n\n let mut col = app.test_list_box1.collection_mut();\n\n col.push(\"Blue\");\n\n }\n\n\n\n app.test_list_box1.sync();\n\n app.test_list_box1.push(\"Hello!\");\n\n assert_eq!(app.test_list_box1.len(), 6);\n", "file_path": "native-windows-gui/src/tests/control_test.rs", "rank": 8, "score": 254121.83336967535 }, { "content": "fn run_combo_tests(app: &ControlsTest, _evt: Event) {\n\n if !app.runs.borrow().combo {\n\n {\n\n let col = app.test_combo.collection();\n\n assert_eq!(\n\n &col as &[&'static str],\n\n &[\"Chocolate\", \"Strawberry\", \"Blueberry\"]\n\n );\n\n }\n\n\n\n {\n\n let mut col = app.test_combo.collection_mut();\n\n col.push(\"Hello\");\n\n }\n\n\n\n app.test_combo.sync();\n\n app.test_combo.push(\"World!\");\n\n assert_eq!(app.test_combo.len(), 5);\n\n\n\n app.test_combo.set_selection(None);\n", "file_path": "native-windows-gui/src/tests/control_test.rs", "rank": 9, "score": 254121.8333696753 }, { "content": "fn run_track_tests(app: &ControlsTest, _evt: Event) {\n\n if !app.runs.borrow().track {\n\n app.test_track1.set_range_min(0);\n\n app.test_track1.set_range_max(10);\n\n\n\n assert_eq!(app.test_track1.range_min(), 0);\n\n assert_eq!(app.test_track1.range_max(), 10);\n\n\n\n app.test_track1.set_pos(3);\n\n assert_eq!(app.test_track1.pos(), 3);\n\n\n\n app.test_track2.set_range_min(0);\n\n app.test_track2.set_range_max(5);\n\n app.test_track2.set_selection_range_pos(0..3);\n\n assert_eq!(app.test_track2.selection_range_pos(), 0..3);\n\n\n\n app.runs.borrow_mut().track = true;\n\n } else {\n\n app.runs.borrow_mut().track = false;\n\n }\n\n}\n\n\n", "file_path": "native-windows-gui/src/tests/control_test.rs", "rank": 10, "score": 254121.83336967535 }, { "content": "fn run_menu_tests(app: &ControlsTest, _evt: Event) {\n\n if !app.runs.borrow().menu {\n\n app.window_menu_item1.set_enabled(false);\n\n assert_eq!(app.window_menu_item1.enabled(), false);\n\n\n\n app.window_submenu1.set_enabled(false);\n\n assert_eq!(app.window_submenu1.enabled(), false);\n\n\n\n app.pop_menu_item1.set_enabled(false);\n\n assert_eq!(app.pop_menu_item1.enabled(), false);\n\n\n\n app.pop_menu.set_enabled(false);\n\n\n\n app.runs.borrow_mut().menu = true;\n\n } else {\n\n app.pop_menu_item1.set_enabled(true);\n\n app.window_submenu1.set_enabled(true);\n\n app.window_menu_item1.set_enabled(true);\n\n app.runs.borrow_mut().menu = false;\n\n }\n\n}\n\n\n", "file_path": "native-windows-gui/src/tests/control_test.rs", "rank": 11, "score": 254121.83336967535 }, { "content": "fn run_progress_tests(app: &ControlsTest, _evt: Event) {\n\n if !app.runs.borrow().progress {\n\n app.test_progress1.set_range(0..1000);\n\n\n\n let r = app.test_progress1.range();\n\n assert!(r.start == 0 && r.end == 1000);\n\n\n\n app.test_progress1.set_pos(500);\n\n assert!(app.test_progress1.pos() == 500);\n\n\n\n app.test_progress1.set_step(100);\n\n assert!(app.test_progress1.step() == 100);\n\n\n\n app.test_progress1.set_state(ProgressBarState::Paused);\n\n assert!(app.test_progress1.state() == ProgressBarState::Paused);\n\n\n\n app.test_progress1.advance();\n\n assert!(app.test_progress1.pos() == 600);\n\n\n\n app.test_progress1.advance_delta(50);\n\n assert!(app.test_progress1.pos() == 650);\n\n\n\n app.runs.borrow_mut().progress = true;\n\n } else {\n\n app.test_progress1.set_pos(0);\n\n app.test_progress1.set_state(ProgressBarState::Normal);\n\n app.runs.borrow_mut().progress = false;\n\n }\n\n}\n\n\n", "file_path": "native-windows-gui/src/tests/control_test.rs", "rank": 12, "score": 254121.83336967535 }, { "content": "fn run_radio_tests(app: &ControlsTest, _evt: Event) {\n\n if !app.runs.borrow().radio {\n\n app.test_radio1.set_check_state(RadioButtonState::Checked);\n\n assert_eq!(app.test_radio1.check_state(), RadioButtonState::Checked);\n\n\n\n app.test_radio2.set_check_state(RadioButtonState::Checked);\n\n assert_eq!(app.test_radio2.check_state(), RadioButtonState::Checked);\n\n\n\n app.test_radio2.set_check_state(RadioButtonState::Unchecked);\n\n assert_eq!(app.test_radio2.check_state(), RadioButtonState::Unchecked);\n\n\n\n app.runs.borrow_mut().radio = true;\n\n } else {\n\n app.runs.borrow_mut().radio = false;\n\n }\n\n}\n\n\n", "file_path": "native-windows-gui/src/tests/control_test.rs", "rank": 13, "score": 254121.83336967535 }, { "content": "fn run_status_tests(app: &ControlsTest, _evt: Event) {\n\n if !app.runs.borrow().status {\n\n app.status.set_text(0, \"Status changed!\");\n\n assert_eq!(&app.status.text(0), \"Status changed!\");\n\n\n\n app.status.set_font(Some(&app.arial_font));\n\n assert_eq!(app.status.font().as_ref(), Some(&app.arial_font));\n\n\n\n app.status.set_min_height(55);\n\n\n\n app.runs.borrow_mut().status = true;\n\n } else {\n\n app.status.set_font(None);\n\n app.status.set_min_height(25);\n\n\n\n app.runs.borrow_mut().status = false;\n\n }\n\n}\n\n\n", "file_path": "native-windows-gui/src/tests/control_test.rs", "rank": 14, "score": 254121.83336967535 }, { "content": "fn run_date_tests(app: &ControlsTest, _evt: Event) {\n\n if !app.runs.borrow().date {\n\n let v = DatePickerValue {\n\n year: 2000,\n\n month: 10,\n\n day: 5,\n\n };\n\n app.test_date.set_value(Some(v));\n\n assert_eq!(app.test_date.value(), Some(v));\n\n assert_eq!(app.test_date.checked(), true);\n\n\n\n app.test_date.set_value(None);\n\n assert_eq!(app.test_date.value(), None);\n\n assert_eq!(app.test_date.checked(), false);\n\n\n\n app.test_date.set_format(Some(\"'YEAR: 'yyyy\"));\n\n\n\n let up = DatePickerValue {\n\n year: 2000,\n\n month: 1,\n", "file_path": "native-windows-gui/src/tests/control_test.rs", "rank": 15, "score": 254121.83336967535 }, { "content": "fn run_button_tests(app: &ControlsTest, _evt: Event) {\n\n if !app.runs.borrow().button {\n\n assert_eq!(&app.test_button.text(), \"A simple button\");\n\n app.test_button.set_text(\"New Text\");\n\n assert_eq!(&app.test_button.text(), \"New Text\");\n\n\n\n assert_eq!(app.test_button.position(), (10, 10));\n\n app.test_button.set_position(5, 5);\n\n assert_eq!(app.test_button.position(), (5, 5));\n\n\n\n assert_eq!(app.test_button.size(), (130, 30));\n\n app.test_button.set_size(120, 35);\n\n assert_eq!(app.test_button.size(), (120, 35));\n\n\n\n if app.basics_control_tab.visible() {\n\n assert_eq!(app.test_button.visible(), true);\n\n app.test_button.set_visible(false);\n\n assert_eq!(app.test_button.visible(), false);\n\n app.test_button.set_visible(true);\n\n }\n", "file_path": "native-windows-gui/src/tests/control_test.rs", "rank": 16, "score": 254121.83336967535 }, { "content": "fn run_font_tests(app: &ControlsTest, _evt: Event) {\n\n if !app.runs.borrow().font {\n\n app.test_label.set_font(Some(&app.arial_font));\n\n app.test_button.set_font(Some(&app.arial_font));\n\n app.test_checkbox1.set_font(Some(&app.arial_font));\n\n app.test_checkbox2.set_font(Some(&app.arial_font));\n\n app.test_combo.set_font(Some(&app.arial_font));\n\n app.test_date.set_font(Some(&app.arial_font));\n\n app.test_date.set_font(Some(&app.arial_font));\n\n app.test_list_box1.set_font(Some(&app.arial_font));\n\n app.test_list_box2.set_font(Some(&app.arial_font));\n\n app.controls_holder.set_font(Some(&app.arial_font));\n\n app.test_text_input.set_font(Some(&app.arial_font));\n\n app.test_text_box.set_font(Some(&app.arial_font));\n\n app.test_tree.set_font(Some(&app.arial_font));\n\n\n\n assert_eq!(app.test_label.font().as_ref(), Some(&app.arial_font));\n\n\n\n app.runs.borrow_mut().font = true;\n\n } else {\n", "file_path": "native-windows-gui/src/tests/control_test.rs", "rank": 17, "score": 254121.83336967535 }, { "content": "fn run_tooltip_tests(app: &ControlsTest, _evt: Event) {\n\n if !app.runs.borrow().tooltip {\n\n app.test_ttp2.set_enabled(false);\n\n\n\n app.test_ttp1.set_delay_time(Some(100));\n\n assert_eq!(app.test_ttp1.delay_time(), 100);\n\n\n\n app.test_ttp1\n\n .register(&app.test_checkbox1, \"A simple checkbox\");\n\n app.test_ttp1\n\n .register(&app.test_checkbox2, \"A checkbox with 3 states!\");\n\n\n\n app.test_ttp3\n\n .set_default_decoration(\"Changed!\", TooltipIcon::None);\n\n\n\n app.test_ttp1\n\n .set_text(&app.test_button.handle, \"New tool tip!\");\n\n assert_eq!(\n\n &app.test_ttp1.text(&app.test_button.handle, None),\n\n \"New tool tip!\"\n", "file_path": "native-windows-gui/src/tests/control_test.rs", "rank": 18, "score": 254121.83336967535 }, { "content": "fn build_box(tbox: &mut nwg::TextInput, window: &nwg::Window, font: &nwg::Font) {\n\n nwg::TextInput::builder()\n\n .text(\"Hello\")\n\n .font(Some(font))\n\n .parent(window)\n\n .build(tbox);\n\n}\n\n```\n\n*/\n\n#[derive(Default)]\n\npub struct TextInput {\n\n pub handle: ControlHandle,\n\n background_brush: Option<HBRUSH>,\n\n handler0: RefCell<Option<RawEventHandler>>,\n\n}\n\n\n\nimpl TextInput {\n\n pub fn builder<'a>() -> TextInputBuilder<'a> {\n\n TextInputBuilder {\n\n text: \"\",\n", "file_path": "native-windows-gui/src/controls/text_input.rs", "rank": 19, "score": 251712.05409424193 }, { "content": "fn build_box(tbox: &mut nwg::TextBox, window: &nwg::Window, font: &nwg::Font) {\n\n nwg::TextBox::builder()\n\n .text(\"Hello\")\n\n .font(Some(font))\n\n .parent(window)\n\n .build(tbox);\n\n}\n\n```\n\n*/\n\n#[derive(Default, PartialEq, Eq)]\n\npub struct TextBox {\n\n pub handle: ControlHandle,\n\n}\n\n\n\nimpl TextBox {\n\n pub fn builder<'a>() -> TextBoxBuilder<'a> {\n\n TextBoxBuilder {\n\n text: \"\",\n\n size: (100, 25),\n\n position: (0, 0),\n", "file_path": "native-windows-gui/src/controls/text_box.rs", "rank": 20, "score": 251712.05409424193 }, { "content": "fn run_check_box_tests(app: &ControlsTest, _evt: Event) {\n\n if !app.runs.borrow().check {\n\n assert_eq!(app.test_checkbox2.tristate(), true);\n\n assert_eq!(app.test_checkbox1.tristate(), false);\n\n\n\n app.test_checkbox1.set_tristate(true);\n\n assert_eq!(app.test_checkbox1.tristate(), true);\n\n\n\n app.test_checkbox1.set_check_state(CheckBoxState::Checked);\n\n assert_eq!(app.test_checkbox1.check_state(), CheckBoxState::Checked);\n\n\n\n app.test_checkbox1.set_check_state(CheckBoxState::Unchecked);\n\n assert_eq!(app.test_checkbox1.check_state(), CheckBoxState::Unchecked);\n\n\n\n app.test_checkbox1\n\n .set_check_state(CheckBoxState::Indeterminate);\n\n assert_eq!(\n\n app.test_checkbox1.check_state(),\n\n CheckBoxState::Indeterminate\n\n );\n\n\n\n app.runs.borrow_mut().check = true;\n\n } else {\n\n app.test_checkbox1.set_tristate(false);\n\n app.runs.borrow_mut().check = false;\n\n }\n\n}\n\n\n", "file_path": "native-windows-gui/src/tests/control_test.rs", "rank": 21, "score": 250341.09136256034 }, { "content": "fn build_listbox(listb: &mut nwg::ListBox<&'static str>, window: &nwg::Window, font: &nwg::Font) {\n\n nwg::ListBox::builder()\n\n .flags(nwg::ListBoxFlags::VISIBLE | nwg::ListBoxFlags::MULTI_SELECT)\n\n .collection(vec![\"Hello\", \"World\", \"!!!!\"])\n\n .multi_selection(vec![0, 1, 2])\n\n .font(Some(font))\n\n .parent(window)\n\n .build(listb);\n\n}\n\n```\n\n\n\n*/\n\n#[derive(Default)]\n\npub struct ListBox<D: Display + Default> {\n\n pub handle: ControlHandle,\n\n collection: RefCell<Vec<D>>,\n\n}\n\n\n\nimpl<D: Display + Default> ListBox<D> {\n\n pub fn builder<'a>() -> ListBoxBuilder<'a, D> {\n", "file_path": "native-windows-gui/src/controls/list_box.rs", "rank": 22, "score": 244941.8689252503 }, { "content": "fn load_icon() -> nwg::Icon {\n\n nwg::Icon::from_file(\"hello.ico\", true).unwrap()\n\n}\n\n\n", "file_path": "native-windows-gui/src/resources/icon.rs", "rank": 23, "score": 236092.32922289864 }, { "content": "fn load_icon_builder() -> nwg::Icon {\n\n let mut icon = nwg::Icon::default();\n\n\n\n nwg::Icon::builder()\n\n .source_file(Some(\"hello.ico\"))\n\n .strict(true)\n\n .build(&mut icon);\n\n\n\n icon\n\n}\n\n\n\n*/\n\n#[allow(unused)]\n\npub struct Icon {\n\n pub handle: HANDLE,\n\n pub(crate) owned: bool,\n\n}\n\n\n\nimpl Icon {\n\n pub fn builder<'a>() -> IconBuilder<'a> {\n", "file_path": "native-windows-gui/src/resources/icon.rs", "rank": 24, "score": 233477.0973389047 }, { "content": "fn basic_stuff(window: &nwg::Window) -> Result<(), nwg::NwgError> {\n\n nwg::ControlBase::build_hwnd()\n\n .class_name(\"BUTTON\")\n\n .forced_flags(0)\n\n .flags(0)\n\n .size((100, 100))\n\n .position((100, 100))\n\n .text(\"HELLO\")\n\n .parent(Some(window.handle))\n\n .build()?;\n\n\n\n #[cfg(feature = \"menu\")]\n\n nwg::ControlBase::build_hmenu()\n\n .text(\"Item\")\n\n .item(true)\n\n .parent(window.handle)\n\n .build()?;\n\n\n\n Ok(())\n\n}\n", "file_path": "native-windows-gui/src/controls/control_base.rs", "rank": 25, "score": 231061.54374075308 }, { "content": "fn next_item(tree: HWND, action: NextAction, handle: HTREEITEM) -> Option<TreeItem> {\n\n use winapi::shared::minwindef::{LPARAM, WPARAM};\n\n use winapi::um::commctrl::TVM_GETNEXTITEM;\n\n\n\n let handle =\n\n wh::send_message(tree, TVM_GETNEXTITEM, action as WPARAM, handle as LPARAM) as HTREEITEM;\n\n if handle.is_null() {\n\n None\n\n } else {\n\n Some(TreeItem { handle })\n\n }\n\n}\n", "file_path": "native-windows-gui/src/controls/treeview_iterator.rs", "rank": 26, "score": 230415.58331640868 }, { "content": "fn iter_tree_view(tree: &mut nwg::TreeView) {\n\n for item in tree.iter() {\n\n println!(\"{:?}\", tree.item_text(&item));\n\n }\n\n}\n\n```\n\n*/\n\n#[allow(unused)]\n\npub struct TreeViewIterator<'a> {\n\n tree_view: &'a TreeView,\n\n tree_view_handle: HWND,\n\n base_item: HTREEITEM,\n\n current_item: HTREEITEM,\n\n action: NextAction,\n\n}\n\n\n\nimpl<'a> TreeViewIterator<'a> {\n\n /// Use `TreeView.iter` to create a `TreeViewIterator`\n\n pub(crate) fn new(tree_view: &'a TreeView, current_item: HTREEITEM) -> TreeViewIterator {\n\n let tree_view_handle = tree_view.handle.hwnd().unwrap();\n", "file_path": "native-windows-gui/src/controls/treeview_iterator.rs", "rank": 27, "score": 228618.45623021023 }, { "content": "fn read_custom_data(window: &nwg::Window) -> Option<Hello> {\n\n unsafe {\n\n nwg::Clipboard::open(window);\n\n let data = nwg::Clipboard::data(nwg::ClipboardFormat::Global(\"Hello\"));\n\n nwg::Clipboard::close();\n\n data\n\n }\n\n}\n\n\n", "file_path": "native-windows-gui/src/win32/clipboard.rs", "rank": 28, "score": 225362.9728755046 }, { "content": "fn read_custom_data_handle(window: &nwg::Window) -> Option<Hello> {\n\n unsafe {\n\n nwg::Clipboard::open(window);\n\n let handle = nwg::Clipboard::data_handle(nwg::ClipboardFormat::Global(\"Hello\"));\n\n let data = match handle {\n\n Some(h) => {\n\n let data_ptr: *const Hello = h.cast();\n\n let data = *data_ptr;\n\n h.release();\n\n Some(data)\n\n },\n\n None => None\n\n };\n\n\n\n nwg::Clipboard::close();\n\n data\n\n }\n\n}\n\n\n\n```\n", "file_path": "native-windows-gui/src/win32/clipboard.rs", "rank": 29, "score": 222911.87265143346 }, { "content": "/// Create the NWG tab classes\n\npub fn create_extern_canvas_classes() -> Result<(), NwgError> {\n\n use winapi::shared::windef::HBRUSH;\n\n use winapi::um::libloaderapi::GetModuleHandleW;\n\n use winapi::um::winuser::{CS_HREDRAW, CS_OWNDC, CS_VREDRAW};\n\n\n\n let hmod = unsafe { GetModuleHandleW(ptr::null_mut()) };\n\n if hmod.is_null() {\n\n return Err(NwgError::initialization(\"GetModuleHandleW failed\"));\n\n }\n\n\n\n unsafe {\n\n build_sysclass(\n\n hmod,\n\n EXT_CANVAS_CLASS_ID,\n\n Some(extern_canvas_proc),\n\n Some(0 as HBRUSH),\n\n Some(CS_OWNDC | CS_VREDRAW | CS_HREDRAW),\n\n )?;\n\n }\n\n\n", "file_path": "native-windows-gui/src/win32/extern_canvas.rs", "rank": 30, "score": 221829.1654413445 }, { "content": "fn print_char(data: &nwg::EventData) {\n\n println!(\"{:?}\", data.on_char());\n\n}\n\n\n", "file_path": "native-windows-gui/examples/partials_d.rs", "rank": 31, "score": 221769.08293920182 }, { "content": "fn clipboard_text(window: &nwg::Window) {\n\n nwg::Clipboard::set_data_text(window, \"Hello!\");\n\n\n\n let text = nwg::Clipboard::data_text(window);\n\n assert!(text.is_some());\n\n assert!(&text.unwrap() == &\"Hello!\");\n\n}\n\n```\n\n\n\n\n\nWriting / Reading custom data\n\n\n\n```rust\n\nuse native_windows_gui as nwg;\n\n\n", "file_path": "native-windows-gui/src/win32/clipboard.rs", "rank": 32, "score": 219750.7689533607 }, { "content": "fn bind_raw_handler(window: &nwg::Window) -> nwg::RawEventHandler {\n\n const WM_MOVE: u32 = 3287542; // Not the actual value, but who cares?\n\n let handler_id = 0x10000; // handler ids equal or smaller than 0xFFFF are reserved by NWG\n\n\n\n nwg::bind_raw_event_handler(&window.handle, handler_id, move |_hwnd, msg, _w, _l| {\n\n if msg == WM_MOVE {\n\n println!(\"MOVING!\");\n\n }\n\n None\n\n }).unwrap()\n\n}\n\n\n\n```\n\n*/\n", "file_path": "native-windows-gui/src/win32/window.rs", "rank": 33, "score": 219702.91282429697 }, { "content": "#[cfg(feature = \"file-dialog\")]\n\nfn open_file(app: &ControlsTest, _evt: Event) {\n\n if app.open_file_dialog.run(Some(&app.window)) {\n\n app.file_dialog_result.clear();\n\n if let Ok(file_names) = app.open_file_dialog.get_selected_items() {\n\n let mut names = String::new();\n\n for name in file_names {\n\n names.push_str(&name.into_string().unwrap());\n\n names.push_str(\"\\r\\n\")\n\n }\n\n\n\n app.file_dialog_result.set_text(&names);\n\n }\n\n }\n\n}\n\n\n", "file_path": "native-windows-gui/src/tests/control_test.rs", "rank": 34, "score": 218178.45183941277 }, { "content": "#[cfg(feature = \"file-dialog\")]\n\nfn save_file(app: &ControlsTest, _evt: Event) {\n\n if app.save_file_dialog.run(Some(&app.window)) {\n\n app.file_dialog_result.clear();\n\n if let Ok(file) = app.save_file_dialog.get_selected_item() {\n\n app.file_dialog_result\n\n .set_text(&file.into_string().unwrap());\n\n }\n\n }\n\n}\n\n\n", "file_path": "native-windows-gui/src/tests/control_test.rs", "rank": 35, "score": 218178.45183941277 }, { "content": "#[cfg(not(feature = \"file-dialog\"))]\n\nfn open_directory(_app: &ControlsTest, _evt: Event) {}\n\n\n", "file_path": "native-windows-gui/src/tests/control_test.rs", "rank": 36, "score": 218178.45183941277 }, { "content": "#[cfg(not(feature = \"file-dialog\"))]\n\nfn open_file(_app: &ControlsTest, _evt: Event) {}\n\n\n", "file_path": "native-windows-gui/src/tests/control_test.rs", "rank": 37, "score": 218178.45183941277 }, { "content": "#[cfg(feature = \"file-dialog\")]\n\nfn open_directory(app: &ControlsTest, _evt: Event) {\n\n if app.open_directory_dialog.run(Some(&app.window)) {\n\n app.file_dialog_result.clear();\n\n if let Ok(directory) = app.open_directory_dialog.get_selected_item() {\n\n app.file_dialog_result\n\n .set_text(&directory.into_string().unwrap());\n\n }\n\n }\n\n}\n\n\n", "file_path": "native-windows-gui/src/tests/control_test.rs", "rank": 38, "score": 218178.45183941277 }, { "content": "#[cfg(not(feature = \"file-dialog\"))]\n\nfn save_file(_app: &ControlsTest, _evt: Event) {}\n\n\n", "file_path": "native-windows-gui/src/tests/control_test.rs", "rank": 39, "score": 218178.45183941277 }, { "content": "pub fn destroy_icon(icon: HANDLE) {\n\n unsafe {\n\n winapi::um::winuser::DestroyIcon(icon as _);\n\n }\n\n}\n\n\n", "file_path": "native-windows-gui/src/win32/resources_helper.rs", "rank": 40, "score": 217965.28335141856 }, { "content": "pub fn init_common_controls() -> Result<(), NwgError> {\n\n use winapi::shared::winerror::{S_FALSE, S_OK};\n\n use winapi::um::commctrl::{InitCommonControlsEx, INITCOMMONCONTROLSEX};\n\n use winapi::um::commctrl::{\n\n ICC_BAR_CLASSES, ICC_DATE_CLASSES, ICC_LISTVIEW_CLASSES, ICC_PROGRESS_CLASS,\n\n ICC_STANDARD_CLASSES, ICC_TAB_CLASSES, ICC_TREEVIEW_CLASSES,\n\n };\n\n use winapi::um::libloaderapi::LoadLibraryW;\n\n use winapi::um::objbase::CoInitialize;\n\n\n\n unsafe {\n\n let mut classes = ICC_BAR_CLASSES | ICC_STANDARD_CLASSES;\n\n\n\n if cfg!(feature = \"datetime-picker\") {\n\n classes |= ICC_DATE_CLASSES;\n\n }\n\n\n\n if cfg!(feature = \"progress-bar\") {\n\n classes |= ICC_PROGRESS_CLASS;\n\n }\n", "file_path": "native-windows-gui/src/win32/mod.rs", "rank": 41, "score": 212928.1696066709 }, { "content": "fn build_notice(notice: &mut nwg::Notice, window: &nwg::Window) {\n\n nwg::Notice::builder()\n\n .parent(window)\n\n .build(notice);\n\n}\n\n```\n\n\n\n```rust\n\nuse native_windows_gui as nwg;\n\nuse std::thread;\n\nuse std::time;\n\n\n", "file_path": "native-windows-gui/src/controls/notice.rs", "rank": 42, "score": 210804.01035039724 }, { "content": "pub fn unbind_raw_event_handler(handler: &RawEventHandler) -> Result<(), NwgError> {\n\n let subclass_proc = handler.subclass_proc;\n\n let handler_id = handler.handler_id;\n\n let handle = handler.handle;\n\n\n\n unsafe {\n\n let mut callback_value: UINT_PTR = 0;\n\n let result = GetWindowSubclass(handle, subclass_proc, handler_id, &mut callback_value);\n\n if result == 0 {\n\n let err = format!(\n\n concat!(\n\n \"Could not fetch raw event handler #{:?}.\",\n\n \"This can happen if the control ({:?}) was freed or\",\n\n \"if this raw event handler was already unbound\"\n\n ),\n\n handler_id, handle\n\n );\n\n return Err(NwgError::EventsBinding(err));\n\n }\n\n\n", "file_path": "native-windows-gui/src/win32/window.rs", "rank": 43, "score": 207076.63903133772 }, { "content": "fn build_button(button: &mut nwg::Button, window: &nwg::Window, font: &nwg::Font) {\n\n nwg::Button::builder()\n\n .text(\"Hello\")\n\n .flags(nwg::ButtonFlags::VISIBLE)\n\n .font(Some(font))\n\n .parent(window)\n\n .build(button);\n\n}\n\n```\n\n\n\n*/\n\n#[derive(Default, Eq, PartialEq)]\n\npub struct Button {\n\n pub handle: ControlHandle,\n\n}\n\n\n\nimpl Button {\n\n pub fn builder<'a>() -> ButtonBuilder<'a> {\n\n ButtonBuilder {\n\n text: \"Button\",\n", "file_path": "native-windows-gui/src/controls/button.rs", "rank": 44, "score": 207066.29408282213 }, { "content": "fn build_label(label: &mut nwg::Label, window: &nwg::Window, font: &nwg::Font) {\n\n nwg::Label::builder()\n\n .text(\"Hello\")\n\n .font(Some(font))\n\n .parent(window)\n\n .build(label);\n\n}\n\n```\n\n\n\n*/\n\n#[derive(Default)]\n\npub struct Label {\n\n pub handle: ControlHandle,\n\n background_brush: Option<HBRUSH>,\n\n handler0: RefCell<Option<RawEventHandler>>,\n\n handler1: RefCell<Option<RawEventHandler>>,\n\n}\n\n\n\nimpl Label {\n\n pub fn builder<'a>() -> LabelBuilder<'a> {\n", "file_path": "native-windows-gui/src/controls/label.rs", "rank": 45, "score": 207066.29408282213 }, { "content": "fn build_timer(parent: &nwg::Window) {\n\n let mut timer = Default::default();\n\n nwg::Timer::builder()\n\n .parent(parent)\n\n .interval(100)\n\n .stopped(false)\n\n .build(&mut timer);\n\n}\n\n```\n\n*/\n\n#[deprecated(\n\n since = \"1.0.11\",\n\n note = \"Use AnimationTimer instead. The winapi timer does not have a constant tick and will call your single threaded from another thread.\"\n\n)]\n\n#[derive(Default)]\n\npub struct Timer {\n\n pub handle: ControlHandle,\n\n interval: RefCell<u32>,\n\n}\n\n\n", "file_path": "native-windows-gui/src/controls/timer.rs", "rank": 46, "score": 206520.27021262125 }, { "content": "fn build_scrollbar(button: &mut nwg::ScrollBar, window: &nwg::Window) {\n\n nwg::ScrollBar::builder()\n\n .range(Some(0..100))\n\n .pos(Some(10))\n\n .parent(window)\n\n .build(button);\n\n}\n\n```\n\n*/\n\n#[derive(Default)]\n\npub struct ScrollBar {\n\n pub handle: ControlHandle,\n\n handler0: RefCell<Option<RawEventHandler>>,\n\n handler1: RefCell<Option<RawEventHandler>>,\n\n}\n\n\n\nimpl ScrollBar {\n\n pub fn builder<'a>() -> ScrollBarBuilder {\n\n ScrollBarBuilder {\n\n size: (25, 100),\n", "file_path": "native-windows-gui/src/controls/scroll_bar.rs", "rank": 47, "score": 206447.66951673766 }, { "content": "fn build_dtp(date: &mut nwg::DatePicker, window: &nwg::Window) {\n\n let v = nwg::DatePickerValue { year: 2000, month: 10, day: 5 };\n\n let v1 = nwg::DatePickerValue { year: 2000, month: 10, day: 5 };\n\n let v2 = nwg::DatePickerValue { year: 2012, month: 10, day: 5 };\n\n\n\n nwg::DatePicker::builder()\n\n .size((200, 300))\n\n .position((0, 0))\n\n .date(Some(v))\n\n .format(Some(\"'YEAR: 'yyyy\"))\n\n .range(Some([v1, v2]))\n\n .parent(window)\n\n .build(date);\n\n}\n\n```\n\n*/\n\n#[derive(Default, PartialEq, Eq)]\n\npub struct DatePicker {\n\n pub handle: ControlHandle,\n\n}\n", "file_path": "native-windows-gui/src/controls/date_picker.rs", "rank": 48, "score": 206447.66951673766 }, { "content": "fn build_trackbar(track: &mut nwg::TrackBar, window: &nwg::Window) {\n\n nwg::TrackBar::builder()\n\n .range(Some(0..100))\n\n .pos(Some(10))\n\n .parent(window)\n\n .build(track);\n\n}\n\n```\n\n\n\n*/\n\n#[derive(Default)]\n\npub struct TrackBar {\n\n pub handle: ControlHandle,\n\n background_brush: Option<HBRUSH>,\n\n handler0: RefCell<Option<RawEventHandler>>,\n\n}\n\n\n\nimpl TrackBar {\n\n pub fn builder() -> TrackBarBuilder {\n\n TrackBarBuilder {\n", "file_path": "native-windows-gui/src/controls/track_bar.rs", "rank": 49, "score": 206447.66951673766 }, { "content": "fn blank_item() -> TVITEMW {\n\n TVITEMW {\n\n mask: 0,\n\n hItem: ptr::null_mut(),\n\n state: 0,\n\n stateMask: 0,\n\n pszText: ptr::null_mut(),\n\n cchTextMax: 0,\n\n iImage: 0,\n\n iSelectedImage: 0,\n\n cChildren: 0,\n\n lParam: 0,\n\n }\n\n}\n", "file_path": "native-windows-gui/src/controls/treeview.rs", "rank": 50, "score": 205362.17084250195 }, { "content": "fn build_progress_bar(bar: &mut nwg::ProgressBar, window: &nwg::Window) {\n\n nwg::ProgressBar::builder()\n\n .state(nwg::ProgressBarState::Paused)\n\n .step(10)\n\n .range(0..100)\n\n .parent(window)\n\n .build(bar);\n\n}\n\n```\n\n\n\n*/\n\n#[derive(Default, PartialEq, Eq)]\n\npub struct ProgressBar {\n\n pub handle: ControlHandle,\n\n}\n\n\n\nimpl ProgressBar {\n\n pub fn builder() -> ProgressBarBuilder {\n\n ProgressBarBuilder {\n\n size: (100, 40),\n", "file_path": "native-windows-gui/src/controls/progress_bar.rs", "rank": 51, "score": 204379.84175207868 }, { "content": "/// Builds a timer that will animation something at 60fps for 3 sec\n\nfn build_timer(parent: &nwg::Window) {\n\n let mut timer = Default::default();\n\n nwg::AnimationTimer::builder()\n\n .parent(parent)\n\n .interval(Duration::from_millis(1000/60))\n\n .lifetime(Some(Duration::from_millis(3000)))\n\n .build(&mut timer);\n\n}\n\n```\n\n*/\n\n#[derive(Default, PartialEq, Eq)]\n\npub struct AnimationTimer {\n\n pub handle: ControlHandle,\n\n}\n\n\n\nimpl AnimationTimer {\n\n pub fn builder() -> AnimationTimerBuilder {\n\n AnimationTimerBuilder {\n\n parent: None,\n\n interval: Duration::from_millis(1000 / 60),\n", "file_path": "native-windows-gui/src/controls/animation_timer.rs", "rank": 52, "score": 203793.6794626072 }, { "content": "fn build_label(label: &mut nwg::RichLabel, window: &nwg::Window, font: &nwg::Font) {\n\n nwg::RichLabel::builder()\n\n .text(\"Hello\")\n\n .font(Some(font))\n\n .parent(window)\n\n .build(label);\n\n}\n\n\n\n*/\n\n#[derive(Default)]\n\npub struct RichLabel {\n\n pub handle: ControlHandle,\n\n line_height: Rc<RefCell<Option<i32>>>,\n\n handler0: RefCell<Option<RawEventHandler>>,\n\n}\n\n\n\nimpl RichLabel {\n\n pub fn builder<'a>() -> RichLabelBuilder<'a> {\n\n RichLabelBuilder {\n\n text: \"A rich label\",\n", "file_path": "native-windows-gui/src/controls/rich_label.rs", "rank": 53, "score": 203304.5674714442 }, { "content": "fn build_radio(radio: &mut nwg::RadioButton, window: &nwg::Window, font: &nwg::Font) {\n\n nwg::RadioButton::builder()\n\n .text(\"Hello\")\n\n .flags(nwg::RadioButtonFlags::VISIBLE)\n\n .font(Some(font))\n\n .parent(window)\n\n .build(radio);\n\n}\n\n```\n\n\n\n*/\n\n#[derive(Default)]\n\npub struct RadioButton {\n\n pub handle: ControlHandle,\n\n background_brush: Option<HBRUSH>,\n\n handler0: RefCell<Option<RawEventHandler>>,\n\n}\n\n\n\nimpl RadioButton {\n\n pub fn builder<'a>() -> RadioButtonBuilder<'a> {\n", "file_path": "native-windows-gui/src/controls/radio_button.rs", "rank": 54, "score": 203304.5674714442 }, { "content": "fn build_checkbox(button: &mut nwg::CheckBox, window: &nwg::Window, font: &nwg::Font) {\n\n nwg::CheckBox::builder()\n\n .text(\"Hello\")\n\n .flags(nwg::CheckBoxFlags::VISIBLE)\n\n .font(Some(font))\n\n .parent(window)\n\n .build(button);\n\n}\n\n```\n\n*/\n\n#[derive(Default)]\n\npub struct CheckBox {\n\n pub handle: ControlHandle,\n\n background_brush: Option<HBRUSH>,\n\n handler0: RefCell<Option<RawEventHandler>>,\n\n}\n\n\n\nimpl CheckBox {\n\n pub fn builder<'a>() -> CheckBoxBuilder<'a> {\n\n CheckBoxBuilder {\n", "file_path": "native-windows-gui/src/controls/check_box.rs", "rank": 55, "score": 203304.5674714442 }, { "content": "fn build_status(status: &mut nwg::StatusBar, window: &nwg::Window, font: &nwg::Font) {\n\n nwg::StatusBar::builder()\n\n .text(\"Hello\")\n\n .font(Some(font))\n\n .parent(window)\n\n .build(status);\n\n}\n\n```\n\n\n\n*/\n\n#[derive(Default)]\n\npub struct StatusBar {\n\n pub handle: ControlHandle,\n\n handler0: RefCell<Option<RawEventHandler>>,\n\n}\n\n\n\nimpl StatusBar {\n\n pub fn builder<'a>() -> StatusBarBuilder<'a> {\n\n StatusBarBuilder {\n\n text: \"\",\n", "file_path": "native-windows-gui/src/controls/status_bar.rs", "rank": 56, "score": 203304.5674714442 }, { "content": "fn build_number_select(num_select: &mut nwg::NumberSelect, window: &nwg::Window, font: &nwg::Font) {\n\n nwg::NumberSelect::builder()\n\n .font(Some(font))\n\n .parent(window)\n\n .build(num_select);\n\n}\n\n```\n\n\n\n*/\n\n#[derive(Default)]\n\npub struct NumberSelect {\n\n pub handle: ControlHandle,\n\n data: Rc<RefCell<NumberSelectData>>,\n\n edit: TextInput,\n\n btn_up: Button,\n\n btn_down: Button,\n\n handler: Option<RawEventHandler>,\n\n}\n\n\n\nimpl NumberSelect {\n", "file_path": "native-windows-gui/src/controls/number_select.rs", "rank": 57, "score": 199769.65000684775 }, { "content": "pub fn full_bind_event_handler<F>(handle: &ControlHandle, f: F) -> EventHandler\n\nwhere\n\n F: Fn(Event, EventData, ControlHandle) + 'static,\n\n{\n\n use winapi::um::winuser::EnumChildWindows;\n\n\n\n struct SetSubclassParam {\n\n callback_ptr: *mut *const Callback,\n\n subclass_id: UINT_PTR,\n\n }\n\n\n\n /**\n\n Function that iters over a top level window and bind the events dispatch callback\n\n */\n\n unsafe extern \"system\" fn set_children_subclass(h: HWND, p: LPARAM) -> i32 {\n\n let params_ptr = p as *mut SetSubclassParam;\n\n let params = &*params_ptr;\n\n\n\n let cb: Rc<Callback> = Rc::from_raw(*params.callback_ptr);\n\n\n", "file_path": "native-windows-gui/src/win32/window.rs", "rank": 58, "score": 199477.4466720561 }, { "content": "fn build_combobox(combo: &mut nwg::ComboBox<&'static str>, window: &nwg::Window) {\n\n let data = vec![\"one\", \"two\"];\n\n\n\n nwg::ComboBox::builder()\n\n .size((200, 300))\n\n .collection(data)\n\n .selected_index(Some(0))\n\n .parent(window)\n\n .build(combo);\n\n}\n\n```\n\n*/\n\n#[derive(Default)]\n\npub struct ComboBox<D: Display + Default> {\n\n pub handle: ControlHandle,\n\n collection: RefCell<Vec<D>>,\n\n handler0: RefCell<Option<RawEventHandler>>,\n\n}\n\n\n\nimpl<D: Display + Default> ComboBox<D> {\n", "file_path": "native-windows-gui/src/controls/combo_box.rs", "rank": 59, "score": 195499.88138102874 }, { "content": "/// Build two group of checkboxes on the same parent with the GROUP flags\n\nfn build_radio_groups(radios: &mut [nwg::RadioButton], parent: &nwg::Window) {\n\n use nwg::RadioButtonFlags as RadioF;\n\n\n\n // Group 1\n\n nwg::RadioButton::builder()\n\n .flags(RadioF::VISIBLE | RadioF::GROUP)\n\n .parent(parent)\n\n .build(&mut radios[0]);\n\n\n\n nwg::RadioButton::builder()\n\n .parent(parent)\n\n .build(&mut radios[1]);\n\n\n\n // Group 2\n\n nwg::RadioButton::builder()\n\n .flags(RadioF::VISIBLE | RadioF::GROUP)\n\n .parent(parent)\n\n .build(&mut radios[2]);\n\n\n\n nwg::RadioButton::builder()\n\n .parent(parent)\n\n .build(&mut radios[3]);\n\n}\n\n\n\n\n\n```\n\n\n\n```rust\n\nuse native_windows_gui as nwg;\n", "file_path": "native-windows-gui/src/controls/radio_button.rs", "rank": 60, "score": 193819.02503376477 }, { "content": "pub fn dispatch_thread_events() {\n\n use winapi::um::winuser::GetMessageW;\n\n use winapi::um::winuser::MSG;\n\n\n\n unsafe {\n\n let mut msg: MSG = mem::zeroed();\n\n while GetMessageW(&mut msg, ptr::null_mut(), 0, 0) != 0 {\n\n if IsDialogMessageW(GetAncestor(msg.hwnd, GA_ROOT), &mut msg) == 0 {\n\n TranslateMessage(&msg);\n\n DispatchMessageW(&msg);\n\n }\n\n }\n\n }\n\n}\n\n\n\n/**\n\n Dispatch system evetns in the current thread AND execute a callback after each peeking attempt.\n\n Unlike `dispath_thread_events`, this method will not pause the thread while waiting for events.\n\n*/\n", "file_path": "native-windows-gui/src/win32/mod.rs", "rank": 61, "score": 192651.5725214475 }, { "content": "pub fn main() {\n\n nwg::init().expect(\"Failed to init Native Windows GUI\");\n\n\n\n let app = ExternCanvas::build_ui(Default::default()).expect(\"Failed to build UI\");\n\n\n\n // Make sure to render everything at least once before showing the window to remove weird artifacts.\n\n app.canvas.create_context();\n\n app.canvas.render();\n\n\n\n // Here we use the `with_callback` version of dispatch_thread_events\n\n // Internally the callback will be executed almost as fast as `loop { callback() }`\n\n nwg::dispatch_thread_events_with_callback(move || {\n\n app.canvas.render();\n\n });\n\n}\n\n\n\n\n\nconst VS_SRC: &'static [u8] = b\"#version 330\n\nlayout (location=0) in vec2 a_position;\n\nlayout (location=1) in vec4 a_color;\n", "file_path": "native-windows-gui/examples/opengl_canvas/src/main.rs", "rank": 62, "score": 191499.24684381863 }, { "content": "pub fn unbind_event_handler(handler: &EventHandler) {\n\n let id = handler.id;\n\n let subclass_id = handler.subclass_id;\n\n let mut callback_ptr: *mut *const Callback = ptr::null_mut();\n\n\n\n for &handle in handler.handles.iter() {\n\n unsafe {\n\n let mut callback_value: UINT_PTR = 0;\n\n let result = GetWindowSubclass(handle, id, subclass_id, &mut callback_value);\n\n if result == 0 {\n\n panic!(\"Parent of hander was either freed or is already unbound\");\n\n }\n\n\n\n callback_ptr = callback_value as *mut *const Callback;\n\n let callback: Rc<Callback> = Rc::from_raw(*callback_ptr);\n\n mem::drop(callback);\n\n\n\n RemoveWindowSubclass(handle, id, subclass_id);\n\n };\n\n }\n", "file_path": "native-windows-gui/src/win32/window.rs", "rank": 63, "score": 188543.54000574944 }, { "content": "pub fn bind_event_handler<F>(\n\n handle: &ControlHandle,\n\n parent_handle: &ControlHandle,\n\n f: F,\n\n) -> EventHandler\n\nwhere\n\n F: Fn(Event, EventData, ControlHandle) + 'static,\n\n{\n\n let hwnd = handle\n\n .hwnd()\n\n .expect(\"Cannot bind control with an handle of type\");\n\n let parent_hwnd = parent_handle\n\n .hwnd()\n\n .expect(\"Cannot bind control with an handle of type\");\n\n\n\n let callback: Rc<Callback> = Rc::new(f);\n\n let parent_callback = callback.clone();\n\n\n\n let callback_box: Box<*const Callback> = Box::new(Rc::into_raw(callback));\n\n let callback_box_parent: Box<*const Callback> = Box::new(Rc::into_raw(parent_callback));\n", "file_path": "native-windows-gui/src/win32/window.rs", "rank": 64, "score": 185739.52884678298 }, { "content": "fn show(app: &TestControlPanel) {\n\n let text = \"Hello World from Native windows GUI!\";\n\n Clipboard::set_data_text(&app.window, text);\n\n assert!(\n\n Some(text)\n\n == Clipboard::data_text(&app.window)\n\n .as_ref()\n\n .map(|s| s as &str)\n\n );\n\n\n\n app.window.set_visible(true);\n\n}\n\n\n", "file_path": "native-windows-gui/src/tests/mod.rs", "rank": 65, "score": 183011.21359432713 }, { "content": "pub fn bind_raw_event_handler<F>(\n\n handle: &ControlHandle,\n\n handler_id: UINT_PTR,\n\n f: F,\n\n) -> Result<RawEventHandler, NwgError>\n\nwhere\n\n F: Fn(HWND, UINT, WPARAM, LPARAM) -> Option<LRESULT> + 'static,\n\n{\n\n if handler_id <= 0xFFFF {\n\n panic!(\"handler_id <= 0xFFFF are reserved by NWG\");\n\n }\n\n\n\n bind_raw_event_handler_inner(handle, handler_id, f)\n\n}\n\n\n\n/**\n\n Check if a raw handler with the specified handler_id is currently bound on the control.\n\n This function will panic if the handle parameter is not a window control.\n\n*/\n", "file_path": "native-windows-gui/src/win32/window.rs", "rank": 66, "score": 182956.14142884294 }, { "content": "fn no_class_name_commands(m: usize) -> Event {\n\n match m as i32 {\n\n IDOK => Event::OnKeyEnter,\n\n IDCANCEL => Event::OnKeyEsc,\n\n _ => Event::Unknown,\n\n }\n\n}\n\n\n", "file_path": "native-windows-gui/src/win32/window.rs", "rank": 67, "score": 182028.00524654338 }, { "content": "fn notice(noticer: &nwg::Notice) {\n\n let sender = noticer.sender();\n\n\n\n thread::spawn(move || {\n\n thread::sleep(time::Duration::new(5, 0));\n\n sender.notice();\n\n });\n\n}\n\n\n\n```\n\n\n\n*/\n\n#[derive(Default, PartialEq, Eq)]\n\npub struct Notice {\n\n pub handle: ControlHandle,\n\n}\n\n\n\nimpl Notice {\n\n pub fn builder() -> NoticeBuilder {\n\n NoticeBuilder { parent: None }\n", "file_path": "native-windows-gui/src/controls/notice.rs", "rank": 68, "score": 180832.71894192728 }, { "content": "/// Just a wrapper to implement ToTokens over Vec<&'a [EventCallback]>\n\nstruct EventCallbackCol<'a> (&'a [EventCallback]);\n\n\n\nimpl<'a> ToTokens for EventCallbackCol<'a> {\n\n\n\n fn to_tokens(&self, tokens: &mut pm2::TokenStream) {\n\n let cb = &self.0;\n\n\n\n let tk = match cb.len() {\n\n 0 => quote!{ {} },\n\n 1 => {\n\n let member = &cb[0].member;\n\n let path = &cb[0].path;\n\n let args = &cb[0].args;\n\n quote!{ if &_handle == &#member { #path(#args) } }\n\n }\n\n _ => {\n\n\n\n // Group callbacks by members\n\n let mut members_callbacks: HashMap<&syn::Expr, Vec<(&syn::Path, &Args)>> = HashMap::new();\n\n for c in cb.iter() {\n", "file_path": "native-windows-derive/src/events.rs", "rank": 69, "score": 179055.72355998782 }, { "content": "/// Building a tooltip and add tooltips at the same time\n\nfn build_tooltip(tt: &mut nwg::Tooltip, btn1: &nwg::Button, btn2: &nwg::Button) {\n\n nwg::Tooltip::builder()\n\n .register(btn1, \"A test button\")\n\n .register_callback(btn2)\n\n .build(tt);\n\n}\n\n\n", "file_path": "native-windows-gui/src/controls/tooltip.rs", "rank": 70, "score": 178110.8645525518 }, { "content": "type Callback = dyn Fn(Event, EventData, ControlHandle);\n\n\n\n/**\n\n An opaque structure that represent a window subclass hook.\n\n*/\n\npub struct EventHandler {\n\n handles: Vec<HWND>,\n\n id: SUBCLASSPROC,\n\n subclass_id: UINT_PTR,\n\n}\n\n\n\n/**\n\n An opaque structure that represent a window subclass hook.\n\n*/\n\npub struct RawEventHandler {\n\n handle: HWND,\n\n subclass_proc: SUBCLASSPROC,\n\n handler_id: UINT_PTR,\n\n}\n\n\n\n/**\n\n Note. While there might be a race condition here, it does not matter because\n\n All controls are thread local and the true id is (HANDLE + NOTICE_ID)\n\n The same apply to timers\n\n*/\n", "file_path": "native-windows-gui/src/win32/window.rs", "rank": 71, "score": 177058.44961948018 }, { "content": "fn find_events_attr(attrs: &[syn::Attribute]) -> Option<&syn::Attribute> {\n\n let mut index = None;\n\n for (i, attr) in attrs.iter().enumerate() {\n\n if let Some(ident) = attr.path.get_ident() {\n\n if ident == \"nwg_events\" {\n\n index = Some(i);\n\n break;\n\n }\n\n }\n\n }\n\n\n\n index.map(|i| &attrs[i])\n\n}\n\n\n\n\n", "file_path": "native-windows-derive/src/events.rs", "rank": 72, "score": 176097.01837657404 }, { "content": "#[derive(Debug)]\n\nstruct NwgLayout<'a> {\n\n id: &'a syn::Ident,\n\n ty: &'a syn::Ident,\n\n names: Vec<syn::Ident>,\n\n values: Vec<syn::Expr>,\n\n}\n\n\n\nimpl<'a> NwgLayout<'a> {\n\n\n\n fn valid(field: &syn::Field) -> bool {\n\n field.attrs.iter().any(|attr|\n\n attr.path.get_ident()\n\n .map(|ident| ident == \"nwg_layout\" )\n\n .unwrap_or(false)\n\n )\n\n }\n\n\n\n fn parse_type(field: &syn::Field) -> &syn::Ident {\n\n // TODO: extract type from nwg_layout first\n\n\n", "file_path": "native-windows-derive/src/ui.rs", "rank": 73, "score": 175797.70918958177 }, { "content": "fn write_custom_data(window: &nwg::Window) {\n\n let data = Hello {\n\n foo: 6529,\n\n bar: [0, 100, 20]\n\n };\n\n\n\n nwg::Clipboard::open(window);\n\n nwg::Clipboard::empty();\n\n unsafe {\n\n nwg::Clipboard::set_data(\n\n nwg::ClipboardFormat::Global(\"Hello\"),\n\n &data as *const Hello,\n\n 1\n\n );\n\n }\n\n\n\n nwg::Clipboard::close();\n\n}\n\n\n", "file_path": "native-windows-gui/src/win32/clipboard.rs", "rank": 74, "score": 175594.80859214708 }, { "content": "/// Create the NWG tab classes\n\npub fn create_tab_classes() -> Result<(), NwgError> {\n\n use winapi::shared::windef::HBRUSH;\n\n use winapi::um::libloaderapi::GetModuleHandleW;\n\n use winapi::um::winuser::COLOR_BTNFACE;\n\n\n\n let hmod = unsafe { GetModuleHandleW(ptr::null_mut()) };\n\n if hmod.is_null() {\n\n return Err(NwgError::initialization(\"GetModuleHandleW failed\"));\n\n }\n\n\n\n unsafe {\n\n build_sysclass(\n\n hmod,\n\n TAB_CLASS_ID,\n\n Some(tab_proc),\n\n Some(COLOR_BTNFACE as HBRUSH),\n\n None,\n\n )?;\n\n }\n\n\n", "file_path": "native-windows-gui/src/win32/tabs.rs", "rank": 76, "score": 174990.92105073953 }, { "content": "pub fn destroy_menu_item(parent: HMENU, item_id: u32) {\n\n use winapi::um::winuser::{DeleteMenu, GetMenuItemCount, GetMenuItemID, MF_BYPOSITION};\n\n\n\n unsafe {\n\n let count = GetMenuItemCount(parent);\n\n let mut index = 0;\n\n\n\n while index < count {\n\n let id = GetMenuItemID(parent, index);\n\n match id == item_id {\n\n true => {\n\n DeleteMenu(parent, index as u32, MF_BYPOSITION);\n\n index = count;\n\n }\n\n false => {\n\n index += 1;\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "native-windows-gui/src/win32/window_helper.rs", "rank": 77, "score": 174940.0413535341 }, { "content": "struct NwgControl<'a> {\n\n id: &'a syn::Ident,\n\n parent_id: Option<String>,\n\n\n\n ty: syn::Ident,\n\n\n\n layout: Option<LayoutChild>,\n\n layout_index: usize,\n\n\n\n names: Vec<syn::Ident>,\n\n values: Vec<syn::Expr>,\n\n\n\n // First value if the parent order, second value is the insert order\n\n weight: [u16; 2],\n\n}\n\n\n\nimpl<'a> NwgControl<'a> {\n\n\n\n fn valid(field: &syn::Field) -> bool {\n\n field.attrs.iter().any(|attr|\n", "file_path": "native-windows-derive/src/ui.rs", "rank": 78, "score": 172954.66857152677 }, { "content": "fn main() {\n\n nwg::init().expect(\"Failed to init Native Windows GUI\");\n\n nwg::Font::set_global_family(\"Segoe UI\").expect(\"Failed to set default font\");\n\n\n\n let _app = CalendarNames::build_ui(Default::default()).expect(\"Failed to build UI\");\n\n\n\n nwg::dispatch_thread_events();\n\n}\n", "file_path": "native-windows-gui/examples/month_name_d.rs", "rank": 79, "score": 172017.42439294845 }, { "content": "fn main() {\n\n nwg::init().expect(\"Failed to init Native Windows GUI\");\n\n nwg::Font::set_global_family(\"Segoe UI\").expect(\"Failed to set default font\");\n\n\n\n let tree_view_in_edit = RefCell::new(false);\n\n\n\n let app: TreeViewApp = TreeViewApp {\n\n tree_view_in_edit,\n\n ..Default::default()\n\n };\n\n let _app = TreeViewApp::build_ui(app).expect(\"Failed to build UI\");\n\n\n\n nwg::dispatch_thread_events();\n\n}\n", "file_path": "native-windows-gui/examples/treeview_d2.rs", "rank": 80, "score": 171834.28893680798 }, { "content": "fn main() {\n\n nwg::init().expect(\"Failed to init Native Windows GUI\");\n\n nwg::Font::set_global_family(\"Segoe UI\").expect(\"Failed to set default font\");\n\n let _app = BasicApp::build_ui(Default::default()).expect(\"Failed to build UI\");\n\n nwg::dispatch_thread_events();\n\n}\n", "file_path": "native-windows-gui/examples/basic_layout_d.rs", "rank": 81, "score": 171676.46207670882 }, { "content": "fn main() {\n\n nwg::init().expect(\"Failed to init Native Windows GUI\");\n\n //nwg::Font::set_global_family(\"Segoe UI\").expect(\"Failed to set default font\");\n\n let mut font = nwg::Font::default();\n\n nwg::Font::builder()\n\n .family(\"MS Shell Dlg\")\n\n .size(15)\n\n .build(&mut font)\n\n .expect(\"Failed to build font\");\n\n nwg::Font::set_global_default(Some(font));\n\n\n\n let _ui = ConfigDlg::build_ui(Default::default()).expect(\"Failed to build UI\");\n\n\n\n nwg::dispatch_thread_events();\n\n}\n", "file_path": "native-windows-gui/examples/dyn_layout_d.rs", "rank": 82, "score": 171676.4620767088 }, { "content": "fn main() {\n\n nwg::init().expect(\"Failed to init Native Windows GUI\");\n\n nwg::Font::set_global_family(\"Segoe UI\").expect(\"Failed to set default font\");\n\n let _ui = BasicApp::build_ui(Default::default()).expect(\"Failed to build UI\");\n\n nwg::dispatch_thread_events();\n\n}\n", "file_path": "native-windows-gui/examples/basic_layout.rs", "rank": 83, "score": 171676.46207670882 }, { "content": "pub fn expand_flags(member_name: &syn::Ident, ty: &syn::Ident, flags: syn::Expr) -> syn::Expr {\n\n let flags_type = format!(\"{}Flags\", ty);\n\n\n\n let flags_value = match &flags {\n\n syn::Expr::Lit(expr_lit) => match &expr_lit.lit {\n\n syn::Lit::Str(value) => value,\n\n other => panic!(\"Compressed flags must str, got {:?} for control {}\", other, member_name)\n\n },\n\n other => panic!(\"Compressed flags must str, got {:?} for control {}\", other, member_name)\n\n };\n\n\n\n let flags = flags_value.value();\n\n let splitted: Vec<&str> = flags.split('|').collect();\n\n\n\n let flags_count = splitted.len() - 1;\n\n let mut final_flags: String = String::with_capacity(100);\n\n for (i, value) in splitted.into_iter().enumerate() {\n\n final_flags.push_str(&flags_type);\n\n final_flags.push_str(\"::\");\n\n final_flags.push_str(value);\n", "file_path": "native-windows-derive/src/controls.rs", "rank": 84, "score": 171363.05347197136 }, { "content": "pub fn build_notice(parent: HWND) -> ControlHandle {\n\n let id = NOTICE_ID.fetch_add(1, Ordering::SeqCst);\n\n ControlHandle::Notice(parent, id)\n\n}\n\n\n\npub unsafe fn build_timer(parent: HWND, interval: u32, stopped: bool) -> ControlHandle {\n\n use winapi::um::winuser::SetTimer;\n\n\n\n let id = TIMER_ID.fetch_add(1, Ordering::SeqCst);\n\n\n\n if !stopped {\n\n SetTimer(parent, id as UINT_PTR, interval as UINT, None);\n\n }\n\n\n\n ControlHandle::Timer(parent, id)\n\n}\n\n\n\n/**\n\n Hook the window subclass with the default event dispatcher.\n\n The hook is applied to the window and all it's children (recursively).\n\n\n\n Returns a `EventHandler` that can be passed to `unbind_event_handler` to remove the callbacks.\n\n\n\n This function will panic if `handle` is not a window handle.\n\n*/\n", "file_path": "native-windows-gui/src/win32/window.rs", "rank": 85, "score": 169787.14570047273 }, { "content": "fn main() {\n\n nwg::init().expect(\"Failed to init Native Windows GUI\");\n\n nwg::Font::set_global_family(\"Segoe UI\").expect(\"Failed to set default font\");\n\n let _app = ResizeApp::build_ui(Default::default()).expect(\"Failed to build UI\");\n\n nwg::dispatch_thread_events();\n\n}\n", "file_path": "native-windows-gui/examples/min_max_size_d.rs", "rank": 86, "score": 168684.69327005796 }, { "content": "fn main() {\n\n nwg::init().expect(\"Failed to init Native Windows GUI\");\n\n nwg::Font::set_global_family(\"Segoe UI\").expect(\"Failed to set default font\");\n\n\n\n let _ui = FlexBoxApp::build_ui(Default::default()).expect(\"Failed to build UI\");\n\n\n\n nwg::dispatch_thread_events();\n\n}\n", "file_path": "native-windows-gui/examples/flexbox_sub_layout.rs", "rank": 87, "score": 168356.93194314046 }, { "content": "fn parse_ui_data(d: &DeriveInput) -> Option<&syn::DataStruct> {\n\n match &d.data {\n\n syn::Data::Struct(ds) => Some(ds),\n\n _ => None\n\n }\n\n}\n\n\n", "file_path": "native-windows-derive/src/lib.rs", "rank": 88, "score": 166395.33606369852 }, { "content": "pub fn modal_message<'a, P: Into<ControlHandle>>(\n\n parent: P,\n\n params: &MessageParams,\n\n) -> MessageChoice {\n\n let control_handle = parent.into();\n\n let hwnd = control_handle.hwnd().expect(\"expected window like control\");\n\n inner_message(hwnd, params)\n\n}\n\n\n\n/**\n\n Display a message box and then panic. The message box has for style `MessageButtons::Ok` and `MessageIcons::Error` .\n\n It is recommended to use `modal_fatal_message` because it locks the window that creates the message box.\n\n This method may be deprecated in the future\n\n\n\n Parameters:\n\n * title: The message box title\n\n * content: The message box message\n\n*/\n", "file_path": "native-windows-gui/src/win32/message_box.rs", "rank": 89, "score": 165493.10377367257 }, { "content": "/// Initializes some application wide GUI settings.\n\n/// This includes default styling and common controls resources.\n\npub fn init() -> std::result::Result<(), errors::NwgError> {\n\n if cfg!(not(feature = \"no-styling\")) {\n\n enable_visual_styles();\n\n }\n\n\n\n init_common_controls()\n\n}\n", "file_path": "native-windows-gui/src/lib.rs", "rank": 90, "score": 165413.11951121458 }, { "content": "pub fn dispatch_thread_events_with_callback<F>(mut cb: F)\n\nwhere\n\n F: FnMut() + 'static,\n\n{\n\n use winapi::um::winuser::MSG;\n\n use winapi::um::winuser::{PeekMessageW, PM_REMOVE, WM_QUIT};\n\n\n\n unsafe {\n\n let mut msg: MSG = mem::zeroed();\n\n while msg.message != WM_QUIT {\n\n let has_message = PeekMessageW(&mut msg, ptr::null_mut(), 0, 0, PM_REMOVE) != 0;\n\n if has_message && IsDialogMessageW(GetAncestor(msg.hwnd, GA_ROOT), &mut msg) == 0 {\n\n TranslateMessage(&msg);\n\n DispatchMessageW(&msg);\n\n }\n\n\n\n cb();\n\n }\n\n }\n\n}\n\n\n\n/**\n\n Break the events loop running on the current thread\n\n*/\n", "file_path": "native-windows-gui/src/win32/mod.rs", "rank": 91, "score": 163007.04387719478 }, { "content": "pub fn modal_fatal_message<'a, P: Into<ControlHandle>>(\n\n parent: P,\n\n title: &'a str,\n\n content: &'a str,\n\n) -> ! {\n\n modal_error_message(parent, title, content);\n\n panic!(\"{} - {}\", title, content);\n\n}\n\n\n\n/**\n\n Display a simple error message box. The message box has for style `MessageButtons::Ok` and `MessageIcons::Error`.\n\n It is recommended to use `modal_error_message` because it locks the window that creates the message box.\n\n This method may be deprecated in the future\n\n\n\n Parameters:\n\n * title: The message box title\n\n * content: The message box message\n\n*/\n", "file_path": "native-windows-gui/src/win32/message_box.rs", "rank": 92, "score": 162976.64088264533 }, { "content": "pub fn modal_info_message<'a, P: Into<ControlHandle>>(\n\n parent: P,\n\n title: &'a str,\n\n content: &'a str,\n\n) -> MessageChoice {\n\n let params = MessageParams {\n\n title,\n\n content,\n\n buttons: MessageButtons::Ok,\n\n icons: MessageIcons::Info,\n\n };\n\n\n\n modal_message(parent, &params)\n\n}\n", "file_path": "native-windows-gui/src/win32/message_box.rs", "rank": 93, "score": 162976.64088264533 }, { "content": "pub fn modal_error_message<'a, P: Into<ControlHandle>>(\n\n parent: P,\n\n title: &'a str,\n\n content: &'a str,\n\n) -> MessageChoice {\n\n let params = MessageParams {\n\n title,\n\n content,\n\n buttons: MessageButtons::Ok,\n\n icons: MessageIcons::Error,\n\n };\n\n\n\n modal_message(parent, &params)\n\n}\n\n\n\n/**\n\n Display a simple message box. The message box has for style `MessageButtons::Ok` and `MessageIcons::Info`.\n\n It is recommended to use `modal_info_message` because it locks the window that creates the message box.\n\n This method may be deprecated in the future\n\n\n\n Parameters:\n\n * title: The message box title\n\n * content: The message box message\n\n*/\n", "file_path": "native-windows-gui/src/win32/message_box.rs", "rank": 94, "score": 162976.64088264533 }, { "content": "#[cfg(feature = \"image-list\")]\n\nfn builder_set_image_list(builder: &TreeViewBuilder, out: &TreeView) {\n\n if builder.image_list.is_some() {\n\n out.set_image_list(builder.image_list);\n\n }\n\n}\n\n\n", "file_path": "native-windows-gui/src/controls/treeview.rs", "rank": 95, "score": 162117.26277034328 }, { "content": "#[cfg(not(feature = \"image-list\"))]\n\nfn builder_set_image_list(_builder: &TreeViewBuilder, _out: &TreeView) {}\n\n\n", "file_path": "native-windows-gui/src/controls/treeview.rs", "rank": 96, "score": 162117.26277034328 }, { "content": "pub fn layout_parameters(field: &syn::Field) -> (Vec<syn::Ident>, Vec<syn::Expr>) {\n\n let member = match field.ident.as_ref() {\n\n Some(m) => m,\n\n None => unreachable!()\n\n };\n\n\n\n let nwg_layout = |attr: &&syn::Attribute| {\n\n attr.path.get_ident()\n\n .map(|id| id == \"nwg_layout\" )\n\n .unwrap_or(false)\n\n };\n\n\n\n let attr = match field.attrs.iter().find(nwg_layout) {\n\n Some(attr) => attr,\n\n None => unreachable!()\n\n };\n\n\n\n let layout: Parameters = match syn::parse2(attr.tokens.clone()) {\n\n Ok(a) => a,\n\n Err(e) => panic!(\"Failed to parse field #{}: {}\", member, e)\n", "file_path": "native-windows-derive/src/layouts.rs", "rank": 97, "score": 160219.44424580337 }, { "content": "fn show_control_test(app: &TestControlPanel) {\n\n app.controls_tests.window.set_visible(true);\n\n app.controls_tests.panel.set_visible(true);\n\n app.controls_tests.window.set_focus();\n\n}\n\n\n", "file_path": "native-windows-gui/src/tests/mod.rs", "rank": 98, "score": 159259.7926611681 }, { "content": "fn run_tray_tests(app: &ControlsTest) {\n\n app.tray_icon.set_visibility(false);\n\n app.tray_icon_2.set_icon(&app.window_icon);\n\n app.tray_icon_2.set_tip(\"Changed the toolip and the icon!\");\n\n\n\n let icon = Some(&app.love_icon);\n\n let flags = Some(\n\n TrayNotificationFlags::USER_ICON\n\n | TrayNotificationFlags::SILENT\n\n | TrayNotificationFlags::LARGE_ICON,\n\n );\n\n\n\n app.tray_icon_2\n\n .show(\"OH NO!\", Some(\"Just a title\"), flags, icon);\n\n app.tray_icon_2.show(\n\n \"I'm spamming the system tray popup!\",\n\n Some(\"Just a title\"),\n\n flags,\n\n icon,\n\n );\n\n app.tray_icon_2.show(\n\n \"You can't stop me!!!!!\",\n\n Some(\"Just a title (really)\"),\n\n flags,\n\n Some(&app.window_icon),\n\n );\n\n}\n\n\n", "file_path": "native-windows-gui/src/tests/control_test.rs", "rank": 99, "score": 159236.48694430728 } ]
Rust
io-engine/tests/lock_lba_range.rs
openebs/MayaStor
a6424b565ea4023acc7896e591e9c71f832eba3f
#![allow(clippy::await_holding_refcell_ref)] #[macro_use] extern crate tracing; use std::{ cell::{Ref, RefCell, RefMut}, ops::{Deref, DerefMut}, rc::Rc, }; use crossbeam::channel::unbounded; use io_engine::{ bdev::nexus::{nexus_create, nexus_lookup_mut}, core::{ IoChannel, MayastorCliArgs, MayastorEnvironment, RangeContext, Reactor, Reactors, UntypedBdev, }, }; use spdk_rs::DmaBuf; pub mod common; const NEXUS_NAME: &str = "lba_range_nexus"; const NEXUS_SIZE: u64 = 10 * 1024 * 1024; const NUM_NEXUS_CHILDREN: u64 = 2; #[derive(Clone)] struct ShareableContext { ctx: Rc<RefCell<RangeContext>>, ch: Rc<RefCell<IoChannel>>, } impl ShareableContext { pub fn new(offset: u64, len: u64) -> ShareableContext { let nexus = UntypedBdev::open_by_name(NEXUS_NAME, true).unwrap(); Self { ctx: Rc::new(RefCell::new(RangeContext::new(offset, len))), ch: Rc::new(RefCell::new(nexus.get_channel().unwrap())), } } pub fn borrow_mut_ctx(&self) -> RefMut<RangeContext> { self.ctx.borrow_mut() } pub fn borrow_ch(&self) -> Ref<IoChannel> { self.ch.borrow() } } fn test_ini() { test_init!(); for i in 0 .. NUM_NEXUS_CHILDREN { common::delete_file(&[get_disk(i)]); common::truncate_file_bytes(&get_disk(i), NEXUS_SIZE); } Reactor::block_on(async { create_nexus().await; }); } fn test_fini() { for i in 0 .. NUM_NEXUS_CHILDREN { common::delete_file(&[get_disk(i)]); } Reactor::block_on(async { let nexus = nexus_lookup_mut(NEXUS_NAME).unwrap(); nexus.destroy().await.unwrap(); }); } fn get_disk(number: u64) -> String { format!("/tmp/disk{}.img", number) } fn get_dev(number: u64) -> String { format!("aio://{}?blk_size=512", get_disk(number)) } async fn create_nexus() { let mut ch = Vec::new(); for i in 0 .. NUM_NEXUS_CHILDREN { ch.push(get_dev(i)); } nexus_create(NEXUS_NAME, NEXUS_SIZE, None, &ch) .await .unwrap(); } async fn lock_range( ctx: &mut RangeContext, ch: &IoChannel, ) -> Result<(), nix::errno::Errno> { let nexus = UntypedBdev::open_by_name(NEXUS_NAME, true).unwrap(); nexus.lock_lba_range(ctx, ch).await } async fn unlock_range( ctx: &mut RangeContext, ch: &IoChannel, ) -> Result<(), nix::errno::Errno> { let nexus = UntypedBdev::open_by_name(NEXUS_NAME, true).unwrap(); nexus.unlock_lba_range(ctx, ch).await } #[test] fn lock_unlock() { test_ini(); Reactor::block_on(async { let nexus = UntypedBdev::open_by_name(NEXUS_NAME, true).unwrap(); let mut ctx = RangeContext::new(1, 5); let ch = nexus.get_channel().unwrap(); nexus .lock_lba_range(&mut ctx, &ch) .await .expect("Failed to acquire lock"); nexus .unlock_lba_range(&mut ctx, &ch) .await .expect("Failed to release lock"); }); test_fini(); } #[test] fn lock_unlock_different_context() { test_ini(); Reactor::block_on(async { let nexus = UntypedBdev::open_by_name(NEXUS_NAME, true).unwrap(); let mut ctx = RangeContext::new(1, 5); let ch = nexus.get_channel().unwrap(); nexus .lock_lba_range(&mut ctx, &ch) .await .expect("Failed to acquire lock"); let mut ctx1 = RangeContext::new(1, 5); let ch1 = nexus.get_channel().unwrap(); nexus .unlock_lba_range(&mut ctx1, &ch1) .await .expect_err("Shouldn't be able to unlock with a different context"); }); test_fini(); } #[test] fn multiple_locks() { test_ini(); let reactor = Reactors::current(); let (s, r) = unbounded::<()>(); let ctx1 = ShareableContext::new(1, 10); let ctx_clone1 = ctx1.clone(); reactor.send_future(async move { lock_range( ctx_clone1.borrow_mut_ctx().deref_mut(), ctx_clone1.borrow_ch().deref(), ) .await .unwrap(); s.send(()).unwrap(); }); reactor_poll!(r); let (lock_sender, lock_receiver) = unbounded::<()>(); let ctx2 = ShareableContext::new(1, 5); let ctx_clone2 = ctx2.clone(); reactor.send_future(async move { lock_range( ctx_clone2.borrow_mut_ctx().deref_mut(), ctx_clone2.borrow_ch().deref(), ) .await .unwrap(); lock_sender.send(()).unwrap(); }); reactor_poll!(100); assert!(lock_receiver.try_recv().is_err()); let (s, r) = unbounded::<()>(); reactor.send_future(async move { unlock_range( ctx1.borrow_mut_ctx().deref_mut(), ctx1.borrow_ch().deref(), ) .await .unwrap(); s.send(()).unwrap(); }); reactor_poll!(r); reactor_poll!(100); assert!(lock_receiver.try_recv().is_ok()); let (s, r) = unbounded::<()>(); reactor.send_future(async move { unlock_range( ctx2.borrow_mut_ctx().deref_mut(), ctx2.borrow_ch().deref(), ) .await .unwrap(); s.send(()).unwrap(); }); reactor_poll!(r); test_fini(); } #[test] fn lock_then_fe_io() { test_ini(); let reactor = Reactors::current(); let (s, r) = unbounded::<()>(); let ctx = ShareableContext::new(1, 10); let ctx_clone = ctx.clone(); reactor.send_future(async move { lock_range( ctx_clone.borrow_mut_ctx().deref_mut(), ctx_clone.borrow_ch().deref(), ) .await .unwrap(); s.send(()).unwrap(); }); reactor_poll!(r); let (io_sender, io_receiver) = unbounded::<()>(); reactor.send_future(async move { let nexus_desc = UntypedBdev::open_by_name(NEXUS_NAME, true).unwrap(); let h = nexus_desc.into_handle().unwrap(); let blk = 2; let blk_size = 512; let buf = DmaBuf::new(blk * blk_size, 9).unwrap(); match h.write_at((blk * blk_size) as u64, &buf).await { Ok(_) => trace!("Successfully wrote to nexus"), Err(e) => trace!("Failed to write to nexus: {}", e), } io_sender.send(()).unwrap(); }); reactor_poll!(1000); assert!(io_receiver.try_recv().is_err()); let (s, r) = unbounded::<()>(); reactor.send_future(async move { unlock_range(ctx.borrow_mut_ctx().deref_mut(), ctx.borrow_ch().deref()) .await .unwrap(); s.send(()).unwrap(); }); reactor_poll!(r); assert!(io_receiver.try_recv().is_ok()); test_fini(); }
#![allow(clippy::await_holding_refcell_ref)] #[macro_use] extern crate tracing; use std::{ cell::{Ref, RefCell, RefMut}, ops::{Deref, DerefMut}, rc::Rc, }; use crossbea
lock_sender.send(()).unwrap(); }); reactor_poll!(100); assert!(lock_receiver.try_recv().is_err()); let (s, r) = unbounded::<()>(); reactor.send_future(async move { unlock_range( ctx1.borrow_mut_ctx().deref_mut(), ctx1.borrow_ch().deref(), ) .await .unwrap(); s.send(()).unwrap(); }); reactor_poll!(r); reactor_poll!(100); assert!(lock_receiver.try_recv().is_ok()); let (s, r) = unbounded::<()>(); reactor.send_future(async move { unlock_range( ctx2.borrow_mut_ctx().deref_mut(), ctx2.borrow_ch().deref(), ) .await .unwrap(); s.send(()).unwrap(); }); reactor_poll!(r); test_fini(); } #[test] fn lock_then_fe_io() { test_ini(); let reactor = Reactors::current(); let (s, r) = unbounded::<()>(); let ctx = ShareableContext::new(1, 10); let ctx_clone = ctx.clone(); reactor.send_future(async move { lock_range( ctx_clone.borrow_mut_ctx().deref_mut(), ctx_clone.borrow_ch().deref(), ) .await .unwrap(); s.send(()).unwrap(); }); reactor_poll!(r); let (io_sender, io_receiver) = unbounded::<()>(); reactor.send_future(async move { let nexus_desc = UntypedBdev::open_by_name(NEXUS_NAME, true).unwrap(); let h = nexus_desc.into_handle().unwrap(); let blk = 2; let blk_size = 512; let buf = DmaBuf::new(blk * blk_size, 9).unwrap(); match h.write_at((blk * blk_size) as u64, &buf).await { Ok(_) => trace!("Successfully wrote to nexus"), Err(e) => trace!("Failed to write to nexus: {}", e), } io_sender.send(()).unwrap(); }); reactor_poll!(1000); assert!(io_receiver.try_recv().is_err()); let (s, r) = unbounded::<()>(); reactor.send_future(async move { unlock_range(ctx.borrow_mut_ctx().deref_mut(), ctx.borrow_ch().deref()) .await .unwrap(); s.send(()).unwrap(); }); reactor_poll!(r); assert!(io_receiver.try_recv().is_ok()); test_fini(); }
m::channel::unbounded; use io_engine::{ bdev::nexus::{nexus_create, nexus_lookup_mut}, core::{ IoChannel, MayastorCliArgs, MayastorEnvironment, RangeContext, Reactor, Reactors, UntypedBdev, }, }; use spdk_rs::DmaBuf; pub mod common; const NEXUS_NAME: &str = "lba_range_nexus"; const NEXUS_SIZE: u64 = 10 * 1024 * 1024; const NUM_NEXUS_CHILDREN: u64 = 2; #[derive(Clone)] struct ShareableContext { ctx: Rc<RefCell<RangeContext>>, ch: Rc<RefCell<IoChannel>>, } impl ShareableContext { pub fn new(offset: u64, len: u64) -> ShareableContext { let nexus = UntypedBdev::open_by_name(NEXUS_NAME, true).unwrap(); Self { ctx: Rc::new(RefCell::new(RangeContext::new(offset, len))), ch: Rc::new(RefCell::new(nexus.get_channel().unwrap())), } } pub fn borrow_mut_ctx(&self) -> RefMut<RangeContext> { self.ctx.borrow_mut() } pub fn borrow_ch(&self) -> Ref<IoChannel> { self.ch.borrow() } } fn test_ini() { test_init!(); for i in 0 .. NUM_NEXUS_CHILDREN { common::delete_file(&[get_disk(i)]); common::truncate_file_bytes(&get_disk(i), NEXUS_SIZE); } Reactor::block_on(async { create_nexus().await; }); } fn test_fini() { for i in 0 .. NUM_NEXUS_CHILDREN { common::delete_file(&[get_disk(i)]); } Reactor::block_on(async { let nexus = nexus_lookup_mut(NEXUS_NAME).unwrap(); nexus.destroy().await.unwrap(); }); } fn get_disk(number: u64) -> String { format!("/tmp/disk{}.img", number) } fn get_dev(number: u64) -> String { format!("aio://{}?blk_size=512", get_disk(number)) } async fn create_nexus() { let mut ch = Vec::new(); for i in 0 .. NUM_NEXUS_CHILDREN { ch.push(get_dev(i)); } nexus_create(NEXUS_NAME, NEXUS_SIZE, None, &ch) .await .unwrap(); } async fn lock_range( ctx: &mut RangeContext, ch: &IoChannel, ) -> Result<(), nix::errno::Errno> { let nexus = UntypedBdev::open_by_name(NEXUS_NAME, true).unwrap(); nexus.lock_lba_range(ctx, ch).await } async fn unlock_range( ctx: &mut RangeContext, ch: &IoChannel, ) -> Result<(), nix::errno::Errno> { let nexus = UntypedBdev::open_by_name(NEXUS_NAME, true).unwrap(); nexus.unlock_lba_range(ctx, ch).await } #[test] fn lock_unlock() { test_ini(); Reactor::block_on(async { let nexus = UntypedBdev::open_by_name(NEXUS_NAME, true).unwrap(); let mut ctx = RangeContext::new(1, 5); let ch = nexus.get_channel().unwrap(); nexus .lock_lba_range(&mut ctx, &ch) .await .expect("Failed to acquire lock"); nexus .unlock_lba_range(&mut ctx, &ch) .await .expect("Failed to release lock"); }); test_fini(); } #[test] fn lock_unlock_different_context() { test_ini(); Reactor::block_on(async { let nexus = UntypedBdev::open_by_name(NEXUS_NAME, true).unwrap(); let mut ctx = RangeContext::new(1, 5); let ch = nexus.get_channel().unwrap(); nexus .lock_lba_range(&mut ctx, &ch) .await .expect("Failed to acquire lock"); let mut ctx1 = RangeContext::new(1, 5); let ch1 = nexus.get_channel().unwrap(); nexus .unlock_lba_range(&mut ctx1, &ch1) .await .expect_err("Shouldn't be able to unlock with a different context"); }); test_fini(); } #[test] fn multiple_locks() { test_ini(); let reactor = Reactors::current(); let (s, r) = unbounded::<()>(); let ctx1 = ShareableContext::new(1, 10); let ctx_clone1 = ctx1.clone(); reactor.send_future(async move { lock_range( ctx_clone1.borrow_mut_ctx().deref_mut(), ctx_clone1.borrow_ch().deref(), ) .await .unwrap(); s.send(()).unwrap(); }); reactor_poll!(r); let (lock_sender, lock_receiver) = unbounded::<()>(); let ctx2 = ShareableContext::new(1, 5); let ctx_clone2 = ctx2.clone(); reactor.send_future(async move { lock_range( ctx_clone2.borrow_mut_ctx().deref_mut(), ctx_clone2.borrow_ch().deref(), ) .await .unwrap();
random
[ { "content": "#[async_trait(? Send)]\n\npub trait Share: std::fmt::Debug {\n\n type Error;\n\n type Output: std::fmt::Display + std::fmt::Debug;\n\n async fn share_nvmf(\n\n self: Pin<&mut Self>,\n\n cntlid_range: Option<(u16, u16)>,\n\n ) -> Result<Self::Output, Self::Error>;\n\n\n\n /// TODO\n\n async fn unshare(self: Pin<&mut Self>)\n\n -> Result<Self::Output, Self::Error>;\n\n\n\n /// TODO\n\n fn shared(&self) -> Option<Protocol>;\n\n\n\n /// TODO\n\n fn share_uri(&self) -> Option<String>;\n\n\n\n /// TODO\n\n fn bdev_uri(&self) -> Option<String>;\n\n\n\n /// TODO\n\n fn bdev_uri_original(&self) -> Option<String>;\n\n}\n", "file_path": "io-engine/src/core/share.rs", "rank": 0, "score": 71121.6324997395 }, { "content": "/// Default endpoint - ip:port\n\npub fn default_endpoint() -> std::net::SocketAddr {\n\n default_endpoint_str()\n\n .parse()\n\n .expect(\"Expected a valid endpoint\")\n\n}\n\n\n", "file_path": "io-engine/src/grpc/mod.rs", "rank": 1, "score": 69303.27510596212 }, { "content": "/// If endpoint is missing a port number then add the default one.\n\npub fn endpoint(endpoint: String) -> std::net::SocketAddr {\n\n (if endpoint.contains(':') {\n\n endpoint\n\n } else {\n\n format!(\"{}:{}\", endpoint, default_port())\n\n })\n\n .parse()\n\n .expect(\"Invalid gRPC endpoint\")\n\n}\n", "file_path": "io-engine/src/grpc/mod.rs", "rank": 2, "score": 64676.90867339916 }, { "content": "/// Store keys type trait\n\npub trait StoreKey: Sync + ToString + std::fmt::Debug {}\n\nimpl<T> StoreKey for T where T: Sync + ToString + std::fmt::Debug {}\n", "file_path": "io-engine/src/store/store_defs.rs", "rank": 3, "score": 63852.27613412507 }, { "content": "pub fn print_error_chain(err: &dyn std::error::Error) -> String {\n\n let mut msg = format!(\"{}\", err);\n\n let mut opt_source = err.source();\n\n while let Some(source) = opt_source {\n\n msg = format!(\"{}: {}\", msg, source);\n\n opt_source = source.source();\n\n }\n\n msg\n\n}\n\n\n", "file_path": "io-engine/src/jsonrpc.rs", "rank": 4, "score": 62369.63955006057 }, { "content": "pub trait BdevCreateDestroy: CreateDestroy + GetName + std::fmt::Debug {}\n\n\n\nimpl<T: CreateDestroy + GetName + std::fmt::Debug> BdevCreateDestroy for T {}\n\n\n\n#[async_trait(?Send)]\n", "file_path": "io-engine/src/bdev/mod.rs", "rank": 5, "score": 62292.727002537766 }, { "content": "type Result<T, E = Error> = std::result::Result<T, E>;\n\n\n\n/// Structure representing a replica which is basically SPDK lvol.\n\n///\n\n/// Note about safety: The structure wraps raw C pointer from SPDK.\n\n/// It is safe to use only in synchronous context. If you keep Replica for\n\n/// longer than that then something else can run on reactor_0 inbetween\n\n/// which may destroy the replica and invalidate the pointer!\n\npub struct Replica {\n\n lvol_ptr: *mut spdk_lvol,\n\n}\n\n\n\n#[derive(Debug, PartialEq, Serialize, Deserialize, Clone, Copy)]\n\n/// Types of remote access storage protocols and IDs for sharing replicas.\n\npub enum ShareType {\n\n Nvmf,\n\n}\n\n\n", "file_path": "io-engine/src/replica.rs", "rank": 6, "score": 61908.51357650428 }, { "content": "/// Store value type trait\n\npub trait StoreValue: Sync + serde::Serialize + std::fmt::Debug {}\n\nimpl<T> StoreValue for T where T: Sync + serde::Serialize + std::fmt::Debug {}\n\n\n\n/// Trait defining the operations that can be performed on a key-value store.\n", "file_path": "io-engine/src/store/store_defs.rs", "rank": 7, "score": 61575.46459896461 }, { "content": "type Result<T, E = Error> = std::result::Result<T, E>;\n\n\n\nthread_local! {\n\npub (crate) static NVMF_TGT: RefCell<Target> = RefCell::new(Target::new());\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Target {\n\n /// the raw pointer to our target\n\n pub(crate) tgt: NonNull<spdk_nvmf_tgt>,\n\n /// the number of poll groups created for this target\n\n poll_group_count: u16,\n\n /// The current state of the target\n\n next_state: TargetState,\n\n}\n\n\n\nimpl Default for Target {\n\n fn default() -> Self {\n\n Target::new()\n\n }\n", "file_path": "io-engine/src/subsys/nvmf/target.rs", "rank": 8, "score": 60289.70608613425 }, { "content": "type Result<T, E = EnvError> = std::result::Result<T, E>;\n\n\n\n/// Mayastor argument\n\n#[derive(Debug, Clone)]\n\n#[allow(dead_code)]\n\npub struct MayastorEnvironment {\n\n pub node_name: String,\n\n pub grpc_endpoint: Option<std::net::SocketAddr>,\n\n pub registration_endpoint: Option<Uri>,\n\n persistent_store_endpoint: Option<String>,\n\n mayastor_config: Option<String>,\n\n pool_config: Option<String>,\n\n delay_subsystem_init: bool,\n\n enable_coredump: bool,\n\n env_context: Option<String>,\n\n hugedir: Option<String>,\n\n hugepage_single_segments: bool,\n\n json_config_file: Option<String>,\n\n master_core: i32,\n\n mem_channel: i32,\n", "file_path": "io-engine/src/core/env.rs", "rank": 9, "score": 60289.70608613425 }, { "content": "/// TODO\n\ntype IoDeviceDestroyCb = unsafe extern \"C\" fn(*mut c_void, *mut c_void);\n\n\n\n/// Abstraction around SPDK I/O device, which hides low-level SPDK\n\n/// API and provides high-level API for I/O channel traversal.\n\nimpl IoDevice {\n\n /// Create a new I/O device using target address as a unique\n\n /// I/O device identifier.\n\n pub fn new<C: Sized>(\n\n devptr: NonNull<c_void>,\n\n name: &str,\n\n create_cb: Option<IoDeviceCreateCb>,\n\n destroy_cb: Option<IoDeviceDestroyCb>,\n\n ) -> Self {\n\n let cname = name.into_cstring();\n\n unsafe {\n\n spdk_io_device_register(\n\n devptr.as_ptr(),\n\n create_cb,\n\n destroy_cb,\n\n std::mem::size_of::<C>() as u32,\n", "file_path": "io-engine/src/core/io_device.rs", "rank": 10, "score": 58105.10685745808 }, { "content": "/// TODO\n\ntype IoDeviceCreateCb = unsafe extern \"C\" fn(*mut c_void, *mut c_void) -> i32;\n\n\n", "file_path": "io-engine/src/core/io_device.rs", "rank": 11, "score": 56290.266012432316 }, { "content": "const externIp = common.getMyIp();\n", "file_path": "test/grpc/test_nexus.js", "rank": 12, "score": 46523.61485650168 }, { "content": "def test_creating_a_pool_using_disk_with_invalid_uuid():\n", "file_path": "test/python/v1/pool/test_bdd_pool.py", "rank": 13, "score": 40010.05357300615 }, { "content": "def test_creating_a_pool_using_disk_with_valid_uuid():\n", "file_path": "test/python/v1/pool/test_bdd_pool.py", "rank": 14, "score": 40010.05357300615 }, { "content": "def test_creating_a_pool_using_disk_with_invalid_block_size():\n", "file_path": "test/python/v1/pool/test_bdd_pool.py", "rank": 15, "score": 39322.01118594501 }, { "content": "def test_fail_creating_a_pool_using_disk_with_invalid_block_size():\n", "file_path": "test/python/tests/replica/test_bdd_pool.py", "rank": 16, "score": 38657.314758211345 }, { "content": "#[macro_use]\n\nextern crate ioctl_gen;\n\n#[macro_use]\n\nextern crate tracing;\n\nextern crate nix;\n\n#[macro_use]\n\nextern crate serde;\n\nextern crate function_name;\n\nextern crate serde_json;\n\nextern crate snafu;\n\nextern crate spdk_rs;\n\n\n\n#[macro_use]\n\npub mod core;\n\npub mod bdev;\n\npub mod delay;\n\npub use spdk_rs::ffihelper;\n\npub mod constants;\n\npub mod grpc;\n\npub mod host;\n", "file_path": "io-engine/src/lib.rs", "rank": 18, "score": 24.26611239434498 }, { "content": "//! json-rpc protocol over unix domain socket implementation as described\n\n//! in spec: https://www.jsonrpc.org/specification.\n\n\n\nextern crate nix;\n\nextern crate serde;\n\n#[macro_use]\n\nextern crate serde_derive;\n\nextern crate serde_json;\n\n#[macro_use]\n\nextern crate tracing;\n\n\n\npub mod error;\n\n#[cfg(test)]\n\nmod test;\n\n\n\nuse self::error::{Error, RpcCode};\n\nuse nix::errno::Errno;\n\nuse tokio::{\n\n io::{AsyncReadExt, AsyncWriteExt},\n\n net::UnixStream,\n", "file_path": "jsonrpc/src/lib.rs", "rank": 19, "score": 23.246419776625615 }, { "content": "extern crate bindgen;\n\n\n\nuse std::{env, path::PathBuf};\n\n\n", "file_path": "libnvme-rs/build.rs", "rank": 20, "score": 21.97384782097884 }, { "content": "use std::{\n\n env,\n\n path::{Path, PathBuf},\n\n process::Command,\n\n};\n\n\n\nextern crate tonic_build;\n\n\n", "file_path": "rpc/build.rs", "rank": 21, "score": 21.339953137012174 }, { "content": "use tonic::{Request, Response, Status};\n\nuse tracing::instrument;\n\n\n\nuse std::{convert::TryFrom, pin::Pin};\n\nuse url::Url;\n\n\n\nuse rpc::mayastor::{\n\n bdev_rpc_server::BdevRpc,\n\n Bdev as RpcBdev,\n\n BdevShareReply,\n\n BdevShareRequest,\n\n BdevUri,\n\n Bdevs,\n\n CreateReply,\n\n Null,\n\n};\n\n\n\nuse crate::{\n\n core::{CoreError, Share, UntypedBdev},\n\n grpc::{rpc_submit, GrpcResult},\n", "file_path": "io-engine/src/grpc/bdev_grpc.rs", "rank": 22, "score": 19.08598266868694 }, { "content": "//! ```\n\n\n\nuse std::collections::HashMap;\n\n\n\nuse super::nvmx;\n\nuse crate::{\n\n bdev::SpdkBlockDevice,\n\n core::{BlockDevice, BlockDeviceDescriptor, CoreError},\n\n nexus_uri::NexusBdevError,\n\n};\n\n\n\nuse url::Url;\n\n\n\npub(crate) mod uri {\n\n use std::convert::TryFrom;\n\n\n\n use snafu::ResultExt;\n\n\n\n use crate::{\n\n bdev::{\n", "file_path": "io-engine/src/bdev/dev.rs", "rank": 23, "score": 18.45768393323352 }, { "content": "use crate::core::{runtime::spawn, Mthread};\n\nuse futures::channel::oneshot;\n\nuse std::time::Duration;\n\n\n\n/// Async sleep that can be called from Mayastor.\n\n/// A sleep is scheduled on the tokio runtime and a receiver channel returned\n\n/// which is signalled once the sleep completes.\n\n/// The sleep duration is not exact as it does not account for thread scheduling\n\n/// but it should be sufficient for most cases.\n\npub(crate) fn mayastor_sleep(duration: Duration) -> oneshot::Receiver<()> {\n\n let (tx, rx) = oneshot::channel::<()>();\n\n spawn(async move {\n\n tokio::time::sleep(duration).await;\n\n let thread = Mthread::get_init();\n\n let rx = thread\n\n .spawn_local(async move {\n\n if tx.send(()).is_err() {\n\n tracing::error!(\n\n \"Failed to send completion for Mayastor sleep.\"\n\n );\n\n }\n\n })\n\n .unwrap();\n\n let _ = rx.await;\n\n });\n\n rx\n\n}\n", "file_path": "io-engine/src/sleep.rs", "rank": 24, "score": 17.492238841987845 }, { "content": "use super::{ChildState, Nexus, NexusChild};\n\nuse crate::{persistent_store::PersistentStore, sleep::mayastor_sleep};\n\nuse serde::{Deserialize, Serialize};\n\nuse std::time::Duration;\n\n\n", "file_path": "io-engine/src/bdev/nexus/nexus_persistence.rs", "rank": 25, "score": 16.543069429538974 }, { "content": "use std::{ffi::CStr, os::raw::c_char, path::Path};\n\n\n\nuse ansi_term::{Colour, Style};\n\n\n\nuse tracing_core::{event::Event, Metadata};\n\nuse tracing_log::{LogTracer, NormalizeEvent};\n\nuse tracing_subscriber::{\n\n fmt::{\n\n format::{FmtSpan, FormatEvent, FormatFields},\n\n FmtContext,\n\n FormattedFields,\n\n },\n\n registry::LookupSpan,\n\n EnvFilter,\n\n};\n\n\n\nuse spdk_rs::libspdk::{spdk_log_get_print_level, spdk_log_level};\n\n\n", "file_path": "io-engine/src/logger.rs", "rank": 26, "score": 16.437905826126674 }, { "content": "use crate::bdev::{nexus::nexus_module::NexusModule, Nexus};\n\nuse spdk_rs::BdevModuleIter;\n\nuse std::pin::Pin;\n\n\n\n/// Returns an immutable iterator for Nexus instances.\n", "file_path": "io-engine/src/bdev/nexus/nexus_iter.rs", "rank": 27, "score": 16.28624292744818 }, { "content": "use once_cell::sync::OnceCell;\n\nuse std::convert::TryFrom;\n\n\n\nextern crate libnvme_rs;\n\n\n\nuse io_engine::{\n\n bdev::nexus::{nexus_create, nexus_lookup_mut},\n\n core::{MayastorCliArgs, Protocol},\n\n};\n\n\n\npub mod common;\n\nuse common::compose::MayastorTest;\n\n\n\nstatic DISKNAME1: &str = \"/tmp/disk1.img\";\n\nstatic BDEVNAME1: &str = \"aio:///tmp/disk1.img?blk_size=512\";\n\n\n\nstatic DISKNAME2: &str = \"/tmp/disk2.img\";\n\nstatic BDEVNAME2: &str = \"aio:///tmp/disk2.img?blk_size=512\";\n\n\n\nstatic MAYASTOR: OnceCell<MayastorTest> = OnceCell::new();\n\n\n\nmacro_rules! prepare_storage {\n\n () => {\n\n common::delete_file(&[DISKNAME1.into(), DISKNAME2.into()]);\n\n common::truncate_file(DISKNAME1, 64 * 1024);\n\n common::truncate_file(DISKNAME2, 64 * 1024);\n\n };\n\n}\n\n\n", "file_path": "io-engine/tests/mount_fs.rs", "rank": 28, "score": 16.172429331320323 }, { "content": "#![warn(missing_docs)]\n\n\n\nuse std::fmt;\n\n\n\nuse crossbeam::channel::{Receiver, Sender};\n\nuse futures::channel::oneshot;\n\nuse snafu::Snafu;\n\n\n\nuse crate::{\n\n bdev::nexus::VerboseError,\n\n core::{BlockDeviceDescriptor, CoreError, Descriptor},\n\n nexus_uri::NexusBdevError,\n\n};\n\nuse spdk_rs::DmaError;\n\n\n\nuse super::rebuild_impl::*;\n\n\n\n#[derive(Debug, Snafu, Clone)]\n\n#[snafu(visibility = \"pub(crate)\")]\n\n#[allow(missing_docs)]\n", "file_path": "io-engine/src/rebuild/rebuild_api.rs", "rank": 29, "score": 16.11476594039652 }, { "content": "pub use composer::*;\n\n\n\nuse crossbeam::channel::bounded;\n\nuse std::future::Future;\n\nuse tokio::sync::oneshot::channel;\n\n\n\nuse crate::common::mayastor_test_init;\n\nuse io_engine::core::{\n\n mayastor_env_stop,\n\n MayastorCliArgs,\n\n MayastorEnvironment,\n\n Reactor,\n\n Reactors,\n\n GLOBAL_RC,\n\n};\n\nuse std::time::Duration;\n\n\n\n/// Mayastor test structure that simplifies sending futures. Mayastor has\n\n/// its own reactor, which is not tokio based, so we need to handle properly\n\n#[derive(Debug)]\n", "file_path": "io-engine/tests/common/compose.rs", "rank": 30, "score": 16.059995659452788 }, { "content": "//!\n\n//! IO is driven by means of so called channels.\n\nuse std::{ffi::c_void, fmt::Debug, pin::Pin};\n\n\n\nuse super::{ChildState, Nexus, Reason};\n\n\n\nuse crate::core::{BlockDeviceHandle, Cores, Mthread};\n\n\n\n/// io channel, per core\n\n#[repr(C)]\n\n#[derive(Debug)]\n\npub struct NexusChannel {\n\n inner: *mut NexusChannelInner,\n\n}\n\n\n\n#[repr(C)]\n\npub(crate) struct NexusChannelInner {\n\n pub(crate) writers: Vec<Box<dyn BlockDeviceHandle>>,\n\n pub(crate) readers: Vec<Box<dyn BlockDeviceHandle>>,\n\n pub(crate) previous: usize,\n", "file_path": "io-engine/src/bdev/nexus/nexus_channel.rs", "rank": 31, "score": 15.693481862249149 }, { "content": " assert_eq!(opts.0.fabrics_connect_timeout_us, 1);\n\n assert_eq!(opts.0.transport_retry_count, 1);\n\n }\n\n }\n\n}\n\n\n\npub(crate) mod transport {\n\n use std::{ffi::CStr, fmt::Debug, ptr::copy_nonoverlapping};\n\n\n\n use libc::c_void;\n\n\n\n use spdk_rs::libspdk::spdk_nvme_transport_id;\n\n\n\n pub struct NvmeTransportId(spdk_nvme_transport_id);\n\n\n\n impl Debug for NvmeTransportId {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n writeln!(\n\n f,\n\n \"Transport ID: {}: {}: {}: {}:\",\n", "file_path": "io-engine/src/bdev/nvmx/controller.rs", "rank": 32, "score": 15.526444078817422 }, { "content": "\n\nuse std::{borrow::Cow, time::Duration};\n\nuse tonic::transport::Server;\n\nuse tracing::trace;\n\n\n\npub struct MayastorGrpcServer;\n\n\n\nimpl MayastorGrpcServer {\n\n pub async fn run(\n\n endpoint: std::net::SocketAddr,\n\n rpc_addr: String,\n\n ) -> Result<(), ()> {\n\n info!(\"gRPC server configured at address {}\", endpoint);\n\n let address = Cow::from(rpc_addr);\n\n let svc = Server::builder()\n\n .add_service(MayastorRpcServer::new(MayastorSvc::new(\n\n Duration::from_millis(4),\n\n )))\n\n .add_service(BdevRpcServer::new(BdevSvc::new()))\n\n .add_service(v1::bdev::BdevRpcServer::new(BdevService::new()))\n", "file_path": "io-engine/src/grpc/server.rs", "rank": 33, "score": 15.457188766033138 }, { "content": "use futures::channel::oneshot::Receiver;\n\nuse snafu::ResultExt;\n\nuse std::pin::Pin;\n\n\n\nuse super::{\n\n nexus_lookup_mut,\n\n ChildState,\n\n CreateRebuild,\n\n DrEvent,\n\n Error,\n\n Nexus,\n\n Reason,\n\n RebuildJobNotFound,\n\n RebuildOperation,\n\n RemoveRebuildJob,\n\n VerboseError,\n\n};\n\n\n\nuse crate::{\n\n bdev::nexus::nexus_persistence::PersistOp,\n", "file_path": "io-engine/src/bdev/nexus/nexus_bdev_rebuild.rs", "rank": 34, "score": 15.233034576747773 }, { "content": "///! Helpers related to nexus grpc methods.\n\nuse rpc::mayastor as rpc;\n\nuse std::{convert::From, pin::Pin};\n\nuse uuid::Uuid;\n\n\n\nuse crate::{\n\n bdev::{\n\n nexus,\n\n nexus::{\n\n nexus_lookup_mut,\n\n nexus_lookup_uuid_mut,\n\n ChildState,\n\n Nexus,\n\n NexusChild,\n\n NexusStatus,\n\n NvmeAnaState,\n\n Reason,\n\n },\n\n },\n\n core::{Protocol, Share},\n", "file_path": "io-engine/src/grpc/nexus_grpc.rs", "rank": 35, "score": 15.152973461761768 }, { "content": "use crate::{\n\n core::Share,\n\n grpc::{rpc_submit, GrpcClientContext, GrpcResult, Serializer},\n\n lvs::{Error as LvsError, Lvs},\n\n pool::{PoolArgs, PoolBackend},\n\n};\n\nuse futures::FutureExt;\n\nuse nix::errno::Errno;\n\nuse std::{convert::TryFrom, fmt::Debug};\n\nuse tonic::{Request, Response, Status};\n\n\n\nuse rpc::mayastor::v1::pool::*;\n\n\n\n#[derive(Debug)]\n", "file_path": "io-engine/src/grpc/v1/pool.rs", "rank": 36, "score": 15.14191644152029 }, { "content": "use async_trait::async_trait;\n\nuse snafu::ResultExt;\n\nuse std::pin::Pin;\n\n\n\nuse super::{\n\n Error,\n\n NbdDisk,\n\n Nexus,\n\n NexusTarget,\n\n ShareNbdNexus,\n\n ShareNvmfNexus,\n\n UnshareNexus,\n\n};\n\n\n\nuse crate::core::{Protocol, Share};\n\n\n\n#[async_trait(? Send)]\n\n///\n\n/// The sharing of the nexus is different compared to regular bdevs\n\n/// the Impl of ['Share'] handles this accordingly\n", "file_path": "io-engine/src/bdev/nexus/nexus_share.rs", "rank": 37, "score": 15.109079101430483 }, { "content": "use std::{\n\n convert::TryFrom,\n\n fmt::{Debug, Error, Formatter},\n\n os::raw::c_void,\n\n};\n\n\n\nuse futures::channel::oneshot;\n\n\n\nuse spdk_rs::{\n\n libspdk::{\n\n bdev_lock_lba_range,\n\n bdev_unlock_lba_range,\n\n spdk_bdev_desc,\n\n spdk_bdev_get_io_channel,\n\n },\n\n BdevModule,\n\n};\n\n\n\nuse crate::{\n\n bdev::nexus::NEXUS_MODULE_NAME,\n", "file_path": "io-engine/src/core/descriptor.rs", "rank": 38, "score": 15.08735715599338 }, { "content": "use std::{\n\n fmt::{Debug, Display, Formatter},\n\n ops::{Deref, DerefMut},\n\n pin::Pin,\n\n};\n\n\n\nuse async_trait::async_trait;\n\nuse nix::errno::Errno;\n\nuse snafu::ResultExt;\n\n\n\nuse spdk_rs::libspdk::spdk_bdev;\n\n\n\nuse crate::{\n\n bdev::SpdkBlockDevice,\n\n core::{\n\n share::{Protocol, Share},\n\n BlockDeviceIoStats,\n\n CoreError,\n\n Descriptor,\n\n ShareNvmf,\n", "file_path": "io-engine/src/core/bdev.rs", "rank": 39, "score": 15.083537742225968 }, { "content": "use std::{collections::HashMap, convert::TryFrom};\n\n\n\nuse async_trait::async_trait;\n\nuse snafu::ResultExt;\n\nuse url::Url;\n\n\n\nuse crate::{\n\n bdev::{\n\n dev::reject_unknown_parameters,\n\n nexus::lookup_nexus_child,\n\n util::uri,\n\n CreateDestroy,\n\n GetName,\n\n },\n\n core::UntypedBdev,\n\n nexus_uri::{self, NexusBdevError},\n\n};\n\n\n\n#[derive(Debug)]\n\npub(super) struct Loopback {\n", "file_path": "io-engine/src/bdev/loopback.rs", "rank": 40, "score": 14.987592600566138 }, { "content": "#![warn(missing_docs)]\n\n\n\nuse std::{cell::UnsafeCell, collections::HashMap};\n\n\n\nuse crossbeam::channel::unbounded;\n\nuse futures::{\n\n channel::{mpsc, oneshot},\n\n StreamExt,\n\n};\n\nuse once_cell::sync::OnceCell;\n\nuse snafu::ResultExt;\n\n\n\nuse spdk_rs::{\n\n libspdk::{spdk_get_thread, SPDK_BDEV_LARGE_BUF_MAX_SIZE},\n\n DmaBuf,\n\n};\n\n\n\nuse crate::{\n\n bdev::{device_open, nexus::VerboseError},\n\n core::{\n", "file_path": "io-engine/src/rebuild/rebuild_impl.rs", "rank": 41, "score": 14.976129132535602 }, { "content": "use std::{\n\n convert::TryFrom,\n\n ffi::CStr,\n\n os::raw::{c_char, c_int, c_ulong, c_void},\n\n ptr::copy_nonoverlapping,\n\n};\n\n\n\nuse async_trait::async_trait;\n\nuse futures::channel::oneshot;\n\nuse snafu::ResultExt;\n\nuse url::Url;\n\n\n\nuse spdk_rs::libspdk::{\n\n bdev_nvme_create,\n\n bdev_nvme_delete,\n\n spdk_nvme_transport_id,\n\n};\n\n\n\nuse crate::{\n\n bdev::{util::uri, CreateDestroy, GetName},\n", "file_path": "io-engine/src/bdev/nvme.rs", "rank": 42, "score": 14.921651641850884 }, { "content": "use async_trait::async_trait;\n\nuse futures::channel::oneshot;\n\nuse nix::errno::Errno;\n\nuse parking_lot::Mutex;\n\nuse std::{convert::From, sync::Arc};\n\nuse uuid::Uuid;\n\n\n\nuse crate::{\n\n bdev::nvmx::{\n\n controller_inner::SpdkNvmeController,\n\n NvmeController,\n\n NvmeControllerState,\n\n NvmeDeviceHandle,\n\n NvmeNamespace,\n\n NVME_CONTROLLERS,\n\n },\n\n core::{\n\n BlockDevice,\n\n BlockDeviceDescriptor,\n\n BlockDeviceHandle,\n", "file_path": "io-engine/src/bdev/nvmx/device.rs", "rank": 43, "score": 14.921651641850886 }, { "content": "use std::{convert::TryFrom, num::ParseIntError, str::ParseBoolError};\n\n\n\nuse crate::{bdev::uri, core::Bdev};\n\nuse futures::channel::oneshot::Canceled;\n\nuse nix::errno::Errno;\n\nuse snafu::Snafu;\n\n\n\nuse url::ParseError;\n\n\n\n// parse URI and bdev create/destroy errors common for all types of bdevs\n\n#[derive(Debug, Snafu, Clone)]\n\n#[snafu(visibility = \"pub(crate)\")]\n\npub enum NexusBdevError {\n\n // Generic URL parse errors.\n\n #[snafu(display(\"Error parsing URI '{}'\", uri))]\n\n UrlParseError { source: ParseError, uri: String },\n\n\n\n // No matching URI error.\n\n #[snafu(display(\n\n \"No matching URI found for BDEV '{}' in aliases {:?}\",\n", "file_path": "io-engine/src/nexus_uri.rs", "rank": 44, "score": 14.779236269109077 }, { "content": "//! Handlers for custom NVMe Admin commands\n\n\n\nuse std::{\n\n convert::TryFrom,\n\n ffi::c_void,\n\n ptr::NonNull,\n\n time::{SystemTime, UNIX_EPOCH},\n\n};\n\n\n\nuse crate::{\n\n bdev::nexus,\n\n core::{Bdev, Reactors},\n\n lvs::Lvol,\n\n};\n\n\n\nuse spdk_rs::{\n\n libspdk::{\n\n nvme_cmd_cdw10_get,\n\n nvme_cmd_cdw10_get_val,\n\n nvme_cmd_cdw11_get,\n", "file_path": "io-engine/src/subsys/nvmf/admin_cmd.rs", "rank": 45, "score": 14.699909803817054 }, { "content": "//! of consistency, the mount table is also checked to ENSURE that the device\n\n//! is not mounted)\n\n\n\nuse std::{\n\n collections::HashMap,\n\n ffi::{OsStr, OsString},\n\n io::Error,\n\n};\n\n\n\nuse crate::constants::{NEXUS_CAS_DRIVER, NVME_CONTROLLER_MODEL_ID};\n\nuse proc_mounts::{MountInfo, MountIter};\n\nuse udev::{Device, Enumerator};\n\n\n\n// Struct representing a property value in a udev::Device struct (and possibly\n\n// elsewhere). It is used to provide conversions via various \"From\" trait\n\n// implementations below.\n", "file_path": "io-engine/src/host/blk_device.rs", "rank": 46, "score": 14.63716477110626 }, { "content": "//! Test utility functions.\n\n//!\n\n//! TODO: All functions here should return errors instead of using assert and\n\n//! panic macros. The caller can decide how to handle the error appropriately.\n\n//! Panics and asserts in this file are still ok for usage & programming errors.\n\n\n\nuse std::{io, io::Write, process::Command, time::Duration};\n\n\n\nuse crossbeam::channel::{after, select, unbounded};\n\nuse once_cell::sync::OnceCell;\n\nuse run_script::{self, ScriptOptions};\n\nuse url::{ParseError, Url};\n\n\n\nuse tracing::{error, info, trace};\n\n\n\nuse io_engine::{\n\n core::{MayastorEnvironment, Mthread},\n\n logger,\n\n rebuild::{ClientOperations, RebuildJob, RebuildState},\n\n};\n\n\n\npub mod bdev_io;\n\npub mod compose;\n\npub mod error_bdev;\n\n\n\npub use compose::MayastorTest;\n\n\n\n/// call F cnt times, and sleep for a duration between each invocation\n", "file_path": "io-engine/tests/common/mod.rs", "rank": 47, "score": 14.629579791063264 }, { "content": "use std::{collections::HashMap, fmt::Display, sync::Arc};\n\n\n\nuse once_cell::sync::Lazy;\n\nuse parking_lot::{Mutex, RwLock, RwLockReadGuard, RwLockWriteGuard};\n\n\n\npub use channel::{NvmeControllerIoChannel, NvmeIoChannel, NvmeIoChannelInner};\n\npub use controller::NvmeController;\n\npub use controller_state::NvmeControllerState;\n\npub use device::{lookup_by_name, open_by_name, NvmeBlockDevice};\n\npub use handle::{nvme_io_ctx_pool_init, NvmeDeviceHandle};\n\npub use namespace::NvmeNamespace;\n\npub(crate) use uri::NvmfDeviceTemplate;\n\n\n\nuse crate::{\n\n core::CoreError,\n\n subsys::{Config, NvmeBdevOpts},\n\n};\n\n\n\nmod channel;\n\nmod controller;\n", "file_path": "io-engine/src/bdev/nvmx/mod.rs", "rank": 48, "score": 14.619576188935788 }, { "content": " .expect(\"done callback receiver side disappeared\");\n\n}\n\n\n\npub(crate) mod options {\n\n use std::{mem::size_of, ptr::copy_nonoverlapping};\n\n\n\n use spdk_rs::libspdk::{\n\n spdk_nvme_ctrlr_get_default_ctrlr_opts,\n\n spdk_nvme_ctrlr_opts,\n\n };\n\n\n\n use crate::ffihelper::IntoCString;\n\n\n\n /// structure that holds the default NVMe controller options. This is\n\n /// different from ['NvmeBdevOpts'] as it exposes more control over\n\n /// variables.\n\n\n\n pub struct NvmeControllerOpts(spdk_nvme_ctrlr_opts);\n\n impl NvmeControllerOpts {\n\n pub fn as_ptr(&self) -> *const spdk_nvme_ctrlr_opts {\n", "file_path": "io-engine/src/bdev/nvmx/controller.rs", "rank": 49, "score": 14.591166811811988 }, { "content": "use std::{\n\n error::Error,\n\n fmt::{Debug, Display},\n\n};\n\n\n\nuse futures::{channel::oneshot::Receiver, Future};\n\npub use server::MayastorGrpcServer;\n\nuse tonic::{Response, Status};\n\n\n\nuse crate::{\n\n core::{CoreError, Mthread, Reactor},\n\n nexus_uri::NexusBdevError,\n\n};\n\n\n\nimpl From<NexusBdevError> for tonic::Status {\n\n fn from(e: NexusBdevError) -> Self {\n\n match e {\n\n NexusBdevError::UrlParseError {\n\n ..\n\n } => Status::invalid_argument(e.to_string()),\n", "file_path": "io-engine/src/grpc/mod.rs", "rank": 50, "score": 14.580709879087411 }, { "content": "use std::{convert::TryFrom, io, time::Duration};\n\n\n\nuse url::{ParseError, Url};\n\n\n\nuse mio08::{Events, Interest, Poll, Token};\n\n\n\nuse crate::{\n\n error::NvmeError,\n\n nvme_device::NvmeDevice,\n\n nvme_tree::{\n\n NvmeCtrlrIterator,\n\n NvmeHostIterator,\n\n NvmeNamespaceInCtrlrIterator,\n\n NvmeNamespaceIterator,\n\n NvmeRoot,\n\n NvmeSubsystemIterator,\n\n },\n\n};\n\n\n\n/// Wrapper for caller-owned C-strings from libnvme\n", "file_path": "libnvme-rs/src/nvme_uri.rs", "rank": 51, "score": 14.52554298733389 }, { "content": "use std::{\n\n collections::VecDeque,\n\n sync::atomic::{AtomicU32, Ordering},\n\n};\n\n\n\nuse crossbeam::atomic::AtomicCell;\n\nuse futures::channel::oneshot;\n\n\n\nuse crate::{\n\n bdev::{nexus::nexus_bdev::Error as NexusError, Nexus},\n\n core::{Bdev, Cores, Protocol, Share},\n\n subsys::NvmfSubsystem,\n\n};\n\n\n\n/// TODO\n\n#[derive(Copy, Clone, Debug, Eq, PartialEq)]\n\npub(super) enum NexusPauseState {\n\n Unpaused,\n\n Pausing,\n\n Paused,\n", "file_path": "io-engine/src/bdev/nexus/nexus_io_subsystem.rs", "rank": 52, "score": 14.52554298733389 }, { "content": "use std::{\n\n fmt::{Debug, Display, Formatter},\n\n marker::PhantomData,\n\n};\n\n\n\nuse crossbeam::atomic::AtomicCell;\n\nuse futures::{channel::mpsc, SinkExt, StreamExt};\n\nuse nix::errno::Errno;\n\nuse serde::Serialize;\n\nuse snafu::{ResultExt, Snafu};\n\nuse url::Url;\n\n\n\nuse super::{nexus_iter_mut, nexus_lookup_mut, DrEvent, VerboseError};\n\n\n\nuse crate::{\n\n bdev::{device_create, device_destroy, device_lookup},\n\n core::{\n\n BlockDevice,\n\n BlockDeviceDescriptor,\n\n BlockDeviceHandle,\n", "file_path": "io-engine/src/bdev/nexus/nexus_child.rs", "rank": 53, "score": 14.494635660018524 }, { "content": "//! Simple utility functions to help with parsing URIs.\n\n\n\nuse std::str::ParseBoolError;\n\n\n\nuse url::Url;\n\n\n\npub(crate) fn segments(url: &Url) -> Vec<&str> {\n\n if let Some(iter) = url.path_segments() {\n\n let mut segments: Vec<&str> = iter.collect();\n\n\n\n if segments.len() == 1 && segments[0].is_empty() {\n\n segments.remove(0);\n\n }\n\n\n\n return segments;\n\n }\n\n\n\n Vec::new()\n\n}\n\n\n", "file_path": "io-engine/src/bdev/util/uri.rs", "rank": 54, "score": 14.435495778998803 }, { "content": "//! helper routines to drive IO to the nexus for testing purposes\n\nuse futures::channel::oneshot;\n\nuse rand::Rng;\n\nuse std::{ptr::NonNull, sync::Mutex};\n\n\n\nuse spdk_rs::libspdk::{\n\n spdk_bdev_free_io,\n\n spdk_bdev_io,\n\n spdk_bdev_read,\n\n spdk_bdev_reset,\n\n spdk_bdev_write,\n\n};\n\n\n\nuse crate::{\n\n core::{Cores, Descriptor, IoChannel, Mthread, UntypedBdev},\n\n ffihelper::pair,\n\n nexus_uri::bdev_create,\n\n};\n\n\n\nuse spdk_rs::DmaBuf;\n", "file_path": "io-engine/src/core/io_driver.rs", "rank": 55, "score": 14.425468238894341 }, { "content": "use crate::{\n\n bdev::{\n\n nexus,\n\n nexus::{nexus_lookup_uuid_mut, NexusChild, NexusStatus, Reason},\n\n },\n\n core::{Protocol, Share},\n\n grpc::{rpc_submit, GrpcClientContext, GrpcResult, Serializer},\n\n rebuild::{RebuildJob, RebuildState, RebuildStats},\n\n};\n\nuse futures::FutureExt;\n\nuse std::{convert::TryFrom, fmt::Debug, ops::Deref, pin::Pin};\n\nuse tonic::{Request, Response, Status};\n\n\n\nuse rpc::mayastor::v1::nexus::*;\n\n\n\n#[derive(Debug)]\n", "file_path": "io-engine/src/grpc/v1/nexus.rs", "rank": 56, "score": 14.414515499632671 }, { "content": "use crate::{\n\n bdev::{nexus, NvmeControllerState},\n\n core::{BlockDeviceIoStats, CoreError, MayastorFeatures},\n\n grpc::{\n\n controller_grpc::{\n\n controller_stats,\n\n list_controllers,\n\n NvmeControllerInfo,\n\n },\n\n rpc_submit,\n\n GrpcClientContext,\n\n GrpcResult,\n\n Serializer,\n\n },\n\n host::{blk_device, resource},\n\n};\n\nuse futures::FutureExt;\n\nuse rpc::mayastor::v1::host as rpc;\n\nuse std::panic::AssertUnwindSafe;\n\nuse tonic::{Request, Response, Status};\n", "file_path": "io-engine/src/grpc/v1/host.rs", "rank": 57, "score": 14.414515499632673 }, { "content": "//! A Registration subsystem is used to keep control-plane in the loop\n\n//! about the lifecycle of mayastor instances.\n\n\n\n/// Module for grpc registration implementation\n\npub mod registration_grpc;\n\n\n\nuse crate::core::MayastorEnvironment;\n\nuse http::Uri;\n\nuse registration_grpc::Registration;\n\nuse spdk_rs::libspdk::{\n\n spdk_add_subsystem,\n\n spdk_subsystem,\n\n spdk_subsystem_fini_next,\n\n spdk_subsystem_init_next,\n\n};\n\nuse std::convert::TryFrom;\n\n\n\nmacro_rules! default_addr {\n\n () => {\n\n \"https://core\"\n\n };\n\n}\n\nmacro_rules! default_port {\n\n () => {\n\n 50051\n\n };\n\n}\n\n\n\n/// Default grpc server port\n", "file_path": "io-engine/src/subsys/registration/mod.rs", "rank": 58, "score": 14.272215836480514 }, { "content": "use crate::lvs::Error;\n\nuse async_trait::async_trait;\n\nuse pin_utils::core_reexport::fmt::Formatter;\n\nuse std::{convert::TryFrom, fmt::Display, pin::Pin};\n\n\n\n#[derive(Debug, PartialOrd, PartialEq)]\n\n/// Indicates what protocol the bdev is shared as\n\npub enum Protocol {\n\n /// not shared by any of the variants\n\n Off,\n\n /// shared as NVMe-oF TCP\n\n Nvmf,\n\n}\n\n\n\nimpl TryFrom<i32> for Protocol {\n\n type Error = Error;\n\n\n\n fn try_from(value: i32) -> Result<Self, Self::Error> {\n\n match value {\n\n 0 => Ok(Self::Off),\n", "file_path": "io-engine/src/core/share.rs", "rank": 59, "score": 14.177384177915457 }, { "content": "use std::{fmt::Display, fs, path::Path, sync::Mutex};\n\n\n\nuse futures::channel::oneshot;\n\nuse once_cell::sync::{Lazy, OnceCell};\n\nuse serde::{Deserialize, Serialize};\n\nuse tonic::Status;\n\n\n\nuse crate::{\n\n bdev::nexus::VerboseError,\n\n core::{runtime, Cores, Mthread, Reactor, Share},\n\n grpc::rpc_submit,\n\n lvs::{Error as LvsError, Lvs},\n\n pool::{Pool as SpdkPool, PoolArgs, PoolsIter},\n\n replica::ShareType,\n\n};\n\n\n\nstatic CONFIG_FILE: OnceCell<String> = OnceCell::new();\n\n\n\n/// Initialise the config file location\n", "file_path": "io-engine/src/subsys/config/pool.rs", "rank": 60, "score": 14.17233928768777 }, { "content": "//!\n\n//! The malloc bdev as the name implies, creates an in memory disk. Note\n\n//! that the backing memory is allocated from huge pages and not from the\n\n//! heap. IOW, you must ensure you do not run out of huge pages while using\n\n//! this.\n\nuse std::{collections::HashMap, convert::TryFrom};\n\n\n\nuse async_trait::async_trait;\n\nuse futures::channel::oneshot;\n\nuse nix::errno::Errno;\n\nuse snafu::ResultExt;\n\nuse url::Url;\n\n\n\nuse spdk_rs::{\n\n libspdk::{create_malloc_disk, delete_malloc_disk, spdk_bdev},\n\n UntypedBdev,\n\n};\n\n\n\nuse crate::{\n\n bdev::{dev::reject_unknown_parameters, util::uri, CreateDestroy, GetName},\n", "file_path": "io-engine/src/bdev/malloc.rs", "rank": 61, "score": 14.132268170085139 }, { "content": "use std::{collections::HashMap, convert::TryFrom, ffi::CString};\n\n\n\nuse async_trait::async_trait;\n\nuse futures::channel::oneshot;\n\nuse nix::errno::Errno;\n\nuse snafu::ResultExt;\n\nuse url::Url;\n\n\n\nuse spdk_rs::libspdk::{bdev_aio_delete, create_aio_bdev};\n\n\n\nuse crate::{\n\n bdev::{dev::reject_unknown_parameters, util::uri, CreateDestroy, GetName},\n\n core::UntypedBdev,\n\n ffihelper::{cb_arg, done_errno_cb, ErrnoResult},\n\n nexus_uri::{self, NexusBdevError},\n\n};\n\n\n\n#[derive(Debug)]\n\npub(super) struct Aio {\n\n name: String,\n", "file_path": "io-engine/src/bdev/aio.rs", "rank": 62, "score": 13.999145335355351 }, { "content": "//! As the name implies, this is a dummy driver that discards all writes and\n\n//! returns undefined data for reads. It's useful for benchmarking the I/O stack\n\n//! with minimal overhead and should *NEVER* be used with *real* data.\n\nuse std::{collections::HashMap, convert::TryFrom};\n\n\n\nuse async_trait::async_trait;\n\nuse futures::channel::oneshot;\n\nuse nix::errno::Errno;\n\nuse snafu::ResultExt;\n\nuse url::Url;\n\nuse uuid::Uuid;\n\n\n\nuse crate::{\n\n bdev::{dev::reject_unknown_parameters, util::uri, CreateDestroy, GetName},\n\n core::UntypedBdev,\n\n ffihelper::{cb_arg, done_errno_cb, ErrnoResult, IntoCString},\n\n nexus_uri::{\n\n NexusBdevError,\n\n {self},\n\n },\n", "file_path": "io-engine/src/bdev/null.rs", "rank": 63, "score": 13.993999806326855 }, { "content": "use std::{collections::HashMap, convert::TryFrom, ffi::CString};\n\n\n\nuse async_trait::async_trait;\n\nuse futures::channel::oneshot;\n\nuse snafu::ResultExt;\n\nuse url::Url;\n\n\n\nuse spdk_rs::libspdk::{create_uring_bdev, delete_uring_bdev};\n\n\n\nuse crate::{\n\n bdev::{dev::reject_unknown_parameters, util::uri, CreateDestroy, GetName},\n\n core::UntypedBdev,\n\n ffihelper::{cb_arg, done_errno_cb, ErrnoResult},\n\n nexus_uri::{self, NexusBdevError},\n\n};\n\n\n\n#[derive(Debug)]\n\npub(super) struct Uring {\n\n name: String,\n\n alias: String,\n", "file_path": "io-engine/src/bdev/uring.rs", "rank": 64, "score": 13.977729274917015 }, { "content": "use composer::RpcHandle;\n\nuse rpc::mayastor::{\n\n Bdev,\n\n CreateNexusRequest,\n\n CreatePoolRequest,\n\n CreateReplicaRequest,\n\n Null,\n\n Replica,\n\n ShareProtocolReplica,\n\n ShareReplicaRequest,\n\n};\n\nuse std::str::FromStr;\n\nuse tracing::info;\n\n\n\npub mod common;\n\nuse common::compose::Builder;\n\n\n\nconst DISKSIZE_KB: u64 = 96 * 1024;\n\nconst VOLUME_SIZE_MB: u64 = (DISKSIZE_KB / 1024) / 2;\n\nconst VOLUME_SIZE_B: u64 = VOLUME_SIZE_MB * 1024 * 1024;\n\nconst VOLUME_UUID: &str = \"cb9e1a5c-7af8-44a7-b3ae-05390be75d83\";\n\n\n\n// pool name for mayastor from handle_index\n", "file_path": "io-engine/tests/replica_uri.rs", "rank": 65, "score": 13.963095584620778 }, { "content": " CreatePoolRequest,\n\n CreateReplicaRequest,\n\n DestroyNexusRequest,\n\n Null,\n\n PublishNexusRequest,\n\n};\n\nuse std::process::{Command, ExitStatus};\n\n\n\npub mod common;\n\nuse common::{compose::Builder, MayastorTest};\n\n\n\nextern crate libnvme_rs;\n\n\n\nstatic POOL_NAME: &str = \"tpool\";\n\nstatic NXNAME: &str = \"nexus0\";\n\nstatic UUID: &str = \"cdc2a7db-3ac3-403a-af80-7fadc1581c47\";\n\nstatic HOSTNQN: &str = NVME_NQN_PREFIX;\n\nstatic HOSTID0: &str = \"53b35ce9-8e71-49a9-ab9b-cba7c5670fad\";\n\nstatic HOSTID1: &str = \"c1affd2d-ef79-4ba4-b5cf-8eb48f9c07d0\";\n\n\n\nstatic DISKNAME1: &str = \"/tmp/disk1.img\";\n\nstatic BDEVNAME1: &str = \"aio:///tmp/disk1.img?blk_size=512\";\n\nstatic DISKNAME2: &str = \"/tmp/disk2.img\";\n\nstatic BDEVNAME2: &str = \"aio:///host/tmp/disk2.img?blk_size=512\";\n\n\n\nstatic MAYASTOR: OnceCell<MayastorTest> = OnceCell::new();\n\n\n", "file_path": "io-engine/tests/nexus_io.rs", "rank": 66, "score": 13.960678270261077 }, { "content": "//! The high-level replica object methods.\n\n//!\n\n//! Replica is a logical data volume exported over nvmf (in SPDK terminology\n\n//! an lvol). Here we define methods for easy management of replicas.\n\n#![allow(dead_code)]\n\nuse std::{ffi::CStr, os::raw::c_char};\n\n\n\nuse ::rpc::mayastor as rpc;\n\nuse snafu::{ResultExt, Snafu};\n\n\n\nuse spdk_rs::libspdk::{spdk_lvol, vbdev_lvol_get_from_bdev};\n\n\n\nuse crate::{\n\n core::{Bdev, UntypedBdev},\n\n subsys::NvmfError,\n\n target,\n\n};\n\n\n\n/// These are high-level context errors one for each rpc method.\n\n#[derive(Debug, Snafu)]\n", "file_path": "io-engine/src/replica.rs", "rank": 67, "score": 13.94264550051463 }, { "content": "use std::{cell::RefCell, os::raw::c_void, time::Duration};\n\n\n\nuse spdk_rs::libspdk::{\n\n spdk_poller,\n\n spdk_poller_register,\n\n spdk_poller_unregister,\n\n};\n\n\n\nthread_local! {\n\n /// Delay poller pointer for unregistering the poller at the end\n\n static DELAY_POLLER: RefCell<Option<*mut spdk_poller>> = RefCell::new(None);\n\n}\n\n\n\n/// Delay function called from the spdk poller to prevent draining of cpu\n\n/// in cases when performance is not a priority (i.e. unit tests).\n\nextern \"C\" fn sleep(_ctx: *mut c_void) -> i32 {\n\n std::thread::sleep(Duration::from_millis(1));\n\n 0\n\n}\n\n\n\n/// Start delaying reactor every 1ms by 1ms. It blocks the thread for a\n\n/// short moment so it is not able to perform any useful work when sleeping.\n", "file_path": "io-engine/src/delay.rs", "rank": 68, "score": 13.9233099446414 }, { "content": "use crate::{\n\n core,\n\n core::{CoreError, Protocol, Share},\n\n grpc::{rpc_submit, GrpcResult},\n\n nexus_uri::{bdev_create, bdev_destroy, NexusBdevError},\n\n};\n\nuse rpc::mayastor::v1::bdev::{\n\n Bdev,\n\n BdevRpc,\n\n BdevShareRequest,\n\n BdevShareResponse,\n\n BdevUnshareRequest,\n\n CreateBdevRequest,\n\n CreateBdevResponse,\n\n DestroyBdevRequest,\n\n ListBdevOptions,\n\n ListBdevResponse,\n\n};\n\nuse std::{convert::TryFrom, pin::Pin};\n\nuse tonic::{Request, Response, Status};\n", "file_path": "io-engine/src/grpc/v1/bdev.rs", "rank": 69, "score": 13.84814449352404 }, { "content": "use std::ffi::{c_void, CString};\n\n\n\nuse snafu::Snafu;\n\nuse spdk_rs::libspdk::{\n\n spdk_get_thread,\n\n spdk_set_thread,\n\n spdk_thread,\n\n spdk_thread_create,\n\n spdk_thread_destroy,\n\n spdk_thread_exit,\n\n spdk_thread_get_by_id,\n\n spdk_thread_get_id,\n\n spdk_thread_get_name,\n\n spdk_thread_is_exited,\n\n spdk_thread_poll,\n\n spdk_thread_send_msg,\n\n};\n\n\n\nuse crate::core::{cpu_cores::CpuMask, CoreError, Cores, Reactors};\n\nuse futures::channel::oneshot::{channel, Receiver, Sender};\n", "file_path": "io-engine/src/core/thread.rs", "rank": 70, "score": 13.842857974856933 }, { "content": "//!\n\n//! gRPC method to proxy calls to (local) SPDK json-rpc service\n\n\n\nuse crate::grpc::GrpcResult;\n\nuse ::rpc::mayastor::v1::json::{JsonRpc, JsonRpcRequest, JsonRpcResponse};\n\nuse jsonrpc::error::Error;\n\nuse std::borrow::Cow;\n\nuse tonic::{Request, Response};\n\n\n\n/// RPC Service for local SPDK json-rpc calls\n\n#[derive(Debug)]\n\npub struct JsonService {\n\n // FIXME: using a static lifetime here is not ideal\n\n rpc_addr: Cow<'static, str>,\n\n}\n\n\n\nimpl JsonService {\n\n pub fn new(rpc_addr: Cow<'static, str>) -> Self {\n\n Self {\n\n rpc_addr,\n", "file_path": "io-engine/src/grpc/v1/json.rs", "rank": 71, "score": 13.79003159896104 }, { "content": "use crate::{\n\n core::{Bdev, Protocol, Share, UntypedBdev},\n\n grpc::{rpc_submit, GrpcClientContext, GrpcResult, Serializer},\n\n lvs::{Error as LvsError, Lvol, Lvs},\n\n nexus_uri::NexusBdevError,\n\n};\n\nuse ::function_name::named;\n\nuse futures::FutureExt;\n\nuse nix::errno::Errno;\n\nuse rpc::mayastor::v1::replica::*;\n\nuse std::{convert::TryFrom, panic::AssertUnwindSafe, pin::Pin};\n\nuse tonic::{Request, Response, Status};\n\n\n\n#[derive(Debug)]\n\n#[allow(dead_code)]\n\npub struct ReplicaService {\n\n name: String,\n\n client_context: tokio::sync::Mutex<Option<GrpcClientContext>>,\n\n}\n\n\n", "file_path": "io-engine/src/grpc/v1/replica.rs", "rank": 72, "score": 13.754719769627567 }, { "content": "//!\n\n//! Thread safe memory pool implemented by using DPDKs rte_ring constructs.\n\n//! This is avoids doing memory allocations in the hot path.\n\n//!\n\n//! Borrowed buffers are accounted for and validated upon freeing.\n\n\n\nuse std::{marker::PhantomData, mem::size_of, os::raw::c_void, ptr::NonNull};\n\n\n\nuse spdk_rs::libspdk::{\n\n spdk_mempool,\n\n spdk_mempool_count,\n\n spdk_mempool_create,\n\n spdk_mempool_free,\n\n spdk_mempool_get,\n\n spdk_mempool_put,\n\n};\n\n\n\nuse crate::ffihelper::IntoCString;\n\n\n\npub struct MemoryPool<T: Sized> {\n", "file_path": "io-engine/src/core/mempool.rs", "rank": 73, "score": 13.742315769034414 }, { "content": "//!\n\n//! gRPC method to proxy calls to (local) SPDK json-rpc service\n\n\n\nuse crate::grpc::GrpcResult;\n\nuse ::rpc::mayastor::{json_rpc_server::JsonRpc, JsonRpcReply, JsonRpcRequest};\n\nuse jsonrpc::error::Error;\n\nuse std::borrow::Cow;\n\nuse tonic::{Request, Response};\n\n\n\n/// RPC Service for local SPDK json-rpc calls\n\n#[derive(Debug)]\n\npub struct JsonRpcSvc {\n\n // FIXME: using a static lifetime here is not ideal\n\n rpc_addr: Cow<'static, str>,\n\n}\n\n\n\nimpl JsonRpcSvc {\n\n pub fn new(rpc_addr: Cow<'static, str>) -> Self {\n\n Self {\n\n rpc_addr,\n", "file_path": "io-engine/src/grpc/json_grpc.rs", "rank": 74, "score": 13.653328602072364 }, { "content": "use std::{sync::Mutex, time::Duration};\n\n\n\nuse crossbeam::channel::unbounded;\n\nuse once_cell::sync::{Lazy, OnceCell};\n\nuse tracing::error;\n\n\n\nuse io_engine::{\n\n bdev::{device_open, nexus::nexus_lookup_mut},\n\n core::{MayastorCliArgs, Mthread, Protocol},\n\n rebuild::{RebuildJob, RebuildState, RebuildState::Completed},\n\n};\n\n\n\npub mod common;\n\nuse common::{compose::MayastorTest, wait_for_rebuild};\n\n\n\n// each test `should` use a different nexus name to prevent clashing with\n\n// one another. This allows the failed tests to `panic gracefully` improving\n\n// the output log and allowing the CI to fail gracefully as well\n\nstatic NEXUS_NAME: Lazy<Mutex<&str>> = Lazy::new(|| Mutex::new(\"Default\"));\n", "file_path": "io-engine/tests/nexus_rebuild.rs", "rank": 75, "score": 13.517960283058821 }, { "content": "#[macro_use]\n\nextern crate assert_matches;\n\n\n\nuse io_engine::{\n\n bdev::nexus::{nexus_create, nexus_lookup_mut, ChildState, Reason},\n\n core::{MayastorCliArgs, Protocol},\n\n};\n\n\n\nstatic NEXUS_NAME: &str = \"nexus\";\n\n\n\nstatic FILE_SIZE: u64 = 64 * 1024 * 1024; // 64MiB\n\n\n\nstatic DISKNAME1: &str = \"/tmp/disk1.img\";\n\nstatic BDEVNAME1: &str = \"aio:///tmp/disk1.img?blk_size=512\";\n\n\n\nstatic DISKNAME2: &str = \"/tmp/disk2.img\";\n\nstatic BDEVNAME2: &str = \"aio:///tmp/disk2.img?blk_size=512\";\n\n\n\npub mod common;\n\nuse common::MayastorTest;\n\n\n", "file_path": "io-engine/tests/add_child.rs", "rank": 76, "score": 13.14100227952559 }, { "content": " })\n\n })\n\n }\n\n\n\n extern \"C\" fn pg_destroy(arg: *mut c_void) {\n\n unsafe {\n\n let pg = Box::from_raw(arg as *mut PollGroup);\n\n spdk_nvmf_poll_group_destroy(\n\n pg.group_ptr(),\n\n Some(pg_destroy_done),\n\n std::ptr::null_mut(),\n\n )\n\n }\n\n }\n\n\n\n NVMF_PGS.with(|t| {\n\n t.borrow().iter().for_each(|pg| {\n\n trace!(\"destroying pg: {:?}\", pg);\n\n pg.thread.send_msg(\n\n pg_destroy,\n", "file_path": "io-engine/src/subsys/nvmf/target.rs", "rank": 77, "score": 13.080029434722302 }, { "content": "use std::{os::raw::c_void, ptr::NonNull};\n\n\n\nuse crate::ffihelper::IntoCString;\n\nuse spdk_rs::libspdk::{\n\n spdk_for_each_channel,\n\n spdk_for_each_channel_continue,\n\n spdk_io_channel,\n\n spdk_io_channel_iter,\n\n spdk_io_channel_iter_get_channel,\n\n spdk_io_channel_iter_get_ctx,\n\n spdk_io_device_register,\n\n spdk_io_device_unregister,\n\n};\n\n\n\n/// TODO\n\n#[derive(Debug)]\n\npub struct IoDevice(NonNull<c_void>);\n\n\n\n// TODO: is `IoDevice` really a Sync/Send type?\n\nunsafe impl Sync for IoDevice {}\n\nunsafe impl Send for IoDevice {}\n\n\n\n/// TODO\n", "file_path": "io-engine/src/core/io_device.rs", "rank": 78, "score": 12.944000623579253 }, { "content": " /// issue a reset to the bdev\n\n request_reset: bool,\n\n /// core to run this job on\n\n core: u32,\n\n /// thread this job is run on\n\n thread: Option<Mthread>,\n\n}\n\n\n\nimpl Job {\n\n extern \"C\" fn io_completion(\n\n bdev_io: *mut spdk_bdev_io,\n\n success: bool,\n\n arg: *mut std::ffi::c_void,\n\n ) {\n\n let ioq: &mut Io = unsafe { &mut *arg.cast() };\n\n let job = unsafe { ioq.job.as_mut() };\n\n\n\n if !success {\n\n // trace!(\n\n // \"core: {} mthread: {:?}{}: {:#?}\",\n", "file_path": "io-engine/src/core/io_driver.rs", "rank": 79, "score": 12.564659805537188 }, { "content": " device: unsafe {\n\n std::ffi::CStr::from_ptr(crate::nvme_ns_get_name(n))\n\n }\n\n .to_str()\n\n .unwrap()\n\n .to_string(),\n\n firmware: unsafe {\n\n std::ffi::CStr::from_ptr(crate::nvme_ns_get_firmware(n))\n\n }\n\n .to_str()\n\n .unwrap()\n\n .to_string(),\n\n model: unsafe {\n\n std::ffi::CStr::from_ptr(crate::nvme_ns_get_model(n))\n\n }\n\n .to_str()\n\n .unwrap()\n\n .to_string(),\n\n serial: unsafe {\n\n std::ffi::CStr::from_ptr(crate::nvme_ns_get_serial(n))\n", "file_path": "libnvme-rs/src/nvme_uri.rs", "rank": 80, "score": 12.53251767160851 }, { "content": "//! The persistent store is used to save information that is required by\n\n//! Mayastor across restarts.\n\n//!\n\n//! etcd is used as the backing store and is interacted with through the use of\n\n//! the etcd-client crate. This crate has a dependency on the tokio async\n\n//! runtime.\n\nuse crate::{\n\n core,\n\n store::{\n\n etcd::Etcd,\n\n store_defs::{\n\n DeleteWait,\n\n GetWait,\n\n PutWait,\n\n Store,\n\n StoreError,\n\n StoreKey,\n\n StoreValue,\n\n },\n\n },\n", "file_path": "io-engine/src/persistent_store.rs", "rank": 81, "score": 12.489808430395811 }, { "content": " nexus_iter_mut,\n\n nexus_lookup,\n\n nexus_lookup_mut,\n\n nexus_lookup_name_uuid,\n\n nexus_lookup_uuid_mut,\n\n};\n\npub(crate) use nexus_module::{NexusModule, NEXUS_MODULE_NAME};\n\npub(crate) use nexus_nbd::{NbdDisk, NbdError};\n\npub(crate) use nexus_persistence::PersistOp;\n\npub use nexus_persistence::{ChildInfo, NexusInfo};\n\n\n\n/// TODO\n\n#[derive(Deserialize)]\n", "file_path": "io-engine/src/bdev/nexus/mod.rs", "rank": 82, "score": 12.219354025174532 }, { "content": "use async_trait::async_trait;\n\n\n\npub use dev::{device_create, device_destroy, device_lookup, device_open};\n\npub use device::{bdev_io_ctx_pool_init, SpdkBlockDevice};\n\npub use nexus::{Nexus, NexusInfo, NexusState};\n\npub use nvmx::{\n\n nvme_io_ctx_pool_init,\n\n NvmeController,\n\n NvmeControllerState,\n\n NVME_CONTROLLERS,\n\n};\n\n\n\nmod aio;\n\npub(crate) mod dev;\n\npub(crate) use dev::uri;\n\npub(crate) mod device;\n\nmod loopback;\n\nmod malloc;\n\npub mod nexus;\n\nmod null;\n\npub mod null_ng;\n\nmod nvme;\n\nmod nvmf;\n\npub(crate) mod nvmx;\n\nmod uring;\n\npub mod util;\n\n\n", "file_path": "io-engine/src/bdev/mod.rs", "rank": 83, "score": 12.120117331083229 }, { "content": "use snafu::Snafu;\n\n\n\n#[derive(Debug, Snafu)]\n\n#[allow(missing_docs)]\n\n#[snafu(visibility = \"pub(crate)\")]\n\npub enum NvmeError {\n\n #[snafu(display(\"IO error:\"))]\n\n IoError { source: std::io::Error },\n\n #[snafu(display(\"Lookup host failed: {}\", rc))]\n\n LookupHostError { rc: i32 },\n\n #[snafu(display(\"Create controller failed: {}\", rc))]\n\n CreateCtrlrError { rc: i32 },\n\n #[snafu(display(\"No controller found: {}\", rc))]\n\n AddCtrlrError { rc: i32 },\n\n #[snafu(display(\"File IO error: {}\", rc))]\n\n FileIoError { rc: i32 },\n\n #[snafu(display(\"NVMe URL invalid: {}\", source))]\n\n UrlError { source: url::ParseError },\n\n}\n\n\n\nimpl From<std::io::Error> for NvmeError {\n\n fn from(source: std::io::Error) -> NvmeError {\n\n NvmeError::IoError {\n\n source,\n\n }\n\n }\n\n}\n", "file_path": "libnvme-rs/src/error.rs", "rank": 84, "score": 11.900904430146563 }, { "content": " chan.clear(); // TODO: use chan drop.\n\n }\n\n}\n\n\n\nimpl IoDeviceChannelTraverse for Nexus<'_> {}\n\n\n\nunsafe fn unsafe_static_ptr(nexus: &Nexus) -> *mut Nexus<'static> {\n\n let r = ::std::mem::transmute::<_, &'static Nexus>(nexus);\n\n r as *const Nexus as *mut Nexus\n\n}\n\n\n\nimpl<'n> BdevOps for Nexus<'n> {\n\n type ChannelData = NexusChannel;\n\n type BdevData = Nexus<'n>;\n\n type IoDev = Nexus<'n>;\n\n\n\n /// TODO\n\n fn destruct(mut self: Pin<&mut Self>) {\n\n // A closed operation might already be in progress calling unregister\n\n // will trip an assertion within the external libraries\n", "file_path": "io-engine/src/bdev/nexus/nexus_bdev.rs", "rank": 85, "score": 11.752357536415072 }, { "content": " UnshareNexus,\n\n NEXUS_PRODUCT_ID,\n\n};\n\npub(crate) use nexus_channel::{\n\n fault_nexus_child,\n\n DrEvent,\n\n NexusChannel,\n\n NexusChannelInner,\n\n};\n\npub use nexus_child::{\n\n lookup_nexus_child,\n\n ChildError,\n\n ChildState,\n\n NexusChild,\n\n Reason,\n\n};\n\npub(crate) use nexus_io::{nexus_submit_request, NioCtx};\n\npub(self) use nexus_io_subsystem::NexusIoSubsystem;\n\npub use nexus_iter::{\n\n nexus_iter,\n", "file_path": "io-engine/src/bdev/nexus/mod.rs", "rank": 86, "score": 11.69882448481868 }, { "content": " Self {\n\n context,\n\n span,\n\n ansi,\n\n }\n\n }\n\n}\n\n\n\n// Display trace context (span) information\n\nimpl<'a, S, N> std::fmt::Display for CustomContext<'a, S, N>\n\nwhere\n\n S: tracing_core::subscriber::Subscriber + for<'s> LookupSpan<'s>,\n\n N: for<'w> FormatFields<'w> + 'static,\n\n{\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n let bold = if self.ansi {\n\n Style::new().bold()\n\n } else {\n\n Style::new()\n\n };\n", "file_path": "io-engine/src/logger.rs", "rank": 87, "score": 11.670913227440792 }, { "content": "///! Utility functions for reading and modifying the state of sysfs\n\n/// objects.\n\nuse std::path::Path;\n\nuse std::{\n\n collections::HashMap,\n\n fs,\n\n io::{BufRead, BufReader, Error, ErrorKind, Result},\n\n str::FromStr,\n\n string,\n\n};\n\n\n\n/// Read and parse value from a file\n", "file_path": "sysfs/src/lib.rs", "rank": 88, "score": 11.558438123526127 }, { "content": "//! Unit tests for json-rpc client module\n\n\n\nuse self::error::Error;\n\nuse super::*;\n\nuse nix::errno::Errno;\n\nuse serde_json::json;\n\nuse std::{fs, panic, path::Path};\n\nuse tokio::{\n\n io::{AsyncReadExt, ErrorKind},\n\n net::UnixListener,\n\n runtime::Runtime,\n\n};\n\n\n\n/// Socket path to the test json-rpc server\n\nconst SOCK_PATH: &str = \"/tmp/jsonrpc-ut.sock\";\n\n\n\n/// Needed because jsonrpc error contains nested boxed std::error::Error trait.\n\nimpl panic::UnwindSafe for Error {}\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n", "file_path": "jsonrpc/src/test.rs", "rank": 89, "score": 11.540694022033378 }, { "content": " spdk_subsystem_init_next,\n\n};\n\npub use subsystem::{NvmfSubsystem, SubType};\n\npub use target::Target;\n\n\n\nuse crate::{\n\n jsonrpc::{Code, RpcErrorCode},\n\n subsys::{nvmf::target::NVMF_TGT, Config},\n\n};\n\n\n\nmod admin_cmd;\n\nmod poll_groups;\n\nmod subsystem;\n\nmod target;\n\nmod transport;\n\n\n\n// wrapper around our NVMF subsystem used for registration\n\npub struct Nvmf(pub(crate) *mut spdk_subsystem);\n\n\n\nimpl Default for Nvmf {\n", "file_path": "io-engine/src/subsys/nvmf/mod.rs", "rank": 90, "score": 11.527680630179717 }, { "content": " NvmeRoot::new(unsafe { crate::nvme_scan(std::ptr::null()) });\n\n let hostiter = NvmeHostIterator::new(&r);\n\n for host in hostiter {\n\n let subsysiter = NvmeSubsystemIterator::new(host);\n\n for subsys in subsysiter {\n\n let cstr = unsafe {\n\n std::ffi::CStr::from_ptr(crate::nvme_subsystem_get_nqn(\n\n subsys,\n\n ))\n\n };\n\n if cstr.to_str().unwrap() != self.subsysnqn {\n\n continue;\n\n }\n\n let nsiter = NvmeNamespaceIterator::new(subsys);\n\n for ns in nsiter {\n\n devices.push(format!(\n\n \"/dev/{}\",\n\n unsafe {\n\n std::ffi::CStr::from_ptr(\n\n crate::nvme_ns_get_name(ns),\n", "file_path": "libnvme-rs/src/nvme_uri.rs", "rank": 91, "score": 11.509750543103486 }, { "content": " tracing::error!(\n\n \"========== Logs from container '{}' start:\",\n\n &name\n\n );\n\n let _ = std::process::Command::new(\"docker\")\n\n .args(&[\"logs\", name])\n\n .status();\n\n tracing::error!(\n\n \"========== Logs from container '{} end':\",\n\n name\n\n );\n\n });\n\n }\n\n\n\n if self.clean && (!thread::panicking() || self.allow_clean_on_panic) {\n\n self.containers.keys().for_each(|c| {\n\n std::process::Command::new(\"docker\")\n\n .args(&[\"kill\", c])\n\n .output()\n\n .unwrap();\n", "file_path": "composer/src/lib.rs", "rank": 92, "score": 11.451551170503121 }, { "content": "\n\n /// sends the specified NVMe Admin command, only read commands\n\n pub async fn nvme_admin(\n\n &self,\n\n nvme_cmd: &spdk_nvme_cmd,\n\n buffer: Option<&mut DmaBuf>,\n\n ) -> Result<(), CoreError> {\n\n trace!(\"Sending nvme_admin {}\", nvme_cmd.opc());\n\n let (s, r) = oneshot::channel::<bool>();\n\n // Use the spdk-rs variant spdk_bdev_nvme_admin_passthru that\n\n // assumes read commands\n\n let errno = unsafe {\n\n spdk_bdev_nvme_admin_passthru_ro(\n\n self.desc.as_ptr(),\n\n self.channel.as_ptr(),\n\n &*nvme_cmd,\n\n match buffer {\n\n Some(ref b) => ***b,\n\n None => std::ptr::null_mut(),\n\n },\n", "file_path": "io-engine/src/core/handle.rs", "rank": 93, "score": 11.440320540390111 }, { "content": "use std::env;\n\n\n", "file_path": "io-engine/build.rs", "rank": 94, "score": 11.423758166628659 }, { "content": " spdk_nbd_disk,\n\n spdk_nbd_get_path,\n\n spdk_nbd_start,\n\n};\n\n\n\nuse crate::{\n\n core::{Mthread, Reactors},\n\n ffihelper::{cb_arg, errno_result_from_i32, ErrnoResult},\n\n};\n\n\n\n// include/uapi/linux/fs.h\n\nconst IOCTL_BLKGETSIZE: u32 = ior!(0x12, 114, std::mem::size_of::<u64>());\n\nconst SET_TIMEOUT: u32 = io!(0xab, 9);\n\nconst SET_SIZE: u32 = io!(0xab, 2);\n\n\n\n#[derive(Debug, Snafu)]\n\npub enum NbdError {\n\n #[snafu(display(\"No free NBD devices available (is NBD kmod loaded?)\"))]\n\n Unavailable {},\n\n #[snafu(display(\"Failed to start NBD on {}\", dev))]\n", "file_path": "io-engine/src/bdev/nexus/nexus_nbd.rs", "rank": 95, "score": 11.419870456283965 }, { "content": "use serde_json::json;\n\n\n\nuse super::{nexus_iter, NioCtx};\n\n\n\nuse spdk_rs::{\n\n BdevModule,\n\n BdevModuleBuild,\n\n JsonWriteContext,\n\n WithModuleConfigJson,\n\n WithModuleFini,\n\n WithModuleGetCtxSize,\n\n WithModuleInit,\n\n};\n\n\n\n/// Name for Nexus Bdev module name.\n\npub(crate) const NEXUS_MODULE_NAME: &str = \"NEXUS_CAS_MODULE\";\n\n\n\n/// TODO\n\n#[derive(Debug)]\n\npub(crate) struct NexusModule {}\n", "file_path": "io-engine/src/bdev/nexus/nexus_module.rs", "rank": 96, "score": 11.317255238441597 }, { "content": "use spdk_rs::libspdk::spdk_nvme_cpl;\n\n\n\nuse crate::core::NvmeCommandStatus;\n\n\n\n#[derive(Debug, PartialEq)]\n", "file_path": "io-engine/src/bdev/nvmx/utils.rs", "rank": 97, "score": 11.306323950765762 }, { "content": "// Custom struct used to format trace events.\n\nstruct CustomFormat {\n\n ansi: bool,\n\n}\n\n\n\n// Format a trace event.\n\nimpl<S, N> FormatEvent<S, N> for CustomFormat\n\nwhere\n\n S: tracing_core::subscriber::Subscriber + for<'s> LookupSpan<'s>,\n\n N: for<'w> FormatFields<'w> + 'static,\n\n{\n\n fn format_event(\n\n &self,\n\n context: &FmtContext<'_, S, N>,\n\n writer: &mut dyn std::fmt::Write,\n\n event: &Event<'_>,\n\n ) -> std::fmt::Result {\n\n let normalized = event.normalized_metadata();\n\n let meta = normalized.as_ref().unwrap_or_else(|| event.metadata());\n\n\n\n write!(\n", "file_path": "io-engine/src/logger.rs", "rank": 98, "score": 11.214296238963124 }, { "content": "// Custom struct used to format the log/trace LEVEL\n\nstruct FormatLevel<'a> {\n\n level: &'a tracing::Level,\n\n ansi: bool,\n\n}\n\n\n\nimpl<'a> FormatLevel<'a> {\n\n fn new(level: &'a tracing::Level, ansi: bool) -> Self {\n\n Self {\n\n level,\n\n ansi,\n\n }\n\n }\n\n}\n\n\n\n// Display trace LEVEL.\n\nimpl std::fmt::Display for FormatLevel<'_> {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n const TRACE: &str = \"TRACE\";\n\n const DEBUG: &str = \"DEBUG\";\n\n const INFO: &str = \" INFO\";\n", "file_path": "io-engine/src/logger.rs", "rank": 99, "score": 11.129761671931954 } ]
Rust
tests/curve_test.rs
AlexandruIca/verg
adcdac98f57492e8b7576e06a92491934d2e0fa1
use crate::common::default_blending; use verg::{ canvas::{Canvas, CanvasDescription, ViewBox}, color::{Color, FillRule, FillStyle}, geometry::{PathOps, Point}, math::{rotate_around, scale_around, translate, Angle}, }; mod common; const WIDTH: usize = 805; const HEIGHT: usize = 405; fn canvas_description() -> CanvasDescription { CanvasDescription { width: WIDTH, height: HEIGHT, viewbox: ViewBox { x: 0.0, y: 0.0, width: WIDTH as f64, height: HEIGHT as f64, }, background_color: Color::white(), tolerance: 0.25, } } const PATH: [PathOps; 4] = [ PathOps::MoveTo { x: 20.0, y: 360.0 }, PathOps::CubicTo { x1: 100.0, y1: 260.0, x2: 50.0, y2: 160.0, x3: 150.0, y3: 60.0, }, PathOps::CubicTo { x1: 120.0, y1: 160.0, x2: 150.0, y2: 260.0, x3: 200.0, y3: 360.0, }, PathOps::CubicTo { x1: 90.0, y1: 320.0, x2: 130.0, y2: 320.0, x3: 20.0, y3: 360.0, }, ]; const DARK_SLATE_BLUE: FillStyle = FillStyle::Plain(Color::dark_slate_blue()); const YELLOW: FillStyle = FillStyle::Plain(Color::yellow()); const BLACK: FillStyle = FillStyle::Plain(Color::black()); const FILL_RULE: FillRule = FillRule::NonZero; const MOON_WIDTH: f64 = 30.0; const MOON_HEIGHT: f64 = 80.0; const MOON_VERTICAL_OFFSET: f64 = 5.0; const MOON: [PathOps; 3] = [ PathOps::MoveTo { x: 0.0, y: 0.0 }, PathOps::CubicTo { x1: MOON_WIDTH / 2.0, y1: MOON_HEIGHT / 2.0 - MOON_VERTICAL_OFFSET, x2: MOON_WIDTH / 2.0, y2: MOON_HEIGHT / 2.0 + MOON_VERTICAL_OFFSET, x3: 0.0, y3: MOON_HEIGHT, }, PathOps::CubicTo { x1: MOON_WIDTH, y1: MOON_HEIGHT / 2.0 + MOON_VERTICAL_OFFSET, x2: MOON_WIDTH, y2: MOON_HEIGHT / 2.0 - MOON_VERTICAL_OFFSET, x3: 0.0, y3: 0.0, }, ]; const EYE_WIDTH: f64 = 150.0; const EYE_HEIGHT: f64 = 300.0; const EYE_QUARTER_W: f64 = EYE_WIDTH / 4.0; const EYE_QUARTER_H: f64 = EYE_HEIGHT / 4.0; const EYE_INNER_OFFSET: f64 = 10.0; const EYE: [PathOps; 10] = [ PathOps::MoveTo { x: EYE_WIDTH / 2.0, y: EYE_HEIGHT, }, PathOps::CubicTo { x1: EYE_WIDTH / 2.0 + EYE_QUARTER_W, y1: EYE_HEIGHT, x2: EYE_WIDTH, y2: EYE_HEIGHT - EYE_QUARTER_H, x3: EYE_WIDTH, y3: EYE_HEIGHT / 2.0, }, PathOps::CubicTo { x1: EYE_WIDTH, y1: EYE_HEIGHT / 2.0 - EYE_QUARTER_H, x2: EYE_WIDTH - EYE_QUARTER_W, y2: 0.0, x3: EYE_WIDTH / 2.0, y3: 0.0, }, PathOps::CubicTo { x1: EYE_QUARTER_W, y1: 0.0, x2: 0.0, y2: EYE_QUARTER_H, x3: 0.0, y3: EYE_HEIGHT / 2.0, }, PathOps::CubicTo { x1: 0.0, y1: EYE_HEIGHT / 2.0 + EYE_QUARTER_H, x2: EYE_QUARTER_W, y2: EYE_HEIGHT, x3: EYE_WIDTH / 2.0, y3: EYE_HEIGHT, }, PathOps::MoveTo { x: EYE_WIDTH / 2.0, y: EYE_HEIGHT - EYE_INNER_OFFSET, }, PathOps::CubicTo { x1: EYE_QUARTER_W + EYE_INNER_OFFSET, y1: EYE_HEIGHT - EYE_INNER_OFFSET, x2: EYE_INNER_OFFSET, y2: EYE_HEIGHT - EYE_QUARTER_H - EYE_INNER_OFFSET, x3: EYE_INNER_OFFSET, y3: EYE_HEIGHT / 2.0, }, PathOps::CubicTo { x1: EYE_INNER_OFFSET, y1: EYE_QUARTER_H + EYE_INNER_OFFSET, x2: EYE_QUARTER_W + EYE_INNER_OFFSET, y2: EYE_INNER_OFFSET, x3: EYE_WIDTH / 2.0, y3: EYE_INNER_OFFSET, }, PathOps::CubicTo { x1: EYE_WIDTH / 2.0 + EYE_QUARTER_W - EYE_INNER_OFFSET, y1: EYE_INNER_OFFSET, x2: EYE_WIDTH - EYE_INNER_OFFSET, y2: EYE_QUARTER_H + EYE_INNER_OFFSET, x3: EYE_WIDTH - EYE_INNER_OFFSET, y3: EYE_HEIGHT / 2.0, }, PathOps::CubicTo { x1: EYE_WIDTH - EYE_INNER_OFFSET, y1: EYE_HEIGHT / 2.0 + EYE_QUARTER_H - EYE_INNER_OFFSET, x2: EYE_WIDTH / 2.0 + EYE_QUARTER_W - EYE_INNER_OFFSET, y2: EYE_HEIGHT - EYE_INNER_OFFSET, x3: EYE_WIDTH / 2.0, y3: EYE_HEIGHT - EYE_INNER_OFFSET, }, ]; const SMALL_EYE_WIDTH: f64 = 80.0; const SMALL_EYE_HEIGHT: f64 = 1.5 * SMALL_EYE_WIDTH; const SMALL_EYE_QUARTER_W: f64 = SMALL_EYE_WIDTH / 4.0; const SMALL_EYE_QUARTER_H: f64 = SMALL_EYE_HEIGHT / 4.0; const SMALL_EYE_INNER_OFFSET: f64 = 10.0; const SMALL_EYE_UPPER: f64 = 4.0; const SMALL_EYE: [PathOps; 10] = [ PathOps::MoveTo { x: SMALL_EYE_WIDTH / 2.0, y: SMALL_EYE_HEIGHT, }, PathOps::CubicTo { x1: SMALL_EYE_WIDTH / 2.0 + SMALL_EYE_QUARTER_W, y1: SMALL_EYE_HEIGHT, x2: SMALL_EYE_WIDTH, y2: SMALL_EYE_HEIGHT - SMALL_EYE_QUARTER_H, x3: SMALL_EYE_WIDTH, y3: SMALL_EYE_HEIGHT / 2.0, }, PathOps::CubicTo { x1: SMALL_EYE_WIDTH, y1: SMALL_EYE_HEIGHT / 2.0 - SMALL_EYE_QUARTER_H, x2: SMALL_EYE_WIDTH - SMALL_EYE_QUARTER_W, y2: 0.0, x3: SMALL_EYE_WIDTH / 2.0, y3: 0.0, }, PathOps::CubicTo { x1: SMALL_EYE_QUARTER_W, y1: 0.0, x2: 0.0, y2: SMALL_EYE_QUARTER_H, x3: 0.0, y3: SMALL_EYE_HEIGHT / 2.0, }, PathOps::CubicTo { x1: 0.0, y1: SMALL_EYE_HEIGHT / 2.0 + SMALL_EYE_QUARTER_H, x2: SMALL_EYE_QUARTER_W, y2: SMALL_EYE_HEIGHT, x3: SMALL_EYE_WIDTH / 2.0, y3: SMALL_EYE_HEIGHT, }, PathOps::MoveTo { x: SMALL_EYE_WIDTH / 2.0, y: SMALL_EYE_HEIGHT - SMALL_EYE_INNER_OFFSET, }, PathOps::CubicTo { x1: SMALL_EYE_QUARTER_W + SMALL_EYE_INNER_OFFSET, y1: SMALL_EYE_HEIGHT - SMALL_EYE_INNER_OFFSET, x2: SMALL_EYE_INNER_OFFSET, y2: SMALL_EYE_HEIGHT - SMALL_EYE_QUARTER_H - SMALL_EYE_INNER_OFFSET, x3: SMALL_EYE_INNER_OFFSET, y3: SMALL_EYE_HEIGHT / 2.0, }, PathOps::CubicTo { x1: SMALL_EYE_INNER_OFFSET, y1: SMALL_EYE_QUARTER_H + SMALL_EYE_INNER_OFFSET, x2: SMALL_EYE_QUARTER_W + SMALL_EYE_INNER_OFFSET, y2: SMALL_EYE_UPPER * SMALL_EYE_INNER_OFFSET, x3: SMALL_EYE_WIDTH / 2.0, y3: SMALL_EYE_UPPER * SMALL_EYE_INNER_OFFSET, }, PathOps::CubicTo { x1: SMALL_EYE_WIDTH / 2.0 + SMALL_EYE_QUARTER_W - SMALL_EYE_INNER_OFFSET, y1: SMALL_EYE_UPPER * SMALL_EYE_INNER_OFFSET, x2: SMALL_EYE_WIDTH - SMALL_EYE_INNER_OFFSET, y2: SMALL_EYE_QUARTER_H + SMALL_EYE_INNER_OFFSET, x3: SMALL_EYE_WIDTH - SMALL_EYE_INNER_OFFSET, y3: SMALL_EYE_HEIGHT / 2.0, }, PathOps::CubicTo { x1: SMALL_EYE_WIDTH - SMALL_EYE_INNER_OFFSET, y1: SMALL_EYE_HEIGHT / 2.0 + SMALL_EYE_QUARTER_H - SMALL_EYE_INNER_OFFSET, x2: SMALL_EYE_WIDTH / 2.0 + SMALL_EYE_QUARTER_W - SMALL_EYE_INNER_OFFSET, y2: SMALL_EYE_HEIGHT - SMALL_EYE_INNER_OFFSET, x3: SMALL_EYE_WIDTH / 2.0, y3: SMALL_EYE_HEIGHT - SMALL_EYE_INNER_OFFSET, }, ]; const IRIS_WIDTH: f64 = 40.0; const IRIS_HEIGHT: f64 = 1.5 * IRIS_WIDTH; const IRIS_QUARTER_W: f64 = IRIS_WIDTH / 4.0; const IRIS_QUARTER_H: f64 = IRIS_HEIGHT / 4.0; const IRIS: [PathOps; 5] = [ PathOps::MoveTo { x: IRIS_WIDTH / 2.0, y: IRIS_HEIGHT, }, PathOps::CubicTo { x1: IRIS_WIDTH / 2.0 + IRIS_QUARTER_W, y1: IRIS_HEIGHT, x2: IRIS_WIDTH, y2: IRIS_HEIGHT - IRIS_QUARTER_H, x3: IRIS_WIDTH, y3: IRIS_HEIGHT / 2.0, }, PathOps::CubicTo { x1: IRIS_WIDTH, y1: IRIS_HEIGHT / 2.0 - IRIS_QUARTER_H, x2: IRIS_WIDTH - IRIS_QUARTER_W, y2: 0.0, x3: IRIS_WIDTH / 2.0, y3: 0.0, }, PathOps::CubicTo { x1: IRIS_QUARTER_W, y1: 0.0, x2: 0.0, y2: IRIS_QUARTER_H, x3: 0.0, y3: IRIS_HEIGHT / 2.0, }, PathOps::CubicTo { x1: 0.0, y1: IRIS_HEIGHT / 2.0 + IRIS_QUARTER_H, x2: IRIS_QUARTER_W, y2: IRIS_HEIGHT, x3: IRIS_WIDTH / 2.0, y3: IRIS_HEIGHT, }, ]; fn callback(canvas: &mut Canvas) { let transform = |p: &Point| { let center = Point { x: MOON_WIDTH / 2.0, y: MOON_HEIGHT / 2.0, }; let p = rotate_around(&p, &center, Angle::from_degrees(65.0)); let p = scale_around(&p, &center, 1.5, 1.5); translate(&p, 280.0, 180.0) }; canvas.draw_shape(&MOON, YELLOW, FILL_RULE, transform); let transform = |x: f64, y: f64| { return move |p: &Point| translate(&p, x, y); }; canvas.draw_shape(&EYE, BLACK, FILL_RULE, transform(400.0, 50.0)); canvas.draw_shape(&EYE, BLACK, FILL_RULE, transform(400.0 + EYE_WIDTH, 50.0)); canvas.draw_shape(&SMALL_EYE, BLACK, FILL_RULE, transform(433.0, 220.0)); canvas.draw_shape( &SMALL_EYE, BLACK, FILL_RULE, transform(433.0 + EYE_WIDTH, 220.0), ); canvas.draw_shape(&IRIS, BLACK, FILL_RULE, transform(448.0, 267.0)); canvas.draw_shape(&IRIS, BLACK, FILL_RULE, transform(448.0 + EYE_WIDTH, 267.0)); } implement_test! { curve_test, canvas_description, callback | PATH, DARK_SLATE_BLUE, FILL_RULE, default_blending }
use crate::common::default_blending; use verg::{ canvas::{Canvas, CanvasDescription, ViewBox}, color::{Color, FillRule, FillStyle}, geometry::{PathOps, Point}, math::{rotate_around, scale_around, translate, Angle}, }; mod common; const WIDTH: usize = 805; const HEIGHT: usize = 405; fn canvas_description() -> CanvasDescription { CanvasDescription {
}, background_color: Color::white(), tolerance: 0.25, } } const PATH: [PathOps; 4] = [ PathOps::MoveTo { x: 20.0, y: 360.0 }, PathOps::CubicTo { x1: 100.0, y1: 260.0, x2: 50.0, y2: 160.0, x3: 150.0, y3: 60.0, }, PathOps::CubicTo { x1: 120.0, y1: 160.0, x2: 150.0, y2: 260.0, x3: 200.0, y3: 360.0, }, PathOps::CubicTo { x1: 90.0, y1: 320.0, x2: 130.0, y2: 320.0, x3: 20.0, y3: 360.0, }, ]; const DARK_SLATE_BLUE: FillStyle = FillStyle::Plain(Color::dark_slate_blue()); const YELLOW: FillStyle = FillStyle::Plain(Color::yellow()); const BLACK: FillStyle = FillStyle::Plain(Color::black()); const FILL_RULE: FillRule = FillRule::NonZero; const MOON_WIDTH: f64 = 30.0; const MOON_HEIGHT: f64 = 80.0; const MOON_VERTICAL_OFFSET: f64 = 5.0; const MOON: [PathOps; 3] = [ PathOps::MoveTo { x: 0.0, y: 0.0 }, PathOps::CubicTo { x1: MOON_WIDTH / 2.0, y1: MOON_HEIGHT / 2.0 - MOON_VERTICAL_OFFSET, x2: MOON_WIDTH / 2.0, y2: MOON_HEIGHT / 2.0 + MOON_VERTICAL_OFFSET, x3: 0.0, y3: MOON_HEIGHT, }, PathOps::CubicTo { x1: MOON_WIDTH, y1: MOON_HEIGHT / 2.0 + MOON_VERTICAL_OFFSET, x2: MOON_WIDTH, y2: MOON_HEIGHT / 2.0 - MOON_VERTICAL_OFFSET, x3: 0.0, y3: 0.0, }, ]; const EYE_WIDTH: f64 = 150.0; const EYE_HEIGHT: f64 = 300.0; const EYE_QUARTER_W: f64 = EYE_WIDTH / 4.0; const EYE_QUARTER_H: f64 = EYE_HEIGHT / 4.0; const EYE_INNER_OFFSET: f64 = 10.0; const EYE: [PathOps; 10] = [ PathOps::MoveTo { x: EYE_WIDTH / 2.0, y: EYE_HEIGHT, }, PathOps::CubicTo { x1: EYE_WIDTH / 2.0 + EYE_QUARTER_W, y1: EYE_HEIGHT, x2: EYE_WIDTH, y2: EYE_HEIGHT - EYE_QUARTER_H, x3: EYE_WIDTH, y3: EYE_HEIGHT / 2.0, }, PathOps::CubicTo { x1: EYE_WIDTH, y1: EYE_HEIGHT / 2.0 - EYE_QUARTER_H, x2: EYE_WIDTH - EYE_QUARTER_W, y2: 0.0, x3: EYE_WIDTH / 2.0, y3: 0.0, }, PathOps::CubicTo { x1: EYE_QUARTER_W, y1: 0.0, x2: 0.0, y2: EYE_QUARTER_H, x3: 0.0, y3: EYE_HEIGHT / 2.0, }, PathOps::CubicTo { x1: 0.0, y1: EYE_HEIGHT / 2.0 + EYE_QUARTER_H, x2: EYE_QUARTER_W, y2: EYE_HEIGHT, x3: EYE_WIDTH / 2.0, y3: EYE_HEIGHT, }, PathOps::MoveTo { x: EYE_WIDTH / 2.0, y: EYE_HEIGHT - EYE_INNER_OFFSET, }, PathOps::CubicTo { x1: EYE_QUARTER_W + EYE_INNER_OFFSET, y1: EYE_HEIGHT - EYE_INNER_OFFSET, x2: EYE_INNER_OFFSET, y2: EYE_HEIGHT - EYE_QUARTER_H - EYE_INNER_OFFSET, x3: EYE_INNER_OFFSET, y3: EYE_HEIGHT / 2.0, }, PathOps::CubicTo { x1: EYE_INNER_OFFSET, y1: EYE_QUARTER_H + EYE_INNER_OFFSET, x2: EYE_QUARTER_W + EYE_INNER_OFFSET, y2: EYE_INNER_OFFSET, x3: EYE_WIDTH / 2.0, y3: EYE_INNER_OFFSET, }, PathOps::CubicTo { x1: EYE_WIDTH / 2.0 + EYE_QUARTER_W - EYE_INNER_OFFSET, y1: EYE_INNER_OFFSET, x2: EYE_WIDTH - EYE_INNER_OFFSET, y2: EYE_QUARTER_H + EYE_INNER_OFFSET, x3: EYE_WIDTH - EYE_INNER_OFFSET, y3: EYE_HEIGHT / 2.0, }, PathOps::CubicTo { x1: EYE_WIDTH - EYE_INNER_OFFSET, y1: EYE_HEIGHT / 2.0 + EYE_QUARTER_H - EYE_INNER_OFFSET, x2: EYE_WIDTH / 2.0 + EYE_QUARTER_W - EYE_INNER_OFFSET, y2: EYE_HEIGHT - EYE_INNER_OFFSET, x3: EYE_WIDTH / 2.0, y3: EYE_HEIGHT - EYE_INNER_OFFSET, }, ]; const SMALL_EYE_WIDTH: f64 = 80.0; const SMALL_EYE_HEIGHT: f64 = 1.5 * SMALL_EYE_WIDTH; const SMALL_EYE_QUARTER_W: f64 = SMALL_EYE_WIDTH / 4.0; const SMALL_EYE_QUARTER_H: f64 = SMALL_EYE_HEIGHT / 4.0; const SMALL_EYE_INNER_OFFSET: f64 = 10.0; const SMALL_EYE_UPPER: f64 = 4.0; const SMALL_EYE: [PathOps; 10] = [ PathOps::MoveTo { x: SMALL_EYE_WIDTH / 2.0, y: SMALL_EYE_HEIGHT, }, PathOps::CubicTo { x1: SMALL_EYE_WIDTH / 2.0 + SMALL_EYE_QUARTER_W, y1: SMALL_EYE_HEIGHT, x2: SMALL_EYE_WIDTH, y2: SMALL_EYE_HEIGHT - SMALL_EYE_QUARTER_H, x3: SMALL_EYE_WIDTH, y3: SMALL_EYE_HEIGHT / 2.0, }, PathOps::CubicTo { x1: SMALL_EYE_WIDTH, y1: SMALL_EYE_HEIGHT / 2.0 - SMALL_EYE_QUARTER_H, x2: SMALL_EYE_WIDTH - SMALL_EYE_QUARTER_W, y2: 0.0, x3: SMALL_EYE_WIDTH / 2.0, y3: 0.0, }, PathOps::CubicTo { x1: SMALL_EYE_QUARTER_W, y1: 0.0, x2: 0.0, y2: SMALL_EYE_QUARTER_H, x3: 0.0, y3: SMALL_EYE_HEIGHT / 2.0, }, PathOps::CubicTo { x1: 0.0, y1: SMALL_EYE_HEIGHT / 2.0 + SMALL_EYE_QUARTER_H, x2: SMALL_EYE_QUARTER_W, y2: SMALL_EYE_HEIGHT, x3: SMALL_EYE_WIDTH / 2.0, y3: SMALL_EYE_HEIGHT, }, PathOps::MoveTo { x: SMALL_EYE_WIDTH / 2.0, y: SMALL_EYE_HEIGHT - SMALL_EYE_INNER_OFFSET, }, PathOps::CubicTo { x1: SMALL_EYE_QUARTER_W + SMALL_EYE_INNER_OFFSET, y1: SMALL_EYE_HEIGHT - SMALL_EYE_INNER_OFFSET, x2: SMALL_EYE_INNER_OFFSET, y2: SMALL_EYE_HEIGHT - SMALL_EYE_QUARTER_H - SMALL_EYE_INNER_OFFSET, x3: SMALL_EYE_INNER_OFFSET, y3: SMALL_EYE_HEIGHT / 2.0, }, PathOps::CubicTo { x1: SMALL_EYE_INNER_OFFSET, y1: SMALL_EYE_QUARTER_H + SMALL_EYE_INNER_OFFSET, x2: SMALL_EYE_QUARTER_W + SMALL_EYE_INNER_OFFSET, y2: SMALL_EYE_UPPER * SMALL_EYE_INNER_OFFSET, x3: SMALL_EYE_WIDTH / 2.0, y3: SMALL_EYE_UPPER * SMALL_EYE_INNER_OFFSET, }, PathOps::CubicTo { x1: SMALL_EYE_WIDTH / 2.0 + SMALL_EYE_QUARTER_W - SMALL_EYE_INNER_OFFSET, y1: SMALL_EYE_UPPER * SMALL_EYE_INNER_OFFSET, x2: SMALL_EYE_WIDTH - SMALL_EYE_INNER_OFFSET, y2: SMALL_EYE_QUARTER_H + SMALL_EYE_INNER_OFFSET, x3: SMALL_EYE_WIDTH - SMALL_EYE_INNER_OFFSET, y3: SMALL_EYE_HEIGHT / 2.0, }, PathOps::CubicTo { x1: SMALL_EYE_WIDTH - SMALL_EYE_INNER_OFFSET, y1: SMALL_EYE_HEIGHT / 2.0 + SMALL_EYE_QUARTER_H - SMALL_EYE_INNER_OFFSET, x2: SMALL_EYE_WIDTH / 2.0 + SMALL_EYE_QUARTER_W - SMALL_EYE_INNER_OFFSET, y2: SMALL_EYE_HEIGHT - SMALL_EYE_INNER_OFFSET, x3: SMALL_EYE_WIDTH / 2.0, y3: SMALL_EYE_HEIGHT - SMALL_EYE_INNER_OFFSET, }, ]; const IRIS_WIDTH: f64 = 40.0; const IRIS_HEIGHT: f64 = 1.5 * IRIS_WIDTH; const IRIS_QUARTER_W: f64 = IRIS_WIDTH / 4.0; const IRIS_QUARTER_H: f64 = IRIS_HEIGHT / 4.0; const IRIS: [PathOps; 5] = [ PathOps::MoveTo { x: IRIS_WIDTH / 2.0, y: IRIS_HEIGHT, }, PathOps::CubicTo { x1: IRIS_WIDTH / 2.0 + IRIS_QUARTER_W, y1: IRIS_HEIGHT, x2: IRIS_WIDTH, y2: IRIS_HEIGHT - IRIS_QUARTER_H, x3: IRIS_WIDTH, y3: IRIS_HEIGHT / 2.0, }, PathOps::CubicTo { x1: IRIS_WIDTH, y1: IRIS_HEIGHT / 2.0 - IRIS_QUARTER_H, x2: IRIS_WIDTH - IRIS_QUARTER_W, y2: 0.0, x3: IRIS_WIDTH / 2.0, y3: 0.0, }, PathOps::CubicTo { x1: IRIS_QUARTER_W, y1: 0.0, x2: 0.0, y2: IRIS_QUARTER_H, x3: 0.0, y3: IRIS_HEIGHT / 2.0, }, PathOps::CubicTo { x1: 0.0, y1: IRIS_HEIGHT / 2.0 + IRIS_QUARTER_H, x2: IRIS_QUARTER_W, y2: IRIS_HEIGHT, x3: IRIS_WIDTH / 2.0, y3: IRIS_HEIGHT, }, ]; fn callback(canvas: &mut Canvas) { let transform = |p: &Point| { let center = Point { x: MOON_WIDTH / 2.0, y: MOON_HEIGHT / 2.0, }; let p = rotate_around(&p, &center, Angle::from_degrees(65.0)); let p = scale_around(&p, &center, 1.5, 1.5); translate(&p, 280.0, 180.0) }; canvas.draw_shape(&MOON, YELLOW, FILL_RULE, transform); let transform = |x: f64, y: f64| { return move |p: &Point| translate(&p, x, y); }; canvas.draw_shape(&EYE, BLACK, FILL_RULE, transform(400.0, 50.0)); canvas.draw_shape(&EYE, BLACK, FILL_RULE, transform(400.0 + EYE_WIDTH, 50.0)); canvas.draw_shape(&SMALL_EYE, BLACK, FILL_RULE, transform(433.0, 220.0)); canvas.draw_shape( &SMALL_EYE, BLACK, FILL_RULE, transform(433.0 + EYE_WIDTH, 220.0), ); canvas.draw_shape(&IRIS, BLACK, FILL_RULE, transform(448.0, 267.0)); canvas.draw_shape(&IRIS, BLACK, FILL_RULE, transform(448.0 + EYE_WIDTH, 267.0)); } implement_test! { curve_test, canvas_description, callback | PATH, DARK_SLATE_BLUE, FILL_RULE, default_blending }
width: WIDTH, height: HEIGHT, viewbox: ViewBox { x: 0.0, y: 0.0, width: WIDTH as f64, height: HEIGHT as f64,
function_block-random_span
[ { "content": "pub fn rotate(point: &Point, angle: Angle) -> Point {\n\n let (sin, cos) = angle.to_radians().sin_cos();\n\n\n\n Point {\n\n x: point.x * sin - point.y * cos,\n\n y: point.x * cos + point.y * sin,\n\n }\n\n}\n\n\n", "file_path": "src/math.rs", "rank": 0, "score": 114510.09888260641 }, { "content": "pub fn skew(point: &Point, x: Angle, y: Angle) -> Point {\n\n Point {\n\n x: point.x + point.y * y.to_radians().tan(),\n\n y: point.y + point.x * x.to_radians().tan(),\n\n }\n\n}\n\n\n", "file_path": "src/math.rs", "rank": 1, "score": 112184.69675149443 }, { "content": "pub fn rotate_around(point: &Point, around: &Point, angle: Angle) -> Point {\n\n let p = translate(point, -around.x, -around.y);\n\n let p = rotate(&p, angle);\n\n\n\n translate(&p, around.x, around.y)\n\n}\n\n\n", "file_path": "src/math.rs", "rank": 2, "score": 110788.59478353846 }, { "content": "pub fn skew_around(point: &Point, around: &Point, x: Angle, y: Angle) -> Point {\n\n let p = translate(point, -around.x, -around.y);\n\n let p = skew(&p, x, y);\n\n\n\n translate(&p, around.x, around.y)\n\n}\n\n\n", "file_path": "src/math.rs", "rank": 3, "score": 108901.89285344369 }, { "content": "pub fn translate(point: &Point, x: f64, y: f64) -> Point {\n\n Point {\n\n x: point.x + x,\n\n y: point.y + y,\n\n }\n\n}\n\n\n", "file_path": "src/math.rs", "rank": 4, "score": 103215.81187425627 }, { "content": "// https://www.w3.org/TR/SVG2/coords.html#ComputingAViewportsTransform\n\npub fn map_viewbox(canvas: &CanvasDescription, point: &Point) -> Point {\n\n let view = canvas.viewbox;\n\n\n\n let (vbx, vby, vb_width, vb_height) = (view.x, view.y, view.width, view.height);\n\n let (ex, ey, e_width, e_height) = (0.0, 0.0, canvas.width as f64, canvas.height as f64);\n\n\n\n let scale_x = e_width / vb_width;\n\n let scale_y = e_height / vb_height;\n\n\n\n let mut translate_x = ex - (vbx * scale_x);\n\n let mut translate_y = ey - (vby * scale_y);\n\n\n\n translate_x += (e_width - vb_width * scale_x) / 2.0;\n\n translate_y += (e_height - vb_height * scale_y) / 2.0;\n\n\n\n let translated = translate(point, translate_x, translate_y);\n\n scale(&translated, scale_x, scale_y)\n\n}\n", "file_path": "src/math.rs", "rank": 5, "score": 100559.61876850684 }, { "content": "pub fn scale_around(point: &Point, around: &Point, sx: f64, sy: f64) -> Point {\n\n let p = translate(point, -around.x, -around.y);\n\n let p = scale(&p, sx, sy);\n\n\n\n translate(&p, around.x, around.y)\n\n}\n\n\n", "file_path": "src/math.rs", "rank": 6, "score": 92194.58908448639 }, { "content": "pub fn scale(point: &Point, sx: f64, sy: f64) -> Point {\n\n Point {\n\n x: point.x * sx,\n\n y: point.y * sy,\n\n }\n\n}\n\n\n", "file_path": "src/math.rs", "rank": 7, "score": 79568.84680023548 }, { "content": "///\n\n/// Line drawing algorithm taken from here:\n\n/// - https://medium.com/@raphlinus/inside-the-fastest-font-renderer-in-the-world-75ae5270c445\n\n///\n\n/// id: A number that should differentiate dfferent segments that are part of the same `Path`.\n\n///\n\npub fn draw_line(state: &mut RenderState, start: &Point, end: &Point) {\n\n let p0 = start;\n\n let p1 = end;\n\n let (width, height) = (state.canvas.desc.width, state.canvas.desc.height);\n\n let accumulation_buffer = &mut state.canvas.accumulation_buffer;\n\n let id = state.id;\n\n\n\n if (p0.y - p1.y).abs() <= f64::EPSILON {\n\n return;\n\n }\n\n let (dir, p0, p1) = if p0.y < p1.y {\n\n (1.0, p0, p1)\n\n } else {\n\n (-1.0, p1, p0)\n\n };\n\n let dxdy = (p1.x - p0.x) / (p1.y - p0.y);\n\n let mut x = p0.x;\n\n let y0 = p0.y as usize;\n\n\n\n for y in y0..height.min(p1.y.ceil() as usize) {\n", "file_path": "src/renderer.rs", "rank": 8, "score": 67592.27032246969 }, { "content": "struct TextDescriptor<'a, const N: usize> {\n\n font_path: &'a str,\n\n glyphs: [u16; N],\n\n foreground_color: Color,\n\n background_color: Color,\n\n}\n\n\n\n#[allow(dead_code)]\n\nconst FMI: TextDescriptor<3> = TextDescriptor::<3> {\n\n font_path: \"media/JFWilwod.ttf\",\n\n glyphs: [42, 49, 45],\n\n foreground_color: Color::white(),\n\n background_color: Color::steel_blue(),\n\n};\n\n\n\n#[allow(dead_code)]\n\nconst MALAXO: TextDescriptor<6> = TextDescriptor::<6> {\n\n font_path: \"media/JFWilwod.ttf\",\n\n glyphs: [49, 37, 48, 37, 60, 51],\n\n foreground_color: Color::white(),\n", "file_path": "tests/font_test.rs", "rank": 9, "score": 60563.382700277565 }, { "content": "#[allow(dead_code)]\n\npub fn default_callback(_canvas: &mut Canvas) {}\n\n\n\n#[macro_export]\n\nmacro_rules! implement_test {\n\n ( $($name:ident, $canvas:ident, $custom:ident)? | $($path:expr, $fill_style:expr, $fill_rule:expr, $blend:ident),* ) => {\n\n #[test]\n\n fn $($name)?() {\n\n let _transform = |p: &Point| -> Point { *p };\n\n let mut canvas = Canvas::new($($canvas)?());\n\n $(\n\n canvas.set_blending_function($blend);\n\n canvas.draw_shape(&($path), $fill_style, $fill_rule, _transform);\n\n )*\n\n\n\n $(\n\n $custom(&mut canvas);\n\n )?\n\n\n\n let u8_buffer = canvas.to_u8();\n\n\n", "file_path": "tests/common.rs", "rank": 10, "score": 48540.462317185884 }, { "content": "pub fn get_hash_for_color_buffer(buffer: &[u8]) -> String {\n\n let mut hasher = Sha256::new();\n\n hasher.update(buffer);\n\n format!(\"{:X}\", hasher.finalize())\n\n}\n\n\n\n// Another false positive, this function is used in a lot of tests.\n", "file_path": "tests/common.rs", "rank": 11, "score": 44940.81261979547 }, { "content": "#[allow(dead_code)]\n\npub fn default_blending(src: &Color, dest: &Color) -> Color {\n\n blend_func::source_over(src, dest)\n\n}\n\n\n", "file_path": "tests/common.rs", "rank": 12, "score": 44057.80305605923 }, { "content": "#[test]\n\nfn font_test() {\n\n let test = VERG;\n\n let font_data = std::fs::read(test.font_path).unwrap();\n\n let face = ttf::Face::from_slice(&font_data, 0).unwrap();\n\n\n\n let mut path_ops = test\n\n .glyphs\n\n .iter()\n\n .map(|_| Vec::<PathOps>::new())\n\n .collect::<Vec<Vec<PathOps>>>();\n\n let mut translations = test\n\n .glyphs\n\n .iter()\n\n .map(|_| Point { x: 0.0, y: 0.0 })\n\n .collect::<Vec<Point>>();\n\n let mut total_width: f64 = 0.0;\n\n let mut total_height: f64 = 0.0;\n\n\n\n for (i, &glyph_index) in test.glyphs.iter().enumerate() {\n\n let glyph_id = ttf::GlyphId(glyph_index);\n", "file_path": "tests/font_test.rs", "rank": 13, "score": 33518.03080962568 }, { "content": "pub fn fill_path(\n\n state: &mut RenderState,\n\n fill_style: FillStyle,\n\n fill_rule: FillRule,\n\n bounds: &BoundingBox,\n\n) {\n\n let accumulation_buffer = &mut state.canvas.accumulation_buffer;\n\n let desc = &state.canvas.desc;\n\n let color_buffer = &mut state.canvas.buffer;\n\n let blend = state.canvas.blend;\n\n\n\n for y in bounds.min_y..bounds.max_y {\n\n let mut acc = 0.0_f32;\n\n let mut filling = -1.0_f32;\n\n let mut prev_cell = AccumulationCell { area: 0.0, id: 0 };\n\n let get_alpha = match fill_rule {\n\n FillRule::NonZero => alpha_fill_non_zero,\n\n FillRule::EvenOdd => alpha_fill_even_odd,\n\n };\n\n\n", "file_path": "src/renderer.rs", "rank": 14, "score": 32632.532031090148 }, { "content": "pub fn render_path(\n\n state: &mut RenderState,\n\n path: Path,\n\n transform: impl Fn(&Point) -> Point,\n\n) -> BoundingBox {\n\n let desc = state.canvas.desc;\n\n state.id = 0;\n\n\n\n let mut result = BoundingBox::default();\n\n let mut update_bounds = |x: f64, y: f64| {\n\n let x = x as usize;\n\n let y = y as usize;\n\n\n\n result.min_x = usize::min(result.min_x, x);\n\n result.min_y = usize::min(result.min_y, y);\n\n\n\n result.max_x = usize::max(result.max_x, x);\n\n result.max_y = usize::max(result.max_y, y);\n\n };\n\n\n", "file_path": "src/renderer.rs", "rank": 15, "score": 32632.532031090148 }, { "content": "fn alpha_fill_non_zero(\n\n cell: &AccumulationCell,\n\n _prev_cell: &mut AccumulationCell,\n\n acc: &mut f32,\n\n _filling: &mut f32,\n\n) -> f32 {\n\n *acc += cell.area;\n\n acc.abs()\n\n}\n\n\n", "file_path": "src/renderer.rs", "rank": 16, "score": 32277.841106992455 }, { "content": "fn get_conic_gradient_color_at(\n\n x: usize,\n\n y: usize,\n\n bounds: &BoundingBox,\n\n stops: &[(Color, Angle)],\n\n translation: Point,\n\n alpha: f32,\n\n) -> Color {\n\n let (min_x, max_x) = (bounds.min_x as f64, bounds.max_x as f64);\n\n let (min_y, max_y) = (bounds.min_y as f64, bounds.max_y as f64);\n\n let center = Point {\n\n x: (min_x + max_x) / 2.0,\n\n y: (min_y + max_y) / 2.0,\n\n };\n\n let point = translate(\n\n &Point {\n\n x: x as f64,\n\n y: y as f64,\n\n },\n\n translation.x,\n", "file_path": "src/renderer.rs", "rank": 17, "score": 32277.841106992455 }, { "content": "fn get_linear_gradient_color_at(\n\n x: usize,\n\n y: usize,\n\n bounds: &BoundingBox,\n\n stops: &[(Color, f64)],\n\n angle: Angle,\n\n alpha: f32,\n\n) -> Color {\n\n let (min_x, max_x) = (bounds.min_x as f64, bounds.max_x as f64);\n\n let (min_y, max_y) = (bounds.min_y as f64, bounds.max_y as f64);\n\n let gradient_width = max_x - min_x;\n\n // Offset angle by PI/2 because of the coordinate system\n\n let angle = Angle::from_radians(angle.to_radians() - std::f64::consts::PI / 2.0);\n\n let point = rotate_around(\n\n &Point {\n\n x: x as f64,\n\n y: y as f64,\n\n },\n\n &Point {\n\n x: (min_x + max_x) / 2.0,\n", "file_path": "src/renderer.rs", "rank": 18, "score": 32277.841106992455 }, { "content": "fn get_radial_gradient_color_at(\n\n x: usize,\n\n y: usize,\n\n bounds: &BoundingBox,\n\n stops: &[(Color, f64)],\n\n translation: Point,\n\n alpha: f32,\n\n) -> Color {\n\n let (min_x, max_x) = (bounds.min_x as f64, bounds.max_x as f64);\n\n let (min_y, max_y) = (bounds.min_y as f64, bounds.max_y as f64);\n\n let gradient_width = (max_x - min_x) / 2.0;\n\n let center = Point {\n\n x: (min_x + max_x) / 2.0,\n\n y: (min_y + max_y) / 2.0,\n\n };\n\n let point = translate(\n\n &Point {\n\n x: x as f64,\n\n y: y as f64,\n\n },\n", "file_path": "src/renderer.rs", "rank": 19, "score": 32277.841106992455 }, { "content": "fn alpha_fill_even_odd(\n\n cell: &AccumulationCell,\n\n prev_cell: &mut AccumulationCell,\n\n acc: &mut f32,\n\n filling: &mut f32,\n\n) -> f32 {\n\n if cell.id > 0 && cell.id != prev_cell.id {\n\n prev_cell.id = cell.id;\n\n *filling = -(*filling);\n\n }\n\n\n\n if cell.id == prev_cell.id {\n\n *acc += *filling * cell.area.abs();\n\n\n\n if *acc < 0.0 || *acc > 1.0 {\n\n let is_filling = filling.partial_cmp(&&mut 0.0_f32) != Some(Ordering::Greater);\n\n *acc = (is_filling as i32) as f32;\n\n }\n\n } else {\n\n let is_filling = filling.partial_cmp(&&mut 0.0_f32) == Some(Ordering::Greater);\n\n *acc = (is_filling as i32) as f32;\n\n }\n\n\n\n clamp(acc.abs(), 0.0, 1.0)\n\n}\n\n\n", "file_path": "src/renderer.rs", "rank": 20, "score": 32277.841106992455 }, { "content": "fn canvas_description() -> CanvasDescription {\n\n CanvasDescription {\n\n width: WIDTH,\n\n height: HEIGHT,\n\n viewbox: ViewBox {\n\n x: 0.0,\n\n y: 0.0,\n\n width: WIDTH as f64,\n\n height: HEIGHT as f64,\n\n },\n\n background_color: Color::black(),\n\n ..Default::default()\n\n }\n\n}\n\n\n\nconst BIG_TRIANGLE: [PathOps; 4] = [\n\n PathOps::MoveTo { x: 60.0, y: 240.0 },\n\n PathOps::LineTo { x: 360.0, y: 80.0 },\n\n PathOps::LineTo { x: 400.0, y: 280.0 },\n\n PathOps::Close,\n", "file_path": "tests/triangle_test.rs", "rank": 21, "score": 30276.309548223035 }, { "content": "fn canvas_description() -> CanvasDescription {\n\n CanvasDescription {\n\n width: WIDTH,\n\n height: HEIGHT,\n\n viewbox: ViewBox {\n\n x: 0.0,\n\n y: 0.0,\n\n width: WIDTH as f64,\n\n height: HEIGHT as f64,\n\n },\n\n background_color: Color::white(),\n\n ..Default::default()\n\n }\n\n}\n\n\n\nconst LINES: [[PathOps; 2]; 6] = [\n\n [\n\n PathOps::MoveTo { x: 120.0, y: 120.0 },\n\n PathOps::LineTo { x: 360.0, y: 360.0 },\n\n ],\n", "file_path": "tests/line_test.rs", "rank": 22, "score": 30276.309548223035 }, { "content": "fn canvas_description() -> CanvasDescription {\n\n CanvasDescription {\n\n width: WIDTH,\n\n height: HEIGHT,\n\n viewbox: ViewBox {\n\n x: 0.0,\n\n y: 0.0,\n\n width: WIDTH as f64,\n\n height: HEIGHT as f64,\n\n },\n\n background_color: Color::coral(),\n\n ..Default::default()\n\n }\n\n}\n\n\n\nconst PATH: [PathOps; 5] = [\n\n PathOps::MoveTo { x: 0.0, y: 0.0 },\n\n PathOps::LineTo {\n\n x: WIDTH as f64 - 1.0,\n\n y: 0.0,\n", "file_path": "tests/basic_test.rs", "rank": 24, "score": 30276.309548223035 }, { "content": "fn canvas_description() -> CanvasDescription {\n\n CanvasDescription {\n\n width: WIDTH,\n\n height: HEIGHT,\n\n viewbox: ViewBox {\n\n x: 0.0,\n\n y: 0.0,\n\n width: WIDTH as f64,\n\n height: HEIGHT as f64,\n\n },\n\n background_color: Color::dark_slate_blue(),\n\n tolerance: 1.0,\n\n }\n\n}\n\n\n\nconst GAP: f64 = 100.0;\n\n\n\nconst SQUARE_SIZE: f64 = 200.0;\n\nconst SQUARE: [PathOps; 5] = [\n\n PathOps::MoveTo { x: 0.0, y: 0.0 },\n", "file_path": "tests/gradients_test.rs", "rank": 25, "score": 30276.309548223035 }, { "content": "fn canvas_description() -> CanvasDescription {\n\n CanvasDescription {\n\n width: WIDTH,\n\n height: HEIGHT,\n\n viewbox: ViewBox {\n\n x: 0.0,\n\n y: 0.0,\n\n width: WIDTH as f64,\n\n height: HEIGHT as f64,\n\n },\n\n background_color: Color::black(),\n\n ..Default::default()\n\n }\n\n}\n\n\n\nconst PATH: [PathOps; 15] = [\n\n PathOps::MoveTo { x: 80.0, y: 80.0 },\n\n PathOps::LineTo { x: 80.0, y: 420.0 },\n\n PathOps::LineTo { x: 420.0, y: 420.0 },\n\n PathOps::LineTo { x: 420.0, y: 80.0 },\n", "file_path": "tests/rect_test.rs", "rank": 26, "score": 30276.309548223035 }, { "content": "fn approximate_integral(x: f64) -> f64 {\n\n const D: f64 = 0.67;\n\n x / (1.0 - D + f64::powf(f64::powf(D, 4.0) + 0.25 * x * x, 0.25))\n\n}\n\n\n", "file_path": "src/geometry.rs", "rank": 27, "score": 30021.638739764938 }, { "content": "use sha2::{Digest, Sha256};\n\nuse verg::canvas::Canvas;\n\nuse verg::color::Color;\n\nuse verg::renderer::blend_func;\n\n\n\n// We allow dead code because clippy gives a false positive.\n\n// The constant is used in `implement_test!`.\n\n#[allow(dead_code)]\n\npub const REFERENCE_HASHES: [(&str, &str); 10] = [\n\n (\n\n \"basic_test\",\n\n \"95AEB28CB13578C558F745AD4DFCE5DF3BCAD3E11C0C9F15077ED3144C6D4D98\",\n\n ),\n\n (\n\n \"even_odd_fill_test\",\n\n \"EFF992CDB334A9EA152DF318A94BB6CFF42B0BC1412F7479DED6C23A0D78518D\",\n\n ),\n\n (\n\n \"line_test\",\n\n \"B63BD4212971997150E7FE594444F58C539111AB72363D7C584C25DA7DE692CE\",\n", "file_path": "tests/common.rs", "rank": 28, "score": 29679.233074964854 }, { "content": " image::save_buffer(\n\n format!(\"{}.png\", stringify!($($name)?)),\n\n u8_buffer.as_slice(),\n\n canvas.desc.width as u32,\n\n canvas.desc.height as u32,\n\n image::ColorType::Rgba8,\n\n )\n\n .unwrap();\n\n\n\n {\n\n let hash = common::get_hash_for_color_buffer(&u8_buffer);\n\n let mut hash_found = false;\n\n\n\n println!(\"Hash for `{}`: {}\", stringify!($($name)?), hash);\n\n\n\n for (ref_id, ref_hash) in common::REFERENCE_HASHES {\n\n if ref_id == stringify!($($name)?) {\n\n hash_found = true;\n\n assert_eq!(ref_hash, hash);\n\n break;\n", "file_path": "tests/common.rs", "rank": 29, "score": 29677.753035134992 }, { "content": " }\n\n }\n\n\n\n if !hash_found {\n\n eprintln!(\n\n \"Hash for test id `{}` not found in `common::REFERENCE_HASHES`!\",\n\n stringify!($($name)?)\n\n );\n\n assert!(false);\n\n }\n\n }\n\n }\n\n }\n\n}\n", "file_path": "tests/common.rs", "rank": 30, "score": 29675.46009495505 }, { "content": " ),\n\n (\n\n \"font_test\",\n\n \"F2B2DC85EB145FD18896CED3F73F07AC851CDF267464ACD8390D3E974A2FC727\",\n\n ),\n\n (\n\n \"gradients_test\",\n\n \"5BAEE5895D31B42D8919ACC944915D3A087235F1DD520522D6AF6FFBE44DF6BE\",\n\n ),\n\n];\n\n\n", "file_path": "tests/common.rs", "rank": 31, "score": 29672.671698600174 }, { "content": " ),\n\n (\n\n \"rect_test\",\n\n \"788DE6A896D30D52DE643180A3B0C084D7EA1F1940F79C41C084B607277CC3B2\",\n\n ),\n\n (\n\n \"triangle_test\",\n\n \"765A7406D707AF35C371B3505B0688C51FB11B42BBB5B46233EAA445A2F28675\",\n\n ),\n\n (\n\n \"alpha_blending_test\",\n\n \"6C9B6E7943B889530E2CDC9BAA64FC70551AFD233A2E85C4E448DCD261AA0832\",\n\n ),\n\n (\n\n \"affine_transforms_test\",\n\n \"72D232FA2940A3ED66F3465073088EC35989131664B18888D5DBAB8C725226EE\",\n\n ),\n\n (\n\n \"curve_test\",\n\n \"7D79ED1BE7F16175DF64AC8DB90A35F8ADEE807A2E5493BA20D270ADBAA47704\",\n", "file_path": "tests/common.rs", "rank": 32, "score": 29672.671698600174 }, { "content": "fn canvas_description() -> CanvasDescription {\n\n CanvasDescription {\n\n width: WIDTH,\n\n height: HEIGHT,\n\n viewbox: ViewBox {\n\n x: 0.0,\n\n y: 0.0,\n\n width: WIDTH as f64,\n\n height: HEIGHT as f64,\n\n },\n\n background_color: Color {\n\n r: 0.0,\n\n g: 0.0,\n\n b: 0.0,\n\n a: 0.0,\n\n },\n\n ..Default::default()\n\n }\n\n}\n\n\n", "file_path": "tests/alpha_blending_test.rs", "rank": 33, "score": 29266.678198308047 }, { "content": "fn canvas_description() -> CanvasDescription {\n\n CanvasDescription {\n\n width: WIDTH,\n\n height: HEIGHT,\n\n viewbox: ViewBox {\n\n x: 0.0,\n\n y: 0.0,\n\n width: 100.0,\n\n height: 100.0,\n\n },\n\n background_color: Color::white(),\n\n ..Default::default()\n\n }\n\n}\n\n\n\nconst PATH: [PathOps; 5] = [\n\n PathOps::MoveTo { x: 5.0, y: 5.0 },\n\n PathOps::LineTo { x: 15.0, y: 5.0 },\n\n PathOps::LineTo { x: 15.0, y: 15.0 },\n\n PathOps::LineTo { x: 5.0, y: 15.0 },\n", "file_path": "tests/affine_transforms_test.rs", "rank": 34, "score": 29266.678198308047 }, { "content": "fn callback(canvas: &mut Canvas) {\n\n const NUM_VERTICES: f64 = 8.0;\n\n const RADIUS: f64 = SQUARE_SIZE / 2.0;\n\n const CX: f64 = SQUARE_SIZE / 2.0;\n\n const CY: f64 = SQUARE_SIZE / 2.0;\n\n let mut hex = Vec::<PathOps>::with_capacity(9);\n\n let mut alpha = 0.0_f64;\n\n\n\n while alpha < 360.0 {\n\n let x = RADIUS * f64::cos(alpha.to_radians()) + CX;\n\n let y = RADIUS * f64::sin(alpha.to_radians()) + CY;\n\n if hex.is_empty() {\n\n hex.push(PathOps::MoveTo { x, y });\n\n } else {\n\n hex.push(PathOps::LineTo { x, y });\n\n }\n\n alpha += 360.0 / NUM_VERTICES;\n\n }\n\n\n\n hex.push(PathOps::Close);\n", "file_path": "tests/gradients_test.rs", "rank": 35, "score": 28905.60595953105 }, { "content": "fn approximate_inverse_integral(x: f64) -> f64 {\n\n const B: f64 = 0.39;\n\n\n\n x * (1.0 - B + f64::sqrt(B * B - 0.25 * x * x))\n\n}\n\n\n\npub struct ParabolaParams {\n\n x0: f64,\n\n x2: f64,\n\n scale: f64,\n\n}\n\n\n\n#[derive(Debug, Clone, Copy)]\n\npub struct QuadraticBezier {\n\n x0: f64,\n\n y0: f64,\n\n x1: f64,\n\n y1: f64,\n\n x2: f64,\n\n y2: f64,\n", "file_path": "src/geometry.rs", "rank": 37, "score": 28905.60595953105 }, { "content": "fn canvas_description() -> CanvasDescription {\n\n CanvasDescription {\n\n width: WIDTH,\n\n height: HEIGHT,\n\n viewbox: ViewBox {\n\n x: 0.0,\n\n y: 0.0,\n\n width: WIDTH as f64,\n\n height: HEIGHT as f64,\n\n },\n\n background_color: Color::black(),\n\n ..Default::default()\n\n }\n\n}\n\n\n\nconst PATH: [PathOps; 18] = [\n\n PathOps::MoveTo { x: 200.0, y: 200.0 },\n\n PathOps::LineTo { x: 800.0, y: 200.0 },\n\n PathOps::LineTo { x: 800.0, y: 800.0 },\n\n PathOps::LineTo { x: 200.0, y: 800.0 },\n", "file_path": "tests/even_odd_fill_test.rs", "rank": 38, "score": 28348.92432209614 }, { "content": "pub fn callback(canvas: &mut Canvas) {\n\n let black = FillStyle::Plain(Color::black());\n\n let dark_slate_blue = FillStyle::Plain(Color::dark_slate_blue());\n\n let forest_green = FillStyle::Plain(Color::forest_green());\n\n let blue = FillStyle::Plain(Color::blue());\n\n let crimson = FillStyle::Plain(Color::crimson());\n\n let coral = FillStyle::Plain(Color::coral());\n\n let cyan = FillStyle::Plain(Color::cyan());\n\n\n\n {\n\n let translate = |i: usize| {\n\n return move |p: &Point| translate(p, TRANSLATIONS[i].x, TRANSLATIONS[i].y);\n\n };\n\n\n\n canvas.draw_shape(&PATH, black, FILL_RULE, translate(0));\n\n canvas.draw_shape(&PATH, dark_slate_blue, FILL_RULE, translate(1));\n\n canvas.draw_shape(&PATH, blue, FILL_RULE, translate(2));\n\n canvas.draw_shape(&PATH, forest_green, FILL_RULE, translate(3));\n\n canvas.draw_shape(&PATH, black, FILL_RULE, translate(4));\n\n }\n", "file_path": "tests/affine_transforms_test.rs", "rank": 39, "score": 26436.923774812414 }, { "content": "fn fmin3(a: f32, b: f32, c: f32) -> f32 {\n\n f32::min(f32::min(a, b), c)\n\n}\n\n\n", "file_path": "tests/font_test.rs", "rank": 40, "score": 26106.845714902316 }, { "content": "fn fmax3(a: f32, b: f32, c: f32) -> f32 {\n\n f32::max(f32::max(a, b), c)\n\n}\n\n\n\nimpl ttf::OutlineBuilder for BboxOutlineBuilder {\n\n fn move_to(&mut self, x: f32, y: f32) {\n\n self.min_x = f32::min(self.min_x, x);\n\n self.min_y = f32::min(self.min_y, y);\n\n self.max_x = f32::max(self.max_x, x);\n\n self.max_y = f32::max(self.max_y, y);\n\n self.num_paths += 1;\n\n }\n\n\n\n fn line_to(&mut self, x: f32, y: f32) {\n\n self.min_x = f32::min(self.min_x, x);\n\n self.min_y = f32::min(self.min_y, y);\n\n self.max_x = f32::max(self.max_x, x);\n\n self.max_y = f32::max(self.max_y, y);\n\n self.num_paths += 1;\n\n }\n", "file_path": "tests/font_test.rs", "rank": 41, "score": 26106.845714902316 }, { "content": "#[allow(dead_code)]\n\nfn destination_in(src: &Color, dest: &Color) -> Color {\n\n blend_func::destination_in(src, dest)\n\n}\n\n\n", "file_path": "tests/alpha_blending_test.rs", "rank": 42, "score": 25128.638930972946 }, { "content": "#[allow(dead_code)]\n\nfn source(src: &Color, dest: &Color) -> Color {\n\n blend_func::source(src, dest)\n\n}\n\n\n", "file_path": "tests/alpha_blending_test.rs", "rank": 43, "score": 25128.638930972946 }, { "content": "#[allow(dead_code)]\n\nfn source_in(src: &Color, dest: &Color) -> Color {\n\n blend_func::source_in(src, dest)\n\n}\n\n\n", "file_path": "tests/alpha_blending_test.rs", "rank": 44, "score": 25128.638930972946 }, { "content": "#[allow(dead_code)]\n\nfn additive(src: &Color, dest: &Color) -> Color {\n\n blend_func::additive(src, dest)\n\n}\n\n\n\nimplement_test! {\n\n alpha_blending_test, canvas_description, default_callback |\n\n DESTINATION_TRIANGLES[0], FILL_DESTINATION, FILL_RULE, default_blending,\n\n SOURCE_TRIANGLES[0], FILL_SOURCE, FILL_RULE, default_blending,\n\n DESTINATION_TRIANGLES[1], FILL_DESTINATION, FILL_RULE, default_blending,\n\n SOURCE_TRIANGLES[1], FILL_SOURCE, FILL_RULE, destination_over,\n\n DESTINATION_TRIANGLES[2], FILL_DESTINATION, FILL_RULE, default_blending,\n\n SOURCE_TRIANGLES[2], FILL_SOURCE, FILL_RULE, source_out,\n\n DESTINATION_TRIANGLES[3], FILL_DESTINATION, FILL_RULE, default_blending,\n\n SOURCE_TRIANGLES[3], FILL_SOURCE, FILL_RULE, destination_out,\n\n DESTINATION_TRIANGLES[4], FILL_DESTINATION, FILL_RULE, default_blending,\n\n SOURCE_TRIANGLES[4], FILL_SOURCE, FILL_RULE, source_in,\n\n DESTINATION_TRIANGLES[5], FILL_DESTINATION, FILL_RULE, default_blending,\n\n SOURCE_TRIANGLES[5], FILL_SOURCE, FILL_RULE, destination_in,\n\n DESTINATION_TRIANGLES[6], FILL_DESTINATION, FILL_RULE, default_blending,\n\n SOURCE_TRIANGLES[6], FILL_SOURCE, FILL_RULE, source_atop,\n", "file_path": "tests/alpha_blending_test.rs", "rank": 45, "score": 25128.638930972946 }, { "content": "#[allow(dead_code)]\n\nfn destination(src: &Color, dest: &Color) -> Color {\n\n blend_func::destination(src, dest)\n\n}\n\n\n", "file_path": "tests/alpha_blending_test.rs", "rank": 46, "score": 25128.638930972946 }, { "content": "#[allow(dead_code)]\n\nfn destination_out(src: &Color, dest: &Color) -> Color {\n\n blend_func::destination_out(src, dest)\n\n}\n\n\n", "file_path": "tests/alpha_blending_test.rs", "rank": 47, "score": 25128.638930972946 }, { "content": "#[allow(dead_code)]\n\nfn source_out(src: &Color, dest: &Color) -> Color {\n\n blend_func::source_out(src, dest)\n\n}\n\n\n", "file_path": "tests/alpha_blending_test.rs", "rank": 48, "score": 25128.638930972946 }, { "content": "#[allow(dead_code)]\n\nfn destination_over(src: &Color, dest: &Color) -> Color {\n\n blend_func::destination_over(src, dest)\n\n}\n\n\n", "file_path": "tests/alpha_blending_test.rs", "rank": 49, "score": 25128.638930972946 }, { "content": "#[allow(dead_code)]\n\nfn xor(src: &Color, dest: &Color) -> Color {\n\n blend_func::xor(src, dest)\n\n}\n\n\n", "file_path": "tests/alpha_blending_test.rs", "rank": 50, "score": 25128.638930972946 }, { "content": "#[allow(dead_code)]\n\nfn source_atop(src: &Color, dest: &Color) -> Color {\n\n blend_func::source_atop(src, dest)\n\n}\n\n\n", "file_path": "tests/alpha_blending_test.rs", "rank": 51, "score": 24360.654298295427 }, { "content": "#[allow(dead_code)]\n\nfn destination_atop(src: &Color, dest: &Color) -> Color {\n\n blend_func::destination_atop(src, dest)\n\n}\n\n\n", "file_path": "tests/alpha_blending_test.rs", "rank": 52, "score": 24360.654298295427 }, { "content": "fn update_cell(area: f32, cell: &mut AccumulationCell, id: i32) {\n\n cell.area += area;\n\n cell.id = id;\n\n}\n\n\n\npub struct RenderState<'a> {\n\n pub canvas: &'a mut Canvas,\n\n pub id: i32,\n\n}\n\n\n", "file_path": "src/renderer.rs", "rank": 53, "score": 22876.743993999917 }, { "content": "pub fn draw_cubic_bezier(state: &mut RenderState, curve: &CubicBezier) {\n\n let points = curve.subdivide(state.canvas.desc.tolerance);\n\n\n\n points.windows(2).for_each(|p: &[Point]| {\n\n draw_line(state, &p[0], &p[1]);\n\n });\n\n}\n\n\n", "file_path": "src/renderer.rs", "rank": 54, "score": 22589.467131028137 }, { "content": "pub fn draw_quad_bezier(state: &mut RenderState, curve: &QuadraticBezier) {\n\n let points = curve\n\n .subdivide(state.canvas.desc.tolerance)\n\n .iter()\n\n .map(|t: &f64| curve.eval(*t))\n\n .collect::<Vec<Point>>();\n\n\n\n points.windows(2).for_each(|p: &[Point]| {\n\n draw_line(state, &p[0], &p[1]);\n\n });\n\n}\n\n\n", "file_path": "src/renderer.rs", "rank": 55, "score": 22589.467131028137 }, { "content": "pub fn clamp<T: std::cmp::PartialOrd>(v: T, min: T, max: T) -> T {\n\n if v < min {\n\n min\n\n } else if v > max {\n\n max\n\n } else {\n\n v\n\n }\n\n}\n\n\n\nimpl Color {\n\n pub fn clamp(&self) -> Color {\n\n let (min, max) = (0.0_f64, 1.0_f64);\n\n Color {\n\n r: clamp(self.r, min, max),\n\n g: clamp(self.g, min, max),\n\n b: clamp(self.b, min, max),\n\n a: clamp(self.a, min, max),\n\n }\n\n }\n", "file_path": "src/color.rs", "rank": 56, "score": 19727.574365289496 }, { "content": "[![CI](https://github.com/AlexandruIca/verg/actions/workflows/ci.yml/badge.svg)](https://github.com/AlexandruIca/verg/actions/workflows/ci.yml)\n\n\n\n# Verg\n\nThis is a WIP vector graphics rendering library. Its goal is to be simple to use and easy to embed in existing projects. It doesn't necessarily want to implement a lot of features:\n\n- Rendering cubic and quadratic Bézier curves\n\n- Gradients: linear, radial, conic\n\n- Support for color spaces\n\n- Correct alpha compositing, including supporting Porter Duff's operators\n\n- Clipping and masking (not sure about this)\n\n\n\nAny other features will be implemented only if they're not too complicated. The focus right now is on _correct_ rendering rather than having lots of features.\n\n\n\n# Setting up a development environment\n\nYou can check `shell.nix`(even if you don't use nix) to see the dependencies that are needed(all of them can be gathered with `rustup`).\n\n\n\nTo run the project's tests:\n\n```sh\n\ncargo test --all-features\n\n```\n", "file_path": "README.md", "rank": 57, "score": 17179.47953477293 }, { "content": "The MIT License (MIT)\n\n\n\nCopyright (c) 2021 Ică Alexandru-Gabriel\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of\n\nthis software and associated documentation files (the \"Software\"), to deal in\n\nthe Software without restriction, including without limitation the rights to\n\nuse, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of\n\nthe Software, and to permit persons to whom the Software is furnished to do so,\n\nsubject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS\n\nFOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR\n\nCOPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER\n\nIN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN\n", "file_path": "LICENSE.md", "rank": 58, "score": 17177.443322087067 }, { "content": "# Foreword\n\nI'm not writing this expecting to have people contributing to this project(of course that would be awesome), I doubt this project will even be _seen_ by other people, I'm mostly writing this for myself.\n\n\n\n# Before you start working on something\n\nCreate a new issue if it doesn't already exist. Give it an appropiate title and describe in as much detail as you can what the issue is.\n\n\n\nAfter you have an issue to work on you should create a new branch. Never commit directly to `main`, that's only meant to be merged _into_. The name of the branch should follow this scheme:\n\n```\n\nVG-issue_number-Short-description-of-what's-being-worked-on\n\n```\n\nThis is so that it's easy to keep track of changes being made to the project.\n\n\n\n# Create tests\n\nThis project isn't test-driven, but whenever you add/change something you should accompany that with some tests so that it's clear what the intent of the addition/modification is.\n\n\n\n# Commit structure\n\n```\n\ncomponent: Short description, 80-100 columns (#issue_number)\n\n\n\nOptionally more text\n\n```\n\nThe first line of the commit message should start with the component that you're working on, that could be one of:\n\n- `misc`: anything that's not directly related to the project itself, for example: editing `.gitignore`, solving merge conflicts\n\n- `build`: anything related to the build process, for example: editing `shell.nix`, editing `Cargo.toml`\n\n- `core`: anything related to the inner workings of this lib\n\n- `test`: anything code-related that you modify in `tests/`\n\n- `ci`: anything related to `.github/workflows/`\n\n- `docs`: anything documentation-related, for example: editing `.md` files, writing rust docs\n\n\n\nFor example, if I want to commit a modification to `README.md` a commit message could look like this:\n\n```\n\ndocs: Provide some info in the README (#42)\n\n\n\nHere I could write in more detail about the change\n\n```\n\nIf you feel like a commit may fit inside two components you can concatenate them. For example, let's say I modify the CI workflow and change a script that generates documentation. The commit message may look like:\n\n```\n\nci:docs: <What changed> (#13)\n\n```\n\nYou shouldn't need more than two components, if you do then maybe that commit should have been split into multiple commits. Keep commits simple and to the point. You can take a look [here](https://chris.beams.io/posts/git-commit/) for some additional advice on how to write commit messages.\n\n\n", "file_path": "CONTRIBUTING.md", "rank": 59, "score": 17176.144280701246 }, { "content": "# After you're done working\n\nYou should run tests, run clippy and run rustfmt to avoid failed CI builds. If everything's ok and the CI builds successfully you can create a PR to be merged into main. PRs might not be needed because I'm by myself but it's still nice to have an organized way to see project history. PRs should be named like this: `#issue_number Short description of what's being merged`. You can also include as many details as you want in the message.\n\n\n\n# Notes on making changes to the CI pipeline\n\nModifying the CI pipeline shouldn't be needed often, but when it's needed it's a pretty ugly process, it usually involves pushing lots of commits that fix very minor things, cluttering the project's history. Rebasing those commits is not an option since the commits need to be _pushed_ upstream before the CI jobs are even run.\n\n\n\nTo overcome this, the usual workflow is:\n\n- Create a temporary branch whose sole purpose is to modify stuff in `.github/workflows/`(make sure the branch starts with `VG-*`, otherwise the CI pipeline will ignore the branch)\n\n- Push all the annoying commits to this branch\n\n- After you are pleased with the modifications you can copy the `.yml` file(s) to the branch you are _actually_ working on **without** merging, deleting the temporary branch\n\n\n\nThis way we can have nice, clean history and a happy CI pipeline!\n", "file_path": "CONTRIBUTING.md", "rank": 60, "score": 17175.634580861042 }, { "content": "use verg::{\n\n canvas::{Canvas, CanvasDescription, ViewBox},\n\n color::{Color, FillRule, FillStyle},\n\n geometry::{PathOps, Point},\n\n math::{translate, Angle},\n\n};\n\n\n\nmod common;\n\n\n\nconst WIDTH: usize = 1000;\n\nconst HEIGHT: usize = 1000;\n\n\n", "file_path": "tests/gradients_test.rs", "rank": 61, "score": 22.607247944673713 }, { "content": "// This test draws using affine transformations applied on paths.\n\n\n\nuse verg::{\n\n canvas::{Canvas, CanvasDescription, ViewBox},\n\n color::{Color, FillRule, FillStyle},\n\n geometry::{PathOps, Point},\n\n math::{rotate_around, scale_around, skew_around, translate, Angle},\n\n};\n\n\n\nmod common;\n\n\n\nconst WIDTH: usize = 1000;\n\nconst HEIGHT: usize = 1000;\n\n\n", "file_path": "tests/affine_transforms_test.rs", "rank": 63, "score": 19.740810180526957 }, { "content": "// This test draws a rectangle that's not filled inside.\n\n\n\nuse crate::common::{default_blending, default_callback};\n\nuse verg::canvas::{Canvas, CanvasDescription, ViewBox};\n\nuse verg::color::{Color, FillRule, FillStyle};\n\nuse verg::geometry::{PathOps, Point};\n\n\n\nmod common;\n\n\n\nconst WIDTH: usize = 500;\n\nconst HEIGHT: usize = 500;\n\n\n", "file_path": "tests/rect_test.rs", "rank": 64, "score": 19.118925705766873 }, { "content": "// This test draws a bunch of triangles of different sizes, colors and shapes.\n\n\n\nuse crate::common::{default_blending, default_callback};\n\nuse verg::canvas::{Canvas, CanvasDescription, ViewBox};\n\nuse verg::color::{Color, FillRule, FillStyle};\n\nuse verg::geometry::{PathOps, Point};\n\n\n\nmod common;\n\n\n\nconst WIDTH: usize = 500;\n\nconst HEIGHT: usize = 500;\n\n\n", "file_path": "tests/triangle_test.rs", "rank": 65, "score": 18.565501737163594 }, { "content": "// This test draws some open paths using only lines.\n\n\n\nuse crate::common::{default_blending, default_callback};\n\nuse verg::{\n\n canvas::{Canvas, CanvasDescription, ViewBox},\n\n color::{Color, FillRule, FillStyle},\n\n geometry::{PathOps, Point},\n\n};\n\n\n\nmod common;\n\n\n\nconst WIDTH: usize = 500;\n\nconst HEIGHT: usize = 500;\n\n\n", "file_path": "tests/line_test.rs", "rank": 66, "score": 18.051788900335232 }, { "content": "// This test draws a background on the whole canvas.\n\n\n\nuse crate::common::{default_blending, default_callback};\n\nuse verg::{\n\n canvas::{Canvas, CanvasDescription, ViewBox},\n\n color::{Color, FillRule, FillStyle},\n\n geometry::{PathOps, Point},\n\n};\n\n\n\nmod common;\n\n\n\nconst WIDTH: usize = 500;\n\nconst HEIGHT: usize = 500;\n\n\n", "file_path": "tests/basic_test.rs", "rank": 67, "score": 17.778621516165643 }, { "content": "// This tests draws a triangle inside a square using the even-odd fill rule.\n\n\n\nuse crate::common::{default_blending, default_callback};\n\nuse verg::{\n\n canvas::{Canvas, CanvasDescription, ViewBox},\n\n color::{Color, FillRule, FillStyle},\n\n geometry::{PathOps, Point},\n\n};\n\n\n\nmod common;\n\n\n\nconst WIDTH: usize = 1000;\n\nconst HEIGHT: usize = 1000;\n\n\n", "file_path": "tests/even_odd_fill_test.rs", "rank": 68, "score": 17.263138187811155 }, { "content": "// This tests the Porter-Duff blending operators.\n\n\n\nuse crate::common::{default_blending, default_callback};\n\nuse verg::{\n\n canvas::{Canvas, CanvasDescription, ViewBox},\n\n color::{Color, FillRule, FillStyle},\n\n geometry::{PathOps, Point},\n\n renderer::blend_func,\n\n};\n\n\n\nmod common;\n\n\n\nconst WIDTH: usize = 1410;\n\nconst HEIGHT: usize = 1030;\n\n\n", "file_path": "tests/alpha_blending_test.rs", "rank": 69, "score": 17.15986501032336 }, { "content": "use ttf_parser as ttf;\n\nuse verg::{\n\n canvas::{Canvas, CanvasDescription, ViewBox},\n\n color::{Color, FillRule, FillStyle},\n\n geometry::{PathOps, Point},\n\n math::translate,\n\n};\n\n\n\nmod common;\n\n\n\n///\n\n/// Only used to compute the bounding box of a glyph.\n\n///\n\n#[derive(Debug, Clone, Copy)]\n", "file_path": "tests/font_test.rs", "rank": 70, "score": 12.957603958442713 }, { "content": "pub struct CanvasDescription {\n\n pub width: usize,\n\n pub height: usize,\n\n pub viewbox: ViewBox,\n\n pub tolerance: f64,\n\n pub background_color: Color,\n\n}\n\n\n\nimpl Default for CanvasDescription {\n\n fn default() -> Self {\n\n CanvasDescription {\n\n width: 600,\n\n height: 600,\n\n viewbox: ViewBox {\n\n x: 0.0,\n\n y: 0.0,\n\n width: 600.0,\n\n height: 600.0,\n\n },\n\n tolerance: 1.5,\n", "file_path": "src/canvas.rs", "rank": 71, "score": 11.926299019794337 }, { "content": "\n\n {\n\n let transform = |i: usize, sx: f64, sy: f64, skew_x: f64, skew_y: f64, rotate: f64| {\n\n return move |p: &Point| {\n\n let p = translate(p, TRANSLATIONS[i].x, TRANSLATIONS[i].y);\n\n let center = Point {\n\n x: TRANSLATIONS[i].x + 10.0,\n\n y: TRANSLATIONS[i].y + 10.0,\n\n };\n\n\n\n let p = scale_around(&p, &center, sx, sy);\n\n let p = skew_around(\n\n &p,\n\n &center,\n\n Angle::from_degrees(skew_x),\n\n Angle::from_degrees(skew_y),\n\n );\n\n\n\n rotate_around(&p, &center, Angle::from_degrees(rotate))\n\n };\n", "file_path": "tests/affine_transforms_test.rs", "rank": 72, "score": 11.044332299713833 }, { "content": " total_width += bbox_builder.max_x as f64;\n\n total_height = f64::max(total_height, bbox_builder.max_y as f64);\n\n }\n\n total_width += 100.0;\n\n total_height += 100.0;\n\n let canvas_desc = CanvasDescription {\n\n width: total_width as usize,\n\n height: total_height as usize,\n\n viewbox: ViewBox {\n\n x: 0.0,\n\n y: 0.0,\n\n width: total_width,\n\n height: total_height,\n\n },\n\n tolerance: 1.0,\n\n background_color: test.background_color,\n\n };\n\n let mut canvas = Canvas::new(canvas_desc);\n\n\n\n for (i, path) in path_ops.iter().enumerate() {\n", "file_path": "tests/font_test.rs", "rank": 73, "score": 10.872286526570871 }, { "content": " translation.y,\n\n );\n\n let pivot = center;\n\n // Get angle between current point and pivot:\n\n /*\n\n let (dx, dy) = (point.x - pivot.x, pivot.y - point.y);\n\n let angle = Angle::from_radians(if dx != 0.0 { f64::atan(dy / dx) } else { 0.0 });\n\n */\n\n let angle = Angle::from_radians(f64::atan2(pivot.y - point.y, point.x - pivot.x).abs());\n\n let mut stop_index = 0_usize;\n\n\n\n while stop_index < stops.len()\n\n && stops[stop_index].1.to_degrees().abs() < angle.to_degrees().abs()\n\n {\n\n stop_index += 1;\n\n }\n\n\n\n stop_index = clamp(stop_index, 0, stops.len() - 1);\n\n\n\n if stop_index == 0 {\n", "file_path": "src/renderer.rs", "rank": 74, "score": 9.589974154452495 }, { "content": "\n\n {\n\n let rotate = |i: usize, angle: f64| {\n\n return move |p: &Point| {\n\n let p = translate(p, TRANSLATIONS[i].x, TRANSLATIONS[i].y);\n\n let around = Point {\n\n x: TRANSLATIONS[i].x + 10.0,\n\n y: TRANSLATIONS[i].y + 10.0,\n\n };\n\n\n\n rotate_around(&p, &around, Angle::from_degrees(angle))\n\n };\n\n };\n\n\n\n canvas.draw_shape(&PATH, dark_slate_blue, FILL_RULE, rotate(5, 25.0));\n\n canvas.draw_shape(&PATH, crimson, FILL_RULE, rotate(6, -45.0));\n\n canvas.draw_shape(&PATH, blue, FILL_RULE, rotate(7, 270.0));\n\n canvas.draw_shape(&PATH, forest_green, FILL_RULE, rotate(8, 170.0));\n\n canvas.draw_shape(&PATH, coral, FILL_RULE, rotate(9, 330.0));\n\n }\n", "file_path": "tests/affine_transforms_test.rs", "rank": 75, "score": 9.458753522238222 }, { "content": "use crate::{\n\n canvas::{AccumulationCell, Canvas},\n\n color::{clamp, Color, FillRule, FillStyle},\n\n geometry::{BoundingBox, CubicBezier, Path, PathOps, Point, QuadraticBezier},\n\n math::{map_viewbox, rotate_around, translate, Angle},\n\n};\n\nuse std::cmp::Ordering;\n\n\n\n///\n\n/// Parameters `src` and `dest`.\n\n///\n\npub type BlendFunc = fn(&Color, &Color) -> Color;\n\n\n\npub mod blend_func {\n\n use crate::{color::clamp, renderer::Color};\n\n\n\n pub fn source_over(src: &Color, dest: &Color) -> Color {\n\n Color {\n\n r: src.r * src.a + dest.r * dest.a * (1.0 - src.a),\n\n g: src.g * src.a + dest.g * dest.a * (1.0 - src.a),\n", "file_path": "src/renderer.rs", "rank": 76, "score": 8.903272125049927 }, { "content": "\n\n {\n\n let skew = |i: usize, x: f64, y: f64| {\n\n return move |p: &Point| {\n\n let p = translate(p, TRANSLATIONS[i].x, TRANSLATIONS[i].y);\n\n let center = Point {\n\n x: TRANSLATIONS[i].x + 10.0,\n\n y: TRANSLATIONS[i].y + 10.0,\n\n };\n\n\n\n skew_around(&p, &center, Angle::from_degrees(x), Angle::from_degrees(y))\n\n };\n\n };\n\n\n\n canvas.draw_shape(&PATH, dark_slate_blue, FILL_RULE, skew(15, 15.0, 15.0));\n\n canvas.draw_shape(&PATH, cyan, FILL_RULE, skew(16, -15.0, -15.0));\n\n canvas.draw_shape(&PATH, coral, FILL_RULE, skew(17, 45.0, -15.0));\n\n canvas.draw_shape(&PATH, forest_green, FILL_RULE, skew(18, 0.0, 0.0));\n\n canvas.draw_shape(&PATH, crimson, FILL_RULE, skew(19, 0.0, 20.0));\n\n }\n", "file_path": "tests/affine_transforms_test.rs", "rank": 77, "score": 8.892831033704837 }, { "content": " translation.x,\n\n translation.y,\n\n );\n\n let clamped = Point {\n\n x: clamp(point.x, min_x, max_x),\n\n y: clamp(point.y, min_y, max_y),\n\n };\n\n let dist = clamped.distance_to(&center).abs() / gradient_width;\n\n let mut stop_index = 0_usize;\n\n\n\n while stop_index < stops.len() && stops[stop_index].1 < dist {\n\n stop_index += 1;\n\n }\n\n\n\n stop_index = clamp(stop_index, 0, stops.len() - 1);\n\n\n\n if stop_index == 0 {\n\n let mut c = stops[0].0;\n\n c.a = alpha as f64;\n\n return c;\n", "file_path": "src/renderer.rs", "rank": 78, "score": 8.660267956401876 }, { "content": "\n\n // Conic gradients:\n\n let square_stops = [\n\n (Color::blue(), Angle::from_degrees(0.0)),\n\n (Color::yellow(), Angle::from_degrees(180.0)),\n\n ];\n\n canvas.draw_shape(\n\n &SQUARE,\n\n FillStyle::ConicGradient {\n\n stops: &square_stops,\n\n translation: Point { x: 0.0, y: 0.0 },\n\n },\n\n FillRule::NonZero,\n\n |p: &Point| translate(p, GAP, 3.0 * GAP + 2.0 * SQUARE_SIZE),\n\n );\n\n\n\n let hex_stops = [\n\n (Color::forest_green(), Angle::from_degrees(0.0)),\n\n (Color::steel_blue(), Angle::from_degrees(90.0)),\n\n (Color::cyan(), Angle::from_degrees(180.0)),\n", "file_path": "tests/gradients_test.rs", "rank": 79, "score": 8.148315370330867 }, { "content": " canvas.draw_shape(\n\n path.as_slice(),\n\n FillStyle::Plain(test.foreground_color),\n\n FillRule::NonZero,\n\n |p: &Point| translate(p, translations[i].x, translations[i].y),\n\n );\n\n }\n\n\n\n let u8_buffer = canvas.to_u8();\n\n\n\n image::save_buffer(\n\n \"font_test.png\",\n\n u8_buffer.as_slice(),\n\n canvas_desc.width as u32,\n\n canvas_desc.height as u32,\n\n image::ColorType::Rgba8,\n\n )\n\n .unwrap();\n\n\n\n {\n", "file_path": "tests/font_test.rs", "rank": 80, "score": 7.967689980608119 }, { "content": " (Color::forest_green(), Angle::from_degrees(270.0)),\n\n ];\n\n canvas.draw_shape(\n\n hex.as_slice(),\n\n FillStyle::ConicGradient {\n\n stops: &hex_stops,\n\n translation: Point { x: 0.0, y: 0.0 },\n\n },\n\n FillRule::NonZero,\n\n |p: &Point| translate(p, 2.0 * GAP + SQUARE_SIZE, 3.0 * GAP + 2.0 * SQUARE_SIZE),\n\n );\n\n\n\n let triangle_stops = [\n\n (Color::yellow(), Angle::from_degrees(0.0)),\n\n (Color::forest_green(), Angle::from_degrees(72.0)),\n\n (Color::white(), Angle::from_degrees(216.0)),\n\n (Color::black(), Angle::from_degrees(288.0)),\n\n ];\n\n canvas.draw_shape(\n\n &CURVED_TRIANGLE,\n", "file_path": "tests/gradients_test.rs", "rank": 81, "score": 7.923158170826542 }, { "content": "use crate::geometry::Point;\n\nuse crate::math::Angle;\n\n\n\n#[derive(Debug, Clone, Copy)]\n\npub struct Color {\n\n pub r: f64,\n\n pub g: f64,\n\n pub b: f64,\n\n pub a: f64,\n\n}\n\n\n", "file_path": "src/color.rs", "rank": 82, "score": 7.851967451724034 }, { "content": " },\n\n RadialGradient {\n\n stops: &'a [(Color, f64)],\n\n translation: Point,\n\n },\n\n ConicGradient {\n\n stops: &'a [(Color, Angle)],\n\n translation: Point,\n\n },\n\n}\n\n\n\n#[derive(Debug, Clone, Copy)]\n\npub enum FillRule {\n\n EvenOdd,\n\n NonZero,\n\n}\n\n\n\nimpl Default for FillRule {\n\n fn default() -> FillRule {\n\n FillRule::NonZero\n\n }\n\n}\n", "file_path": "src/color.rs", "rank": 83, "score": 7.275945626781317 }, { "content": " y: (min_y + max_y) / 2.0,\n\n },\n\n angle,\n\n );\n\n\n\n let clamped_x = clamp(point.x, min_x, max_x);\n\n let fx = (clamped_x - min_x) / gradient_width;\n\n let mut stop_index = 0_usize;\n\n\n\n while stop_index < stops.len() && stops[stop_index].1 < fx {\n\n stop_index += 1;\n\n }\n\n\n\n stop_index = clamp(stop_index, 0, stops.len() - 1);\n\n\n\n if stop_index == 0 {\n\n let mut c = stops[0].0;\n\n c.a = alpha as f64;\n\n return c;\n\n }\n", "file_path": "src/renderer.rs", "rank": 84, "score": 7.114827937047069 }, { "content": "use crate::color::{Color, FillRule, FillStyle};\n\nuse crate::geometry::{Path, Point};\n\nuse crate::renderer::{blend_func, fill_path, render_path, BlendFunc, RenderState, NUM_CHANNELS};\n\nuse std::vec::Vec;\n\n\n\n#[derive(Debug, Clone, Copy)]\n\npub struct AccumulationCell {\n\n pub area: f32,\n\n pub id: i32,\n\n}\n\n\n\n#[derive(Debug, Clone, Copy)]\n\npub struct ViewBox {\n\n pub x: f64,\n\n pub y: f64,\n\n pub width: f64,\n\n pub height: f64,\n\n}\n\n\n\n#[derive(Debug, Clone, Copy)]\n", "file_path": "src/canvas.rs", "rank": 86, "score": 6.929081483001614 }, { "content": "\n\n {\n\n let scale = |i: usize, sx: f64, sy: f64| {\n\n return move |p: &Point| {\n\n let p = translate(p, TRANSLATIONS[i].x, TRANSLATIONS[i].y);\n\n let center = Point {\n\n x: TRANSLATIONS[i].x + 10.0,\n\n y: TRANSLATIONS[i].y + 10.0,\n\n };\n\n\n\n scale_around(&p, &center, sx, sy)\n\n };\n\n };\n\n\n\n canvas.draw_shape(&PATH, dark_slate_blue, FILL_RULE, scale(10, 0.5, 0.5));\n\n canvas.draw_shape(&PATH, crimson, FILL_RULE, scale(11, 0.5, 1.0));\n\n canvas.draw_shape(&PATH, blue, FILL_RULE, scale(12, 1.2, 1.2));\n\n canvas.draw_shape(&PATH, forest_green, FILL_RULE, scale(13, 1.0, 2.5));\n\n canvas.draw_shape(&PATH, coral, FILL_RULE, scale(14, 1.5, 2.5));\n\n }\n", "file_path": "tests/affine_transforms_test.rs", "rank": 88, "score": 6.709062914455849 }, { "content": "use crate::canvas::CanvasDescription;\n\nuse crate::geometry::Point;\n\n\n\n#[derive(Debug, Clone, Copy, Default)]\n\npub struct Angle(pub f64);\n\n\n\nimpl Angle {\n\n pub fn from_radians(radians: f64) -> Self {\n\n Self(radians)\n\n }\n\n\n\n pub fn from_degrees(degrees: f64) -> Self {\n\n Self(degrees.to_radians())\n\n }\n\n\n\n pub fn to_radians(&self) -> f64 {\n\n self.0\n\n }\n\n\n\n pub fn to_degrees(&self) -> f64 {\n\n self.0 * 180.0 / core::f32::consts::PI as f64\n\n }\n\n}\n\n\n", "file_path": "src/math.rs", "rank": 89, "score": 6.699119335917139 }, { "content": " PathOps::Close,\n\n];\n\n\n\nconst FILL_RULE: FillRule = FillRule::NonZero;\n\n\n\nstatic TRANSLATIONS: [Point; 25] = [\n\n Point { x: 0.0, y: 0.0 },\n\n Point { x: 20.0, y: 0.0 },\n\n Point { x: 40.0, y: 0.0 },\n\n Point { x: 60.0, y: 0.0 },\n\n Point { x: 80.0, y: 0.0 },\n\n Point { x: 0.0, y: 20.0 },\n\n Point { x: 20.0, y: 20.0 },\n\n Point { x: 40.0, y: 20.0 },\n\n Point { x: 60.0, y: 20.0 },\n\n Point { x: 80.0, y: 20.0 },\n\n Point { x: 0.0, y: 40.0 },\n\n Point { x: 20.0, y: 40.0 },\n\n Point { x: 40.0, y: 40.0 },\n\n Point { x: 60.0, y: 40.0 },\n", "file_path": "tests/affine_transforms_test.rs", "rank": 91, "score": 6.438975945843735 }, { "content": "\n\n // Linear gradients:\n\n let square_stops = [\n\n (Color::white(), 0.0),\n\n (Color::red(), 0.5),\n\n (Color::black(), 1.0),\n\n ];\n\n canvas.draw_shape(\n\n &SQUARE,\n\n FillStyle::LinearGradient {\n\n stops: &square_stops,\n\n angle: Angle::from_radians(0.0),\n\n },\n\n FillRule::NonZero,\n\n |p: &Point| translate(p, GAP, GAP),\n\n );\n\n\n\n let hex_stops = [\n\n (Color::crimson(), 0.0),\n\n (Color::dark_slate_blue(), 0.25),\n", "file_path": "tests/gradients_test.rs", "rank": 94, "score": 6.182192689368318 }, { "content": " fn quad_to(&mut self, x1: f32, y1: f32, x: f32, y: f32) {\n\n let p1 = Point {\n\n x: (x1 - self.min_x) as f64,\n\n y: self.height as f64 - (y1 - self.min_y) as f64,\n\n };\n\n let p = Point {\n\n x: (x - self.min_x) as f64,\n\n y: self.height as f64 - (y - self.min_y) as f64,\n\n };\n\n let cp3 = p;\n\n let cp1 = Point {\n\n x: self.prev_point.x + (2.0 / 3.0) * (p1.x - self.prev_point.x),\n\n y: self.prev_point.y + (2.0 / 3.0) * (p1.y - self.prev_point.y),\n\n };\n\n let cp2 = Point {\n\n x: p.x + (2.0 / 3.0) * (p1.x - p.x),\n\n y: p.y + (2.0 / 3.0) * (p1.y - p.y),\n\n };\n\n self.path_ops.push(PathOps::CubicTo {\n\n x1: cp1.x,\n", "file_path": "tests/font_test.rs", "rank": 95, "score": 6.07952724289739 }, { "content": " ];\n\n canvas.draw_shape(\n\n &CURVED_TRIANGLE,\n\n FillStyle::LinearGradient {\n\n stops: &triangle_stops,\n\n angle: Angle::from_degrees(285.0),\n\n },\n\n FillRule::NonZero,\n\n |p: &Point| translate(p, 3.0 * GAP + 2.0 * SQUARE_SIZE, GAP),\n\n );\n\n\n\n // Radial gradients:\n\n let square_stops = [\n\n (Color::black(), 0.0),\n\n (Color::red(), 0.5),\n\n (Color::white(), 1.0),\n\n ];\n\n canvas.draw_shape(\n\n &SQUARE,\n\n FillStyle::RadialGradient {\n", "file_path": "tests/gradients_test.rs", "rank": 97, "score": 5.974490392474241 }, { "content": " x3,\n\n y3,\n\n } => {\n\n let p1 = transform(&Point {\n\n x: currently_at_unmaped.x + *x1,\n\n y: currently_at_unmaped.y + *y1,\n\n });\n\n let p2 = transform(&Point {\n\n x: currently_at_unmaped.x + *x2,\n\n y: currently_at_unmaped.y + *y2,\n\n });\n\n let p3 = transform(&Point {\n\n x: currently_at_unmaped.x + *x3,\n\n y: currently_at_unmaped.y + *y3,\n\n });\n\n\n\n let p1 = map_viewbox(&desc, &p1);\n\n let p2 = map_viewbox(&desc, &p2);\n\n let p3 = map_viewbox(&desc, &p3);\n\n\n", "file_path": "src/renderer.rs", "rank": 98, "score": 5.9671529160687715 }, { "content": " stops: &square_stops,\n\n translation: Point { x: 0.0, y: 0.0 },\n\n },\n\n FillRule::NonZero,\n\n |p: &Point| translate(p, GAP, 2.0 * GAP + SQUARE_SIZE),\n\n );\n\n\n\n let hex_stops = [\n\n (Color::forest_green(), 0.0),\n\n (Color::steel_blue(), 0.25),\n\n (Color::cyan(), 0.5),\n\n (Color::forest_green(), 0.75),\n\n (Color::coral(), 1.0),\n\n ];\n\n canvas.draw_shape(\n\n hex.as_slice(),\n\n FillStyle::RadialGradient {\n\n stops: &hex_stops,\n\n translation: Point { x: 0.0, y: 0.0 },\n\n },\n", "file_path": "tests/gradients_test.rs", "rank": 99, "score": 5.912818967358678 } ]
Rust
pkg-dashboard/rate-app/src/canvas.rs
transparencies/rillrate
a1a6f76e84211224a85bb9fd92602d33f095229e
use anyhow::Error; use approx::abs_diff_ne; use derive_more::{Deref, DerefMut}; use plotters::prelude::*; use plotters_canvas::CanvasBackend; use rate_ui::packages::or_fail::{Fail, Fasten}; use wasm_bindgen::JsCast; use web_sys::{CanvasRenderingContext2d as Context2d, HtmlCanvasElement}; use yew::NodeRef; const SCALE: f64 = 2.0; pub struct SmartCanvas { canvas_ref: NodeRef, canvas: Option<HtmlCanvasElement>, ctx2d: Option<Context2d>, scale: f64, real_width: f64, real_height: f64, was_width: f64, was_height: f64, } impl Default for SmartCanvas { fn default() -> Self { Self { canvas_ref: NodeRef::default(), canvas: None, ctx2d: None, scale: SCALE, real_width: 0.0, real_height: 0.0, was_width: 0.0, was_height: 0.0, } } } impl SmartCanvas { pub fn node_ref(&self) -> &NodeRef { &self.canvas_ref } pub fn canvas(&self) -> Result<&HtmlCanvasElement, Error> { self.canvas.as_ref().ok_or_else(|| Error::msg("no canvas")) } pub fn bind(&mut self) -> Result<(), Error> { let canvas = self .canvas_ref .cast::<HtmlCanvasElement>() .or_fail("can't cast canvas")?; let ctx2d: Context2d = canvas .get_context("2d") .fasten()? .or_fail("no canvas context")? .dyn_into() .fasten()?; self.canvas = Some(canvas); self.ctx2d = Some(ctx2d); Ok(()) } pub fn resize(&mut self) -> Result<(), Error> { let canvas = self .canvas .as_ref() .ok_or_else(|| Error::msg("Canvas 2D is not available!"))?; let rect = canvas.get_bounding_client_rect(); self.scale = 2.0; self.real_height = rect.height(); if abs_diff_ne!(&self.was_height, &self.real_height) { let height = self.real_height * self.scale; canvas.set_height(height as u32); self.was_height = self.real_height; } self.real_width = rect.width(); if abs_diff_ne!(&self.was_width, &self.real_width) { let width = self.real_width * self.scale; canvas.set_width(width as u32); self.was_width = self.real_width; } /* log::info!("RATIO: {}", web_sys::window().unwrap().device_pixel_ratio()); self.scale = web_sys::window() .as_ref() .map(Window::device_pixel_ratio) .unwrap_or(SCALE); self.scale = 2.0; */ Ok(()) } pub fn clear(&mut self) -> Result<(), Error> { let ctx = self .ctx2d .as_ref() .ok_or_else(|| Error::msg("Canvas 2D Context not initialized!"))?; /* ctx.set_transform(self.scale, 0.0, 0.0, self.scale, 0.0, 0.0) .map_err(|_| { Error::msg("Can't set transformation parameter to the Canvas 2D Context!") })?; */ ctx.clear_rect( 0.0, 0.0, self.real_width * self.scale, self.real_height * self.scale, ); Ok(()) } } #[derive(Deref, DerefMut, Default)] pub struct DrawCanvas { canvas: SmartCanvas, } impl DrawCanvas { #[allow(clippy::too_many_arguments)] pub fn draw_charts( &mut self, secs: i64, mut from_color: usize, min: f32, max: f32, x_formatter: &dyn Fn(&i64) -> String, y_formatter: &dyn Fn(&f32) -> String, data: &[Vec<(i64, f32)>], ) -> Result<(), Error> { from_color += 5; let canvas = self.canvas.canvas()?.clone(); let root_area = CanvasBackend::with_canvas_object(canvas) .ok_or_else(|| Error::msg("no canvas backend created"))? .into_drawing_area(); let mut ctx = ChartBuilder::on(&root_area) .set_label_area_size(LabelAreaPosition::Left, 40) .set_label_area_size(LabelAreaPosition::Bottom, 40) .margin(60) .build_cartesian_2d((-secs * 1_000)..0, min..max)?; ctx.configure_mesh() .light_line_style(&RGBColor(0xF8, 0xF9, 0xFA)) .label_style(("Jost", 26)) .x_label_formatter(x_formatter) .y_label_formatter(y_formatter) .draw()?; let single = data.len() == 1; for (col, line) in data.iter().enumerate() { let area_color; let line_color; if single { area_color = RGBColor(0xD2, 0x09, 0x09).mix(0.2).to_rgba(); line_color = RGBColor(0x42, 0x11, 0xCC).mix(1.0).to_rgba(); } else { line_color = Palette99::pick(from_color + col).to_rgba(); area_color = line_color.mix(0.2).to_rgba(); } let line = line.iter().cloned(); let series = AreaSeries::new(line, 0.0, &area_color).border_style(&line_color); ctx.draw_series(series)?; } Ok(()) } } pub fn sustain<Y: Copy>(mut iter: impl Iterator<Item = (i64, Y)>, last_x: i64) -> Vec<(i64, Y)> { let mut result = Vec::new(); if let Some((mut prev_x, mut prev_y)) = iter.next() { result.push((prev_x, prev_y)); for (next_x, next_y) in iter { let diff = next_x - prev_x; let shift = (diff as f32 * 0.1) as i64; result.push((next_x - shift, prev_y)); result.push((next_x, next_y)); prev_x = next_x; prev_y = next_y; } result.push((last_x, prev_y)); } result } /* pub fn sustain_soft<Y: Copy>( mut iter: impl Iterator<Item = (i64, Y)>, last: Option<i64>, ) -> Vec<(i64, Y)> { let mut result = Vec::new(); if let Some((mut prev_x, mut prev_y)) = iter.next() { result.push((prev_x, prev_y)); for (next_x, next_y) in iter { let diff = ((next_x - prev_x) as f32 * 0.2) as i64; result.push((next_x - diff, prev_y)); result.push((next_x, next_y)); prev_x = next_x; prev_y = next_y; } if let Some(last_x) = last { result.push((last_x, prev_y)); } } result } pub fn sustain_sharp<X: Copy, Y: Copy>( mut iter: impl Iterator<Item = (X, Y)>, last: Option<X>, ) -> Vec<(X, Y)> { let mut result = Vec::new(); if let Some((prev_x, mut prev_y)) = iter.next() { result.push((prev_x, prev_y)); for (next_x, next_y) in iter { result.push((next_x, prev_y)); result.push((next_x, next_y)); //prev_x = next_x; prev_y = next_y; } if let Some(last_x) = last { result.push((last_x, prev_y)); } } result } */ pub fn formatter_plain(input: &f32) -> String { input.to_string() } /* pub fn formatter_pct(input: &f32) -> String { format!("{:.0} %", input) } */ /* pub fn formatter_kib(input: &f32) -> String { format!("{:.0} KiB/s", input / 1_024.0) } pub fn formatter_gb(input: &f32) -> String { format!("{:.0} Gb", input / 1_000_000.0) } */ pub fn formatter_sec(input: &i64) -> String { let input = input.abs(); format!("{} sec", input / 1_000) /* if input % 60_000 == 0 { format!("{} min", input / 60_000) } else { format!("{} sec", input / 1_000) } */ }
use anyhow::Error; use approx::abs_diff_ne; use derive_more::{Deref, DerefMut}; use plotters::prelude::*; use plotters_canvas::CanvasBackend; use rate_ui::packages::or_fail::{Fail, Fasten}; use wasm_bindgen::JsCast; use web_sys::{CanvasRenderingContext2d as Context2d, HtmlCanvasElement}; use yew::NodeRef; const SCALE: f64 = 2.0; pub struct SmartCanvas { canvas_ref: NodeRef, canvas: Option<HtmlCanvasElement>, ctx2d: Option<Context2d>, scale: f64, real_width: f64, real_height: f64, was_width: f64, was_height: f64, } impl Default for SmartCanvas { fn default() -> Self { Self { canvas_ref: NodeRef::default(), canvas: None, ctx2d: None, scale: SCALE, real_width: 0.0, real_height: 0.0, was_width: 0.0, was_height: 0.0, } } } impl SmartCanvas { pub fn node_ref(&self) -> &NodeRef { &self.canvas_ref } pub fn canvas(&self) -> Result<&HtmlCanvasElement, Error> { self.canvas.as_ref().ok_or_else(|| Error::msg("no canvas")) } pub fn bind(&mut self) -> Result<(), Error> { let canvas = self .canvas_ref .cast::<HtmlCanvasElement>() .or_fail("can't cast canvas")?; let ctx2d: Context2d = canvas .get_context("2d") .fasten()? .or_fail("no canvas context")? .dyn_into() .fasten()?; self.canvas = Some(canvas); self.ctx2d = Some(ctx2d); Ok(()) } pub fn resize(&mut self) -> Result<(), Error> { let canvas = self .canvas .as_ref() .ok_or_else(|| Error::msg("Canvas 2D is not available!"))?; let rect = canvas.get_bounding_client_rect(); self.scale = 2.0; self.real_height = rect.height(); if abs_diff_ne!(&self.was_height, &self.real_height) { let height = self.real_height * self.scale; canvas.set_height(height as u32); self.was_height = self.real_height; } self.real_width = rect.width(); if abs_diff_ne!(&self.was_width, &self.real_width) { let width = self.real_width * self.scale; canvas.set_width(width as u32); self.was_width = self.real_width; } /* log::info!("RATIO: {}", web_sys::window().unwrap().device_pixel_ratio()); self.scale = web_sys::window() .as_ref() .map(Window::device_pixel_ratio) .unwrap_or(SCALE); self.scale = 2.0; */ Ok(()) } pub fn clear(&mut self) -> Result<(), Error> { let ctx = self .ctx2d .as_ref() .ok_or_else(|| Error::msg("Canvas 2D Context not initialized!"))?; /* ctx.set_transform(self.scale, 0.0, 0.0, self.scale, 0.0, 0.0) .map_err(|_| { Error::msg("Can't set transformation parameter to the Canvas 2D Context!") })?; */ ctx.clear_rect( 0.0, 0.0, self.real_width * self.scale, self.real_height * self.scale, ); Ok(()) } } #[derive(Deref, DerefMut, Default)] pub struct DrawCanvas { canvas: SmartCanvas, } impl DrawCanvas { #[allow(clippy::too_many_arguments)] pub fn draw_charts( &mut self, secs: i64, mut from_color: usize, min: f32, max: f32, x_formatter: &dyn Fn(&i64) -> String, y_formatter: &dyn Fn(&f32) -> String, data: &[Vec<(i64, f32)>], ) -> Result<(), Error> { from_color += 5; let canvas = self.canvas.canvas()?.clone(); let root_area = CanvasBackend::with_canvas_object(canvas) .ok_or_else(|| Error::msg("no canvas backend created"))? .into_drawing_area(); let mut ctx = ChartBuilder::on(&root_area) .set_label_area_size(LabelAreaPosition::Left, 40) .set_label_area_size(LabelAreaPosition::Bottom, 40) .margin(60) .build_cartesian_2d((-secs * 1_000)..0, min..max)?; ctx.configure_mesh() .light_line_style(&RGBColor(0xF8, 0xF9, 0xFA)) .label_style(("Jost", 26)) .x_label_formatter(x_formatter) .y_label_formatter(y_formatter) .draw()?; let single = data.len() == 1; for (col, line) in data.iter().enumerate() { let area_color; let line_color; if single { area_color = RGBColor(0xD2, 0x09, 0x09).mix(0.2).to_rgba(); line_color = RGBColor(0x42, 0x11, 0xCC).mix(1.0).to_rgba(); } else { line_color = Palette99::pick(from_color + col).to_rgba(); area_color = line_color.mix(0.2).to_rgba(); } let line = line.iter().cloned(); let series = AreaSeries::new(line, 0.0, &area_color).border_style(&line_color); ctx.draw_series(series)?; } Ok(()) } } pub fn sustain<Y: Copy>(mut iter: impl Iterato
((last_x, prev_y)); } result } /* pub fn sustain_soft<Y: Copy>( mut iter: impl Iterator<Item = (i64, Y)>, last: Option<i64>, ) -> Vec<(i64, Y)> { let mut result = Vec::new(); if let Some((mut prev_x, mut prev_y)) = iter.next() { result.push((prev_x, prev_y)); for (next_x, next_y) in iter { let diff = ((next_x - prev_x) as f32 * 0.2) as i64; result.push((next_x - diff, prev_y)); result.push((next_x, next_y)); prev_x = next_x; prev_y = next_y; } if let Some(last_x) = last { result.push((last_x, prev_y)); } } result } pub fn sustain_sharp<X: Copy, Y: Copy>( mut iter: impl Iterator<Item = (X, Y)>, last: Option<X>, ) -> Vec<(X, Y)> { let mut result = Vec::new(); if let Some((prev_x, mut prev_y)) = iter.next() { result.push((prev_x, prev_y)); for (next_x, next_y) in iter { result.push((next_x, prev_y)); result.push((next_x, next_y)); //prev_x = next_x; prev_y = next_y; } if let Some(last_x) = last { result.push((last_x, prev_y)); } } result } */ pub fn formatter_plain(input: &f32) -> String { input.to_string() } /* pub fn formatter_pct(input: &f32) -> String { format!("{:.0} %", input) } */ /* pub fn formatter_kib(input: &f32) -> String { format!("{:.0} KiB/s", input / 1_024.0) } pub fn formatter_gb(input: &f32) -> String { format!("{:.0} Gb", input / 1_000_000.0) } */ pub fn formatter_sec(input: &i64) -> String { let input = input.abs(); format!("{} sec", input / 1_000) /* if input % 60_000 == 0 { format!("{} min", input / 60_000) } else { format!("{} sec", input / 1_000) } */ }
r<Item = (i64, Y)>, last_x: i64) -> Vec<(i64, Y)> { let mut result = Vec::new(); if let Some((mut prev_x, mut prev_y)) = iter.next() { result.push((prev_x, prev_y)); for (next_x, next_y) in iter { let diff = next_x - prev_x; let shift = (diff as f32 * 0.1) as i64; result.push((next_x - shift, prev_y)); result.push((next_x, next_y)); prev_x = next_x; prev_y = next_y; } result.push
function_block-random_span
[]
Rust
src/env.rs
sezaru/rust-rocks
06245eedaf91b8358688abefa67eba802b607142
use lazy_static::lazy_static; use std::ffi::CStr; use std::mem; use std::path::Path; use std::ptr; use std::str; use rocks_sys as ll; use crate::thread_status::ThreadStatus; use crate::to_raw::{FromRaw, ToRaw}; use crate::{Error, Result}; pub const DEFAULT_PAGE_SIZE: usize = 4 * 1024; lazy_static! { static ref DEFAULT_ENVOPTIONS: EnvOptions = EnvOptions::default(); static ref DEFAULT_ENV: Env = { Env { raw: unsafe { ll::rocks_create_default_env() }, } }; } #[repr(C)] #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum Priority { Low, High, Total, } pub struct EnvOptions { raw: *mut ll::rocks_envoptions_t, } impl Drop for EnvOptions { fn drop(&mut self) { unsafe { ll::rocks_envoptions_destroy(self.raw) } } } impl ToRaw<ll::rocks_envoptions_t> for EnvOptions { fn raw(&self) -> *mut ll::rocks_envoptions_t { self.raw } } impl Default for EnvOptions { fn default() -> Self { EnvOptions { raw: unsafe { ll::rocks_envoptions_create() }, } } } unsafe impl Sync for EnvOptions {} impl EnvOptions { pub fn default_instance() -> &'static EnvOptions { &*DEFAULT_ENVOPTIONS } pub fn use_mmap_reads(self, val: bool) -> Self { unsafe { ll::rocks_envoptions_set_use_mmap_reads(self.raw, val as u8); } self } pub fn use_mmap_writes(self, val: bool) -> Self { unsafe { ll::rocks_envoptions_set_use_mmap_writes(self.raw, val as u8); } self } pub fn use_direct_reads(self, val: bool) -> Self { unsafe { ll::rocks_envoptions_set_use_direct_reads(self.raw, val as u8); } self } pub fn use_direct_writes(self, val: bool) -> Self { unsafe { ll::rocks_envoptions_set_use_direct_writes(self.raw, val as u8); } self } pub fn allow_fallocate(self, val: bool) -> Self { unsafe { ll::rocks_envoptions_set_allow_fallocate(self.raw, val as u8); } self } pub fn fd_cloexec(self, val: bool) -> Self { unsafe { ll::rocks_envoptions_set_fd_cloexec(self.raw, val as u8); } self } pub fn bytes_per_sync(self, val: u64) -> Self { unsafe { ll::rocks_envoptions_set_bytes_per_sync(self.raw, val); } self } pub fn fallocate_with_keep_size(self, val: bool) -> Self { unsafe { ll::rocks_envoptions_set_fallocate_with_keep_size(self.raw, val as u8); } self } pub fn compaction_readahead_size(self, val: usize) -> Self { unsafe { ll::rocks_envoptions_set_compaction_readahead_size(self.raw, val); } self } pub fn random_access_max_buffer_size(self, val: usize) -> Self { unsafe { ll::rocks_envoptions_set_random_access_max_buffer_size(self.raw, val); } self } pub fn writable_file_max_buffer_size(self, val: usize) -> Self { unsafe { ll::rocks_envoptions_set_writable_file_max_buffer_size(self.raw, val); } self } } #[repr(C)] #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum InfoLogLevel { Debug = 0, Info, Warn, Error, Fatal, Header, } #[derive(Debug)] pub struct Logger { raw: *mut ll::rocks_logger_t, } impl ToRaw<ll::rocks_logger_t> for Logger { fn raw(&self) -> *mut ll::rocks_logger_t { self.raw } } impl Drop for Logger { fn drop(&mut self) { unsafe { ll::rocks_logger_destroy(self.raw); } } } impl Logger { unsafe fn from_ll(raw: *mut ll::rocks_logger_t) -> Logger { Logger { raw: raw } } pub fn log(&self, log_level: InfoLogLevel, msg: &str) { unsafe { ll::rocks_logger_log(self.raw, mem::transmute(log_level), msg.as_ptr() as *const _, msg.len()); } } pub fn flush(&self) { unsafe { ll::rocks_logger_flush(self.raw); } } pub fn get_log_level(&self) -> InfoLogLevel { unsafe { mem::transmute(ll::rocks_logger_get_log_level(self.raw)) } } pub fn set_log_level(&mut self, log_level: InfoLogLevel) { unsafe { ll::rocks_logger_set_log_level(self.raw, mem::transmute(log_level)); } } } pub struct Env { raw: *mut ll::rocks_env_t, } impl ToRaw<ll::rocks_env_t> for Env { fn raw(&self) -> *mut ll::rocks_env_t { self.raw } } impl Drop for Env { fn drop(&mut self) { unsafe { ll::rocks_env_destroy(self.raw) } } } unsafe impl Sync for Env {} impl Env { pub fn default_instance() -> &'static Env { &*DEFAULT_ENV } pub fn new_mem() -> Env { Env { raw: unsafe { ll::rocks_create_mem_env() }, } } pub fn new_timed() -> Env { Env { raw: unsafe { ll::rocks_create_timed_env() }, } } pub fn set_low_priority_background_threads(&self, number: i32) { unsafe { ll::rocks_env_set_background_threads(self.raw, number); } } pub fn set_high_priority_background_threads(&self, number: i32) { unsafe { ll::rocks_env_set_high_priority_background_threads(self.raw, number); } } pub fn wait_for_join(&self) { unsafe { ll::rocks_env_join_all_threads(self.raw); } } pub fn get_thread_pool_queue_len(&self, pri: Priority) -> u32 { unsafe { ll::rocks_env_get_thread_pool_queue_len(self.raw, mem::transmute(pri)) as u32 } } pub fn create_logger<P: AsRef<Path>>(&self, fname: P) -> Result<Logger> { let mut status = ptr::null_mut(); unsafe { let name = fname.as_ref().to_str().unwrap(); let logger = ll::rocks_env_new_logger(self.raw, name.as_ptr() as *const _, name.len(), &mut status); Error::from_ll(status).map(|_| Logger::from_ll(logger)) } } pub fn now_micros(&self) -> u64 { unsafe { ll::rocks_env_now_micros(self.raw) as u64 } } pub fn now_nanos(&self) -> u64 { unsafe { ll::rocks_env_now_nanos(self.raw) as u64 } } pub fn sleep_for_microseconds(&self, micros: i32) { unsafe { ll::rocks_env_sleep_for_microseconds(self.raw, micros); } } pub fn get_hostname(&self) -> Result<String> { let mut buf = [0u8; 128]; let mut status = ptr::null_mut(); unsafe { ll::rocks_env_get_host_name(self.raw, (&mut buf).as_mut_ptr() as *mut _, 128, &mut status); Error::from_ll(status).map(|_| CStr::from_ptr(buf[..].as_ptr() as _).to_string_lossy().to_string()) } } pub fn get_current_time(&self) -> Result<u64> { let mut status = ptr::null_mut(); unsafe { let tm = ll::rocks_env_get_current_time(self.raw, &mut status); Error::from_ll(status).map(|()| tm as u64) } } pub fn time_to_string(&self, time: u64) -> String { unsafe { let cxx_string = ll::rocks_env_time_to_string(self.raw, time); let ret = CStr::from_ptr(ll::cxx_string_data(cxx_string) as *const _) .to_str() .unwrap() .into(); ll::cxx_string_destroy(cxx_string); ret } } pub fn set_background_threads(&self, number: i32, pri: Priority) { match pri { Priority::Low => self.set_low_priority_background_threads(number), Priority::High => self.set_high_priority_background_threads(number), _ => unreachable!("wrong pri for thread pool"), } } pub fn get_background_threads(&self, pri: Priority) -> i32 { unsafe { ll::rocks_env_get_background_threads(self.raw, mem::transmute(pri)) as i32 } } pub fn inc_background_threads_if_needed(&self, number: i32, pri: Priority) { unsafe { ll::rocks_env_inc_background_threads_if_needed(self.raw, number, mem::transmute(pri)); } } pub fn lower_thread_pool_io_priority(&self, pool: Priority) { unsafe { ll::rocks_env_lower_thread_pool_io_priority(self.raw, mem::transmute(pool)); } } pub fn get_thread_list(&self) -> Vec<ThreadStatus> { let mut len = 0; unsafe { let thread_status_arr = ll::rocks_env_get_thread_list(self.raw, &mut len); let ret = (0..len) .into_iter() .map(|i| ThreadStatus::from_ll(*thread_status_arr.offset(i as isize))) .collect(); ll::rocks_env_get_thread_list_destroy(thread_status_arr); ret } } pub fn get_thread_id(&self) -> u64 { unsafe { ll::rocks_env_get_thread_id(self.raw) as u64 } } } #[cfg(test)] mod tests { use super::*; use std::fs::File; use std::io::prelude::*; #[test] fn env_basic() { let env = Env::default_instance(); assert!(env.get_thread_id() > 0); assert!(env.now_micros() > 1500000000000000); assert!(env.get_hostname().is_ok()); assert!(env.get_current_time().is_ok()); assert!(env.time_to_string(env.get_current_time().unwrap()).len() > 10); } #[test] fn logger() { let log_dir = ::tempdir::TempDir::new_in(".", "log").unwrap(); let env = Env::default_instance(); { let logger = env.create_logger(log_dir.path().join("test.log")); assert!(logger.is_ok()); let mut logger = logger.unwrap(); logger.set_log_level(InfoLogLevel::Info); assert_eq!(logger.get_log_level(), InfoLogLevel::Info); logger.log(InfoLogLevel::Error, "test log message"); logger.log(InfoLogLevel::Debug, "debug log message"); logger.flush(); } let mut f = File::open(log_dir.path().join("test.log")).unwrap(); let mut s = String::new(); f.read_to_string(&mut s).unwrap(); assert!(s.contains("[ERROR] test log message")); assert!(!s.contains("debug log message")); } }
use lazy_static::lazy_static; use std::ffi::CStr; use std::mem; use std::path::Path; use std::ptr; use std::str; use rocks_sys as ll; use crate::thread_status::ThreadStatus; use crate::to_raw::{FromRaw, ToRaw}; use crate::{Error, Result}; pub const DEFAULT_PAGE_SIZE: usize = 4 * 1024; lazy_static! { static ref DEFAULT_ENVOPTIONS: EnvOptions = EnvOptions::default(); static ref DEFAULT_ENV: Env = { Env { raw: unsafe { ll::rocks_create_default_env() }, } }; } #[repr(C)] #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum Priority { Low, High, Total, } pub struct EnvOptions { raw: *mut ll::rocks_envoptions_t, } impl Drop for EnvOptions { fn drop(&mut self) { unsafe { ll::rocks_envoptions_destroy(self.raw) } } } impl ToRaw<ll::rocks_envoptions_t> for EnvOptions { fn raw(&self) -> *mut ll::rocks_envoptions_t { self.raw } } impl Default for EnvOptions { fn default() -> Self { EnvOptions { raw: unsafe { ll::rocks_envoptions_create() }, } } } unsafe impl Sync for EnvOptions {} impl EnvOptions { pub fn default_instance() -> &'static EnvOptions { &*DEFAULT_ENVOPTIONS } pub fn use_mmap_reads(self, val: bool) -> Self { unsafe { ll::rocks_envoptions_set_use_mmap_reads(self.raw, val as u8); } self } pub fn use_mmap_writes(self, val: bool) -> Self { unsafe { ll::rocks_envoptions_set_use_mmap_writes(self.raw, val as u8); } self } pub fn use_direct_reads(self, val: bool) -> Self { unsafe { ll::rocks_envoptions_set_use_direct_reads(self.raw, val as u8); } self } pub fn use_direct_writes(self, val: bool) -> Self { unsafe { ll::rocks_envoptions_set_use_direct_writes(self.raw, val as u8); } self } pub fn allow_fallocate(self, val: bool) -> Self { unsafe { ll::rocks_envoptions_set_allow_fallocate(self.raw, val as u8); } self } pub fn fd_cloexec(self, val: bool) -> Self { unsafe { ll::rocks_envoptions_set_fd_cloexec(self.raw, val as u8); } self } pub fn bytes_per_sync(self, val: u64) -> Self { unsafe { ll::rocks_envoptions_set_bytes_per_sync(self.raw, val); } self } pub fn fallocate_with_keep_size(self, val: bool) -> Self { unsafe { ll::rocks_envoptions_set_fallocate_with_keep_size(self.raw, val as u8); } self } pub fn compaction_readahead_size(self, val: usize) -> Self { unsafe { ll::rocks_envoptions_set_compaction_readahead_size(self.raw, val); } self } pub fn random_access_max_buffer_size(self, val: usize) -> Self { unsafe { ll::rocks_envoptions_set_random_access_max_buffer_size(self.raw, val); } self } pub fn writable_file_max_buffer_size(self, val: usize) -> Self { unsafe { ll::rocks_envoptions_set_writable_file_max_buffer_size(self.raw, val); } self } } #[repr(C)] #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum InfoLogLevel { Debug = 0, Info, Warn, Error, Fatal, Header, } #[derive(Debug)] pub struct Logger { raw: *mut ll::rocks_logger_t, } impl ToRaw<ll::rocks_logger_t> for Logger { fn raw(&self) -> *mut ll::rocks_logger_t { self.raw } } impl Drop for Logger { fn drop(&mut self) { unsafe { ll::rocks_logger_destroy(self.raw); } } } impl Logger { unsafe fn from_ll(raw: *mut ll::rocks_logger_t) -> Logger { Logger { raw: raw } } pub fn log(&self, log_level: InfoLogLevel, msg: &str) { unsafe { ll::rocks_logger_log(self.raw, mem::transmute(log_level), msg.as_ptr() as *const _, msg.len()); } } pub fn flush(&self) { unsafe { ll::rocks_logger_flush(self.raw); } } pub fn get_log_level(&self) -> InfoLogLevel { unsafe { mem::transmute(ll::rocks_logger_get_log_level(self.raw)) } } pub fn set_log_level(&mut self, log_level: InfoLogLevel) { unsafe { ll::rocks_logger_set_log_level(self.raw, mem::transmute(log_level)); } } } pub struct Env { raw: *mut ll::rocks_env_t, } impl ToRaw<ll::rocks_env_t> for Env { fn raw(&self) -> *mut ll::rocks_env_t { self.raw } } impl Drop for Env { fn drop(&mut self) { unsafe { ll::rocks_env_destroy(self.raw) } } } unsafe impl Sync for Env {} impl Env { pub fn default_instance() -> &'static Env { &*DEFAULT_ENV } pub fn new_mem() -> Env { Env { raw: unsafe { ll::rocks_create_mem_env() }, } } pub fn new_timed() -> Env { Env { raw: unsafe { ll::rocks_create_timed_env() }, } } pub fn set_low_priority_background_threads(&self, number: i32) { unsafe { ll::rocks_env_set_background_threads(self.raw, number); } } pub fn set_high_priority_background_threads(&self, number: i32) { unsafe { ll::rocks_env_set_high_priority_background_threads(self.raw, number); } } pub fn wait_for_join(&self) { unsafe { ll::rocks_env_join_all_threads(self.raw); } } pub fn get_thread_pool_queue_len(&self, pri: Priority) -> u32 { unsafe { ll::rocks_env_get_thread_pool_queue_len(self.raw, mem::transmute(pri)) as u32 } } pub fn create_logger<P: AsRef<Path>>(&self, fname: P) -> Result<Logger> { let mut status = ptr::null_mut(); unsafe { let name = fname.as_ref().to_str().unwrap(); let logger = ll::rocks_env_new_logger(self.raw, name.as_ptr() as *const _, name.len(), &mut status); Error::from_ll(status).map(|_| Logger::from_ll(logger)) } } pub fn now_micros(&self) -> u64 { unsafe { ll::rocks_env_now_micros(self.raw) as u64 } } pub fn now_nanos(&self) -> u64 { unsafe { ll::rocks_env_now_nanos(self.raw) as u64 } } pub fn sleep_for_microseconds(&self, micros: i32) { unsafe { ll::rocks_env_sleep_for_microseconds(self.raw, micros); } } pub fn get_hostname(&self) -> Result<String> { let mut buf = [0u8; 128]; let mut status = ptr::null_mut(); unsafe { ll::rocks_env_get_host_name(self.raw, (&mut buf).as_mut_ptr() as *mut _, 128, &mut status); Error::from_ll(status).map(|_| CStr::from_ptr(buf[..].as_ptr() as _).to_string_lossy().to_string()) } } pub fn get_current_time(&self) -> Result<u64> { let mut status = ptr::null_mut(); unsafe { let tm = ll::rocks_env_get_current_time(self.raw, &mut status); Error::from_ll(status).map(|()| tm as u64) } } pub fn time_to_string(&self, time: u64) -> String { unsafe { let cxx_string = ll::rocks_env_time_to_string(self.raw, time); let ret = CStr::from_ptr(ll::cxx_string_data(cxx_string) as *const _) .to_str() .unwrap() .into(); ll::cxx_string_destroy(cxx_string); ret } } pub fn set_background_threads(&self, number: i32, pri: Priority) { match pri { Priority::Low => self.set_low_priority_background_threads(number), Priority::High => self.set_high_priority_background_threads(number), _ => unreachable!("wrong pri for thread pool"), } } pub fn get_background_threads(&self, pri: Priority) -> i32 { unsafe { ll::rocks_env_get_background_threads(self.raw, mem::transmute(pri)) as i32 } } pub fn inc_background_threads_if_needed(&self, number: i32, pri: Priority) { unsafe { ll::rocks_env_inc_background_threads_if_needed(self.raw, number, mem::transmute(pri)); } } pub fn lower_thread_pool_io_priority(&self, pool: Priority) { unsafe { ll::rocks_env_lower_thread_pool_io_priority(self.raw, mem::transmute(pool)); } } pub fn get_thread_list(&self) -> Vec<ThreadStatus> { let mut len = 0; unsafe { let thread_status_arr = ll::rocks_env_get_thread_list(self.raw, &mut len); let ret = (0..len) .into_iter() .map(|i| ThreadStatus::from_ll(*thread_status_arr.offset(i as isize))) .collect(); ll::rocks_env_get_thread_list_destroy(thread_status_arr); ret } } pub fn get_thread_id(&self) -> u64 { unsafe { ll::rocks_env_get_thread_id(self.raw) as u64 } } } #[cfg(test)] mod tests { use super::*; use std::fs::File; use std::io::prelude::*; #[test] fn env_basic() { let env = Env::default_instance(); assert!(env.get_thread_id() > 0); assert!(env.now_micros() > 1500000000000000); assert!(env.get_hostname().is_ok()); assert!(env.get_current_time().is_ok()); assert!(env.time_to_string(env.get_current_time().unwrap()).len() > 10); } #[test] fn logger() { let log_dir = ::tempdir::TempDir::new_in(".", "log").unwrap(); let env = Env::default_instance(); { let logger = env.create_logger(log_dir.path().join
sage")); assert!(!s.contains("debug log message")); } }
("test.log")); assert!(logger.is_ok()); let mut logger = logger.unwrap(); logger.set_log_level(InfoLogLevel::Info); assert_eq!(logger.get_log_level(), InfoLogLevel::Info); logger.log(InfoLogLevel::Error, "test log message"); logger.log(InfoLogLevel::Debug, "debug log message"); logger.flush(); } let mut f = File::open(log_dir.path().join("test.log")).unwrap(); let mut s = String::new(); f.read_to_string(&mut s).unwrap(); assert!(s.contains("[ERROR] test log mes
function_block-random_span
[ { "content": "/// Destroy the contents of the specified database.\n\n///\n\n/// Be very careful using this method.\n\npub fn destroy_db<P: AsRef<Path>>(options: &Options, name: P) -> Result<()> {\n\n let name = name.as_ref().to_str().expect(\"valid utf8\");\n\n let mut status = ptr::null_mut();\n\n unsafe {\n\n ll::rocks_destroy_db(options.raw(), name.as_ptr() as *const _, name.len(), &mut status);\n\n Error::from_ll(status)\n\n }\n\n}\n\n\n", "file_path": "src/db.rs", "rank": 0, "score": 244379.999235599 }, { "content": "/// `options` These options will be used for the database and for ALL column\n\n/// families encountered during the repair.\n\npub fn repair_db<P: AsRef<Path>>(options: &Options, name: P) -> Result<()> {\n\n let name = name.as_ref().to_str().expect(\"valid utf8\");\n\n let mut status = ptr::null_mut();\n\n unsafe {\n\n ll::rocks_repair_db(options.raw(), name.as_ptr() as *const _, name.len(), &mut status);\n\n Error::from_ll(status)\n\n }\n\n}\n\n\n", "file_path": "src/db.rs", "rank": 1, "score": 244379.92778067727 }, { "content": "pub fn escape(data: &[u8]) -> String {\n\n let mut escaped = Vec::with_capacity(data.len() * 4);\n\n for &c in data {\n\n match c {\n\n b'\\n' => escaped.extend_from_slice(br\"\\n\"),\n\n b'\\r' => escaped.extend_from_slice(br\"\\r\"),\n\n b'\\t' => escaped.extend_from_slice(br\"\\t\"),\n\n b'\"' => escaped.extend_from_slice(b\"\\\\\\\"\"),\n\n b'\\\\' => escaped.extend_from_slice(br\"\\\\\"),\n\n _ => {\n\n if c >= 0x20 && c < 0x7f {\n\n // c is printable\n\n escaped.push(c);\n\n } else {\n\n escaped.push(b'\\\\');\n\n escaped.push(b'0' + (c >> 6));\n\n escaped.push(b'0' + ((c >> 3) & 7));\n\n escaped.push(b'0' + (c & 7));\n\n }\n\n }\n\n }\n\n }\n\n escaped.shrink_to_fit();\n\n unsafe { String::from_utf8_unchecked(escaped) }\n\n}\n\n\n", "file_path": "examples/dumper.rs", "rank": 2, "score": 200023.8560185378 }, { "content": "/// If a DB cannot be opened, you may attempt to call this method to\n\n/// resurrect as much of the contents of the database as possible.\n\n/// Some data may be lost, so be careful when calling this function\n\n/// on a database that contains important information.\n\n///\n\n/// With this API, we will warn and skip data associated with column families not\n\n/// specified in `column_families`.\n\n///\n\n/// `column_families` Descriptors for known column families\n\npub fn repair_db_with_cf<P: AsRef<Path>>(\n\n db_options: &DBOptions,\n\n dbname: P,\n\n column_families: &[&ColumnFamilyDescriptor],\n\n) -> Result<()> {\n\n unimplemented!()\n\n}\n\n\n", "file_path": "src/db.rs", "rank": 3, "score": 177937.28816683526 }, { "content": "/// `unknown_cf_opts` Options for column families encountered during the\n\n/// repair that were not specified in `column_families`.\n\npub fn repair_db_with_unknown_cf_opts<P: AsRef<Path>>(\n\n db_options: &DBOptions,\n\n dbname: P,\n\n column_families: &[&ColumnFamilyDescriptor],\n\n unknown_cf_opts: &ColumnFamilyOptions,\n\n) -> Result<()> {\n\n unimplemented!()\n\n}\n\n\n", "file_path": "src/db.rs", "rank": 4, "score": 169316.76033243176 }, { "content": "fn deserialize(value: &[u8]) -> u64 {\n\n value\n\n .iter()\n\n .enumerate()\n\n .fold(0, |acc, (i, &v)| acc + ((v as u64) << ((7 - i) * 8)))\n\n}\n\n\n", "file_path": "examples/counters.rs", "rank": 5, "score": 159090.86791329278 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n // Optimize RocksDB. This is the easiest way to get RocksDB to perform well\n\n // NOTE: Is rust, Options is splited into 2 parts.\n\n let options = Options::default()\n\n .map_db_options(|db| db.create_if_missing(true).increase_parallelism(16))\n\n .map_cf_options(|cf| cf.optimize_level_style_compaction(512 * 1024 * 1024));\n\n\n\n // open DB\n\n let db = DB::open(&options, DB_PATH)?;\n\n\n\n // Put key-value\n\n db.put(WriteOptions::default_instance(), b\"key1\", b\"value\")?;\n\n\n\n // get value\n\n let value = db.get(ReadOptions::default_instance(), b\"key1\")?;\n\n assert_eq!(value, b\"value\");\n\n\n\n // atomically apply a set of updates\n\n {\n\n let mut batch = WriteBatch::default();\n", "file_path": "examples/simple.rs", "rank": 6, "score": 154445.65015397637 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let options = Options::default().map_db_options(|db_opt| db_opt.max_open_files(-1));\n\n\n\n let secondary_path = \"/tmp/rocksdb_secondary\";\n\n\n\n let db = DB::open_as_secondary(&options, DB_PATH, secondary_path)?;\n\n\n\n println!(\"db => {:?}\", db);\n\n\n\n db.try_catch_up_with_primary()?;\n\n\n\n println!(\"get => {:?}\", db.get(ReadOptions::default_instance(), b\"key2\"));\n\n\n\n println!(\n\n \"write => {:?}\",\n\n db.put(WriteOptions::default_instance(), b\"key3\", b\"key4\")\n\n );\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/secondary.rs", "rank": 7, "score": 154445.65015397637 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let opts = Options::default();\n\n\n\n let db_path = env::args().skip(1).next().expect(\"usage: ./dumper XXXX\");\n\n\n\n let cfs = DB::list_column_families(&opts, &db_path).unwrap();\n\n let (db, cfs) = DB::open_for_readonly_with_column_families(&DBOptions::default(), &db_path, cfs, false)?;\n\n println!(\"DB => {:?}\", db);\n\n\n\n for cf in &cfs {\n\n println!(\"{:?}\", cf);\n\n let meta = db.get_column_family_metadata(cf);\n\n println!(\"{:?}\", meta);\n\n let it = cf.new_iterator(&ReadOptions::default().pin_data(true));\n\n for (k, val) in it {\n\n println!(r#\" \"{}\" => \"{}\"\"#, escape(k), escape(val));\n\n }\n\n }\n\n Ok(())\n\n}\n", "file_path": "examples/dumper.rs", "rank": 8, "score": 154445.65015397637 }, { "content": "pub fn load_latest_options(path: &str) -> Result<(DBOptions, Vec<ColumnFamilyDescriptor>)> {\n\n let cpath = CString::new(path).unwrap();\n\n let db_opt = DBOptions::default();\n\n let mut cf_descs_len = 0_usize;\n\n let mut status = ptr::null_mut();\n\n let mut cf_descs: Vec<ColumnFamilyDescriptor> = Vec::new();\n\n\n\n let c_cf_descs =\n\n unsafe { ll::rocks_load_latest_options(cpath.as_ptr(), db_opt.raw(), &mut cf_descs_len, &mut status) };\n\n if let Err(error) = Error::from_ll(status) {\n\n return Err(error);\n\n }\n\n for i in 0..cf_descs_len {\n\n let c_cf_desc = unsafe { *c_cf_descs.offset(i as _) };\n\n let name = unsafe { CStr::from_ptr(ll::rocks_column_family_descriptor_get_name(c_cf_desc)) };\n\n let cfopt =\n\n unsafe { ColumnFamilyOptions::from_ll(ll::rocks_column_family_descriptor_get_cfoptions(c_cf_desc)) };\n\n cf_descs.push(ColumnFamilyDescriptor::new(\n\n name.to_str().expect(\"non-utf8 cf name\"),\n\n cfopt,\n", "file_path": "src/utilities.rs", "rank": 9, "score": 154289.5588664529 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n const DB_PATH: &str = \"/tmp/rocksmergetest\";\n\n\n\n let options = Options::default()\n\n .map_db_options(|opt| opt.create_if_missing(true))\n\n .map_cf_options(|opt| opt.merge_operator(Box::new(MyMerge)).compaction_filter(&*MY_FILTER));\n\n\n\n let db = DB::open(&options, DB_PATH)?;\n\n\n\n let wopts = WriteOptions::default_instance();\n\n db.merge(wopts, b\"0\", b\"bad\")?;\n\n db.merge(wopts, b\"1\", b\"data1\")?;\n\n db.merge(wopts, b\"1\", b\"bad\")?;\n\n db.merge(wopts, b\"1\", b\"data2\")?;\n\n db.merge(wopts, b\"1\", b\"bad\")?;\n\n db.merge(wopts, b\"3\", b\"data3\")?;\n\n\n\n db.compact_range(&CompactRangeOptions::default(), ..)?;\n\n\n\n println!(\"{:?}\", &*MY_FILTER);\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/compaction_filter.rs", "rank": 10, "score": 151221.63849681054 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n // open DB\n\n let options = Options::default().map_db_options(|db_opt| db_opt.create_if_missing(true));\n\n\n\n let db = DB::open(&options, DB_PATH).map_err(|err| {\n\n eprintln!(\n\n \"You should delete the {:?} directory before running this example.\",\n\n DB_PATH\n\n );\n\n err\n\n })?;\n\n\n\n // create column family\n\n let cf = db.create_column_family(&ColumnFamilyOptions::default(), \"new_cf\")?;\n\n\n\n // close DB\n\n drop(cf);\n\n drop(db);\n\n\n\n // open DB with two column families\n", "file_path": "examples/column_families.rs", "rank": 11, "score": 151221.63849681054 }, { "content": "fn serialize(value: u64) -> Vec<u8> {\n\n value.to_be_bytes().to_vec()\n\n}\n\n\n\nimpl AssociativeMergeOperator for UInt64AddOperator {\n\n fn merge(&self, key: &[u8], existing_value: Option<&[u8]>, value: &[u8], _logger: &Logger) -> Option<Vec<u8>> {\n\n println!(\n\n \"merge: key = {:?} existing_value = {:?} value = {:?}\",\n\n key, existing_value, value\n\n );\n\n // assuming 0 if no existing value\n\n let existing = existing_value.map(|raw| deserialize(raw)).unwrap_or_default();\n\n let oper = deserialize(value);\n\n\n\n let new = existing + oper;\n\n return Some(serialize(new));\n\n }\n\n}\n\n\n\npub struct MergeBasedCounters {\n", "file_path": "examples/counters.rs", "rank": 12, "score": 150056.3520236221 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n // Optimize RocksDB. This is the easiest way to get RocksDB to perform well\n\n // NOTE: Is rust, Options is splited into 2 parts.\n\n let options = Options::default()\n\n .map_db_options(|db| db.create_if_missing(true).increase_parallelism(8))\n\n .map_cf_options(|cf| cf.optimize_level_style_compaction(512 * 1024 * 1024));\n\n\n\n // open DB\n\n let db = DB::open(&options, DB_PATH)?;\n\n println!(\"db => {:?}\", db);\n\n\n\n // Put key-value\n\n db.put(WriteOptions::default_instance(), b\"key1\", b\"value\")?;\n\n\n\n let seq = db.get_latest_sequence_number();\n\n println!(\"latest seq = {}\", seq);\n\n\n\n db.put(WriteOptions::default_instance(), b\"key1\", b\"xxxxxxxxxx\")?;\n\n db.put(WriteOptions::default_instance(), b\"key2\", b\"xxasdfxxxx\")?;\n\n db.put(WriteOptions::default_instance(), b\"key3\", b\"xxxagaasdxxxx\")?;\n", "file_path": "examples/iter_start_seqnum.rs", "rank": 13, "score": 148174.18431191935 }, { "content": "#[test]\n\nfn test_cf_lifetime() {\n\n let tmp_dir = TempDir::new_in(\".\", \"rocks\").unwrap();\n\n\n\n let opt = DBOptions::default().create_if_missing(true);\n\n\n\n let mut cf_handle = None;\n\n {\n\n let ret = DB::open_with_column_families(\n\n &opt,\n\n tmp_dir.path().to_str().unwrap(),\n\n vec![ColumnFamilyDescriptor::default()],\n\n );\n\n assert!(ret.is_ok(), \"err => {:?}\", ret);\n\n println!(\"cfs => {:?}\", ret);\n\n\n\n if let Ok((_db, mut cfs)) = ret {\n\n let cf = cfs.pop().unwrap();\n\n println!(\"cf name => {:?} id => {}\", cf.name(), cf.id());\n\n cf_handle = Some(cf);\n\n }\n\n }\n\n println!(\"db lifetime ends\");\n\n println!(\"cf name => {:?}\", cf_handle.unwrap().name());\n\n}\n\n\n", "file_path": "tests/db.rs", "rank": 14, "score": 115122.49627158631 }, { "content": "#[test]\n\nfn test_list_cfs() {\n\n let tmp_dir = TempDir::new_in(\".\", \"rocks\").unwrap();\n\n let path = tmp_dir.path().to_str().unwrap();\n\n\n\n {\n\n let opt = Options::default().map_db_options(|opt| opt.create_if_missing(true));\n\n let db = DB::open(&opt, path);\n\n assert!(db.is_ok());\n\n\n\n let db = db.unwrap();\n\n let ret = db.create_column_family(&ColumnFamilyOptions::default(), \"cf1\");\n\n assert!(ret.is_ok());\n\n\n\n let ret = db.create_column_family(&ColumnFamilyOptions::default(), \"cf2\");\n\n assert!(ret.is_ok());\n\n }\n\n\n\n let opt = Options::default();\n\n let ret = DB::list_column_families(&opt, path);\n\n assert!(ret.is_ok());\n", "file_path": "tests/db.rs", "rank": 15, "score": 115122.49627158631 }, { "content": "#[test]\n\nfn test_open_for_readonly() {\n\n let tmp_dir = TempDir::new_in(\".\", \"rocks\").unwrap();\n\n let path = tmp_dir.path().to_str().unwrap();\n\n\n\n {\n\n let opt = Options::default().map_db_options(|opt| opt.create_if_missing(true));\n\n let db = DB::open(&opt, path);\n\n assert!(db.is_ok());\n\n }\n\n\n\n let db = DB::open_for_readonly(&Options::default(), path, false);\n\n assert!(db.is_ok());\n\n}\n\n\n", "file_path": "tests/db.rs", "rank": 16, "score": 115122.49627158631 }, { "content": "#[test]\n\nfn test_db_get() {\n\n let tmp_dir = TempDir::new_in(\".\", \"rocks\").unwrap();\n\n let path = tmp_dir.path().to_str().unwrap();\n\n\n\n {\n\n let opt = Options::default().map_db_options(|dbopt| dbopt.create_if_missing(true));\n\n\n\n let db = DB::open(&opt, path);\n\n assert!(db.is_ok(), \"err => {:?}\", db.as_ref().unwrap_err());\n\n let db = db.unwrap();\n\n let _ = db.put(&WriteOptions::default(), b\"name\", b\"BH1XUW\");\n\n }\n\n\n\n let db = DB::open(Options::default(), path).unwrap();\n\n let val = db.get(&ReadOptions::default(), b\"name\");\n\n assert_eq!(val.unwrap().as_ref(), b\"BH1XUW\");\n\n}\n\n\n", "file_path": "tests/db.rs", "rank": 17, "score": 115122.49627158631 }, { "content": "#[test]\n\nfn test_open_cf() {\n\n let tmp_dir = TempDir::new_in(\".\", \"rocks\").unwrap();\n\n\n\n let opt = DBOptions::default().create_if_missing(true);\n\n\n\n let ret = DB::open_with_column_families(\n\n &opt,\n\n tmp_dir.path().to_str().unwrap(),\n\n vec![ColumnFamilyDescriptor::default()],\n\n );\n\n assert!(ret.is_ok(), \"err => {:?}\", ret);\n\n println!(\"cfs => {:?}\", ret);\n\n\n\n if let Ok((_db, cfs)) = ret {\n\n let cf = &cfs[0];\n\n println!(\"cf name => {:?} id => {}\", cf.name(), cf.id());\n\n }\n\n}\n\n\n", "file_path": "tests/db.rs", "rank": 18, "score": 115122.49627158631 }, { "content": "/// RocksDB version.\n\npub fn version() -> Version {\n\n unsafe {\n\n Version {\n\n major: ll::rocks_version_major() as _,\n\n minor: ll::rocks_version_minor() as _,\n\n patch: ll::rocks_version_patch() as _,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/version.rs", "rank": 19, "score": 112754.71181188193 }, { "content": "#[test]\n\nfn test_ingest_sst_file() {\n\n use rocks::sst_file_writer::SstFileWriter;\n\n\n\n let sst_dir = ::tempdir::TempDir::new_in(\".\", \"rocks.sst\").unwrap();\n\n\n\n let writer = SstFileWriter::builder().build();\n\n writer.open(sst_dir.path().join(\"2333.sst\")).unwrap();\n\n for i in 0..999 {\n\n let key = format!(\"B{:05}\", i);\n\n let value = format!(\"ABCDEFGH{:03}IJKLMN\", i);\n\n writer.put(key.as_bytes(), value.as_bytes()).unwrap();\n\n }\n\n let info = writer.finish().unwrap();\n\n assert_eq!(info.num_entries(), 999);\n\n\n\n let tmp_db_dir = ::tempdir::TempDir::new_in(\".\", \"rocks\").unwrap();\n\n\n\n let db = DB::open(\n\n Options::default().map_db_options(|db| db.create_if_missing(true)),\n\n &tmp_db_dir,\n", "file_path": "tests/db.rs", "rank": 20, "score": 112471.36237509828 }, { "content": "#[test]\n\nfn test_key_may_exist() {\n\n let tmp_dir = TempDir::new_in(\".\", \"rocks\").unwrap();\n\n\n\n let db = DB::open(\n\n Options::default().map_db_options(|db| db.create_if_missing(true)),\n\n tmp_dir,\n\n )\n\n .unwrap();\n\n\n\n db.put(&WriteOptions::default(), b\"name\", b\"value\").unwrap();\n\n\n\n assert!(db.key_may_exist(&ReadOptions::default(), b\"name\"));\n\n assert!(!db.key_may_exist(&ReadOptions::default(), b\"name2\"))\n\n}\n\n\n", "file_path": "tests/db.rs", "rank": 21, "score": 112471.36237509828 }, { "content": "#[test]\n\nfn flush() {\n\n let tmp_dir = ::tempdir::TempDir::new_in(\".\", \"rocks\").unwrap();\n\n let db = DB::open(\n\n Options::default()\n\n .map_db_options(|db| db.create_if_missing(true))\n\n .map_cf_options(|cf| cf.disable_auto_compactions(true)),\n\n &tmp_dir,\n\n )\n\n .unwrap();\n\n\n\n assert_eq!(*db.get_latest_sequence_number(), 0);\n\n\n\n assert!(db\n\n .put(&Default::default(), b\"long-key\", vec![b'A'; 1024 * 1024].as_ref())\n\n .is_ok());\n\n assert!(db.put(&Default::default(), b\"a\", b\"1\").is_ok());\n\n assert!(db.put(&Default::default(), b\"b\", b\"2\").is_ok());\n\n assert!(db.put(&Default::default(), b\"c\", b\"3\").is_ok());\n\n\n\n assert!(db.flush(&FlushOptions::default().wait(true)).is_ok());\n\n assert!(db.sync_wal().is_ok());\n\n\n\n // 5th transaction\n\n assert_eq!(*db.get_latest_sequence_number(), 4);\n\n}\n\n\n", "file_path": "tests/db.rs", "rank": 22, "score": 110491.08676182656 }, { "content": "#[test]\n\nfn it_works() {\n\n use rocks::advanced_options::CompactionPri;\n\n\n\n let tmp_dir = TempDir::new_in(\".\", \"rocks\").unwrap();\n\n let path = tmp_dir.path().to_str().unwrap();\n\n\n\n // staircase style config\n\n let opt = Options::default()\n\n .map_db_options(|dbopt| dbopt.create_if_missing(true))\n\n .map_cf_options(|cfopt| cfopt.compaction_pri(CompactionPri::MinOverlappingRatio))\n\n .optimize_for_small_db();\n\n let db = DB::open(&opt, path);\n\n assert!(db.is_ok(), \"err => {:?}\", db);\n\n let db = db.unwrap();\n\n let cfhandle = db.create_column_family(&ColumnFamilyOptions::default(), \"lock\");\n\n println!(\"cf => {:?}\", cfhandle);\n\n\n\n assert!(db.name().contains(\"rocks\"));\n\n\n\n // FIXME: missing on static build?\n\n // assert!(db.get_info_log_list().is_ok());\n\n // assert!(db.get_info_log_list().unwrap().contains(&\"LOG\".to_string()));\n\n}\n\n\n", "file_path": "tests/db.rs", "rank": 23, "score": 110491.08676182656 }, { "content": "#[test]\n\nfn livemetadata() {\n\n let tmp_dir = ::tempdir::TempDir::new_in(\".\", \"rocks\").unwrap();\n\n let db = DB::open(\n\n Options::default().map_db_options(|db| db.create_if_missing(true)),\n\n &tmp_dir,\n\n )\n\n .unwrap();\n\n\n\n assert!(db.disable_file_deletions().is_ok());\n\n let meta = db.get_live_files_metadata();\n\n assert_eq!(meta.len(), 0);\n\n\n\n assert!(db\n\n .put(&Default::default(), b\"long-key\", vec![b'A'; 1024 * 1024].as_ref())\n\n .is_ok());\n\n assert!(db.flush(&FlushOptions::default().wait(true)).is_ok());\n\n let meta = db.get_live_files_metadata();\n\n assert_eq!(meta.len(), 1);\n\n assert_eq!(meta[0].level, 0);\n\n\n", "file_path": "tests/db.rs", "rank": 24, "score": 110491.08676182656 }, { "content": "#[test]\n\nfn compact_files() {\n\n let tmp_dir = ::tempdir::TempDir::new_in(\".\", \"rocks\").unwrap();\n\n let db = DB::open(\n\n Options::default()\n\n .map_db_options(|db| db.create_if_missing(true))\n\n .map_cf_options(|cf| cf.disable_auto_compactions(true)), // disable\n\n &tmp_dir,\n\n )\n\n .unwrap();\n\n assert!(db\n\n .put(&Default::default(), b\"long-key\", vec![b'A'; 1024 * 1024].as_ref())\n\n .is_ok());\n\n assert!(db.flush(&FlushOptions::default().wait(true)).is_ok());\n\n assert!(db\n\n .put(&Default::default(), b\"long-key-2\", vec![b'A'; 2 * 1024].as_ref())\n\n .is_ok());\n\n\n\n for i in 0..10 {\n\n let key = format!(\"k{}\", i);\n\n let val = format!(\"v{}\", i * 10);\n", "file_path": "tests/db.rs", "rank": 25, "score": 107106.3107707264 }, { "content": "#[test]\n\nfn compact_range() {\n\n let s = b\"123123123\";\n\n let e = b\"asdfasfasfasf\";\n\n\n\n let _: ::std::ops::Range<&[u8]> = s.as_ref()..e.as_ref();\n\n\n\n let tmp_db_dir = ::tempdir::TempDir::new_in(\".\", \"rocks\").unwrap();\n\n\n\n let opt = Options::default().map_db_options(|dbopt| dbopt.create_if_missing(true));\n\n\n\n let db = DB::open(opt, &tmp_db_dir).unwrap();\n\n\n\n let _ = db.put(&WriteOptions::default(), b\"name\", b\"BH1XUW\").unwrap();\n\n for i in 0..100 {\n\n let key = format!(\"test2-key-{}\", i);\n\n let val = format!(\"rocksdb-value-{}\", i * 10);\n\n\n\n db.put(&WriteOptions::default(), key.as_bytes(), val.as_bytes())\n\n .unwrap();\n\n\n", "file_path": "tests/db.rs", "rank": 26, "score": 107106.3107707264 }, { "content": "#[test]\n\nfn change_options() {\n\n let tmp_dir = ::tempdir::TempDir::new_in(\".\", \"rocks\").unwrap();\n\n let db = DB::open(\n\n Options::default()\n\n .map_db_options(|db| db.create_if_missing(true))\n\n .map_cf_options(|cf| cf.disable_auto_compactions(true)), // disable\n\n &tmp_dir,\n\n )\n\n .unwrap();\n\n let default_cf = db.default_column_family();\n\n assert!(db\n\n .put(&Default::default(), b\"long-key\", vec![b'A'; 1024 * 1024].as_ref())\n\n .is_ok());\n\n assert!(db.flush(&FlushOptions::default().wait(true)).is_ok());\n\n assert!(db\n\n .put(&Default::default(), b\"long-key-2\", vec![b'A'; 2 * 1024].as_ref())\n\n .is_ok());\n\n\n\n let new_opt: HashMap<&str, &str> = [(\"base_background_compactions\", \"6\"), (\"stats_dump_period_sec\", \"10\")] // dump every 10s\n\n .iter()\n", "file_path": "tests/db.rs", "rank": 27, "score": 107106.3107707264 }, { "content": "#[test]\n\nfn misc_functions() {\n\n let tmp_dir = ::tempdir::TempDir::new_in(\".\", \"rocks\").unwrap();\n\n let db = DB::open(\n\n Options::default()\n\n .map_db_options(|db| db.create_if_missing(true))\n\n .map_cf_options(|cf| cf.disable_auto_compactions(true)),\n\n &tmp_dir,\n\n )\n\n .unwrap();\n\n\n\n assert!(db\n\n .put(&Default::default(), b\"long-key\", vec![b'A'; 1024 * 1024].as_ref())\n\n .is_ok());\n\n assert!(db.put(&Default::default(), b\"a\", b\"1\").is_ok());\n\n assert!(db.put(&Default::default(), b\"b\", b\"2\").is_ok());\n\n assert!(db.put(&Default::default(), b\"c\", b\"3\").is_ok());\n\n\n\n assert!(db.compact_range(&Default::default(), ..).is_ok());\n\n\n\n assert!(db.pause_background_work().is_ok());\n", "file_path": "tests/db.rs", "rank": 28, "score": 107106.3107707264 }, { "content": "#[test]\n\nfn multi_get() {\n\n let tmp_dir = ::tempdir::TempDir::new_in(\".\", \"rocks\").unwrap();\n\n let db = DB::open(\n\n Options::default().map_db_options(|db| db.create_if_missing(true)),\n\n &tmp_dir,\n\n )\n\n .unwrap();\n\n\n\n assert!(db.put(&Default::default(), b\"a\", b\"1\").is_ok());\n\n assert!(db.put(&Default::default(), b\"b\", b\"2\").is_ok());\n\n assert!(db.put(&Default::default(), b\"c\", b\"3\").is_ok());\n\n assert!(db.put(&Default::default(), b\"long-key\", b\"long-value\").is_ok());\n\n assert!(db.put(&Default::default(), b\"e\", b\"5\").is_ok());\n\n assert!(db.put(&Default::default(), b\"f\", b\"6\").is_ok());\n\n\n\n assert!(db.compact_range(&Default::default(), ..).is_ok());\n\n\n\n let ret = db.multi_get(\n\n &ReadOptions::default(),\n\n &[b\"a\", b\"b\", b\"c\", b\"f\", b\"long-key\", b\"non-exist\"],\n\n );\n\n\n\n assert_eq!(ret[0].as_ref().unwrap(), b\"1\".as_ref());\n\n assert_eq!(ret[1].as_ref().unwrap(), b\"2\".as_ref());\n\n assert_eq!(ret[2].as_ref().unwrap(), b\"3\".as_ref());\n\n assert_eq!(ret[3].as_ref().unwrap(), b\"6\".as_ref());\n\n assert_eq!(ret[4].as_ref().unwrap(), b\"long-value\".as_ref());\n\n assert!(ret[5].as_ref().unwrap_err().is_not_found());\n\n}\n\n\n", "file_path": "tests/db.rs", "rank": 29, "score": 107106.3107707264 }, { "content": "#[test]\n\nfn db_paths() {\n\n let tmp_dir = ::tempdir::TempDir::new_in(\".\", \"rocks\").unwrap();\n\n let dir1 = ::tempdir::TempDir::new_in(\".\", \"rocks\").unwrap();\n\n let dir2 = ::tempdir::TempDir::new_in(\".\", \"rocks\").unwrap();\n\n let wal_dir = ::tempdir::TempDir::new_in(\".\", \"rocks\").unwrap();\n\n\n\n let opt = Options::default().map_db_options(|dbopt| {\n\n dbopt\n\n .create_if_missing(true)\n\n .db_paths(vec![&dir1.path(), &dir2.path()]) /* only has sst file */\n\n .wal_dir(&wal_dir)\n\n });\n\n\n\n let db = DB::open(opt, &tmp_dir);\n\n if db.is_err() {\n\n println!(\"db error\");\n\n println!(\"err => {:?}\", db);\n\n return;\n\n }\n\n let db = db.unwrap();\n", "file_path": "tests/db.rs", "rank": 30, "score": 107106.3107707264 }, { "content": "#[test]\n\nfn get_prop() {\n\n let tmp_dir = ::tempdir::TempDir::new_in(\".\", \"rocks\").unwrap();\n\n let db = DB::open(\n\n Options::default().map_db_options(|db| db.create_if_missing(true)),\n\n &tmp_dir,\n\n )\n\n .unwrap();\n\n\n\n assert!(db\n\n .put(&Default::default(), b\"long-key\", vec![b'A'; 1024 * 1024].as_ref())\n\n .is_ok());\n\n\n\n let cf1 = db.create_column_family(&Default::default(), \"db1\").unwrap();\n\n\n\n assert!(db.compact_range(&Default::default(), ..).is_ok());\n\n\n\n let snap = db.get_snapshot();\n\n assert_eq!(db.get_property(\"rocksdb.num-snapshots\"), Some(\"1\".to_string()));\n\n\n\n // dump status\n", "file_path": "tests/db.rs", "rank": 31, "score": 107106.3107707264 }, { "content": "#[test]\n\nfn approximate_sizes() {\n\n let tmp_dir = ::tempdir::TempDir::new_in(\".\", \"rocks\").unwrap();\n\n let db = DB::open(\n\n Options::default()\n\n .map_db_options(|db| db.create_if_missing(true))\n\n .map_cf_options(|cf| cf.disable_auto_compactions(true)), // disable\n\n &tmp_dir,\n\n )\n\n .unwrap();\n\n let default_cf = db.default_column_family();\n\n\n\n assert!(db\n\n .put(&Default::default(), b\"long-key\", vec![b'A'; 1024 * 1024].as_ref())\n\n .is_ok());\n\n assert!(db.flush(&FlushOptions::default().wait(true)).is_ok());\n\n assert!(db\n\n .put(&Default::default(), b\"long-key-2\", vec![b'A'; 2 * 1024].as_ref())\n\n .is_ok());\n\n\n\n let sizes = db.get_approximate_sizes(&default_cf, &[&b\"long-key\"[..]..&b\"long-key-\"[..]]);\n", "file_path": "tests/db.rs", "rank": 32, "score": 107106.3107707264 }, { "content": "#[test]\n\nfn test_version() {\n\n assert!(version().major >= 5);\n\n println!(\"version = {}\", version());\n\n}\n", "file_path": "src/version.rs", "rank": 33, "score": 107106.3107707264 }, { "content": "#[test]\n\nfn test_compression_types() {\n\n let types = get_supported_compressions();\n\n // [ZlibCompression, SnappyCompression, LZ4HCCompression, LZ4Compression, BZip2Compression,\n\n // NoCompression]\n\n assert!(types.len() >= 1);\n\n assert!(types.contains(&CompressionType::NoCompression));\n\n}\n", "file_path": "src/convenience.rs", "rank": 34, "score": 103973.13090575646 }, { "content": "#[test]\n\nfn get_properties_of_all_tables() {\n\n let tmp_dir = ::tempdir::TempDir::new_in(\"\", \"rocks\").unwrap();\n\n let db = DB::open(\n\n Options::default()\n\n .map_db_options(|db| db.create_if_missing(true))\n\n .map_cf_options(|cf| cf.disable_auto_compactions(true)),\n\n &tmp_dir,\n\n )\n\n .unwrap();\n\n\n\n for i in 0..10 {\n\n let key = format!(\"k{}\", i);\n\n let val = format!(\"v{}\", i * i);\n\n\n\n db.put(WriteOptions::default_instance(), key.as_bytes(), val.as_bytes())\n\n .unwrap();\n\n\n\n if i % 2 == 0 {\n\n assert!(db.flush(&FlushOptions::default().wait(true)).is_ok());\n\n }\n", "file_path": "tests/db.rs", "rank": 35, "score": 103973.13090575646 }, { "content": "#[test]\n\nfn delete_files_in_range() {\n\n let tmp_dir = ::tempdir::TempDir::new_in(\"\", \"rocks\").unwrap();\n\n let db = DB::open(\n\n Options::default().map_db_options(|db| db.create_if_missing(true)),\n\n // NOTE: delete_files_in_range() requires auto compaction\n\n // .map_cf_options(|cf| cf.disable_auto_compactions(true)),\n\n &tmp_dir,\n\n )\n\n .unwrap();\n\n\n\n // will have 10 sst file\n\n for i in 0..10 {\n\n let key = format!(\"k{}\", i);\n\n let val = format!(\"v{}\", i * i);\n\n\n\n db.put(WriteOptions::default_instance(), key.as_bytes(), val.as_bytes())\n\n .unwrap();\n\n\n\n assert!(db.flush(&FlushOptions::default().wait(true)).is_ok());\n\n }\n", "file_path": "tests/db.rs", "rank": 36, "score": 103973.13090575646 }, { "content": "#[test]\n\nfn list_live_files() {\n\n let tmp_dir = ::tempdir::TempDir::new_in(\".\", \"rocks\").unwrap();\n\n let db = DB::open(\n\n Options::default().map_db_options(|db| db.create_if_missing(true)),\n\n &tmp_dir,\n\n )\n\n .unwrap();\n\n assert!(db\n\n .put(&Default::default(), b\"long-key\", vec![b'A'; 1024 * 1024].as_ref())\n\n .is_ok());\n\n assert!(db.flush(&FlushOptions::default().wait(true)).is_ok());\n\n assert!(db\n\n .put(&Default::default(), b\"long-key-2\", vec![b'A'; 2 * 1024].as_ref())\n\n .is_ok());\n\n assert!(db.flush(&FlushOptions::default().wait(true)).is_ok());\n\n\n\n if let Ok((_size, files)) = db.get_live_files(false) {\n\n assert!(files.contains(&\"/CURRENT\".to_string()));\n\n } else {\n\n assert!(false, \"get_live_files fails\");\n\n }\n\n}\n\n\n", "file_path": "tests/db.rs", "rank": 37, "score": 103973.13090575646 }, { "content": "#[test]\n\nfn column_family_meta() {\n\n let tmp_dir = ::tempdir::TempDir::new_in(\".\", \"rocks\").unwrap();\n\n let db = DB::open(\n\n Options::default().map_db_options(|db| db.create_if_missing(true)),\n\n &tmp_dir,\n\n )\n\n .unwrap();\n\n\n\n for i in 0..10 {\n\n let key = format!(\"k{}\", i);\n\n let val = format!(\"v{}\", i * 10);\n\n\n\n db.put(&WriteOptions::default(), key.as_bytes(), val.as_bytes())\n\n .unwrap();\n\n\n\n // 2 keys into a sst\n\n if i % 2 == 0 {\n\n assert!(db.flush(&FlushOptions::default().wait(true)).is_ok());\n\n }\n\n\n", "file_path": "tests/db.rs", "rank": 38, "score": 103973.13090575646 }, { "content": "#[test]\n\nfn multi_get_cf() {\n\n let tmp_dir = ::tempdir::TempDir::new_in(\".\", \"rocks\").unwrap();\n\n let db = DB::open(\n\n Options::default().map_db_options(|db| db.create_if_missing(true)),\n\n &tmp_dir,\n\n )\n\n .unwrap();\n\n\n\n let def = db.default_column_family();\n\n let cf1 = db.create_column_family(&Default::default(), \"db1\").unwrap();\n\n let cf2 = db.create_column_family(&Default::default(), \"db2\").unwrap();\n\n let cf3 = db.create_column_family(&Default::default(), \"db3\").unwrap();\n\n let cf4 = db.create_column_family(&Default::default(), \"db4\").unwrap();\n\n\n\n // via DB api\n\n assert!(db.put_cf(&WriteOptions::default(), &def, b\"AA\", b\"aa\").is_ok());\n\n assert!(db.put_cf(&WriteOptions::default(), &cf1, b\"BB\", b\"bb\").is_ok());\n\n assert!(db.put_cf(&WriteOptions::default(), &cf2, b\"CC\", b\"cc\").is_ok());\n\n assert!(db.put_cf(&WriteOptions::default(), &cf3, b\"DD\", b\"dd\").is_ok());\n\n assert!(db.put_cf(&WriteOptions::default(), &cf4, b\"EE\", b\"ee\").is_ok());\n", "file_path": "tests/db.rs", "rank": 39, "score": 103973.13090575646 }, { "content": "#[test]\n\nfn key_may_exist() {\n\n let tmp_dir = ::tempdir::TempDir::new_in(\".\", \"rocks\").unwrap();\n\n let db = DB::open(\n\n Options::default().map_db_options(|db| db.create_if_missing(true)),\n\n &tmp_dir,\n\n )\n\n .unwrap();\n\n\n\n assert!(db.put(&Default::default(), b\"long-key\", b\"long-value\").is_ok());\n\n assert!(db.compact_range(&Default::default(), ..).is_ok());\n\n\n\n assert!(db.key_may_exist(&ReadOptions::default(), b\"long-key\"));\n\n assert!(!db.key_may_exist(&ReadOptions::default(), b\"long-key-not-exist\"));\n\n\n\n let (found, maybe_val) = db.key_may_get(&ReadOptions::default(), b\"long-key\");\n\n assert!(found);\n\n // it depends, Some/None are all OK\n\n let _ = maybe_val;\n\n\n\n let (found, maybe_val) = db.key_may_get(&ReadOptions::default(), b\"not-exist\");\n\n assert!(!found);\n\n assert!(!maybe_val.is_some());\n\n}\n\n\n", "file_path": "tests/db.rs", "rank": 40, "score": 103973.13090575646 }, { "content": "/// get current perf stats level for current thread\n\npub fn get_perf_level() -> PerfLevel {\n\n unsafe { mem::transmute(ll::rocks_get_perf_level()) }\n\n}\n\n\n\n\n", "file_path": "src/perf_level.rs", "rank": 41, "score": 102242.41000018407 }, { "content": "struct rocks_envoptions_t {\n\n EnvOptions rep;\n\n};\n", "file_path": "rocks-sys/rocks/ctypes.hpp", "rank": 42, "score": 101147.26423393471 }, { "content": "struct rocks_logger_t {\n\n shared_ptr<Logger> rep;\n\n};\n\n\n", "file_path": "rocks-sys/rocks/ctypes.hpp", "rank": 43, "score": 101147.26423393471 }, { "content": "#[test]\n\nfn test_perf_level() {\n\n set_perf_level(PerfLevel::Disable);\n\n assert_eq!(get_perf_level(), PerfLevel::Disable);\n\n\n\n set_perf_level(PerfLevel::EnableTimeExceptForMutex);\n\n assert_eq!(get_perf_level(), PerfLevel::EnableTimeExceptForMutex);\n\n}\n", "file_path": "src/perf_level.rs", "rank": 44, "score": 101064.50004739712 }, { "content": "#[test]\n\nfn test_persistent_cache() {\n\n let tmp_dir = ::tempdir::TempDir::new_in(\"\", \"rocks\").unwrap();\n\n // let logger = Env::default_instance()\n\n // .create_logger(tmp_dir.path().join(\"test.logfiles\"))\n\n // .unwrap();\n\n // NOTE: from RocksdB, size should be big enough\n\n let pcache = PersistentCache::new(Env::default_instance(), tmp_dir.path(), 1 << 30, None, true).unwrap();\n\n\n\n assert!(format!(\"{:?}\", pcache).contains(\"is_compressed: 1\"));\n\n}\n", "file_path": "src/persistent_cache.rs", "rank": 45, "score": 101064.50004739712 }, { "content": "#[test]\n\nfn get_sorted_wal_files() {\n\n let tmp_dir = ::tempdir::TempDir::new_in(\".\", \"rocks\").unwrap();\n\n let db = DB::open(\n\n Options::default().map_db_options(|db| {\n\n db.create_if_missing(true)\n\n .db_write_buffer_size(2 << 20) // 2MB per wal log\n\n .wal_ttl_seconds(1000)\n\n }),\n\n &tmp_dir,\n\n )\n\n .unwrap();\n\n for i in 0..10 {\n\n assert!(db\n\n .put(\n\n &Default::default(),\n\n format!(\"key{}\", i).as_bytes(),\n\n format!(\"val{:01000000}\", i).as_bytes()\n\n ) // 1MB value\n\n .is_ok());\n\n }\n\n let files = db.get_sorted_wal_files();\n\n assert!(files.is_ok());\n\n assert!(files.unwrap().len() > 2);\n\n}\n\n\n", "file_path": "tests/db.rs", "rank": 46, "score": 101064.50004739712 }, { "content": "struct rocks_env_t {\n\n Env* rep;\n\n bool is_default;\n\n};\n\n\n\n/* snapshot*/\n", "file_path": "rocks-sys/rocks/ctypes.hpp", "rank": 47, "score": 100996.04327525519 }, { "content": "struct cxx_string_vector_t {\n\n std::vector<std::string> rep;\n\n};\n\n\n\n// std::string\n\ntypedef struct cxx_string_t cxx_string_t;\n\n\n\nstatic bool SaveError(rocks_status_t** status, const Status&& s) {\n\n if (s.ok()) {\n\n *status = nullptr;\n\n return false;\n\n } else {\n\n *status = new rocks_status_t{std::move(s)};\n\n return true;\n\n }\n\n}\n\n\n\nstatic char* CopyString(const std::string& str) {\n\n char* result = reinterpret_cast<char*>(malloc(sizeof(char) * str.size()));\n\n memcpy(result, str.data(), sizeof(char) * str.size());\n\n return result;\n\n}\n\n\n\n/* slice */\n", "file_path": "rocks-sys/rocks/ctypes.hpp", "rank": 48, "score": 98429.27862034197 }, { "content": "struct rocks_raw_filterpolicy_t {\n\n shared_ptr<const FilterPolicy> rep;\n\n};\n\n\n\n/* cache */\n", "file_path": "rocks-sys/rocks/ctypes.hpp", "rank": 49, "score": 98429.27862034197 }, { "content": "struct rocks_thread_status_t {\n\n ThreadStatus rep;\n\n};\n\n\n\n/* persistent_cache */\n", "file_path": "rocks-sys/rocks/ctypes.hpp", "rank": 50, "score": 98383.53112338246 }, { "content": "/// Get all supported compression type as a list\n\npub fn get_supported_compressions() -> Vec<CompressionType> {\n\n unsafe {\n\n let mut n = 0;\n\n let ptr = ll::rocks_get_supported_compressions(&mut n);\n\n let mut ret = Vec::with_capacity(n);\n\n\n\n for i in 0..n {\n\n ret.push(mem::transmute(*ptr.offset(i as isize)));\n\n }\n\n ll::rocks_get_supported_compressions_destroy(ptr);\n\n ret\n\n }\n\n}\n\n\n", "file_path": "src/convenience.rs", "rank": 51, "score": 97940.26820436963 }, { "content": "struct rocks_key_version_collection_t {\n\n std::vector<KeyVersion> rep;\n\n};\n\n\n\n/* listener */\n\n/*\n", "file_path": "rocks-sys/rocks/ctypes.hpp", "rank": 52, "score": 95889.28165732304 }, { "content": "struct rocks_table_props_collection_t {\n\n // std::unordered_map<std::string, std::shared_ptr<const TableProperties>>\n\n TablePropertiesCollection rep;\n\n};\n\n\n", "file_path": "rocks-sys/rocks/ctypes.hpp", "rank": 53, "score": 95889.28165732304 }, { "content": "/// set the perf stats level for current thread\n\npub fn set_perf_level(level: PerfLevel) {\n\n unsafe {\n\n ll::rocks_set_perf_level(mem::transmute(level));\n\n }\n\n}\n\n\n\n\n", "file_path": "src/perf_level.rs", "rank": 54, "score": 95728.11893743074 }, { "content": "struct rocks_external_sst_file_info_t {\n\n ExternalSstFileInfo rep;\n\n};\n\n\n\n/* compaction_filter */\n", "file_path": "rocks-sys/rocks/ctypes.hpp", "rank": 55, "score": 93533.93216479238 }, { "content": "struct rocks_user_collected_props_iter_t {\n\n UserCollectedProperties::const_iterator rep;\n\n const UserCollectedProperties::const_iterator cend;\n\n};\n\n\n", "file_path": "rocks-sys/rocks/ctypes.hpp", "rank": 56, "score": 93521.63018221647 }, { "content": "struct rocks_table_props_collection_iter_t {\n\n TablePropertiesCollection::const_iterator rep;\n\n const TablePropertiesCollection::const_iterator cend;\n\n};\n\n\n", "file_path": "rocks-sys/rocks/ctypes.hpp", "rank": 57, "score": 93521.63018221647 }, { "content": "fn main() {\n\n let db = DB::open(\n\n Options::default()\n\n .map_db_options(|db| db.create_if_missing(true))\n\n .map_cf_options(|cf| cf.associative_merge_operator(Box::new(UInt64AddOperator))),\n\n DB_PATH,\n\n )\n\n .unwrap();\n\n\n\n let counters = MergeBasedCounters::new(db);\n\n // counters.remove(\"a\");\n\n counters.add(\"a\", 5);\n\n println!(\"val => {:?}\", counters.get(\"a\"));\n\n // counters.set(\"a\", 100);\n\n // println!(\"val => {:?}\", counters.get(\"a\"));\n\n}\n", "file_path": "examples/counters.rs", "rank": 58, "score": 66624.78743505891 }, { "content": "fn main() {\n\n let opt = Options::default().map_db_options(|db_opt| db_opt.create_if_missing(true));\n\n let db = DB::open(opt, \"./data\").unwrap();\n\n\n\n let mut wb = WriteBatch::new();\n\n\n\n for i in 0..1000 {\n\n wb.put(format!(\"{:3}-key\", i).as_bytes(), format!(\"value-{:03}\", i).as_bytes());\n\n }\n\n\n\n println!(\"wb => {:?}\", wb);\n\n\n\n let _ = db.write(WriteOptions::default_instance(), &wb).unwrap();\n\n\n\n println!(\n\n \"db[042-key] => {:?}\",\n\n db.get(ReadOptions::default_instance(), b\"042-key\")\n\n );\n\n\n\n // pin_data pins iterator key\n", "file_path": "examples/iterator.rs", "rank": 59, "score": 66624.78743505891 }, { "content": "fn main() {\n\n println!(\"RocksDB: {}\", rocksdb::version());\n\n println!(\"Compression Supported:\");\n\n let mut compressions = rocks::convenience::get_supported_compressions();\n\n compressions.sort();\n\n for compression in compressions {\n\n println!(\" - {:?}\", compression);\n\n }\n\n}\n", "file_path": "examples/it-works.rs", "rank": 60, "score": 66624.78743505891 }, { "content": "fn main() {\n\n let opt = Options::default().map_db_options(|db_opt| db_opt.create_if_missing(true));\n\n let db = DB::open(opt, \"./data\").unwrap();\n\n\n\n assert!(db.put(WriteOptions::default_instance(), b\"hello\", b\"world\").is_ok());\n\n match db.get(ReadOptions::default_instance(), b\"hello\") {\n\n Ok(ref value) => println!(\"hello: {:?}\", value),\n\n Err(e) => eprintln!(\"error: {}\", e),\n\n }\n\n let _ = db.delete(WriteOptions::default_instance(), b\"hello\").unwrap();\n\n}\n", "file_path": "examples/hello.rs", "rank": 61, "score": 66624.78743505891 }, { "content": "#[derive(PartialEq, Eq)]\n\nenum CacheType {\n\n LRU,\n\n Clock,\n\n}\n\n\n\npub struct CacheBuilder {\n\n type_: CacheType,\n\n capacity: usize,\n\n num_shard_bits: i32,\n\n strict_capacity_limit: bool,\n\n high_pri_pool_ratio: f64,\n\n}\n\n\n\nimpl CacheBuilder {\n\n /// Create a new cache with a fixed size capacity. The cache is sharded\n\n /// to 2^num_shard_bits shards, by hash of the key. The total capacity\n\n /// is divided and evenly assigned to each shard. If strict_capacity_limit\n\n /// is set, insert to the cache will fail when cache is full. User can also\n\n /// set percentage of the cache reserves for high priority entries via\n\n /// high_pri_pool_pct.\n", "file_path": "src/cache.rs", "rank": 62, "score": 65977.07720043333 }, { "content": "fn main() {\n\n if cfg!(not(target_pointer_width = \"64\")) {\n\n panic!(\"only 64 bit system supported\");\n\n }\n\n\n\n println!(\"cargo:rerun-if-changed=build.rs\");\n\n println!(\"cargo:rerun-if-changed=src/c.rs\");\n\n println!(\"cargo:rerun-if-changed=src/lib.rs\");\n\n\n\n imp::build();\n\n\n\n let mut build = ::cc::Build::new();\n\n\n\n #[cfg(feature = \"static-link\")]\n\n build.include(\"rocksdb/include\");\n\n\n\n #[cfg(unix)]\n\n {\n\n build.flag(\"-std=c++14\");\n\n build.flag(\"-fno-rtti\");\n", "file_path": "rocks-sys/build.rs", "rank": 63, "score": 64933.063794688365 }, { "content": "fn main() {\n\n let tmp_dir = ::tempdir::TempDir::new_in(\".\", \"rocks\").unwrap();\n\n let db = DB::open(\n\n Options::default().map_db_options(|db| db.create_if_missing(true).add_listener(MyEventListener::default())),\n\n &tmp_dir,\n\n )\n\n .unwrap();\n\n\n\n for i in 0..100 {\n\n let key = format!(\"test2-key-{}\", i);\n\n let val = format!(\"rocksdb-value-{}\", i * 10);\n\n\n\n db.put(&WriteOptions::default(), key.as_bytes(), val.as_bytes())\n\n .unwrap();\n\n\n\n if i % 6 == 0 {\n\n db.flush(&FlushOptions::default().wait(true)).unwrap();\n\n }\n\n if i % 36 == 0 {\n\n db.compact_range(&CompactRangeOptions::default(), ..).unwrap();\n", "file_path": "examples/event_listener.rs", "rank": 64, "score": 64933.063794688365 }, { "content": "#[derive(Default)]\n\nstruct MyEventListener {\n\n flush_completed_called: usize,\n\n flush_begin_called: usize,\n\n table_file_deleted_called: usize,\n\n compaction_completed_called: usize,\n\n table_file_created_called: usize,\n\n table_file_creation_started_called: usize,\n\n on_memtable_sealed_called: usize,\n\n on_external_file_ingested_called: usize,\n\n on_column_family_handle_deletion_started_called: usize,\n\n}\n\n\n\nimpl Drop for MyEventListener {\n\n fn drop(&mut self) {\n\n assert!(self.flush_begin_called > 0);\n\n assert!(self.flush_completed_called > 0);\n\n assert!(self.table_file_deleted_called > 0);\n\n assert!(self.compaction_completed_called > 0);\n\n assert!(self.table_file_created_called > 0);\n\n assert!(self.table_file_creation_started_called > 0);\n", "file_path": "examples/event_listener.rs", "rank": 65, "score": 63264.84423830938 }, { "content": "struct rocks_snapshot_t {\n\n const Snapshot* rep;\n\n};\n\n\n\n/* iterator */\n", "file_path": "rocks-sys/rocks/ctypes.hpp", "rank": 66, "score": 61806.659306720954 }, { "content": "struct rocks_iterator_t {\n\n Iterator* rep;\n\n};\n\n\n\n/* write_batch */\n", "file_path": "rocks-sys/rocks/ctypes.hpp", "rank": 67, "score": 61806.659306720954 }, { "content": "struct rocks_logfiles_t {\n\n VectorLogPtr rep;\n\n};\n\n\n", "file_path": "rocks-sys/rocks/ctypes.hpp", "rank": 68, "score": 61806.659306720954 }, { "content": "struct rocks_writeoptions_t {\n\n WriteOptions rep;\n\n};\n", "file_path": "rocks-sys/rocks/ctypes.hpp", "rank": 69, "score": 61806.659306720954 }, { "content": "struct rocks_ratelimiter_t {\n\n shared_ptr<RateLimiter> rep;\n\n};\n\n\n\n/* env */\n", "file_path": "rocks-sys/rocks/ctypes.hpp", "rank": 70, "score": 61806.659306720954 }, { "content": "struct rocks_dboptions_t {\n\n DBOptions rep;\n\n};\n", "file_path": "rocks-sys/rocks/ctypes.hpp", "rank": 71, "score": 61806.659306720954 }, { "content": "struct rocks_options_t {\n\n Options rep;\n\n};\n", "file_path": "rocks-sys/rocks/ctypes.hpp", "rank": 72, "score": 61806.659306720954 }, { "content": "struct rocks_cache_t {\n\n shared_ptr<Cache> rep;\n\n};\n\n\n\n/* sst_file_writer */\n", "file_path": "rocks-sys/rocks/ctypes.hpp", "rank": 73, "score": 61806.659306720954 }, { "content": "struct rocks_flushoptions_t {\n\n FlushOptions rep;\n\n};\n", "file_path": "rocks-sys/rocks/ctypes.hpp", "rank": 74, "score": 61806.659306720954 }, { "content": "struct rocks_db_t {\n\n DB* rep;\n\n};\n\n\n\n/* options */\n", "file_path": "rocks-sys/rocks/ctypes.hpp", "rank": 75, "score": 61806.659306720954 }, { "content": "struct rocks_livefiles_t {\n\n std::vector<LiveFileMetaData> rep;\n\n};\n", "file_path": "rocks-sys/rocks/ctypes.hpp", "rank": 76, "score": 61806.659306720954 }, { "content": "struct rocks_status_t {\n\n Status rep;\n\n\n\n // rocks_status_t(const Status st) noexcept : rep(st) {}\n\n rocks_status_t() : rep(Status()) {}\n\n rocks_status_t(const Status&& st) noexcept : rep(std::move(st)) {}\n\n};\n\n\n\n/* aux */\n", "file_path": "rocks-sys/rocks/ctypes.hpp", "rank": 77, "score": 61806.659306720954 }, { "content": "struct rocks_dbpath_t {\n\n DbPath rep;\n\n};\n", "file_path": "rocks-sys/rocks/ctypes.hpp", "rank": 78, "score": 61806.659306720954 }, { "content": "struct rocks_statistics_t {\n\n shared_ptr<Statistics> rep;\n\n};\n\ntypedef struct rocks_histogram_data_t rocks_histogram_data_t;\n\n\n\n/* metadata */\n", "file_path": "rocks-sys/rocks/ctypes.hpp", "rank": 79, "score": 61806.659306720954 }, { "content": "struct rocks_readoptions_t {\n\n ReadOptions rep;\n\n Slice lower_bound;\n\n Slice upper_bound; // hold variable to set pointer to in ReadOptions\n\n};\n", "file_path": "rocks-sys/rocks/ctypes.hpp", "rank": 80, "score": 61806.659306720954 }, { "content": "struct rocks_cfoptions_t {\n\n ColumnFamilyOptions rep;\n\n};\n", "file_path": "rocks-sys/rocks/ctypes.hpp", "rank": 81, "score": 61806.659306720954 }, { "content": "struct rocks_writebatch_t {\n\n std::unique_ptr<WriteBatch> rep;\n\n};\n\ntypedef struct rocks_raw_writebatch_t rocks_raw_writebatch_t;\n\n\n", "file_path": "rocks-sys/rocks/ctypes.hpp", "rank": 82, "score": 61806.659306720954 }, { "content": "/// A `Comparator` object provides a total order across slices that are\n\n/// used as keys in an sstable or a database. A `Comparator` implementation\n\n/// must be thread-safe since rocksdb may invoke its methods concurrently\n\n/// from multiple threads.\n\npub trait Comparator {\n\n /// Three-way comparison. Returns value:\n\n ///\n\n /// - `< 0 iff \"a\" < \"b\"`,\n\n /// - `== 0 iff \"a\" == \"b\"`,\n\n /// - `> 0 iff \"a\" > \"b\"`\n\n fn compare(&self, a: &[u8], b: &[u8]) -> Ordering;\n\n\n\n /// Compares two slices for equality. The following invariant should always\n\n /// hold (and is the default implementation):\n\n ///\n\n /// > `Equal(a, b) iff Compare(a, b) == 0`\n\n ///\n\n /// Overwrite only if equality comparisons can be done more efficiently than\n\n /// three-way comparisons.\n\n fn equal(&self, a: &[u8], b: &[u8]) -> bool {\n\n self.compare(a, b) == Ordering::Equal\n\n }\n\n /// The name of the comparator. Used to check for comparator\n\n /// mismatches (i.e., a DB created with one comparator is\n", "file_path": "src/comparator.rs", "rank": 83, "score": 61161.41816981001 }, { "content": "struct rocks_persistent_cache_t {\n\n std::shared_ptr<PersistentCache> rep;\n\n};\n\n\n\n#ifdef __cplusplus\n\n}\n\n#endif\n\n\n\n#endif /* __RUST_ROCSK_SYS_H____ */\n", "file_path": "rocks-sys/rocks/ctypes.hpp", "rank": 84, "score": 60454.260346402145 }, { "content": "struct rocks_compaction_options_t {\n\n CompactionOptions rep;\n\n};\n", "file_path": "rocks-sys/rocks/ctypes.hpp", "rank": 85, "score": 60454.260346402145 }, { "content": "struct rocks_dump_options_t {\n\n DumpOptions rep;\n\n};\n", "file_path": "rocks-sys/rocks/ctypes.hpp", "rank": 86, "score": 60454.260346402145 }, { "content": "struct rocks_undump_options_t {\n\n UndumpOptions rep;\n\n};\n\n\n\n/* iostats_context */\n\ntypedef struct rocks_iostats_context_t rocks_iostats_context_t;\n\n\n\n/* perf_context */\n\ntypedef struct rocks_perf_context_t rocks_perf_context_t;\n\n\n\n/* statistics */\n", "file_path": "rocks-sys/rocks/ctypes.hpp", "rank": 87, "score": 60454.260346402145 }, { "content": "struct rocks_compactrange_options_t {\n\n CompactRangeOptions rep;\n\n};\n", "file_path": "rocks-sys/rocks/ctypes.hpp", "rank": 88, "score": 60454.260346402145 }, { "content": "struct rocks_table_props_t {\n\n std::shared_ptr<const TableProperties> rep;\n\n};\n\n\n\n// std::map<std::string, std::string>*\n\n// ie. UserCollectedProperties*\n\ntypedef struct rocks_user_collected_props_t rocks_user_collected_props_t;\n\n\n", "file_path": "rocks-sys/rocks/ctypes.hpp", "rank": 89, "score": 60454.260346402145 }, { "content": "struct rocks_pinnable_slice_t {\n\n PinnableSlice rep;\n\n};\n\n\n\n/* db */\n", "file_path": "rocks-sys/rocks/ctypes.hpp", "rank": 90, "score": 60454.260346402145 }, { "content": "struct rocks_ingestexternalfile_options_t {\n\n IngestExternalFileOptions rep;\n\n};\n\n\n", "file_path": "rocks-sys/rocks/ctypes.hpp", "rank": 91, "score": 60454.260346402145 }, { "content": "/// `EventListener` class contains a set of call-back functions that will\n\n/// be called when specific RocksDB event happens such as flush. It can\n\n/// be used as a building block for developing custom features such as\n\n/// stats-collector or external compaction algorithm.\n\n///\n\n/// Note that call-back functions should not run for an extended period of\n\n/// time before the function returns, otherwise RocksDB may be blocked.\n\n/// For example, it is not suggested to do `DB::CompactFiles()` (as it may\n\n/// run for a long while) or issue many of `DB::Put()` (as Put may be blocked\n\n/// in certain cases) in the same thread in the `EventListener` callback.\n\n/// However, doing `DB::CompactFiles()` and `DB::Put()` in another thread is\n\n/// considered safe.\n\n///\n\n/// [Threading] All `EventListener` callback will be called using the\n\n/// actual thread that involves in that specific event. For example, it\n\n/// is the RocksDB background flush thread that does the actual flush to\n\n/// call `EventListener::OnFlushCompleted()`.\n\n///\n\n/// [Locking] All `EventListener` callbacks are designed to be called without\n\n/// the current thread holding any DB mutex. This is to prevent potential\n\n/// deadlock and performance issue when using EventListener callback\n\n/// in a complex way. However, all `EventListener` call-back functions\n\n/// should not run for an extended period of time before the function\n\n/// returns, otherwise RocksDB may be blocked. For example, it is not\n\n/// suggested to do `DB::CompactFiles()` (as it may run for a long while)\n\n/// or issue many of `DB::Put()` (as Put may be blocked in certain cases)\n\n/// in the same thread in the `EventListener` callback. However, doing\n\n/// `DB::CompactFiles()` and `DB::Put()` in a thread other than the\n\n/// EventListener callback thread is considered safe.\n\n///\n\n/// FIXME: how to hold CFHandle ref\n\npub trait EventListener {\n\n /// A call-back function to RocksDB which will be called whenever a\n\n /// registered RocksDB flushes a file. The default implementation is\n\n /// no-op.\n\n ///\n\n /// Note that the this function must be implemented in a way such that\n\n /// it should not run for an extended period of time before the function\n\n /// returns. Otherwise, RocksDB may be blocked.\n\n fn on_flush_completed(&mut self, db: &DBRef, flush_job_info: &FlushJobInfo) {}\n\n\n\n /// A call-back function to RocksDB which will be called before a\n\n /// RocksDB starts to flush memtables. The default implementation is\n\n /// no-op.\n\n ///\n\n /// Note that the this function must be implemented in a way such that\n\n /// it should not run for an extended period of time before the function\n\n /// returns. Otherwise, RocksDB may be blocked.\n\n fn on_flush_begin(&mut self, db: &DBRef, flush_job_info: &FlushJobInfo) {}\n\n\n\n /// A call-back function for RocksDB which will be called whenever\n", "file_path": "src/listener.rs", "rank": 92, "score": 59591.48179939044 }, { "content": "pub trait AsCompactRange {\n\n fn start_key(&self) -> *const u8 {\n\n ptr::null()\n\n }\n\n\n\n fn start_key_len(&self) -> usize {\n\n 0\n\n }\n\n\n\n fn end_key(&self) -> *const u8 {\n\n ptr::null()\n\n }\n\n\n\n fn end_key_len(&self) -> usize {\n\n 0\n\n }\n\n}\n\n\n\nimpl<'a> AsCompactRange for ops::RangeInclusive<&'a [u8]> {\n\n fn start_key(&self) -> *const u8 {\n", "file_path": "src/db.rs", "rank": 93, "score": 59576.02973161325 }, { "content": "struct rocks_plain_table_options_t {\n\n PlainTableOptions rep;\n\n};\n\n\n\n/* filter_policy */\n", "file_path": "rocks-sys/rocks/ctypes.hpp", "rank": 94, "score": 59192.30342434648 }, { "content": "struct rocks_cuckoo_table_options_t {\n\n CuckooTableOptions rep;\n\n};\n", "file_path": "rocks-sys/rocks/ctypes.hpp", "rank": 95, "score": 59192.30342434648 }, { "content": "struct rocks_fifo_compaction_options_t {\n\n CompactionOptionsFIFO rep;\n\n};\n\n\n", "file_path": "rocks-sys/rocks/ctypes.hpp", "rank": 96, "score": 59192.30342434648 }, { "content": "struct rocks_column_family_handle_t {\n\n ColumnFamilyHandle* rep;\n\n};\n", "file_path": "rocks-sys/rocks/ctypes.hpp", "rank": 97, "score": 59192.30342434648 }, { "content": "struct rocks_sst_file_writer_t {\n\n SstFileWriter* rep;\n\n};\n", "file_path": "rocks-sys/rocks/ctypes.hpp", "rank": 98, "score": 59192.30342434648 }, { "content": "struct rocks_column_family_descriptor_t {\n\n ColumnFamilyDescriptor rep;\n\n};\n", "file_path": "rocks-sys/rocks/ctypes.hpp", "rank": 99, "score": 59192.30342434648 } ]
Rust
game-core/src/system/ally.rs
TheBlueSmoke/gameoff
b3619817c9c2af750c0eda2fad3e441692852261
use amethyst::{ core::cgmath::{InnerSpace, Vector2}, core::Transform, ecs::{Entities, Join, Read, ReadStorage, System, WriteStorage}, renderer::{SpriteRender, Transparent}, }; use config::GameoffConfig; use crate::component::{Ally, Animation, Motion, Player}; use rand::distributions::{Distribution, Uniform}; pub struct Movement; impl<'s> System<'s> for Movement { type SystemData = ( ReadStorage<'s, Ally>, WriteStorage<'s, Motion>, ReadStorage<'s, Transform>, ReadStorage<'s, Player>, Read<'s, GameoffConfig>, Entities<'s>, ); fn run( &mut self, (allies, mut motions, transforms, players, config, entities): Self::SystemData, ) { let mut rng = rand::thread_rng(); let zero_distance_dist = Uniform::new(0.5, 1.0); let p_transform = { let (t, _) = (&transforms, &players) .join() .next() .expect("no player found"); (t.clone()) }; for (_, motion, transform1, entity1) in (&allies, &mut motions, &transforms, &entities).join() { let d = (p_transform.translation - transform1.translation).truncate(); let m = d.magnitude().abs(); let mut pv = d.normalize(); if m < config.ally.follow_distance { pv *= 0.0; } else if (m > config.ally.follow_distance) && (m < config.ally.max_distance) { pv *= (1.0 + (m - config.ally.follow_distance) / (config.ally.max_distance - config.ally.follow_distance)) * config.speed; } else if m > config.ally.max_distance { pv *= 2.0 * config.speed; } let mut av = Vector2::new(0.0, 0.0); for (_, transform2, entity2) in (&allies, &transforms, &entities).join() { if entity1 != entity2 { let d = (transform1.translation - transform2.translation).truncate(); let m = d.magnitude().abs(); let mut v = if m == 0.0 { Vector2::new( zero_distance_dist.sample(&mut rng), zero_distance_dist.sample(&mut rng), ).normalize() } else { d.normalize() }; if m < config.ally.min_distance { v *= (config.ally.min_distance - m) / config.ally.min_distance * 5.0 * config.speed; } else if (m > config.ally.follow_distance) && (m < config.ally.max_distance) { v *= ((m - config.ally.follow_distance) / (config.ally.max_distance - config.ally.follow_distance)) * config.speed; } av += v; } } motion.vel = pv + av; } } } pub struct Grouper; impl<'s> System<'s> for Grouper { type SystemData = ( ReadStorage<'s, Ally>, ReadStorage<'s, Transform>, WriteStorage<'s, Motion>, ReadStorage<'s, Player>, Entities<'s>, ); fn run(&mut self, (allies, transforms, mut motions, players, entities): Self::SystemData) { let p_transform = { let (t, _) = (&transforms, &players) .join() .next() .expect("no player found"); (t.clone()) }; let mut merged = vec![]; for (_ally, transform, e, _) in (&allies, &transforms, &*entities, !&motions).join() { let d = p_transform.translation - transform.translation; let m = d.truncate().magnitude(); let merge_dist = 32.0 * 1.0; if m < merge_dist { merged.push(e); } } for entity in merged { let _ = motions.insert(entity, Motion::default()); } } } pub struct Spawner; impl<'s> System<'s> for Spawner { type SystemData = ( ReadStorage<'s, Player>, ReadStorage<'s, Motion>, Read<'s, crate::load::LoadedTextures>, WriteStorage<'s, Transform>, WriteStorage<'s, Ally>, WriteStorage<'s, SpriteRender>, WriteStorage<'s, Transparent>, Entities<'s>, WriteStorage<'s, Animation>, ); fn run( &mut self, ( players, motions, textures, mut transforms, mut allies, mut sprites, mut transparent, entities, mut animation, ): Self::SystemData, ) { let count = (&allies, !&motions).join().count(); if count < 5 { let mut ally_positions = vec![]; let range = Uniform::new_inclusive(-5.0 * 32.0, 5.0 * 32.0); let mut rng = rand::thread_rng(); for (_, transform) in (&players, &mut transforms).join() { let mut pos = Transform::default(); pos.scale.x = 0.5; pos.scale.y = 0.5; pos.translation.x = transform.translation.x + range.sample(&mut rng); pos.translation.y = transform.translation.y + range.sample(&mut rng); ally_positions.push(pos); } for pos in ally_positions { let sprite = SpriteRender { sprite_sheet: textures.textures["FRONT.png"].clone(), sprite_number: 1, flip_horizontal: false, flip_vertical: false, }; let anim = Animation { total_frames: 8, max_count_till_next_frame: 0.1, frame_life_time_count: 0.1, current_frame: 0, }; entities .build_entity() .with(pos, &mut transforms) .with(Ally::default(), &mut allies) .with(sprite, &mut sprites) .with(Transparent, &mut transparent) .with(anim, &mut animation) .build(); } } } }
use amethyst::{ core::cgmath::{InnerSpace, Vector2}, core::Transform, ecs::{Entities, Join, Read, ReadStorage, System, WriteStorage}, renderer::{SpriteRender, Transparent}, }; use config::GameoffConfig; use crate::component::{Ally, Animation, Motion, Player}; use rand::distributions::{Distribution, Uniform}; pub struct Movement; impl<'s> System<'s> for Movement { type SystemData = ( ReadStorage<'s, Ally>, WriteStorage<'s, Motion>, ReadStorage<'s, Transform>, ReadStorage<'s, Player>, Read<'s, GameoffConfig>, Entities<'s>, ); fn run( &mut self, (allies, mut motions, transforms, players, config, entities): Self::SystemData, ) { let mut rng = rand::thread_rng(); let zero_distance_dist = Uniform::new(0.5, 1.0); let p_transform = { let (t, _) = (&transforms, &players) .join() .next() .expect("no player found"); (t.clone()) }; for (_, motion, transform1, entity1) in (&allies, &mut motions, &transforms, &entities).join() { let d = (p_transform.translation - transform1.translation).truncate(); let m = d.magnitude().abs(); let mut pv = d.normalize(); if m < config.ally.follow_distance { pv *= 0.0; } else if (m > config.ally.follow_distance) && (m < config.ally.max_distance) { pv *= (1.0 + (m - config.ally.follow_distance) / (config.ally.max_distance - config.ally.follow_distance)) * config.speed; } else if m > config.ally.max_distance { pv *= 2.0 * config.speed; } let mut av = Vector2::new(0.0, 0.0); for (_, transform2, entity2) in (&allies, &transforms, &entities).join() { if entity1 != entity2 { let d = (transform1.translation - transform2.translation).truncate(); let m = d.magnitude().abs(); let mut v = if m == 0.0 { Vector2::new( zero_distance_dist.sample(&mut rng), zero_distance_dist.sample(&mut rng), ).normalize() } else { d.normalize() }; if m < config.ally.min_distance { v *= (config.ally.min_distance - m) / config.ally.min_distance * 5.0 * config.speed; } else if (m > config.ally.follow_distance) && (m < config.ally.max_distance) { v *= ((m - config.ally.follow_distance) / (config.ally.max_distance - config.ally.follow_distance)) * config.speed; } av += v; } } motion.vel = pv + av; } } } pub struct Grouper; impl<'s> System<'s> for Grouper { type SystemData = ( ReadStorage<'s, Ally>, ReadStorage<'s, Transform>, WriteStorage<'s, Motion>, ReadStorage<'s, Player>, Entities<'s>, ); fn run(&mut self, (allies, transforms, mut motions, players, entities): Self::SystemData) { let p_transform = { let (t, _) = (&transforms, &players) .join() .next() .expect("no player found"); (t.clone()) }; let mut merged = vec![]; for (_ally, transform, e, _) in (&allies, &transforms, &*entities, !&motions).join() { let d = p_transform.translation - transform.translation; let m = d.truncate().magnitude(); let merge_dist = 32.0 * 1.0; if m < merge_dist { merged.push(e); } } for entity in merged { let _ = motions.insert(entity, Motion::default()); } } } pub struct Spawner; impl<'s> System<'s> for Spawner { type SystemData = ( ReadStorage<'s, Player>, ReadStorage<'s, Motion>, Read<'s, crate::load::LoadedTextures>, WriteStorage<'s, Transform>, WriteStorage<'s, Ally>, WriteStorage<'s, SpriteRender>, WriteStorage<'s, Transparent>, Entities<'s>, WriteStorage<'s, Animation>, ); fn run( &mut self, ( players, motions, textures, mut transforms, mut allies, mut sprites, mut transparent, entities, mut animation, ): Self::SystemData, ) { let count = (&allies, !&motions).join().count(); if count < 5 { let mut ally_positions = vec![]; let range = Uniform::new_inclusive(-5.0 * 32.0, 5.0 * 32.0); let mut rng = rand::thread_rng(); for (_, transform) in (&players, &mut transforms).join() { let mut pos = Transform::default(); pos.scale.x = 0.5; pos.scale.y = 0.5; pos.translation.x = transform.translation.x + range.sample(&mut rng); pos.translation.y = transform.translation.y + range.sample(&mut rng);
priteRender { sprite_sheet: textures.textures["FRONT.png"].clone(), sprite_number: 1, flip_horizontal: false, flip_vertical: false, }; let anim = Animation { total_frames: 8, max_count_till_next_frame: 0.1, frame_life_time_count: 0.1, current_frame: 0, }; entities .build_entity() .with(pos, &mut transforms) .with(Ally::default(), &mut allies) .with(sprite, &mut sprites) .with(Transparent, &mut transparent) .with(anim, &mut animation) .build(); } } } }
ally_positions.push(pos); } for pos in ally_positions { let sprite = S
function_block-random_span
[ { "content": "pub fn run() -> amethyst::Result<()> {\n\n let root = format!(\"{}/resources\", application_root_dir());\n\n let display_config = DisplayConfig::load(format!(\"{}/display_config.ron\", root));\n\n let gameoff_config = config::GameoffConfig::load(format!(\"{}/config.ron\", root));\n\n let pipe = Pipeline::build().with_stage(\n\n Stage::with_backbuffer()\n\n .clear_target([0.1, 0.1, 0.1, 1.0], 1.0)\n\n .with_pass(DrawSprite::new().with_transparency(\n\n ColorMask::all(),\n\n ALPHA,\n\n Some(DepthMode::LessEqualWrite), // Tells the pipeline to respect sprite z-depth\n\n )),\n\n );\n\n\n\n let game_data = GameDataBuilder::default()\n\n .with_bundle(TransformBundle::new())?\n\n .with_bundle(\n\n InputBundle::<String, String>::new()\n\n .with_bindings_from_file(format!(\"{}/input.ron\", root))?,\n\n )?.with(\n", "file_path": "game-core/src/lib.rs", "rank": 0, "score": 129083.25466209007 }, { "content": "pub fn load_map_sprites(world: &mut World) {\n\n let fname = format!(\"{}/resources/testmap.tmx\", application_root_dir());\n\n let file = Path::new(&fname);\n\n let map = tiled::parse_file(&file).unwrap();\n\n\n\n let tileset = &map.tilesets[0];\n\n let image = &tileset.images[0];\n\n\n\n let sprite_cords = |sprite_id| {\n\n let width = image.width as u32 - 2 * tileset.margin + tileset.spacing;\n\n let cols = width / (tileset.tile_width + tileset.spacing);\n\n\n\n let col = sprite_id % cols;\n\n let row = sprite_id / cols;\n\n\n\n let left = tileset.margin + tileset.tile_width * col + tileset.spacing * col;\n\n let top = tileset.margin + tileset.tile_height * row + tileset.spacing * row;\n\n\n\n (left, top)\n\n };\n", "file_path": "game-core/src/map.rs", "rank": 1, "score": 106408.81502116645 }, { "content": "pub fn init_camera(world: &mut World, parent: Entity) {\n\n let mut transform = {\n\n let transforms = world.read_storage::<Transform>();\n\n transforms.get(parent).unwrap().clone()\n\n };\n\n\n\n world.register::<CameraOrtho>();\n\n\n\n transform.translation.z = 2.0;\n\n transform.translation.x -= 256.0;\n\n transform.translation.y -= 256.0;\n\n transform.scale.x = 512.0;\n\n transform.scale.y = 512.0;\n\n\n\n world\n\n .create_entity()\n\n .with(CameraOrtho::normalized(CameraNormalizeMode::Contain))\n\n .with(Camera::standard_2d())\n\n .with(transform)\n\n .build();\n\n}\n", "file_path": "game-core/src/state/menu.rs", "rank": 2, "score": 97078.10055853189 }, { "content": "/// Loads texture into world and returns texture id.\n\npub fn texture(world: &mut World, png_path: &str) -> u64 {\n\n let texture_handle = {\n\n let loader = world.read_resource::<Loader>();\n\n let texture_storage = world.read_resource::<AssetStorage<Texture>>();\n\n loader.load(\n\n png_path,\n\n PngFormat,\n\n TextureMetadata::srgb_scale(),\n\n (),\n\n &texture_storage,\n\n )\n\n };\n\n\n\n let mut material_texture_set = world.write_resource::<MaterialTextureSet>();\n\n let texture_id = material_texture_set.len() as u64;\n\n material_texture_set.insert(texture_id, texture_handle);\n\n\n\n texture_id\n\n}\n", "file_path": "game-core/src/load.rs", "rank": 3, "score": 95292.86349700287 }, { "content": "#[derive(Debug, Serialize)]\n\nstruct AmethystSprite {\n\n x: f64,\n\n y: f64,\n\n width: f64,\n\n height: f64,\n\n}\n\n\n", "file_path": "piskel2amethyst/src/main.rs", "rank": 4, "score": 88898.38692349599 }, { "content": "pub fn sprite_sheet(world: &mut World, png_path: &str, ron_path: &str) -> SpriteSheetHandle {\n\n let texture_id = super::load::texture(world, png_path);\n\n\n\n let loader = world.read_resource::<Loader>();\n\n let sprite_sheet_store = world.read_resource::<AssetStorage<SpriteSheet>>();\n\n let handle = loader.load(\n\n ron_path,\n\n SpriteSheetFormat,\n\n texture_id,\n\n (),\n\n &sprite_sheet_store,\n\n );\n\n\n\n let mut my = world.write_resource::<LoadedTextures>();\n\n let old_val = my.textures.insert(png_path.into(), handle.clone());\n\n assert!(old_val.is_none());\n\n\n\n handle\n\n}\n\n\n", "file_path": "game-core/src/load.rs", "rank": 5, "score": 88497.04387147399 }, { "content": "#[derive(Debug, Serialize)]\n\nstruct AmethystSheet {\n\n spritesheet_width: f64,\n\n spritesheet_height: f64,\n\n sprites: Vec<AmethystSprite>,\n\n}\n\n\n", "file_path": "piskel2amethyst/src/main.rs", "rank": 6, "score": 64257.651295513315 }, { "content": "use amethyst::{\n\n core::cgmath::{InnerSpace, Vector2},\n\n core::{timing::Time, Transform},\n\n ecs::{Join, Read, System, WriteStorage},\n\n};\n\nuse crate::component::Motion;\n\n\n\npub struct Movement;\n\n\n\nimpl<'s> System<'s> for Movement {\n\n type SystemData = (\n\n WriteStorage<'s, Motion>,\n\n WriteStorage<'s, Transform>,\n\n Read<'s, Time>,\n\n );\n\n\n\n fn run(&mut self, (mut motions, mut transforms, time): Self::SystemData) {\n\n for (motion, transform) in (&mut motions, &mut transforms).join() {\n\n let delta = time.delta_seconds();\n\n let distance = motion.vel * delta + 0.5 * motion.acc * delta.powf(2.0); // d = v*t + (a*t^2)/2\n", "file_path": "game-core/src/system/motion.rs", "rank": 7, "score": 46190.476434450444 }, { "content": "use amethyst::{\n\n core::timing::Time,\n\n ecs::{Entities, Join, Read, System, WriteStorage},\n\n renderer::SpriteRender,\n\n};\n\nuse crate::component::Animation;\n\n\n\npub struct Frame;\n\n\n\nimpl<'s> System<'s> for Frame {\n\n type SystemData = (\n\n WriteStorage<'s, SpriteRender>,\n\n WriteStorage<'s, Animation>,\n\n Entities<'s>,\n\n Read<'s, Time>,\n\n );\n\n\n\n fn run(&mut self, (mut sprite_render, mut animation, entities, time): Self::SystemData) {\n\n for (_, animation, sprite_render) in (&entities, &mut animation, &mut sprite_render).join()\n\n {\n\n animation.frame_update(sprite_render, time.delta_seconds());\n\n }\n\n }\n\n}\n", "file_path": "game-core/src/system/animation.rs", "rank": 8, "score": 46189.555895548496 }, { "content": " if let Some(min_vel) = motion.min_vel {\n\n if distance.magnitude2() < (min_vel * delta).powf(2.0)\n\n || motion.vel.dot(motion.vel + motion.acc * delta) < 0.0\n\n {\n\n distance.normalize_to(min_vel * delta);\n\n motion.vel = motion.vel.normalize_to(min_vel);\n\n motion.acc = Vector2 { x: 0.0, y: 0.0 };\n\n motion.min_vel = None;\n\n }\n\n } else if let Some(max_vel) = motion.max_vel {\n\n if distance.magnitude2() > (max_vel * delta).powf(2.0) {\n\n distance.normalize_to(max_vel * delta);\n\n motion.vel = motion.vel.normalize_to(max_vel);\n\n motion.acc = Vector2 { x: 0.0, y: 0.0 };\n\n motion.max_vel = None;\n\n }\n\n }\n\n motion.vel += motion.acc * delta; // vo = vi + a*t\n\n transform.translation += distance.extend(0.0);\n\n }\n\n }\n\n}\n", "file_path": "game-core/src/system/motion.rs", "rank": 9, "score": 46171.60579193264 }, { "content": "use amethyst::{\n\n core::cgmath::{InnerSpace, Vector2},\n\n core::Transform,\n\n ecs::{Entities, Join, Read, ReadStorage, System, WriteStorage},\n\n input::InputHandler,\n\n renderer::{SpriteRender, Transparent},\n\n};\n\nuse crate::component::{Animation, Enemy, Motion, Player, Projectile};\n\nuse rand::distributions::{Distribution, Uniform};\n\n\n\npub struct Movement;\n\n\n\nimpl<'s> System<'s> for Movement {\n\n type SystemData = (\n\n WriteStorage<'s, Player>,\n\n WriteStorage<'s, Transform>,\n\n Read<'s, InputHandler<String, String>>,\n\n Option<Read<'s, crate::map::PassableTiles>>,\n\n );\n\n\n", "file_path": "game-core/src/system/player.rs", "rank": 13, "score": 45611.85513543618 }, { "content": " mut transparent,\n\n mut animations,\n\n entities,\n\n input,\n\n ): Self::SystemData,\n\n ) {\n\n let mut bubble_transform = None;\n\n let mut bubble_dir = None;\n\n for (player, p_transform) in (&players, &transforms).join() {\n\n for (enemy, e_transform, enemy_entity) in (&mut enemies, &transforms, &*entities).join()\n\n {\n\n if input.action_is_down(\"jump\") == Some(true) {\n\n bubble_transform = Some(p_transform.clone());\n\n\n\n let range = Uniform::new_inclusive(-5.0 * 32.0, 5.0 * 32.0);\n\n let mut rng = rand::thread_rng();\n\n let perp = Vector2 {\n\n x: player.last_direction.y,\n\n y: -player.last_direction.x,\n\n };\n", "file_path": "game-core/src/system/player.rs", "rank": 16, "score": 45608.55205766159 }, { "content": " Read<'s, crate::load::LoadedTextures>,\n\n WriteStorage<'s, Projectile>,\n\n WriteStorage<'s, Motion>,\n\n WriteStorage<'s, SpriteRender>,\n\n WriteStorage<'s, Transparent>,\n\n WriteStorage<'s, Animation>,\n\n Entities<'s>,\n\n Read<'s, InputHandler<String, String>>,\n\n );\n\n\n\n fn run(\n\n &mut self,\n\n (\n\n players,\n\n mut enemies,\n\n mut transforms,\n\n textures,\n\n mut projectiles,\n\n mut motions,\n\n mut sprites,\n", "file_path": "game-core/src/system/player.rs", "rank": 20, "score": 45605.179653978776 }, { "content": " let perp = perp.normalize_to(range.sample(&mut rng));\n\n\n\n bubble_dir = Some(player.last_direction.normalize_to(32.0 * 23.0) + perp);\n\n }\n\n\n\n if e_transform.translation.x < p_transform.translation.x\n\n && e_transform.translation.y < p_transform.translation.y\n\n {\n\n if enemy.hp > 0 {\n\n enemy.hp -= 1;\n\n } else {\n\n let _r = entities.delete(enemy_entity);\n\n }\n\n }\n\n }\n\n }\n\n\n\n if let Some(transform) = bubble_transform {\n\n let sprite = SpriteRender {\n\n sprite_sheet: textures.textures[\"bubble.png\"].clone(),\n", "file_path": "game-core/src/system/player.rs", "rank": 22, "score": 45601.93593274888 }, { "content": " .build_entity()\n\n .with(transform, &mut transforms)\n\n .with(Projectile, &mut projectiles)\n\n .with(motion, &mut motions)\n\n .with(sprite, &mut sprites)\n\n .with(Transparent, &mut transparent)\n\n .with(anim, &mut animations)\n\n .build();\n\n }\n\n }\n\n}\n", "file_path": "game-core/src/system/player.rs", "rank": 23, "score": 45601.44450844723 }, { "content": " fn run(&mut self, (mut players, mut transforms, input, passable): Self::SystemData) {\n\n if let Some(passable) = passable {\n\n let x_move = input.axis_value(\"entity_x\").unwrap();\n\n let y_move = input.axis_value(\"entity_y\").unwrap();\n\n\n\n for (player, transform) in (&mut players, &mut transforms).join() {\n\n if x_move != 0.0 || y_move != 0.0 {\n\n player.last_direction = Vector2 {\n\n x: x_move as f32,\n\n y: y_move as f32,\n\n };\n\n }\n\n\n\n let goal_x = transform.translation.x + x_move as f32 * 5.0;\n\n let goal_y = transform.translation.y + y_move as f32 * 5.0;\n\n\n\n let tile_y = (goal_y as u32 / 32) as usize;\n\n let tile_x = (goal_x as u32 / 32) as usize;\n\n\n\n if *passable\n", "file_path": "game-core/src/system/player.rs", "rank": 25, "score": 45599.64345833361 }, { "content": " .tile_matrix\n\n .get(tile_y)\n\n .and_then(|row| row.get(tile_x))\n\n .unwrap_or(&false)\n\n {\n\n transform.translation.x = goal_x;\n\n transform.translation.y = goal_y;\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\npub struct Attack;\n\n\n\nimpl<'s> System<'s> for Attack {\n\n type SystemData = (\n\n ReadStorage<'s, Player>,\n\n WriteStorage<'s, Enemy>,\n\n WriteStorage<'s, Transform>,\n", "file_path": "game-core/src/system/player.rs", "rank": 26, "score": 45595.10415126881 }, { "content": " sprite_number: 0,\n\n flip_horizontal: false,\n\n flip_vertical: false,\n\n };\n\n\n\n let anim = Animation {\n\n total_frames: 2,\n\n max_count_till_next_frame: 0.5,\n\n frame_life_time_count: 0.5,\n\n current_frame: 0,\n\n };\n\n\n\n let motion = Motion {\n\n vel: bubble_dir.unwrap(),\n\n acc: bubble_dir.unwrap() * -2.0,\n\n min_vel: Some(32.0),\n\n max_vel: None,\n\n };\n\n\n\n entities\n", "file_path": "game-core/src/system/player.rs", "rank": 27, "score": 45594.90302779055 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct Piskelwh {\n\n w: u32,\n\n h: u32,\n\n}\n\n\n", "file_path": "piskel2amethyst/src/main.rs", "rank": 28, "score": 40606.77303894779 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct Piskelxywh {\n\n x: u32,\n\n y: u32,\n\n w: u32,\n\n h: u32,\n\n}\n\n\n", "file_path": "piskel2amethyst/src/main.rs", "rank": 29, "score": 40606.77303894779 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct PiskelMeta {\n\n app: String,\n\n version: String,\n\n image: String,\n\n format: String,\n\n size: Piskelwh,\n\n}\n\n\n", "file_path": "piskel2amethyst/src/main.rs", "rank": 30, "score": 39616.915667530644 }, { "content": "fn main() {\n\n if let Err(env::VarError::NotPresent) = env::var(\"RUST_LOG\") {\n\n env::set_var(\"RUST_LOG\", \"debug,gfx_device_gl=warn,amethyst_assets=warn\");\n\n }\n\n\n\n env_logger::init();\n\n\n\n game_core::run().unwrap();\n\n}\n", "file_path": "game-main/src/main.rs", "rank": 31, "score": 39616.915667530644 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct PiskelSheet {\n\n frames: HashMap<String, PiskelImage>,\n\n meta: PiskelMeta,\n\n}\n\n\n", "file_path": "piskel2amethyst/src/main.rs", "rank": 32, "score": 39616.915667530644 }, { "content": "#[allow(non_snake_case)]\n\n#[derive(Debug, Deserialize)]\n\nstruct PiskelImage {\n\n frame: Piskelxywh,\n\n rotated: bool,\n\n trimmed: bool,\n\n spriteSourceSize: Piskelxywh,\n\n sourceSize: Piskelwh,\n\n}\n\n\n", "file_path": "piskel2amethyst/src/main.rs", "rank": 33, "score": 39616.915667530644 }, { "content": "fn main() -> Result<(), Error> {\n\n // Open the file in read-only mode.\n\n let path = std::env::args().nth(1).unwrap();\n\n let file = File::open(path)?;\n\n\n\n // Read the JSON contents of the file as an instance of `User`.\n\n let u: PiskelSheet = serde_json::from_reader(file)?;\n\n\n\n let mut out = AmethystSheet {\n\n spritesheet_width: f64::from(u.meta.size.w),\n\n spritesheet_height: f64::from(u.meta.size.h),\n\n sprites: vec![],\n\n };\n\n\n\n for sprite in u.frames.values() {\n\n out.sprites.push(AmethystSprite {\n\n x: f64::from(sprite.frame.x),\n\n y: f64::from(sprite.frame.y),\n\n width: f64::from(sprite.frame.w),\n\n height: f64::from(sprite.frame.h),\n", "file_path": "piskel2amethyst/src/main.rs", "rank": 34, "score": 34597.36319837881 }, { "content": "pub struct GameoffConfig {\n\n pub ally: Ally,\n\n pub speed: f32,\n\n}\n\n\n\nimpl Default for GameoffConfig {\n\n fn default() -> Self {\n\n GameoffConfig {\n\n speed: 0.0,\n\n ally: Ally::default(),\n\n }\n\n }\n\n}\n", "file_path": "game-core/src/config.rs", "rank": 35, "score": 25730.81660706131 }, { "content": "use serde::{Deserialize, Serialize};\n\n\n\n#[derive(Debug, Deserialize, Serialize)]\n\npub struct Ally {\n\n pub follow_distance: f32,\n\n pub max_distance: f32,\n\n pub min_distance: f32,\n\n}\n\n\n\nimpl Default for Ally {\n\n fn default() -> Self {\n\n Ally {\n\n follow_distance: 0.0,\n\n max_distance: 0.0,\n\n min_distance: 0.0,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Deserialize, Serialize)]\n", "file_path": "game-core/src/config.rs", "rank": 36, "score": 25727.856290912176 }, { "content": "use amethyst::{\n\n ecs::{Component, DenseVecStorage},\n\n renderer::SpriteRender,\n\n};\n\n\n\npub struct Animation {\n\n pub total_frames: usize,\n\n pub max_count_till_next_frame: f32, // These are in seconds\n\n pub frame_life_time_count: f32, // These are in seconds\n\n pub current_frame: usize,\n\n}\n\n\n\nimpl Default for Animation {\n\n fn default() -> Self {\n\n Self {\n\n total_frames: 0,\n\n max_count_till_next_frame: 0.0,\n\n frame_life_time_count: 0.0,\n\n current_frame: 0,\n\n }\n", "file_path": "game-core/src/component/animation.rs", "rank": 37, "score": 24661.13285283458 }, { "content": "use amethyst::{\n\n core::cgmath::Vector2,\n\n ecs::{Component, DenseVecStorage},\n\n};\n\n\n\npub struct Motion {\n\n pub vel: Vector2<f32>,\n\n pub acc: Vector2<f32>,\n\n pub max_vel: Option<f32>,\n\n pub min_vel: Option<f32>,\n\n}\n\n\n\nimpl Default for Motion {\n\n fn default() -> Self {\n\n Self {\n\n vel: Vector2 { x: 0.0, y: 0.0 },\n\n acc: Vector2 { x: 0.0, y: 0.0 },\n\n max_vel: None,\n\n min_vel: None,\n\n }\n\n }\n\n}\n\n\n\nimpl Component for Motion {\n\n type Storage = DenseVecStorage<Self>;\n\n}\n", "file_path": "game-core/src/component/motion.rs", "rank": 38, "score": 24659.58443363412 }, { "content": " }\n\n}\n\n\n\nimpl Component for Animation {\n\n type Storage = DenseVecStorage<Self>;\n\n}\n\n\n\nimpl Animation {\n\n pub fn frame_update(&mut self, sprite_render: &mut SpriteRender, seconds: f32) {\n\n if self.frame_life_time_count > 0.0 {\n\n self.frame_life_time_count -= seconds;\n\n } else {\n\n self.frame_life_time_count = self.max_count_till_next_frame;\n\n self.current_frame = (self.current_frame + 1) % self.total_frames;\n\n }\n\n\n\n sprite_render.sprite_number = self.current_frame;\n\n }\n\n}\n", "file_path": "game-core/src/component/animation.rs", "rank": 39, "score": 24659.54852647544 }, { "content": "use amethyst::ecs::{Component, HashMapStorage};\n\nuse amethyst::{\n\n core::cgmath::Vector2,\n\n core::Transform,\n\n ecs::Entity,\n\n prelude::*,\n\n renderer::{SpriteRender, SpriteSheetHandle, Transparent},\n\n};\n\nuse crate::component::Animation;\n\n\n\n#[derive(Debug)]\n\npub struct Player {\n\n pub hp: u32,\n\n pub num_allies: u32,\n\n pub last_direction: Vector2<f32>,\n\n}\n\n\n\nimpl Default for Player {\n\n fn default() -> Self {\n\n Self {\n", "file_path": "game-core/src/component/player.rs", "rank": 40, "score": 24086.732737081908 }, { "content": " hp: 10,\n\n num_allies: 0,\n\n last_direction: Vector2 { x: 1.0, y: 1.0 },\n\n }\n\n }\n\n}\n\n\n\nimpl Component for Player {\n\n type Storage = HashMapStorage<Self>;\n\n}\n\n\n\nimpl Player {\n\n pub fn new(world: &mut World, sprite_sheet: &SpriteSheetHandle) -> Entity {\n\n let mut transform = Transform::default();\n\n transform.translation.x = 32.0 * 70.0;\n\n transform.translation.y = 32.0 * 50.0;\n\n\n\n let sprite = SpriteRender {\n\n sprite_sheet: sprite_sheet.clone(),\n\n sprite_number: 1,\n", "file_path": "game-core/src/component/player.rs", "rank": 41, "score": 24081.59078089831 }, { "content": "use amethyst::ecs::{Component, DenseVecStorage};\n\n\n\npub struct Ally {\n\n pub hp: u32,\n\n}\n\n\n\nimpl Default for Ally {\n\n fn default() -> Self {\n\n Self { hp: 10 }\n\n }\n\n}\n\n\n\nimpl Component for Ally {\n\n type Storage = DenseVecStorage<Self>;\n\n}\n", "file_path": "game-core/src/component/ally.rs", "rank": 42, "score": 24079.027956289818 }, { "content": " flip_horizontal: false,\n\n flip_vertical: false,\n\n };\n\n\n\n let anim = Animation {\n\n total_frames: 8,\n\n max_count_till_next_frame: 0.5,\n\n frame_life_time_count: 0.5,\n\n current_frame: 0,\n\n };\n\n\n\n world\n\n .create_entity()\n\n .with(transform)\n\n .with(Player::default())\n\n .with(sprite)\n\n .with(Transparent)\n\n .with(anim)\n\n .build()\n\n }\n\n}\n", "file_path": "game-core/src/component/player.rs", "rank": 43, "score": 24077.67862719153 }, { "content": "\n\n fn run(\n\n &mut self,\n\n (\n\n players,\n\n textures,\n\n mut transforms,\n\n mut enemies,\n\n mut motions,\n\n mut sprites,\n\n mut transparent,\n\n entities,\n\n mut animation,\n\n passable,\n\n ): Self::SystemData,\n\n ) {\n\n let count = (&enemies).join().count();\n\n\n\n if let Some(passable) = passable {\n\n if count < 5 {\n", "file_path": "game-core/src/system/enemy.rs", "rank": 44, "score": 21552.14089870348 }, { "content": "use amethyst::core::cgmath::InnerSpace;\n\nuse amethyst::core::cgmath::Vector2;\n\nuse amethyst::renderer::Camera;\n\nuse amethyst::{\n\n core::Transform,\n\n ecs::{Join, ReadStorage, System, WriteStorage},\n\n};\n\nuse crate::component::Player;\n\n\n\npub struct Movement;\n\n\n\nimpl<'s> System<'s> for Movement {\n\n type SystemData = (\n\n ReadStorage<'s, Player>,\n\n ReadStorage<'s, Camera>,\n\n WriteStorage<'s, Transform>,\n\n );\n\n\n\n fn run(&mut self, (players, cameras, mut transforms): Self::SystemData) {\n\n let mut player_translation = Vector2 { x: 0.0, y: 0.0 };\n", "file_path": "game-core/src/system/camera.rs", "rank": 45, "score": 21552.101127554706 }, { "content": "use amethyst::{\n\n core::cgmath::{InnerSpace, Vector2},\n\n core::timing::Time,\n\n core::Transform,\n\n ecs::{Entities, Join, Read, ReadStorage, System, WriteStorage},\n\n renderer::{SpriteRender, Transparent},\n\n};\n\nuse crate::component::{Animation, Enemy, Motion, Player, Projectile};\n\nuse rand::distributions::{Distribution, Uniform};\n\nuse std::f32::consts::PI;\n\nuse std::time::Duration;\n\n\n\npub struct Movement {\n\n pub random_movement_time: Duration,\n\n pub random_idle_time: Duration,\n\n}\n\n\n\nimpl<'s> System<'s> for Movement {\n\n type SystemData = (\n\n ReadStorage<'s, Player>,\n", "file_path": "game-core/src/system/enemy.rs", "rank": 46, "score": 21551.54439624189 }, { "content": " mut transforms,\n\n textures,\n\n mut projectiles,\n\n mut motions,\n\n mut sprites,\n\n mut transparent,\n\n mut animations,\n\n entities,\n\n ): Self::SystemData,\n\n ) {\n\n let mut bubble_transform = None;\n\n let mut bubble_dir = None;\n\n for (_player, _p_transform) in (&players, &transforms).join() {\n\n for (enemy, e_transform, e_motion) in (&mut enemies, &transforms, &motions).join() {\n\n // if they're moving they shoot\n\n if enemy.has_player_in_sight {\n\n bubble_transform = Some(e_transform.clone());\n\n\n\n let range = Uniform::new_inclusive(-5.0 * 32.0, 5.0 * 32.0);\n\n let mut rng = rand::thread_rng();\n", "file_path": "game-core/src/system/enemy.rs", "rank": 47, "score": 21550.839880027397 }, { "content": "\n\nimpl<'s> System<'s> for Attack {\n\n type SystemData = (\n\n ReadStorage<'s, Player>,\n\n WriteStorage<'s, Enemy>,\n\n WriteStorage<'s, Transform>,\n\n Read<'s, crate::load::LoadedTextures>,\n\n WriteStorage<'s, Projectile>,\n\n WriteStorage<'s, Motion>,\n\n WriteStorage<'s, SpriteRender>,\n\n WriteStorage<'s, Transparent>,\n\n WriteStorage<'s, Animation>,\n\n Entities<'s>,\n\n );\n\n\n\n fn run(\n\n &mut self,\n\n (\n\n players,\n\n mut enemies,\n", "file_path": "game-core/src/system/enemy.rs", "rank": 48, "score": 21546.669589304627 }, { "content": " .build();\n\n }\n\n }\n\n}\n\n\n\npub struct Spawner;\n\n\n\nimpl<'s> System<'s> for Spawner {\n\n type SystemData = (\n\n ReadStorage<'s, Player>,\n\n Read<'s, crate::load::LoadedTextures>,\n\n WriteStorage<'s, Transform>,\n\n WriteStorage<'s, Enemy>,\n\n WriteStorage<'s, Motion>,\n\n WriteStorage<'s, SpriteRender>,\n\n WriteStorage<'s, Transparent>,\n\n Entities<'s>,\n\n WriteStorage<'s, Animation>,\n\n Option<Read<'s, crate::map::PassableTiles>>,\n\n );\n", "file_path": "game-core/src/system/enemy.rs", "rank": 49, "score": 21546.408888682286 }, { "content": " max_count_till_next_frame: 0.5,\n\n frame_life_time_count: 0.5,\n\n current_frame: 0,\n\n };\n\n\n\n let motion = Motion {\n\n vel: bubble_dir.unwrap(),\n\n acc: bubble_dir.unwrap() * -2.0,\n\n min_vel: Some(32.0),\n\n max_vel: None,\n\n };\n\n\n\n entities\n\n .build_entity()\n\n .with(transform, &mut transforms)\n\n .with(Projectile, &mut projectiles)\n\n .with(motion, &mut motions)\n\n .with(sprite, &mut sprites)\n\n .with(Transparent, &mut transparent)\n\n .with(anim, &mut animations)\n", "file_path": "game-core/src/system/enemy.rs", "rank": 50, "score": 21542.98979922431 }, { "content": " if motion.vel.magnitude2() == 0.0 {\n\n let range = Uniform::new_inclusive(0.0, 2.0 * PI);\n\n let mut rng = rand::thread_rng();\n\n let random_velocity = Vector2 {\n\n x: range.sample(&mut rng).sin(),\n\n y: range.sample(&mut rng).cos(),\n\n };\n\n motion.vel = random_velocity.normalize_to(idle_velocity);\n\n }\n\n } else if current_second % 3.0 == 0.0 {\n\n motion.vel = Vector2 { x: 0.0, y: 0.0 };\n\n }\n\n */\n\n enemy.has_player_in_sight = false;\n\n }\n\n }\n\n }\n\n}\n\n\n\npub struct Attack;\n", "file_path": "game-core/src/system/enemy.rs", "rank": 51, "score": 21542.163140329012 }, { "content": " let perp = e_motion.vel;\n\n let perp = perp.normalize_to(range.sample(&mut rng));\n\n\n\n bubble_dir = Some(e_motion.vel.normalize_to(32.0 * 23.0) + perp);\n\n }\n\n\n\n // do some dmg stuff here maybe\n\n }\n\n }\n\n\n\n if let Some(transform) = bubble_transform {\n\n let sprite = SpriteRender {\n\n sprite_sheet: textures.textures[\"bubble.png\"].clone(),\n\n sprite_number: 0,\n\n flip_horizontal: false,\n\n flip_vertical: false,\n\n };\n\n\n\n let anim = Animation {\n\n total_frames: 2,\n", "file_path": "game-core/src/system/enemy.rs", "rank": 52, "score": 21541.3885653141 }, { "content": " WriteStorage<'s, Enemy>,\n\n WriteStorage<'s, Motion>,\n\n WriteStorage<'s, Transform>,\n\n Read<'s, Time>,\n\n );\n\n\n\n fn run(&mut self, (players, mut enemies, mut motions, transforms, time): Self::SystemData) {\n\n let idle_velocity = 50.0;\n\n let tracking_velocity = 100.0;\n\n\n\n let mut player_translation = Vector2 { x: 0.0, y: 0.0 };\n\n let detect_radius = 180.0;\n\n let detection_circle = Vector2 {\n\n x: detect_radius,\n\n y: detect_radius,\n\n };\n\n\n\n let time_accel = 4.0;\n\n // let current_second = (time.absolute_time_seconds() * time_accel).floor();\n\n\n", "file_path": "game-core/src/system/enemy.rs", "rank": 53, "score": 21539.49961701673 }, { "content": " }\n\n\n\n for pos in enemy_positions {\n\n let sprite = SpriteRender {\n\n sprite_sheet: textures.textures[\"penguinFront.png\"].clone(),\n\n sprite_number: 0,\n\n flip_horizontal: false,\n\n flip_vertical: false,\n\n };\n\n\n\n let anim = Animation {\n\n total_frames: 2,\n\n max_count_till_next_frame: 0.7,\n\n frame_life_time_count: 0.7,\n\n current_frame: 0,\n\n };\n\n\n\n entities\n\n .build_entity()\n\n .with(pos, &mut transforms)\n", "file_path": "game-core/src/system/enemy.rs", "rank": 54, "score": 21539.139366850184 }, { "content": " }\n\n }\n\n\n\n if motion.vel.magnitude2() == 0.0 {\n\n if let Some(diff) = self.random_idle_time.checked_sub(time.delta_time()) {\n\n self.random_idle_time = diff;\n\n } else {\n\n let range = Uniform::new_inclusive(0.0, 2.0 * PI);\n\n let mut rng = rand::thread_rng();\n\n let random_velocity = Vector2 {\n\n x: range.sample(&mut rng).sin(),\n\n y: range.sample(&mut rng).cos(),\n\n };\n\n motion.vel = random_velocity.normalize_to(idle_velocity);\n\n self.random_movement_time = Duration::new(2, 0);\n\n }\n\n }\n\n\n\n /*\n\n if current_second % 2.0 == 0.0 {\n", "file_path": "game-core/src/system/enemy.rs", "rank": 55, "score": 21538.499817776923 }, { "content": " // get player position\n\n for (_, transform) in (&players, &transforms).join() {\n\n player_translation = transform.translation.truncate();\n\n }\n\n\n\n for (enemy, motion, transform) in (&mut enemies, &mut motions, &transforms).join() {\n\n let enemy_translation = transform.translation.truncate();\n\n let player_direction = player_translation - enemy_translation;\n\n\n\n if player_direction.magnitude2() <= detection_circle.magnitude2() {\n\n let enemy_shift = player_direction.normalize_to(tracking_velocity);\n\n motion.vel = enemy_shift;\n\n enemy.has_player_in_sight = true;\n\n } else {\n\n if motion.vel.magnitude2() > 0.0 {\n\n if let Some(diff) = self.random_movement_time.checked_sub(time.delta_time()) {\n\n self.random_movement_time = diff;\n\n } else {\n\n motion.vel = Vector2 { x: 0.0, y: 0.0 };\n\n self.random_idle_time = Duration::new(2, 0);\n", "file_path": "game-core/src/system/enemy.rs", "rank": 56, "score": 21538.447627931324 }, { "content": " let mut enemy_positions = vec![];\n\n let range = Uniform::new_inclusive(-5.0 * 32.0, 5.0 * 32.0);\n\n let mut rng = rand::thread_rng();\n\n for (_, transform) in (&players, &mut transforms).join() {\n\n let mut pos = Transform::default();\n\n pos.translation.x = transform.translation.x + range.sample(&mut rng);\n\n pos.translation.y = transform.translation.y + range.sample(&mut rng);\n\n\n\n // get tile and check if passable\n\n let tile_y = (pos.translation.y as u32 / 32) as usize;\n\n let tile_x = (pos.translation.x as u32 / 32) as usize;\n\n\n\n if *passable\n\n .tile_matrix\n\n .get(tile_y)\n\n .and_then(|row| row.get(tile_x))\n\n .unwrap_or(&false)\n\n {\n\n enemy_positions.push(pos);\n\n }\n", "file_path": "game-core/src/system/enemy.rs", "rank": 57, "score": 21538.084815908587 }, { "content": " .with(Enemy::default(), &mut enemies)\n\n .with(Motion::default(), &mut motions)\n\n .with(sprite, &mut sprites)\n\n .with(Transparent, &mut transparent)\n\n .with(anim, &mut animation)\n\n .build();\n\n }\n\n }\n\n }\n\n }\n\n}\n", "file_path": "game-core/src/system/enemy.rs", "rank": 58, "score": 21536.40536148524 }, { "content": "pub mod ally;\n\npub mod animation;\n\npub mod camera;\n\npub mod enemy;\n\npub mod motion;\n\npub mod player;\n", "file_path": "game-core/src/system/mod.rs", "rank": 59, "score": 21534.194427684146 }, { "content": "\n\n for (_, transform) in (&players, &mut transforms).join() {\n\n player_translation = transform.translation.truncate();\n\n }\n\n\n\n for (_, transform) in (&cameras, &mut transforms).join() {\n\n let camera_translation = transform.translation.truncate();\n\n let camera_scale = transform.scale.truncate();\n\n let player_direction = player_translation - camera_translation - camera_scale / 2.0;\n\n let camera_safe_edge = camera_scale / 4.0;\n\n\n\n if player_direction.magnitude2() > camera_safe_edge.magnitude2() {\n\n let camera_shift =\n\n player_direction - player_direction.normalize_to(camera_safe_edge.magnitude());\n\n transform.translation += camera_shift.extend(0.0);\n\n }\n\n }\n\n }\n\n}\n", "file_path": "game-core/src/system/camera.rs", "rank": 60, "score": 21533.330104851975 }, { "content": " let mut dispatcher: Dispatcher = DispatcherBuilder::new()\n\n .with(player::Movement, \"player-movement\", &[])\n\n .with(\n\n enemy::Movement {\n\n random_movement_time: Duration::new(0, 0),\n\n random_idle_time: Duration::new(1, 0),\n\n },\n\n \"enemy-movement\",\n\n &[],\n\n ).with(camera::Movement, \"camera-movement\", &[])\n\n .with(enemy::Spawner, \"enemy-spawner\", &[])\n\n .with(ally::Movement, \"ally-movement\", &[])\n\n .with(ally::Grouper, \"ally-grouper\", &[])\n\n .with(ally::Spawner, \"ally-spawner\", &[])\n\n .with(player::Attack, \"player-attack\", &[])\n\n .with(enemy::Attack, \"enemy-attack\", &[])\n\n .with(animation::Frame, \"frame-animation\", &[])\n\n .with(motion::Movement, \"projectile-movement\", &[])\n\n .build();\n\n dispatcher.setup(&mut world.res);\n\n Trans::Push(Box::new(Game { dispatcher }))\n\n }\n\n}\n\n\n", "file_path": "game-core/src/state/menu.rs", "rank": 61, "score": 19.20273258909609 }, { "content": "use amethyst::{\n\n core::Transform,\n\n ecs::prelude::*,\n\n ecs::Entity,\n\n prelude::*,\n\n renderer::Camera,\n\n utils::ortho_camera::{CameraNormalizeMode, CameraOrtho},\n\n};\n\nuse component::{Animation, Player};\n\nuse crate::load;\n\nuse crate::state::Game;\n\nuse crate::system::*;\n\nuse std::time::Duration;\n\n\n\npub struct Menu;\n\n\n\nimpl SimpleState<'static, 'static> for Menu {\n\n fn on_start(&mut self, data: StateData<GameData>) {\n\n let world = data.world;\n\n\n", "file_path": "game-core/src/state/menu.rs", "rank": 62, "score": 18.0234232746318 }, { "content": "pub mod ally;\n\npub mod animation;\n\npub mod enemy;\n\npub mod motion;\n\npub mod player;\n\npub mod projectile;\n\n\n\npub use self::ally::Ally;\n\npub use self::animation::Animation;\n\npub use self::enemy::Enemy;\n\npub use self::motion::Motion;\n\npub use self::player::Player;\n\npub use self::projectile::Projectile;\n", "file_path": "game-core/src/component/mod.rs", "rank": 63, "score": 17.76488136656588 }, { "content": "use amethyst::ecs::{Component, DenseVecStorage};\n\n\n\npub struct Enemy {\n\n pub hp: u32,\n\n pub has_player_in_sight: bool,\n\n}\n\n\n\nimpl Default for Enemy {\n\n fn default() -> Self {\n\n Self {\n\n hp: 120,\n\n has_player_in_sight: false,\n\n }\n\n }\n\n}\n\n\n\nimpl Component for Enemy {\n\n type Storage = DenseVecStorage<Self>;\n\n}\n", "file_path": "game-core/src/component/enemy.rs", "rank": 64, "score": 17.26480601257887 }, { "content": " amethyst::utils::ortho_camera::CameraOrthoSystem::default(),\n\n \"OrthoCamera\",\n\n &[],\n\n ).with_bundle(\n\n RenderBundle::new(pipe, Some(display_config))\n\n .with_sprite_sheet_processor()\n\n .with_sprite_visibility_sorting(&[]), // Let's us use the `Transparent` component\n\n )?;\n\n\n\n let mut game = Application::build(root, Menu)?\n\n .with_resource(gameoff_config)\n\n .build(game_data)?;\n\n game.run();\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n fn it_works() {\n\n assert_eq!(2 + 2, 4);\n\n }\n\n}\n", "file_path": "game-core/src/lib.rs", "rank": 65, "score": 15.729483522042274 }, { "content": "#![cfg_attr(\n\n feature = \"cargo-clippy\",\n\n allow(clippy::type_complexity, clippy::new_ret_no_self)\n\n)]\n\nextern crate amethyst;\n\nextern crate rand;\n\nextern crate serde;\n\n\n\nmod component;\n\npub mod config;\n\nmod load;\n\nmod map;\n\nmod state;\n\nmod system;\n\n\n\nuse amethyst::{\n\n core::TransformBundle,\n\n input::InputBundle,\n\n prelude::*,\n\n renderer::{\n\n ColorMask, DepthMode, DisplayConfig, DrawSprite, Pipeline, RenderBundle, Stage, ALPHA,\n\n },\n\n utils::application_root_dir,\n\n};\n\nuse state::Menu;\n\n\n", "file_path": "game-core/src/lib.rs", "rank": 66, "score": 13.178108296588366 }, { "content": "use amethyst::{\n\n assets::{AssetStorage, Loader},\n\n prelude::*,\n\n renderer::{\n\n MaterialTextureSet, PngFormat, SpriteSheet, SpriteSheetFormat, SpriteSheetHandle, Texture,\n\n TextureMetadata,\n\n },\n\n};\n\nuse std::collections::HashMap;\n\n\n\n#[derive(Default)]\n\npub struct LoadedTextures {\n\n pub textures: HashMap<String, SpriteSheetHandle>,\n\n}\n\n\n", "file_path": "game-core/src/load.rs", "rank": 67, "score": 12.665160783173807 }, { "content": "use amethyst::ecs::{Component, NullStorage};\n\n\n\npub struct Projectile;\n\n\n\nimpl Default for Projectile {\n\n fn default() -> Self {\n\n Projectile\n\n }\n\n}\n\n\n\nimpl Component for Projectile {\n\n type Storage = NullStorage<Self>;\n\n}\n", "file_path": "game-core/src/component/projectile.rs", "rank": 68, "score": 12.359933725709897 }, { "content": "\n\n let texture_id = load::texture(world, &image.source);\n\n\n\n let handle = {\n\n let loader = world.read_resource::<Loader>();\n\n let sprite_sheet_store = world.read_resource::<AssetStorage<SpriteSheet>>();\n\n\n\n let mut sprites: Vec<Sprite> = Vec::with_capacity(tileset.tiles.len());\n\n\n\n for tile in &tileset.tiles {\n\n let (left, top) = sprite_cords(tile.id);\n\n\n\n let sprite = Sprite::from_pixel_values(\n\n image.width as u32,\n\n image.height as u32,\n\n tileset.tile_width,\n\n tileset.tile_height,\n\n left,\n\n top,\n\n [0, 0],\n", "file_path": "game-core/src/map.rs", "rank": 69, "score": 11.077557692425874 }, { "content": "use amethyst::renderer::Sprite;\n\nuse amethyst::utils::application_root_dir;\n\nuse amethyst::{\n\n assets::{AssetStorage, Loader},\n\n core::Transform,\n\n prelude::*,\n\n renderer::{SpriteRender, SpriteSheet},\n\n};\n\nuse crate::load;\n\nuse std::path::Path;\n\n\n", "file_path": "game-core/src/map.rs", "rank": 70, "score": 10.970167517637654 }, { "content": " );\n\n\n\n sprites.push(sprite);\n\n }\n\n\n\n loader.load_from_data(\n\n SpriteSheet {\n\n texture_id,\n\n sprites,\n\n },\n\n (),\n\n &sprite_sheet_store,\n\n )\n\n };\n\n\n\n let mut left = (tileset.tile_width / 2) as f32;\n\n let mut top = (tileset.tile_height / 2) as f32;\n\n\n\n let layer = &map.layers[0];\n\n let mut passable: Vec<Vec<bool>> = Vec::with_capacity(layer.tiles.len());\n", "file_path": "game-core/src/map.rs", "rank": 71, "score": 10.46785509645227 }, { "content": " world.create_entity().with(transform).with(sprite).build();\n\n }\n\n\n\n left += tileset.tile_width as f32;\n\n }\n\n\n\n passable.push(passable_row);\n\n\n\n left = (tileset.tile_width / 2) as f32;\n\n top += tileset.tile_height as f32;\n\n }\n\n\n\n world.add_resource(PassableTiles {\n\n tile_matrix: passable,\n\n });\n\n}\n\n\n\npub struct PassableTiles {\n\n pub tile_matrix: Vec<Vec<bool>>,\n\n}\n", "file_path": "game-core/src/map.rs", "rank": 72, "score": 9.783239280835556 }, { "content": "\n\n for row in layer.tiles.iter().rev() {\n\n let mut passable_row: Vec<bool> = Vec::with_capacity(row.len());\n\n\n\n for tile_id in row {\n\n passable_row.push(*tile_id != 30);\n\n\n\n if *tile_id != 30 && *tile_id != 0 {\n\n let mut transform = Transform::default();\n\n transform.translation.z = -1.0;\n\n transform.translation.x = left;\n\n transform.translation.y = top;\n\n\n\n let sprite = SpriteRender {\n\n sprite_sheet: handle.clone(),\n\n sprite_number: *tile_id as usize - 1,\n\n flip_horizontal: false,\n\n flip_vertical: false,\n\n };\n\n\n", "file_path": "game-core/src/map.rs", "rank": 73, "score": 9.764025823343538 }, { "content": " world.register::<Player>();\n\n world.register::<Animation>();\n\n\n\n world.add_resource(load::LoadedTextures::default());\n\n\n\n let player_sprite_sheet_handle = load::sprite_sheet(world, \"FRONT.png\", \"FRONT.ron\");\n\n let _penguin_sprite_sheet_handle =\n\n load::sprite_sheet(world, \"penguinFront.png\", \"penguinFront.ron\");\n\n let _ = load::sprite_sheet(world, \"bubble.png\", \"bubble.ron\");\n\n\n\n crate::map::load_map_sprites(world);\n\n let parent = Player::new(world, &player_sprite_sheet_handle);\n\n init_camera(world, parent);\n\n }\n\n\n\n fn update(\n\n &mut self,\n\n data: &mut StateData<GameData>,\n\n ) -> Trans<GameData<'static, 'static>, StateEvent> {\n\n let world = &mut data.world;\n", "file_path": "game-core/src/state/menu.rs", "rank": 74, "score": 9.74478055492453 }, { "content": "use amethyst::{ecs::prelude::*, prelude::*};\n\n\n\npub struct Game<'a, 'b> {\n\n pub dispatcher: Dispatcher<'a, 'b>,\n\n}\n\n\n\nimpl<'a, 'b> State<GameData<'a, 'b>, StateEvent> for Game<'a, 'b> {\n\n fn update(&mut self, data: StateData<GameData<'a, 'b>>) -> Trans<GameData<'a, 'b>, StateEvent> {\n\n data.data.update(&data.world);\n\n self.dispatcher.dispatch(&data.world.res);\n\n\n\n Trans::None\n\n }\n\n}\n", "file_path": "game-core/src/state/game.rs", "rank": 75, "score": 9.2386772828592 }, { "content": "mod game;\n\nmod menu;\n\n\n\npub use self::game::Game;\n\npub use self::menu::Menu;\n", "file_path": "game-core/src/state/mod.rs", "rank": 76, "score": 6.116870828989207 }, { "content": "#[macro_use]\n\nextern crate serde_derive;\n\n\n\nextern crate failure;\n\nextern crate serde;\n\nextern crate serde_json;\n\n\n\nuse failure::Error;\n\nuse std::collections::HashMap;\n\nuse std::fs::File;\n\n\n\n#[derive(Debug, Deserialize)]\n", "file_path": "piskel2amethyst/src/main.rs", "rank": 77, "score": 3.180315381177957 }, { "content": "extern crate game_core;\n\n\n\nuse std::env;\n\n\n", "file_path": "game-main/src/main.rs", "rank": 78, "score": 2.7342022229431833 }, { "content": " });\n\n }\n\n\n\n println!(\n\n \"{}\",\n\n ron::ser::to_string_pretty(&out, ron::ser::PrettyConfig::default())?\n\n );\n\n\n\n Ok(())\n\n}\n", "file_path": "piskel2amethyst/src/main.rs", "rank": 79, "score": 2.7050581084383034 } ]
Rust
examples/demo.rs
alvinhochun/conrod_floatwin
c29c2668bdc4408fce802bd5cc6e44ef05dd82b2
use conrod_core::{ widget, widget_ids, Borderable, Colorable, Labelable, Positionable, Sizeable, Widget, }; use conrod_floatwin::windowing_area::{ layout::{WinId, WindowingState}, WindowBuilder, WindowingArea, WindowingContext, }; use glium::Surface; mod support; fn main() { const WIDTH: u32 = 800; const HEIGHT: u32 = 600; let mut events_loop = glium::glutin::EventsLoop::new(); let window = glium::glutin::WindowBuilder::new() .with_title("conrod_floatwin demo") .with_dimensions((WIDTH, HEIGHT).into()); let context = glium::glutin::ContextBuilder::new() .with_vsync(true) .with_multisampling(4); let display = glium::Display::new(window, context, &events_loop).unwrap(); let display = support::GliumDisplayWinitWrapper(display); let mut current_hidpi_factor = display.0.gl_window().get_hidpi_factor(); let mut ui = conrod_core::UiBuilder::new([WIDTH as f64, HEIGHT as f64]).build(); let font_path = "./assets/fonts/NotoSans/NotoSans-Regular.ttf"; ui.fonts.insert_from_file(font_path).unwrap(); let mut renderer = conrod_glium::Renderer::new(&display.0).unwrap(); let image_map = conrod_core::image::Map::<glium::texture::Texture2d>::new(); let ids = &mut Ids::new(ui.widget_id_generator()); let mut win_state = WindowingState::new(); let win_ids = WinIds { test1: win_state.next_id(), test2: win_state.next_id(), }; let mut ui_state = UiState { enable_debug: false, win_state, win_ids, array_wins: vec![], reusable_win_ids: vec![], next_array_win_idx: 1, hide_test2: false, }; let mut event_loop = support::EventLoop::new(); 'main: loop { for event in event_loop.next(&mut events_loop) { if let Some(event) = support::convert_event(event.clone(), &display) { ui.handle_event(event); event_loop.needs_update(); } match event { glium::glutin::Event::WindowEvent { event, .. } => match event { glium::glutin::WindowEvent::CloseRequested | glium::glutin::WindowEvent::KeyboardInput { input: glium::glutin::KeyboardInput { virtual_keycode: Some(glium::glutin::VirtualKeyCode::Escape), .. }, .. } => break 'main, glium::glutin::WindowEvent::HiDpiFactorChanged(hidpi_factor) => { current_hidpi_factor = hidpi_factor; } glium::glutin::WindowEvent::KeyboardInput { input: glium::glutin::KeyboardInput { virtual_keycode: Some(glium::glutin::VirtualKeyCode::F11), state: glium::glutin::ElementState::Pressed, .. }, .. } => match display.0.gl_window().window().get_fullscreen() { Some(_) => display.0.gl_window().window().set_fullscreen(None), None => display.0.gl_window().window().set_fullscreen(Some( display.0.gl_window().window().get_current_monitor(), )), }, glium::glutin::WindowEvent::KeyboardInput { input: glium::glutin::KeyboardInput { virtual_keycode: Some(glium::glutin::VirtualKeyCode::F12), state: glium::glutin::ElementState::Pressed, .. }, .. } => ui_state.enable_debug = !ui_state.enable_debug, _ => (), }, _ => (), } } set_widgets(ui.set_widgets(), ids, current_hidpi_factor, &mut ui_state); display .0 .gl_window() .window() .set_cursor(support::convert_mouse_cursor(ui.mouse_cursor())); if let Some(primitives) = ui.draw_if_changed() { renderer.fill(&display.0, primitives, &image_map); let mut target = display.0.draw(); target.clear_color(0.0, 0.0, 0.0, 1.0); renderer.draw(&display.0, &mut target, &image_map).unwrap(); target.finish().unwrap(); } } } widget_ids! { struct Ids { backdrop, windowing_area, text, button, toggle, } } struct WinIds { test1: WinId, test2: WinId, } struct UiState { enable_debug: bool, win_state: WindowingState, win_ids: WinIds, array_wins: Vec<ArrayWinState>, reusable_win_ids: Vec<WinId>, next_array_win_idx: usize, hide_test2: bool, } struct ArrayWinState { index: usize, win_id: WinId, } fn set_widgets( ref mut ui: conrod_core::UiCell, ids: &mut Ids, hidpi_factor: f64, state: &mut UiState, ) { widget::Rectangle::fill(ui.window_dim()) .color(conrod_core::color::BLUE) .middle() .set(ids.backdrop, ui); let mut win_ctx: WindowingContext = WindowingArea::new(&mut state.win_state, hidpi_factor) .with_debug(state.enable_debug) .set(ids.windowing_area, ui); let builder = WindowBuilder::new() .title("Test1") .is_collapsible(false) .initial_position([100.0, 100.0]) .initial_size([150.0, 100.0]) .min_size([200.0, 50.0]); if let (_, Some(win)) = win_ctx.make_window(builder, state.win_ids.test1, ui) { let c = widget::Canvas::new() .border(0.0) .color(conrod_core::color::LIGHT_YELLOW) .scroll_kids(); let (container_id, _) = win.set(c, ui); widget::Text::new("Hello World!") .color(conrod_core::color::RED) .font_size(32) .parent(container_id) .set(ids.text, ui); let clicked = widget::Toggle::new(state.hide_test2) .label(if state.hide_test2 { "Test2:\nHidden" } else { "Test2:\nShown" }) .label_color(conrod_core::color::LIGHT_BLUE) .w_h(100.0, 50.0) .up(8.0) .parent(container_id) .set(ids.toggle, ui); state.hide_test2 = clicked.last().unwrap_or(state.hide_test2); } let mut add_win = 0; let builder = WindowBuilder::new() .title("Test2") .is_hidden(state.hide_test2) .initial_position([150.0, 150.0]) .initial_size([200.0, 200.0]); if let (_, Some(win)) = win_ctx.make_window(builder, state.win_ids.test2, ui) { let c = widget::Canvas::new() .border(0.0) .color(conrod_core::color::LIGHT_BLUE) .scroll_kids(); let (container_id, _) = win.set(c, ui); let clicks = widget::Button::new() .label("Click me") .w_h(100.0, 50.0) .middle_of(container_id) .parent(container_id) .set(ids.button, ui); for _ in clicks { println!("Clicked me!"); add_win += 1; } } let mut array_win_to_close = vec![]; for (i, array_win_state) in state.array_wins.iter().enumerate() { let title = format!("Test multi - {}", array_win_state.index); let builder = WindowBuilder::new() .title(&title) .is_closable(true) .initial_size([150.0, 100.0]); let (event, win) = win_ctx.make_window(builder, array_win_state.win_id, ui); if let Some(win) = win { let c = widget::Canvas::new() .border(0.0) .color(conrod_core::color::LIGHT_CHARCOAL) .scroll_kids(); let (_container_id, _) = win.set(c, ui); } if event.close_clicked.was_clicked() { array_win_to_close.push(i); } } std::mem::drop(win_ctx); while add_win > 0 { let win_state = &mut state.win_state; let win_id = state .reusable_win_ids .pop() .unwrap_or_else(|| win_state.next_id()); state.array_wins.push(ArrayWinState { index: state.next_array_win_idx, win_id, }); state.next_array_win_idx += 1; add_win -= 1; } for i in array_win_to_close.into_iter().rev() { let s = state.array_wins.swap_remove(i); state.reusable_win_ids.push(s.win_id); } }
use conrod_core::{ widget, widget_ids, Borderable, Colorable, Labelable, Positionable, Sizeable, Widget, }; use conrod_floatwin::windowing_area::{ layout::{WinId, WindowingState}, WindowBuilder, WindowingArea, WindowingContext, }; use glium::Surface; mod support; fn main() { const WIDTH: u32 = 800; const HEIGHT: u32 = 600; let mut events_loop = glium::glutin::EventsLoop::new();
let context = glium::glutin::ContextBuilder::new() .with_vsync(true) .with_multisampling(4); let display = glium::Display::new(window, context, &events_loop).unwrap(); let display = support::GliumDisplayWinitWrapper(display); let mut current_hidpi_factor = display.0.gl_window().get_hidpi_factor(); let mut ui = conrod_core::UiBuilder::new([WIDTH as f64, HEIGHT as f64]).build(); let font_path = "./assets/fonts/NotoSans/NotoSans-Regular.ttf"; ui.fonts.insert_from_file(font_path).unwrap(); let mut renderer = conrod_glium::Renderer::new(&display.0).unwrap(); let image_map = conrod_core::image::Map::<glium::texture::Texture2d>::new(); let ids = &mut Ids::new(ui.widget_id_generator()); let mut win_state = WindowingState::new(); let win_ids = WinIds { test1: win_state.next_id(), test2: win_state.next_id(), }; let mut ui_state = UiState { enable_debug: false, win_state, win_ids, array_wins: vec![], reusable_win_ids: vec![], next_array_win_idx: 1, hide_test2: false, }; let mut event_loop = support::EventLoop::new(); 'main: loop { for event in event_loop.next(&mut events_loop) { if let Some(event) = support::convert_event(event.clone(), &display) { ui.handle_event(event); event_loop.needs_update(); } match event { glium::glutin::Event::WindowEvent { event, .. } => match event { glium::glutin::WindowEvent::CloseRequested | glium::glutin::WindowEvent::KeyboardInput { input: glium::glutin::KeyboardInput { virtual_keycode: Some(glium::glutin::VirtualKeyCode::Escape), .. }, .. } => break 'main, glium::glutin::WindowEvent::HiDpiFactorChanged(hidpi_factor) => { current_hidpi_factor = hidpi_factor; } glium::glutin::WindowEvent::KeyboardInput { input: glium::glutin::KeyboardInput { virtual_keycode: Some(glium::glutin::VirtualKeyCode::F11), state: glium::glutin::ElementState::Pressed, .. }, .. } => match display.0.gl_window().window().get_fullscreen() { Some(_) => display.0.gl_window().window().set_fullscreen(None), None => display.0.gl_window().window().set_fullscreen(Some( display.0.gl_window().window().get_current_monitor(), )), }, glium::glutin::WindowEvent::KeyboardInput { input: glium::glutin::KeyboardInput { virtual_keycode: Some(glium::glutin::VirtualKeyCode::F12), state: glium::glutin::ElementState::Pressed, .. }, .. } => ui_state.enable_debug = !ui_state.enable_debug, _ => (), }, _ => (), } } set_widgets(ui.set_widgets(), ids, current_hidpi_factor, &mut ui_state); display .0 .gl_window() .window() .set_cursor(support::convert_mouse_cursor(ui.mouse_cursor())); if let Some(primitives) = ui.draw_if_changed() { renderer.fill(&display.0, primitives, &image_map); let mut target = display.0.draw(); target.clear_color(0.0, 0.0, 0.0, 1.0); renderer.draw(&display.0, &mut target, &image_map).unwrap(); target.finish().unwrap(); } } } widget_ids! { struct Ids { backdrop, windowing_area, text, button, toggle, } } struct WinIds { test1: WinId, test2: WinId, } struct UiState { enable_debug: bool, win_state: WindowingState, win_ids: WinIds, array_wins: Vec<ArrayWinState>, reusable_win_ids: Vec<WinId>, next_array_win_idx: usize, hide_test2: bool, } struct ArrayWinState { index: usize, win_id: WinId, } fn set_widgets( ref mut ui: conrod_core::UiCell, ids: &mut Ids, hidpi_factor: f64, state: &mut UiState, ) { widget::Rectangle::fill(ui.window_dim()) .color(conrod_core::color::BLUE) .middle() .set(ids.backdrop, ui); let mut win_ctx: WindowingContext = WindowingArea::new(&mut state.win_state, hidpi_factor) .with_debug(state.enable_debug) .set(ids.windowing_area, ui); let builder = WindowBuilder::new() .title("Test1") .is_collapsible(false) .initial_position([100.0, 100.0]) .initial_size([150.0, 100.0]) .min_size([200.0, 50.0]); if let (_, Some(win)) = win_ctx.make_window(builder, state.win_ids.test1, ui) { let c = widget::Canvas::new() .border(0.0) .color(conrod_core::color::LIGHT_YELLOW) .scroll_kids(); let (container_id, _) = win.set(c, ui); widget::Text::new("Hello World!") .color(conrod_core::color::RED) .font_size(32) .parent(container_id) .set(ids.text, ui); let clicked = widget::Toggle::new(state.hide_test2) .label(if state.hide_test2 { "Test2:\nHidden" } else { "Test2:\nShown" }) .label_color(conrod_core::color::LIGHT_BLUE) .w_h(100.0, 50.0) .up(8.0) .parent(container_id) .set(ids.toggle, ui); state.hide_test2 = clicked.last().unwrap_or(state.hide_test2); } let mut add_win = 0; let builder = WindowBuilder::new() .title("Test2") .is_hidden(state.hide_test2) .initial_position([150.0, 150.0]) .initial_size([200.0, 200.0]); if let (_, Some(win)) = win_ctx.make_window(builder, state.win_ids.test2, ui) { let c = widget::Canvas::new() .border(0.0) .color(conrod_core::color::LIGHT_BLUE) .scroll_kids(); let (container_id, _) = win.set(c, ui); let clicks = widget::Button::new() .label("Click me") .w_h(100.0, 50.0) .middle_of(container_id) .parent(container_id) .set(ids.button, ui); for _ in clicks { println!("Clicked me!"); add_win += 1; } } let mut array_win_to_close = vec![]; for (i, array_win_state) in state.array_wins.iter().enumerate() { let title = format!("Test multi - {}", array_win_state.index); let builder = WindowBuilder::new() .title(&title) .is_closable(true) .initial_size([150.0, 100.0]); let (event, win) = win_ctx.make_window(builder, array_win_state.win_id, ui); if let Some(win) = win { let c = widget::Canvas::new() .border(0.0) .color(conrod_core::color::LIGHT_CHARCOAL) .scroll_kids(); let (_container_id, _) = win.set(c, ui); } if event.close_clicked.was_clicked() { array_win_to_close.push(i); } } std::mem::drop(win_ctx); while add_win > 0 { let win_state = &mut state.win_state; let win_id = state .reusable_win_ids .pop() .unwrap_or_else(|| win_state.next_id()); state.array_wins.push(ArrayWinState { index: state.next_array_win_idx, win_id, }); state.next_array_win_idx += 1; add_win -= 1; } for i in array_win_to_close.into_iter().rev() { let s = state.array_wins.swap_remove(i); state.reusable_win_ids.push(s.win_id); } }
let window = glium::glutin::WindowBuilder::new() .with_title("conrod_floatwin demo") .with_dimensions((WIDTH, HEIGHT).into());
assignment_statement
[ { "content": "#![allow(dead_code)]\n\n\n\nuse glium;\n\nuse std;\n\n\n\npub struct GliumDisplayWinitWrapper(pub glium::Display);\n\n\n\nimpl conrod_winit::WinitWindow for GliumDisplayWinitWrapper {\n\n fn get_inner_size(&self) -> Option<(u32, u32)> {\n\n self.0.gl_window().get_inner_size().map(Into::into)\n\n }\n\n fn hidpi_factor(&self) -> f32 {\n\n self.0.gl_window().get_hidpi_factor() as _\n\n }\n\n}\n\n\n\n/// In most of the examples the `glutin` crate is used for providing the window context and\n\n/// events while the `glium` crate is used for displaying `conrod_core::render::Primitives` to the\n\n/// screen.\n\n///\n", "file_path": "examples/support/mod.rs", "rank": 2, "score": 59638.64232765747 }, { "content": "\n\n self.ui_needs_update = false;\n\n self.last_update = std::time::Instant::now();\n\n\n\n events\n\n }\n\n\n\n /// Notifies the event loop that the `Ui` requires another update whether or not there are any\n\n /// pending events.\n\n ///\n\n /// This is primarily used on the occasion that some part of the `Ui` is still animating and\n\n /// requires further updates to do so.\n\n pub fn needs_update(&mut self) {\n\n self.ui_needs_update = true;\n\n }\n\n}\n\n\n\n// Conversion functions for converting between types from glium's version of `winit` and\n\n// `conrod_core`.\n\nconrod_winit::conversion_fns!();\n", "file_path": "examples/support/mod.rs", "rank": 3, "score": 59636.3791861999 }, { "content": "/// This `Iterator`-like type simplifies some of the boilerplate involved in setting up a\n\n/// glutin+glium event loop that works efficiently with conrod.\n\npub struct EventLoop {\n\n ui_needs_update: bool,\n\n last_update: std::time::Instant,\n\n}\n\n\n\nimpl EventLoop {\n\n pub fn new() -> Self {\n\n EventLoop {\n\n last_update: std::time::Instant::now(),\n\n ui_needs_update: true,\n\n }\n\n }\n\n\n\n /// Produce an iterator yielding all available events.\n\n pub fn next(\n\n &mut self,\n\n events_loop: &mut glium::glutin::EventsLoop,\n\n ) -> Vec<glium::glutin::Event> {\n", "file_path": "examples/support/mod.rs", "rank": 4, "score": 59634.99357313451 }, { "content": " // We don't want to loop any faster than 60 FPS, so wait until it has been at least 16ms\n\n // since the last yield.\n\n let last_update = self.last_update;\n\n let sixteen_ms = std::time::Duration::from_millis(16);\n\n let duration_since_last_update = std::time::Instant::now().duration_since(last_update);\n\n if duration_since_last_update < sixteen_ms {\n\n std::thread::sleep(sixteen_ms - duration_since_last_update);\n\n }\n\n\n\n // Collect all pending events.\n\n let mut events = Vec::new();\n\n events_loop.poll_events(|event| events.push(event));\n\n\n\n // If there are no events and the `Ui` does not need updating, wait for the next event.\n\n if events.is_empty() && !self.ui_needs_update {\n\n events_loop.run_forever(|event| {\n\n events.push(event);\n\n glium::glutin::ControlFlow::Break\n\n });\n\n }\n", "file_path": "examples/support/mod.rs", "rank": 5, "score": 59633.871784735944 }, { "content": "fn interaction_and_times_triggered(button_id: widget::Id, ui: &UiCell) -> (Interaction, u16) {\n\n let input = ui.widget_input(button_id);\n\n let mouse_interaction = input.mouse().map_or(Interaction::Idle, |mouse| {\n\n if mouse.buttons.left().is_down() {\n\n if ui.global_input().current.widget_under_mouse == Some(button_id) {\n\n Interaction::Press\n\n } else {\n\n Interaction::Idle\n\n }\n\n } else {\n\n Interaction::Hover\n\n }\n\n });\n\n let interaction = match mouse_interaction {\n\n Interaction::Idle | Interaction::Hover => {\n\n let is_touch_press = ui\n\n .global_input()\n\n .current\n\n .touch\n\n .values()\n", "file_path": "src/classic_button.rs", "rank": 6, "score": 55795.984376146545 }, { "content": "fn polygon_to_triangle_points<P, Iter>(mut points: Iter) -> impl Iterator<Item = [P; 3]>\n\nwhere\n\n P: Copy,\n\n Iter: Iterator<Item = P>,\n\n{\n\n let first = points.next();\n\n let mut first_and_prev = first.and_then(|first| points.next().map(|second| (first, second)));\n\n std::iter::from_fn(move || {\n\n first_and_prev\n\n .as_mut()\n\n .and_then(|&mut (first, ref mut prev)| {\n\n points.next().map(|point| {\n\n let triangle = [first, *prev, point];\n\n *prev = point;\n\n triangle\n\n })\n\n })\n\n })\n\n}\n\n\n", "file_path": "src/classic_frame.rs", "rank": 7, "score": 52660.79400477471 }, { "content": "fn make_frame(\n\n bottom_left: [f64; 2],\n\n top_right: [f64; 2],\n\n border_thickness: f64,\n\n frame_color: FrameColor,\n\n) -> impl Iterator<Item = widget::triangles::Triangle<widget::triangles::ColoredPoint>> {\n\n // The frame is constructed from 4 L-shapes and a rectangle, laid out as\n\n // follow:\n\n //\n\n // ________________\n\n // | ____________| |\n\n // | | ________| | |\n\n // | | | | | |\n\n // | | | | | |\n\n // | | | | | |\n\n // | |_|________| | |\n\n // |_|____________| |\n\n // |________________|\n\n\n\n let lower_a_color = frame_color.lower_a;\n", "file_path": "src/classic_frame.rs", "rank": 8, "score": 42644.946501332684 }, { "content": "fn make_l_shape_polygon(\n\n origin: [f64; 2],\n\n extents: [f64; 2],\n\n thickness: f64,\n\n) -> impl Iterator<Item = [f64; 2]> {\n\n // Imagine a L shape like the following:\n\n //\n\n // a ________ b\n\n // | ______|\n\n // | | d c\n\n // | |\n\n // |_|\n\n // f e\n\n //\n\n // The origin represents point `a`.\n\n // The extents represents `[b.x, f.y]`.\n\n // The thickness represents `abs(c - b)` and `abs(e - f)`.\n\n //\n\n // We produce points in the alphabetical order `a` to `f`. This order is\n\n // suitable for use with simple fan triangluation.\n", "file_path": "src/classic_frame.rs", "rank": 9, "score": 41416.869939145756 }, { "content": "fn window_hit_test(\n\n window_size: [f32; 2],\n\n rel_pos: [f32; 2],\n\n hidpi_factor: f32,\n\n frame_metrics: FrameMetrics,\n\n) -> Option<HitTest> {\n\n let [log_w, log_h] = window_size;\n\n let [log_x, log_y] = rel_pos;\n\n if log_x < -0.01 || log_y < -0.01 || log_x > log_w + 0.01 || log_y > log_h + 0.01 {\n\n return None;\n\n }\n\n let x = (log_x * hidpi_factor).round() as i32;\n\n let y = (log_y * hidpi_factor).round() as i32;\n\n let w = (log_w * hidpi_factor).round() as i32;\n\n let h = (log_h * hidpi_factor).round() as i32;\n\n\n\n let border_thickness = (frame_metrics.border_thickness as f32 * hidpi_factor).round() as i32;\n\n let title_bar_height = (frame_metrics.title_bar_height as f32 * hidpi_factor).round() as i32;\n\n\n\n let window_part_x = if x <= border_thickness {\n", "file_path": "src/windowing_area/layout.rs", "rank": 10, "score": 40296.90523159877 }, { "content": "fn make_close_icon_shape(\n\n bottom_left: [f64; 2],\n\n top_right: [f64; 2],\n\n) -> impl Iterator<Item = widget::triangles::Triangle<conrod_core::Point>> {\n\n let [x_o, y_o] = bottom_left;\n\n let [x_e, y_e] = top_right;\n\n let icon_width = x_e - x_o;\n\n let icon_height = y_e - y_o;\n\n let (sx, sy) = {\n\n if icon_width > icon_height {\n\n let sx = icon_width / 5.0;\n\n (sx, sx - (icon_width - icon_height))\n\n } else {\n\n let sy = icon_height / 5.0;\n\n (sy - (icon_height - icon_width), sy)\n\n }\n\n };\n\n let x_mid_offset = (icon_width / 2.0) - sx;\n\n let y_mid_offset = (icon_height / 2.0) - sy;\n\n\n", "file_path": "src/classic_frame.rs", "rank": 11, "score": 40296.90523159877 }, { "content": "fn make_down_arrow_icon_shape(\n\n bottom_left: [f64; 2],\n\n top_right: [f64; 2],\n\n) -> impl Iterator<Item = widget::triangles::Triangle<conrod_core::Point>> {\n\n let [x_o, y_o] = bottom_left;\n\n let [x_e, y_e] = top_right;\n\n let icon_width = x_e - x_o;\n\n let icon_height = y_e - y_o;\n\n let half_width = icon_width / 2.0;\n\n let tip_shift_y = if icon_height < half_width {\n\n icon_height\n\n } else {\n\n // Add a small offset to the tip so that it won't lie exactly at the\n\n // middle of the pixel.\n\n half_width + 0.01\n\n };\n\n\n\n let triangle = [\n\n [x_o, y_e],\n\n [x_e, y_e],\n\n [x_o + half_width, y_e - tip_shift_y],\n\n ];\n\n std::iter::once(triangle).map(widget::triangles::Triangle)\n\n}\n", "file_path": "src/classic_frame.rs", "rank": 12, "score": 40296.90523159877 }, { "content": "fn make_right_arrow_icon_shape(\n\n bottom_left: [f64; 2],\n\n top_right: [f64; 2],\n\n) -> impl Iterator<Item = widget::triangles::Triangle<conrod_core::Point>> {\n\n let [x_o, y_o] = bottom_left;\n\n let [x_e, y_e] = top_right;\n\n let icon_width = x_e - x_o;\n\n let icon_height = y_e - y_o;\n\n let half_height = icon_height / 2.0;\n\n let tip_shift_x = if icon_width < half_height {\n\n icon_width\n\n } else {\n\n // Add a small offset to the tip so that it won't lie exactly at the\n\n // middle of the pixel.\n\n half_height + 0.01\n\n };\n\n\n\n let triangle = [\n\n [x_o, y_o],\n\n [x_o, y_e],\n", "file_path": "src/classic_frame.rs", "rank": 13, "score": 39271.378082303476 }, { "content": "use conrod_core::{widget, Widget, WidgetCommon};\n\n\n\n#[derive(Clone, Copy, Debug, WidgetCommon)]\n\npub struct EmptyWidget {\n\n #[conrod(common_builder)]\n\n pub common: widget::CommonBuilder,\n\n}\n\n\n\nimpl EmptyWidget {\n\n pub fn new() -> Self {\n\n EmptyWidget {\n\n common: widget::CommonBuilder::default(),\n\n }\n\n }\n\n}\n\n\n\nimpl Widget for EmptyWidget {\n\n type State = ();\n\n type Style = ();\n\n type Event = ();\n\n\n\n fn init_state(&self, _: conrod_core::widget::id::Generator) -> Self::State {}\n\n\n\n fn style(&self) -> Self::Style {}\n\n\n\n fn update(self, _: conrod_core::widget::UpdateArgs<Self>) -> Self::Event {}\n\n}\n", "file_path": "src/empty_widget.rs", "rank": 14, "score": 29851.42592375371 }, { "content": "pub fn conrod_point_to_layout_pos(point: Point, win_area_rect: Rect) -> [f32; 2] {\n\n let x = (point[0] - win_area_rect.left()) as f32;\n\n let y = (win_area_rect.top() - point[1]) as f32;\n\n [x, y]\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 15, "score": 27377.94921160641 }, { "content": "pub fn layout_pos_to_conrod_point(pos: [f64; 2], win_area_rect: Rect) -> Point {\n\n win_area_rect.pad_left(pos[0]).pad_top(pos[1]).top_left()\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 16, "score": 27377.94921160641 }, { "content": "struct FrameColor {\n\n lower_a: color::Rgba,\n\n upper_a: color::Rgba,\n\n lower_b: color::Rgba,\n\n upper_b: color::Rgba,\n\n inside: color::Rgba,\n\n}\n\n\n", "file_path": "src/classic_frame.rs", "rank": 17, "score": 27020.203534658856 }, { "content": "pub fn win_rect_to_conrod_rect(win_rect: [f64; 4], win_area_rect: Rect) -> Rect {\n\n let [x, y, w, h] = win_rect;\n\n let [left, top] = win_area_rect.top_left();\n\n let x1 = left + x;\n\n let y1 = top - y;\n\n let x2 = left + x + w;\n\n let y2 = top - y - h;\n\n conrod_core::Rect::from_corners([x1, y1], [x2, y2])\n\n}\n", "file_path": "src/util.rs", "rank": 18, "score": 26772.624693283666 }, { "content": "fn make_rect(origin: [f64; 2], extents: [f64; 2]) -> impl Iterator<Item = [f64; 2]> {\n\n let [x_o, y_o] = origin;\n\n let [x_e, y_e] = extents;\n\n value_iter_chain![[x_o, y_o], [x_o, y_e], [x_e, y_e], [x_e, y_o]]\n\n}\n\n\n", "file_path": "src/classic_frame.rs", "rank": 19, "score": 26228.498969727414 }, { "content": "use super::layout;\n\nuse layout::FrameMetrics;\n\n\n\nuse crate::{classic_button, classic_frame, empty_widget::EmptyWidget};\n\nuse conrod_core::{\n\n builder_methods, color,\n\n position::{self},\n\n text, widget, widget_ids, Borderable, Color, Colorable, FontSize, Labelable, Positionable,\n\n Sizeable, Widget, WidgetCommon, WidgetStyle,\n\n};\n\nuse widget::KidAreaArgs;\n\n\n\n#[derive(WidgetCommon)]\n\npub struct WindowFrame<'a> {\n\n #[conrod(common_builder)]\n\n pub common: widget::CommonBuilder,\n\n pub style: Style,\n\n pub title: &'a str,\n\n pub is_focused: bool,\n\n pub is_collapsed: bool,\n", "file_path": "src/windowing_area/window_frame.rs", "rank": 21, "score": 17.19186119166519 }, { "content": "use crate::{empty_widget::EmptyWidget, util};\n\nuse layout::{FrameMetrics, WinId, WindowingState};\n\nuse window_frame::WindowFrame;\n\n\n\nuse conrod_core::{\n\n cursor,\n\n position::{self, Place},\n\n widget, widget_ids, Position, Positionable, Sizeable, Ui, UiCell, Widget, WidgetCommon,\n\n WidgetStyle,\n\n};\n\n\n\npub mod layout;\n\n\n\nmod debug;\n\nmod window_frame;\n\n\n\n#[derive(WidgetCommon)]\n\npub struct WindowingArea<'a> {\n\n #[conrod(common_builder)]\n\n pub common: widget::CommonBuilder,\n", "file_path": "src/windowing_area.rs", "rank": 22, "score": 15.190226938092465 }, { "content": "use super::layout::{WinId, WindowingState};\n\nuse crate::util;\n\nuse conrod_core::{\n\n position, widget, widget_ids, Colorable, Position, Positionable, Sizeable, Ui, Widget,\n\n WidgetCommon, WidgetStyle,\n\n};\n\nuse position::Place;\n\n\n\n#[derive(WidgetCommon)]\n\npub struct DebugWidget<'a> {\n\n #[conrod(common_builder)]\n\n pub common: widget::CommonBuilder,\n\n pub style: Style,\n\n pub windowing_state: &'a WindowingState,\n\n pub debug_win_id: WinId,\n\n pub hidpi_factor: f64,\n\n}\n\n\n\npub struct State {\n\n ids: Ids,\n", "file_path": "src/windowing_area/debug.rs", "rank": 23, "score": 13.893644981331521 }, { "content": " color_left = color::rgba(0.0, 0.0, 0.5, 1.0);\n\n color_right = color::rgba(0.05, 0.5, 0.8, 1.0);\n\n } else {\n\n color_left = color::rgba(0.5, 0.5, 0.5, 1.0);\n\n color_right = color::rgba(0.7, 0.7, 0.7, 1.0);\n\n }\n\n let triangles = classic_frame::make_title_bar_gradient(\n\n title_bar_rect.bottom_left(),\n\n title_bar_rect.top_right(),\n\n color_left,\n\n color_right,\n\n );\n\n widget::Triangles::multi_color(triangles)\n\n .with_bounding_rect(title_bar_rect)\n\n .graphics_for(id)\n\n .place_on_kid_area(false)\n\n .set(state.ids.title_bar_box, &mut ui);\n\n\n\n let button_width = frame_metrics.title_button_width;\n\n let button_height =\n", "file_path": "src/windowing_area/window_frame.rs", "rank": 25, "score": 11.72399641995497 }, { "content": " }\n\n }\n\n self.set_win_normal_rect_int(win_id, rect);\n\n }\n\n\n\n pub(crate) fn ensure_all_win_in_area(&mut self) {\n\n let border_thickness = self.frame_metrics.border_thickness as f32;\n\n let title_bar_height = self.frame_metrics.title_bar_height as f32;\n\n let collapsed_win_width = self.frame_metrics.collapsed_win_width as f32;\n\n\n\n for &mut WindowState {\n\n rect: ref mut window_rect,\n\n min_size,\n\n is_hidden,\n\n is_collapsed,\n\n ..\n\n } in self.window_states.iter_mut().filter_map(|x| x.as_mut())\n\n {\n\n if is_hidden {\n\n continue;\n", "file_path": "src/windowing_area/layout.rs", "rank": 26, "score": 11.55097660318109 }, { "content": " let state: &mut widget::State<State> = state;\n\n let Self {\n\n style,\n\n title,\n\n is_focused,\n\n is_collapsed,\n\n is_collapsible,\n\n is_closable,\n\n frame_metrics,\n\n hidpi_factor,\n\n ..\n\n } = self;\n\n let style: Style = style;\n\n\n\n // Draw a classic frame using triangles:\n\n let base_color = style.frame_color(ui.theme());\n\n let triangles = classic_frame::make_panel_frame(\n\n rect.bottom_left(),\n\n rect.top_right(),\n\n // The frame border is 4 units, but the actual panel frame border\n", "file_path": "src/windowing_area/window_frame.rs", "rank": 27, "score": 11.46309023524304 }, { "content": " pub is_collapsible: bool,\n\n pub is_closable: bool,\n\n pub(crate) frame_metrics: FrameMetrics,\n\n pub hidpi_factor: f64,\n\n}\n\n\n\npub struct State {\n\n ids: Ids,\n\n}\n\n\n\n#[derive(Copy, Clone, Debug, Default, PartialEq, WidgetStyle)]\n\npub struct Style {\n\n /// The color of the window frame.\n\n #[conrod(default = \"theme.background_color\")]\n\n pub frame_color: Option<Color>,\n\n // /// The width of the border surrounding the Canvas' rectangle.\n\n // #[conrod(default = \"theme.border_width\")]\n\n // #[conrod(default = \"2.0\")]\n\n // pub border: Option<Scalar>,\n\n // /// The color of the Canvas' border.\n", "file_path": "src/windowing_area/window_frame.rs", "rank": 28, "score": 11.365105096433762 }, { "content": " let border_thickness = self.frame_metrics.border_thickness as f32;\n\n let title_bar_height = self.frame_metrics.title_bar_height as f32;\n\n let collapsed_win_width = self.frame_metrics.collapsed_win_width as f32;\n\n Some(RectI {\n\n x: (rect.x * hidpi_factor).round() as i32,\n\n y: (rect.y * hidpi_factor).round() as i32,\n\n w: (collapsed_win_width * hidpi_factor).round() as i32,\n\n h: ((title_bar_height + border_thickness * 2.0) * hidpi_factor).round() as i32,\n\n })\n\n } else {\n\n self.win_normal_rect_int(win_id)\n\n }\n\n }\n\n\n\n /// Retrieves the x, y, width and height of a window for display. The\n\n /// dimensions are adjusted to align to the physical pixel grid. The\n\n /// calculations use f64 so that the results are precise enough for GUI\n\n /// toolkits that use f64 internally.\n\n pub fn win_display_rect_f64(&self, win_id: WinId) -> Option<[f64; 4]> {\n\n let WinId(win_idx) = win_id;\n", "file_path": "src/windowing_area/layout.rs", "rank": 29, "score": 10.846245643711363 }, { "content": "\n\n fn kid_area(&self, args: KidAreaArgs<Self>) -> widget::KidArea {\n\n let rect = args\n\n .rect\n\n .pad(self.frame_metrics.border_thickness)\n\n .pad_top(self.frame_metrics.title_bar_height + self.frame_metrics.gap_below_title_bar);\n\n widget::KidArea {\n\n rect,\n\n pad: conrod_core::position::Padding::none(),\n\n }\n\n }\n\n\n\n fn update(self, args: widget::UpdateArgs<Self>) -> Self::Event {\n\n let widget::UpdateArgs {\n\n id,\n\n state,\n\n rect,\n\n mut ui,\n\n ..\n\n } = args;\n", "file_path": "src/windowing_area/window_frame.rs", "rank": 30, "score": 10.790623995444683 }, { "content": " // is only 2 units.\n\n frame_metrics.border_thickness / 2.0,\n\n base_color,\n\n );\n\n widget::Triangles::multi_color(triangles)\n\n .with_bounding_rect(rect)\n\n .middle_of(id)\n\n .graphics_for(id)\n\n .place_on_kid_area(false)\n\n .set(state.ids.frame, &mut ui);\n\n\n\n let left = rect.pad_left(frame_metrics.border_thickness).left();\n\n let right = rect.pad_right(frame_metrics.border_thickness).right();\n\n let top = rect.pad_top(frame_metrics.border_thickness).top();\n\n let bottom = top - frame_metrics.title_bar_height;\n\n let title_bar_rect = conrod_core::Rect::from_corners([left, bottom], [right, top]);\n\n\n\n // Draw a title bar rect:\n\n let (color_left, color_right);\n\n if is_focused {\n", "file_path": "src/windowing_area/window_frame.rs", "rank": 31, "score": 10.712683160130048 }, { "content": " let WinId(win_idx) = win_id;\n\n let win = &mut self.window_states[win_idx as usize];\n\n if win.is_none() {\n\n let double_border = self.frame_metrics.border_thickness as f32 * 2.0;\n\n let additional_height = self.frame_metrics.title_bar_height as f32\n\n + self.frame_metrics.gap_below_title_bar as f32;\n\n\n\n let initial_state = init();\n\n let min_size: dim::SizeF = initial_state\n\n .min_size\n\n .unwrap_or_else(|| [150.0, 50.0])\n\n .into();\n\n let w = initial_state.client_size[0].max(min_size.w) + double_border;\n\n let h =\n\n initial_state.client_size[1].max(min_size.h) + double_border + additional_height;\n\n let next_auto_pos = &mut self.next_auto_position;\n\n let area_h = self.area_size[1];\n\n let [x, y] = initial_state.position.unwrap_or_else(|| {\n\n let mut pos = *next_auto_pos;\n\n if pos[1] + h > area_h {\n", "file_path": "src/windowing_area/layout.rs", "rank": 32, "score": 10.610755608799124 }, { "content": " [x_o + tip_shift_x, y_o + half_height],\n\n ];\n\n std::iter::once(triangle).map(widget::triangles::Triangle)\n\n}\n\n\n\npub(super) fn make_collapse_button_icon(\n\n bottom_left: [f64; 2],\n\n top_right: [f64; 2],\n\n hidpi_factor: f64,\n\n) -> impl Iterator<Item = widget::triangles::Triangle<conrod_core::Point>> {\n\n let [x_o, y_o] = bottom_left;\n\n let [x_e, y_e] = top_right;\n\n let width = x_e - x_o;\n\n let height = y_e - y_o;\n\n let px_width = (width * hidpi_factor).round();\n\n let px_height = (height * hidpi_factor).round();\n\n let (icon_px_width, icon_px_height) = {\n\n let shape_width_from_width = (px_width / 11.0 * 7.0).round();\n\n let shape_height_from_width = (shape_width_from_width / 7.0 * 4.0).round();\n\n let shape_height_from_height = (px_height / 9.0 * 4.0).round();\n", "file_path": "src/classic_frame.rs", "rank": 34, "score": 10.505373433550028 }, { "content": " }\n\n let width_to_test = if is_collapsed {\n\n collapsed_win_width - border_thickness\n\n } else {\n\n collapsed_win_width.min(window_rect.w) - border_thickness\n\n };\n\n let display_width = if is_collapsed {\n\n collapsed_win_width\n\n } else {\n\n window_rect.w\n\n };\n\n if window_rect.x <= width_to_test - display_width - border_thickness {\n\n window_rect.x = width_to_test - display_width - border_thickness;\n\n } else if window_rect.x > self.area_size[0] - width_to_test {\n\n window_rect.x = self.area_size[0] - width_to_test;\n\n }\n\n if window_rect.y <= -border_thickness {\n\n window_rect.y = -border_thickness;\n\n } else if window_rect.y > self.area_size[1] - (border_thickness + title_bar_height) {\n\n window_rect.y = self.area_size[1] - (border_thickness + title_bar_height);\n", "file_path": "src/windowing_area/layout.rs", "rank": 35, "score": 10.459001196987977 }, { "content": " [x_o + icon_width, y_o + sy],\n\n ]);\n\n iter_chain![part1, part2, part3,].map(widget::triangles::Triangle)\n\n}\n\n\n\npub(super) fn make_uncollapse_button_icon(\n\n bottom_left: [f64; 2],\n\n top_right: [f64; 2],\n\n hidpi_factor: f64,\n\n) -> impl Iterator<Item = widget::triangles::Triangle<conrod_core::Point>> {\n\n let [x_o, y_o] = bottom_left;\n\n let [x_e, y_e] = top_right;\n\n let width = x_e - x_o;\n\n let height = y_e - y_o;\n\n let px_width = (width * hidpi_factor).round();\n\n let px_height = (height * hidpi_factor).round();\n\n let (icon_px_width, icon_px_height) = {\n\n let shape_width_from_width = (px_width / 11.0 * 4.0).round();\n\n let shape_height_from_width = (shape_width_from_width / 4.0 * 7.0).round();\n\n let shape_height_from_height = (px_height / 9.0 * 7.0).round();\n", "file_path": "src/classic_frame.rs", "rank": 36, "score": 10.448006988742595 }, { "content": "}\n\n\n\npub(super) fn make_close_button_icon(\n\n bottom_left: [f64; 2],\n\n top_right: [f64; 2],\n\n hidpi_factor: f64,\n\n) -> impl Iterator<Item = widget::triangles::Triangle<conrod_core::Point>> {\n\n let [x_o, y_o] = bottom_left;\n\n let [x_e, y_e] = top_right;\n\n let width = x_e - x_o;\n\n let height = y_e - y_o;\n\n let px_width = (width * hidpi_factor).round();\n\n let px_height = (height * hidpi_factor).round();\n\n let (icon_px_width, icon_px_height) = {\n\n let shape_width_from_width = (px_width / 11.0 * 8.0).round();\n\n let shape_height_from_width = (shape_width_from_width / 8.0 * 7.0).round();\n\n let shape_height_from_height = (px_height / 9.0 * 7.0).round();\n\n if shape_height_from_height < shape_height_from_width {\n\n let shape_width_from_height = (shape_height_from_height / 7.0 * 8.0).round();\n\n (shape_width_from_height, shape_height_from_height)\n", "file_path": "src/classic_frame.rs", "rank": 37, "score": 10.114185024725426 }, { "content": " if shape_height_from_height < shape_height_from_width {\n\n let shape_width_from_height = (shape_height_from_height / 4.0 * 7.0).round();\n\n (shape_width_from_height, shape_height_from_height)\n\n } else {\n\n (shape_width_from_width, shape_height_from_width)\n\n }\n\n };\n\n let icon_pad_left = ((px_width - icon_px_width) / 2.0).round() / hidpi_factor;\n\n let icon_pad_top = {\n\n let mut pad = (px_height - icon_px_height) / 2.0;\n\n let diff = icon_px_height - icon_px_width / 2.0;\n\n if diff >= 1.0 {\n\n // This is to prevent the icon becoming imbalanced.\n\n pad += diff;\n\n }\n\n pad.round() / hidpi_factor\n\n };\n\n let icon_width = icon_px_width / hidpi_factor;\n\n let icon_height = icon_px_height / hidpi_factor;\n\n\n\n let icon_bottom_left = [x_o + icon_pad_left, y_e - icon_pad_top - icon_height];\n\n let icon_top_right = [x_o + icon_pad_left + icon_width, y_e - icon_pad_top];\n\n make_down_arrow_icon_shape(icon_bottom_left, icon_top_right)\n\n}\n\n\n", "file_path": "src/classic_frame.rs", "rank": 38, "score": 9.528631431057164 }, { "content": " if shape_height_from_height < shape_height_from_width {\n\n let shape_width_from_height = (shape_height_from_height / 7.0 * 4.0).round();\n\n (shape_width_from_height, shape_height_from_height)\n\n } else {\n\n (shape_width_from_width, shape_height_from_width)\n\n }\n\n };\n\n let icon_pad_left = {\n\n let mut pad = (px_width - icon_px_width) / 2.0;\n\n let diff = icon_px_width - icon_px_height / 2.0;\n\n if diff >= 1.0 {\n\n // This is to prevent the icon becoming imbalanced.\n\n pad += diff;\n\n }\n\n pad.round() / hidpi_factor\n\n };\n\n let icon_pad_bottom = ((px_height - icon_px_height) / 2.0).round() / hidpi_factor;\n\n let icon_width = icon_px_width / hidpi_factor;\n\n let icon_height = icon_px_height / hidpi_factor;\n\n\n\n let icon_bottom_left = [x_o + icon_pad_left, y_o + icon_pad_bottom];\n\n let icon_top_right = [\n\n x_o + icon_pad_left + icon_width,\n\n y_o + icon_pad_bottom + icon_height,\n\n ];\n\n make_right_arrow_icon_shape(icon_bottom_left, icon_top_right)\n\n}\n\n\n", "file_path": "src/classic_frame.rs", "rank": 39, "score": 9.50938214016248 }, { "content": " upper_b,\n\n inside,\n\n };\n\n\n\n make_frame(bottom_left, top_right, border_thickness, frame_color)\n\n}\n\n\n\npub(super) fn make_button_frame(\n\n bottom_left: [f64; 2],\n\n top_right: [f64; 2],\n\n border_thickness: f64,\n\n base_color: color::Color,\n\n is_clicked: bool,\n\n) -> impl Iterator<Item = widget::triangles::Triangle<widget::triangles::ColoredPoint>> {\n\n let hsla = base_color.to_hsl();\n\n let alpha = hsla.3;\n\n // The original colors are greyscale with luminance of:\n\n // 0.0, 1.0, 0.5, 0.875, 0.75\n\n // We treat the base colour as the fifth colour and scale the other colours\n\n // based on the original scales --\n", "file_path": "src/classic_frame.rs", "rank": 40, "score": 9.150110587684043 }, { "content": " let win = self.window_states[win_idx as usize].as_ref()?;\n\n if win.is_hidden {\n\n return None;\n\n }\n\n if win.is_collapsed {\n\n let rect = win.rect;\n\n let hidpi_factor = self.hidpi_factor;\n\n let border_thickness = self.frame_metrics.border_thickness;\n\n let title_bar_height = self.frame_metrics.title_bar_height;\n\n let collapsed_win_width = self.frame_metrics.collapsed_win_width;\n\n Some([\n\n (rect.x as f64 * hidpi_factor).round() / hidpi_factor,\n\n (rect.y as f64 * hidpi_factor).round() / hidpi_factor,\n\n collapsed_win_width,\n\n title_bar_height + border_thickness * 2.0,\n\n ])\n\n } else {\n\n self.win_normal_rect_f64(win_id)\n\n }\n\n }\n", "file_path": "src/windowing_area/layout.rs", "rank": 41, "score": 9.05683499021967 }, { "content": " make_frame(bottom_left, top_right, border_thickness, frame_color)\n\n}\n\n\n\npub(super) fn make_title_bar_gradient(\n\n bottom_left: [f64; 2],\n\n top_right: [f64; 2],\n\n color_left: color::Color,\n\n color_right: color::Color,\n\n) -> impl Iterator<Item = widget::triangles::Triangle<widget::triangles::ColoredPoint>> {\n\n let [x_o, y_o] = bottom_left;\n\n let [x_e, y_e] = top_right;\n\n let color_left = color_left.to_rgb();\n\n let color_right = color_right.to_rgb();\n\n polygon_to_triangle_points(value_iter_chain![\n\n ([x_o, y_o], color_left),\n\n ([x_o, y_e], color_left),\n\n ([x_e, y_e], color_right),\n\n ([x_e, y_o], color_right),\n\n ])\n\n .map(widget::triangles::Triangle)\n", "file_path": "src/classic_frame.rs", "rank": 42, "score": 8.941132881607356 }, { "content": "pub use dim::{Rect, RectF, RectI};\n\n\n\nmod debug;\n\nmod dim;\n\nmod snapping;\n\n\n\n#[derive(Clone, Copy, PartialEq, Eq, Debug)]\n\npub enum HitTest {\n\n Content,\n\n TitleBarOrDragArea,\n\n TopBorder,\n\n LeftBorder,\n\n RightBorder,\n\n BottomBorder,\n\n TopLeftCorner,\n\n TopRightCorner,\n\n BottomLeftCorner,\n\n BottomRightCorner,\n\n // CollapseButton,\n\n // CloseButton,\n", "file_path": "src/windowing_area/layout.rs", "rank": 43, "score": 8.876843575633398 }, { "content": " pub fn title_bar_color(mut self, color: Color) -> Self {\n\n self.style.title_bar_color = Some(color);\n\n self\n\n }\n\n}\n\n\n\nimpl<'a> Widget for WindowFrame<'a> {\n\n type State = State;\n\n type Style = Style;\n\n type Event = Event;\n\n\n\n fn init_state(&self, id_gen: widget::id::Generator) -> Self::State {\n\n State {\n\n ids: Ids::new(id_gen),\n\n }\n\n }\n\n\n\n fn style(&self) -> Self::Style {\n\n self.style.clone()\n\n }\n", "file_path": "src/windowing_area/window_frame.rs", "rank": 44, "score": 8.846181111773085 }, { "content": " fn style(&self) -> Self::Style {\n\n self.style.clone()\n\n }\n\n\n\n fn is_over(&self) -> widget::IsOverFn {\n\n |_, _, _| widget::IsOver::Bool(false)\n\n }\n\n\n\n fn update(self, args: widget::UpdateArgs<Self>) -> Self::Event {\n\n let widget::UpdateArgs {\n\n id,\n\n state,\n\n rect,\n\n mut ui,\n\n ..\n\n } = args;\n\n let state: &mut conrod_core::widget::State<State> = state;\n\n let Self {\n\n windowing_state,\n\n debug_win_id: win_id,\n", "file_path": "src/windowing_area/debug.rs", "rank": 45, "score": 8.845382744720908 }, { "content": "pub mod windowing_area;\n\n\n\nmod classic_button;\n\nmod classic_frame;\n\nmod empty_widget;\n\nmod util;\n\n\n\npub use windowing_area::{\n\n layout::{WinId, WindowingState},\n\n WindowBuilder, WindowEvent, WindowSetter, WindowingArea, WindowingContext,\n\n};\n", "file_path": "src/lib.rs", "rank": 47, "score": 8.820878644899349 }, { "content": "use crate::classic_frame;\n\n\n\nuse conrod_core::{color, widget, widget_ids, Positionable, UiCell, Widget, WidgetCommon};\n\nuse widget::button::TimesClicked;\n\n\n\n#[derive(Clone, Copy, Debug, WidgetCommon)]\n\npub struct ClassicButton {\n\n #[conrod(common_builder)]\n\n pub common: widget::CommonBuilder,\n\n pub button_type: ButtonType,\n\n pub hidpi_factor: f64,\n\n}\n\n\n\n#[derive(Clone, Copy, PartialEq, Eq, Debug)]\n\npub enum ButtonType {\n\n Collapse,\n\n Uncollapse,\n\n Close,\n\n}\n\n\n", "file_path": "src/classic_button.rs", "rank": 48, "score": 8.748355444213232 }, { "content": " }\n\n\n\n /// Retrieves the `Rect` of a window for display. The `Rect` is adjusted to\n\n /// align to the physical pixel grid. Note that since the returned `Rect`\n\n /// contains f32 dimensions, it may not suitable for use with GUI toolkits\n\n /// that use f64 internally due to the limited precision.\n\n pub fn win_display_rect(&self, win_id: WinId) -> Option<RectF> {\n\n let WinId(win_idx) = win_id;\n\n let win = self.window_states[win_idx as usize].as_ref()?;\n\n if win.is_hidden {\n\n return None;\n\n }\n\n if win.is_collapsed {\n\n let rect = win.rect;\n\n let hidpi_factor = self.hidpi_factor as f32;\n\n let border_thickness = self.frame_metrics.border_thickness as f32;\n\n let title_bar_height = self.frame_metrics.title_bar_height as f32;\n\n let collapsed_win_width = self.frame_metrics.collapsed_win_width as f32;\n\n Some(RectF {\n\n x: (rect.x * hidpi_factor).round() / hidpi_factor,\n", "file_path": "src/windowing_area/layout.rs", "rank": 49, "score": 8.589501090681187 }, { "content": " border_thickness: f64,\n\n base_color: color::Color,\n\n) -> impl Iterator<Item = widget::triangles::Triangle<widget::triangles::ColoredPoint>> {\n\n let hsla = base_color.to_hsl();\n\n let alpha = hsla.3;\n\n // The original colors are greyscale with luminance of:\n\n // 0.0, 0.875, 0.5, 1.0, 0.75\n\n // We treat the base colour as the fifth colour and scale the other colours\n\n // based on the original scales --\n\n // 0.875 = (1.0 - 0.75) / 2.0 + 0.75\n\n // 0.5 = 0.75 / 1.5\n\n let lower_a = color::Rgba(0.0, 0.0, 0.0, alpha);\n\n let upper_a = color::hsla(hsla.0, hsla.1, (1.0 - hsla.2) / 2.0 + hsla.2, alpha).to_rgb();\n\n let lower_b = color::hsla(hsla.0, hsla.1, hsla.2 / 1.5, alpha).to_rgb();\n\n let upper_b = color::Rgba(1.0, 1.0, 1.0, alpha);\n\n let inside = base_color.to_rgb();\n\n let frame_color = FrameColor {\n\n lower_a,\n\n upper_a,\n\n lower_b,\n", "file_path": "src/classic_frame.rs", "rank": 50, "score": 8.256253128657686 }, { "content": " .solid()\n\n .thickness(2.0)\n\n .color(conrod_core::color::RED.alpha(0.8))\n\n .graphics_for(id)\n\n .set(item_id, ui);\n\n }\n\n for (i, seg) in debug.snap_y_segments().enumerate() {\n\n let item_id = get_id!(snap_candidates_y, i);\n\n let pt1 = util::layout_pos_to_conrod_point([seg.x1 as f64, seg.y1 as f64], rect);\n\n let pt2 = util::layout_pos_to_conrod_point([seg.x2 as f64, seg.y2 as f64], rect);\n\n widget::Line::abs(pt1, pt2)\n\n .solid()\n\n .thickness(2.0)\n\n .color(conrod_core::color::GREEN.alpha(0.8))\n\n .graphics_for(id)\n\n .set(item_id, ui);\n\n }\n\n }\n\n\n\n fn default_x_position(&self, _ui: &Ui) -> Position {\n\n Position::Relative(position::Relative::Place(Place::Middle), None)\n\n }\n\n\n\n fn default_y_position(&self, _ui: &Ui) -> Position {\n\n Position::Relative(position::Relative::Place(Place::Middle), None)\n\n }\n\n}\n", "file_path": "src/windowing_area/debug.rs", "rank": 52, "score": 8.183659956298632 }, { "content": " line_thickness,\n\n )\n\n .map(move |point| (point, upper_b_color)),\n\n );\n\n\n\n // Inside rectangle:\n\n let inside = polygon_to_triangle_points(\n\n make_rect(\n\n [x_left + border_thickness, y_top - border_thickness],\n\n [x_right - border_thickness, y_bottom + border_thickness],\n\n )\n\n .map(move |point| (point, inside_color)),\n\n );\n\n\n\n iter_chain![lower_a, upper_a, lower_b, upper_b, inside].map(widget::triangles::Triangle)\n\n}\n\n\n\npub(super) fn make_panel_frame(\n\n bottom_left: [f64; 2],\n\n top_right: [f64; 2],\n", "file_path": "src/classic_frame.rs", "rank": 53, "score": 8.099945776208632 }, { "content": " icon_rect.top_right(),\n\n hidpi_factor,\n\n );\n\n widget::Triangles::single_color(color::BLACK, icon_triangles)\n\n .with_bounding_rect(icon_rect)\n\n .top_left_with_margin_on(id, border_thickness)\n\n .graphics_for(id)\n\n .place_on_kid_area(false)\n\n .set(state.icon, ui);\n\n }\n\n ButtonType::Uncollapse => {\n\n let icon_triangles = classic_frame::make_uncollapse_button_icon(\n\n icon_rect.bottom_left(),\n\n icon_rect.top_right(),\n\n hidpi_factor,\n\n );\n\n widget::Triangles::single_color(color::BLACK, icon_triangles)\n\n .with_bounding_rect(icon_rect)\n\n .top_left_with_margin_on(id, border_thickness)\n\n .graphics_for(id)\n", "file_path": "src/classic_button.rs", "rank": 54, "score": 8.020857311186937 }, { "content": " ..\n\n } = self;\n\n\n\n let (interaction, times_triggered) = interaction_and_times_triggered(id, ui);\n\n\n\n // Draw a classic frame using triangles:\n\n let base_color = color::rgba(0.75, 0.75, 0.75, 1.0);\n\n let dpi_int = if hidpi_factor.fract() < 0.51 {\n\n hidpi_factor.trunc()\n\n } else {\n\n hidpi_factor.trunc() + 1.0\n\n };\n\n let border_thickness = 2.0 * dpi_int / hidpi_factor;\n\n let triangles = classic_frame::make_button_frame(\n\n rect.bottom_left(),\n\n rect.top_right(),\n\n border_thickness,\n\n base_color,\n\n interaction == Interaction::Press,\n\n );\n", "file_path": "src/classic_button.rs", "rank": 55, "score": 7.998969044668541 }, { "content": " } else {\n\n (shape_width_from_width, shape_height_from_width)\n\n }\n\n };\n\n let icon_pad_left = ((px_width - icon_px_width) / 2.0).round() / hidpi_factor;\n\n let icon_pad_bottom = ((px_height - icon_px_height) / 2.0).round() / hidpi_factor;\n\n let icon_width = icon_px_width / hidpi_factor;\n\n let icon_height = icon_px_height / hidpi_factor;\n\n\n\n let icon_bottom_left = [x_o + icon_pad_left, y_o + icon_pad_bottom];\n\n let icon_top_right = [\n\n x_o + icon_pad_left + icon_width,\n\n y_o + icon_pad_bottom + icon_height,\n\n ];\n\n\n\n make_close_icon_shape(icon_bottom_left, icon_top_right)\n\n}\n\n\n", "file_path": "src/classic_frame.rs", "rank": 56, "score": 7.756688488606611 }, { "content": "#[derive(Clone, Copy, PartialEq, Debug)]\n\npub(crate) struct FrameMetrics {\n\n pub(crate) border_thickness: f64,\n\n pub(crate) title_bar_height: f64,\n\n pub(crate) gap_below_title_bar: f64,\n\n /// The window width of a collapsed window. This includes the borders on\n\n /// both sides.\n\n pub(crate) collapsed_win_width: f64,\n\n pub(crate) title_button_padding: f64,\n\n pub(crate) title_button_width: f64,\n\n pub(crate) title_text_padding: f64,\n\n}\n\n\n", "file_path": "src/windowing_area/layout.rs", "rank": 57, "score": 7.7459860343010725 }, { "content": "\n\n pub fn set_win_min_size(&mut self, win_id: WinId, min_size: [f32; 2]) {\n\n let WinId(win_idx) = win_id;\n\n if let Some(win) = &mut self.window_states[win_idx as usize] {\n\n let min_size: dim::SizeF = min_size.into();\n\n if win.min_size.w < min_size.w || win.min_size.h < min_size.h {\n\n // The new `min_size` is larger than the existing one, so we\n\n // might need to expand the window.\n\n let border_thickness = self.frame_metrics.border_thickness as f32;\n\n let title_bar_height = self.frame_metrics.title_bar_height as f32;\n\n let min_w = border_thickness * 2.0 + min_size.w;\n\n let min_h = border_thickness * 2.0 + title_bar_height + min_size.h;\n\n if win.rect.w < min_w {\n\n win.rect.w = min_w;\n\n }\n\n if win.rect.h < min_h {\n\n win.rect.h = min_h;\n\n }\n\n }\n\n win.min_size = min_size;\n", "file_path": "src/windowing_area/layout.rs", "rank": 58, "score": 7.635084442883636 }, { "content": " let part1 = polygon_to_triangle_points(value_iter_chain![\n\n [x_o, y_o],\n\n [x_o, y_o + sy],\n\n [x_o + icon_width - sx, y_o + icon_height],\n\n [x_o + icon_width, y_o + icon_height],\n\n [x_o + icon_width, y_o + icon_height - sy],\n\n [x_o + sx, y_o],\n\n ]);\n\n let part2 = polygon_to_triangle_points(value_iter_chain![\n\n [x_o, y_o + icon_height],\n\n [x_o + sx, y_o + icon_height],\n\n [x_o + sx + x_mid_offset, y_o + icon_height - x_mid_offset],\n\n [x_o + y_mid_offset, y_o + sy + y_mid_offset],\n\n [x_o, y_o + icon_height - sy],\n\n ]);\n\n let part3 = polygon_to_triangle_points(value_iter_chain![\n\n [x_o + icon_width, y_o],\n\n [x_o + icon_width - sx, y_o],\n\n [x_o + sx + x_mid_offset, y_o + x_mid_offset],\n\n [x_o + icon_width - y_mid_offset, y_o + sy + y_mid_offset],\n", "file_path": "src/classic_frame.rs", "rank": 59, "score": 7.621585463261419 }, { "content": " };\n\n\n\n // Close button:\n\n let close_clicked = if is_closable {\n\n classic_button::ClassicButton::new(classic_button::ButtonType::Close, hidpi_factor)\n\n .mid_right_with_margin_on(\n\n state.ids.title_bar_box,\n\n frame_metrics.title_button_padding,\n\n )\n\n .w_h(button_width, button_height)\n\n .parent(id)\n\n .place_on_kid_area(false)\n\n .set(state.ids.button_close, &mut ui)\n\n } else {\n\n widget::button::TimesClicked(0)\n\n };\n\n\n\n // Set the clipping box for the title bar text:\n\n let left_padding =\n\n frame_metrics.title_text_padding + if is_collapsible { button_width } else { 0.0 };\n", "file_path": "src/windowing_area/window_frame.rs", "rank": 60, "score": 7.611860070677135 }, { "content": " // #[conrod(default = \"theme.border_color\")]\n\n // pub border_color: Option<Color>,\n\n\n\n // /// Padding for the left edge of the Canvas' kid area.\n\n // #[conrod(default = \"theme.padding.x.start\")]\n\n // pub pad_left: Option<Scalar>,\n\n // /// Padding for the right edge of the Canvas' kid area.\n\n // #[conrod(default = \"theme.padding.x.end\")]\n\n // pub pad_right: Option<Scalar>,\n\n // /// Padding for the bottom edge of the Canvas' kid area.\n\n // #[conrod(default = \"theme.padding.y.start\")]\n\n // pub pad_bottom: Option<Scalar>,\n\n // /// Padding for the top edge of the Canvas' kid area.\n\n // #[conrod(default = \"theme.padding.y.end\")]\n\n // pub pad_top: Option<Scalar>,\n\n /// The color of the title bar. Defaults to the color of the Canvas.\n\n #[conrod(default = \"theme.shape_color\")]\n\n pub title_bar_color: Option<Color>,\n\n /// The color of the title bar's text.\n\n #[conrod(default = \"theme.label_color\")]\n", "file_path": "src/windowing_area/window_frame.rs", "rank": 61, "score": 7.452411124196843 }, { "content": " let right_padding =\n\n frame_metrics.title_text_padding + if is_closable { button_width } else { 0.0 };\n\n EmptyWidget::new()\n\n .x_position_relative_to(\n\n state.ids.title_bar_box,\n\n position::Relative::Place(position::Place::Start(Some(left_padding))),\n\n )\n\n .align_middle_y_of(state.ids.title_bar_box)\n\n .padded_w_of(\n\n state.ids.title_bar_box,\n\n (left_padding + right_padding) / 2.0,\n\n )\n\n .h_of(state.ids.title_bar_box)\n\n .graphics_for(state.ids.title_bar_box)\n\n .place_on_kid_area(false)\n\n .crop_kids()\n\n .set(state.ids.title_text_clip, &mut ui);\n\n\n\n // Draw the title bar text:\n\n let font_size = style.title_bar_font_size(&ui.theme);\n", "file_path": "src/windowing_area/window_frame.rs", "rank": 63, "score": 7.335574774402561 }, { "content": " frame_metrics.title_bar_height - frame_metrics.title_button_padding * 2.0;\n\n\n\n // Collapse (minimize) button:\n\n let collapse_clicked = if is_collapsible {\n\n let button_type = if is_collapsed {\n\n classic_button::ButtonType::Uncollapse\n\n } else {\n\n classic_button::ButtonType::Collapse\n\n };\n\n classic_button::ClassicButton::new(button_type, hidpi_factor)\n\n .mid_left_with_margin_on(\n\n state.ids.title_bar_box,\n\n frame_metrics.title_button_padding,\n\n )\n\n .w_h(button_width, button_height)\n\n .parent(id)\n\n .place_on_kid_area(false)\n\n .set(state.ids.button_collapse, &mut ui)\n\n } else {\n\n widget::button::TimesClicked(0)\n", "file_path": "src/windowing_area/window_frame.rs", "rank": 64, "score": 7.332484190898524 }, { "content": "\n\nimpl FrameMetrics {\n\n pub(crate) fn with_hidpi_factor(hidpi_factor: f64) -> Self {\n\n let dpi_int = if hidpi_factor.fract() < 0.51 {\n\n hidpi_factor.trunc()\n\n } else {\n\n hidpi_factor.trunc() + 1.0\n\n };\n\n let border_thickness = 4.0 * dpi_int / hidpi_factor;\n\n let gap_below_title_bar = 1.0 * dpi_int / hidpi_factor;\n\n let title_bar_height = (18.0 * hidpi_factor).round() / hidpi_factor;\n\n let collapsed_win_width =\n\n (150.0 * hidpi_factor + border_thickness * hidpi_factor * 2.0).round() / hidpi_factor;\n\n let title_button_padding = (2.0 * hidpi_factor).round() / hidpi_factor;\n\n let title_button_width = (16.0 * hidpi_factor).round() / hidpi_factor;\n\n let title_text_padding = (4.0 * hidpi_factor).round() / hidpi_factor;\n\n Self {\n\n border_thickness,\n\n title_bar_height,\n\n gap_below_title_bar,\n", "file_path": "src/windowing_area/layout.rs", "rank": 65, "score": 7.283167129260299 }, { "content": " // TODO: Make these configurable:\n\n let snap_threshold = (12.0 * hidpi_factor).round() as i32;\n\n let snap_margin = (8.0 * hidpi_factor).round() as i32;\n\n\n\n let dragging_state = self\n\n .maybe_dragging_window\n\n .as_mut()\n\n .unwrap_or_else(|| unreachable!());\n\n\n\n fn snap_dimension<D: dim::Dir>(\n\n try_snap: impl Fn(i32) -> Option<i32>,\n\n dim_range: dim::DimRange<i32, D>,\n\n snap_candidates: &[(WinId, snapping::SnapSegment<D>)],\n\n last_snapped: &mut Option<u32>,\n\n ) -> Option<i32> {\n\n ({\n\n last_snapped.and_then(|last_snapped_idx| {\n\n // Check the previously snapped window border.\n\n let (_, seg) = snap_candidates[last_snapped_idx as usize];\n\n if seg.dim_range().overlaps_with(dim_range) {\n", "file_path": "src/windowing_area/layout.rs", "rank": 67, "score": 6.989258989813301 }, { "content": " let upper_a_color = frame_color.upper_a;\n\n let lower_b_color = frame_color.lower_b;\n\n let upper_b_color = frame_color.upper_b;\n\n let inside_color = frame_color.inside;\n\n\n\n let line_thickness = border_thickness / 2.0;\n\n let [x_left, y_bottom] = bottom_left;\n\n let [x_right, y_top] = top_right;\n\n\n\n // Outmost (bottom-right) border:\n\n let lower_a = polygon_to_triangle_points(\n\n make_l_shape_polygon([x_right, y_bottom], [x_left, y_top], line_thickness)\n\n .map(move |point| (point, lower_a_color)),\n\n );\n\n\n\n // Outmost (top-left) border:\n\n let upper_a = polygon_to_triangle_points(\n\n make_l_shape_polygon(\n\n [x_left, y_top],\n\n [x_right - line_thickness, y_bottom + line_thickness],\n", "file_path": "src/classic_frame.rs", "rank": 68, "score": 6.975486183483749 }, { "content": " widget::Text::new(title)\n\n .no_line_wrap()\n\n .left_justify()\n\n .w_of(state.ids.title_text_clip)\n\n .middle_of(state.ids.title_text_clip)\n\n .color(color::WHITE)\n\n .font_size(font_size)\n\n .graphics_for(state.ids.title_text_clip)\n\n .place_on_kid_area(false)\n\n .set(state.ids.title_text, &mut ui);\n\n\n\n Event {\n\n collapse_clicked,\n\n close_clicked,\n\n }\n\n }\n\n}\n", "file_path": "src/windowing_area/window_frame.rs", "rank": 69, "score": 6.789757906197504 }, { "content": " let is_drag_move_window =\n\n ui.global_input().current.modifiers == conrod_core::input::ModifierKey::ALT;\n\n if is_drag_move_window {\n\n // Add an empty widget on top for mouse capturing.\n\n EmptyWidget::new()\n\n .graphics_for(id)\n\n .place_on_kid_area(false)\n\n .xy(rect.xy())\n\n .wh(rect.dim())\n\n .depth(position::Depth::MIN)\n\n .set(state.ids.capture_overlay, &mut ui);\n\n }\n\n\n\n if state.ids.window_frames.len() != windowing_state.win_count() {\n\n let target_len = windowing_state.win_count();\n\n state.update(|state| {\n\n state\n\n .ids\n\n .window_frames\n\n .resize(target_len, &mut ui.widget_id_generator());\n", "file_path": "src/windowing_area.rs", "rank": 71, "score": 6.634304118022748 }, { "content": " let title_bar_height = self.frame_metrics.title_bar_height as f32;\n\n let area_w = (self.area_size[0] * hidpi_factor) as i32;\n\n let area_h = (self.area_size[1] * hidpi_factor) as i32;\n\n let snap_margin = (8.0 * hidpi_factor).round() as i32;\n\n\n\n let mut rect = self\n\n .win_normal_rect_int(win_id)\n\n .unwrap_or_else(|| unreachable!());\n\n let display_size = self\n\n .win_display_rect_int(win_id)\n\n .unwrap_or_else(|| unreachable!())\n\n .size();\n\n let min_w = ((border_thickness * 2.0 + win.min_size.w) * hidpi_factor).round() as i32;\n\n let min_h = ((border_thickness * 2.0 + title_bar_height + win.min_size.h) * hidpi_factor)\n\n .round() as i32;\n\n\n\n match win.anchor_x {\n\n snapping::Anchor::None => {}\n\n snapping::Anchor::LowerEdge => {\n\n rect.x = 0 + snap_margin;\n", "file_path": "src/windowing_area/layout.rs", "rank": 72, "score": 6.479435918832303 }, { "content": " let win_id = WinId(i as u32);\n\n self.win_recompute_snapping_rect(win_id);\n\n }\n\n }\n\n\n\n fn win_recompute_snapping_rect(&mut self, win_id: WinId) {\n\n let win_idx = win_id.0 as usize;\n\n let win = match &self.window_states[win_idx] {\n\n Some(win) => win,\n\n None => return,\n\n };\n\n if win.is_hidden {\n\n return;\n\n }\n\n if win.anchor_x == snapping::Anchor::None && win.anchor_y == snapping::Anchor::None {\n\n return;\n\n }\n\n\n\n let hidpi_factor = self.hidpi_factor as f32;\n\n let border_thickness = self.frame_metrics.border_thickness as f32;\n", "file_path": "src/windowing_area/layout.rs", "rank": 73, "score": 6.33053809399382 }, { "content": " pub title_bar_text_color: Option<Color>,\n\n /// The font size for the title bar's text.\n\n #[conrod(default = \"theme.font_size_small\")]\n\n pub title_bar_font_size: Option<FontSize>,\n\n // /// The way in which the title bar's text should wrap.\n\n // #[conrod(default = \"None\")]\n\n // pub title_bar_maybe_wrap: Option<Option<widget::text::Wrap>>,\n\n // /// The distance between lines for multi-line title bar text.\n\n // #[conrod(default = \"1.0\")]\n\n // pub title_bar_line_spacing: Option<Scalar>,\n\n /// The label's typographic alignment over the *x* axis.\n\n #[conrod(default = \"text::Justify::Left\")]\n\n pub title_bar_justify: Option<text::Justify>,\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct Event {\n\n pub collapse_clicked: widget::button::TimesClicked,\n\n pub close_clicked: widget::button::TimesClicked,\n\n}\n", "file_path": "src/windowing_area/window_frame.rs", "rank": 74, "score": 6.189053413914454 }, { "content": "}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct WindowBuilder<'a> {\n\n pub title: &'a str,\n\n pub initial_position: Option<[f32; 2]>,\n\n pub initial_size: Option<[f32; 2]>,\n\n pub min_size: Option<[f32; 2]>,\n\n pub is_hidden: bool,\n\n pub is_collapsible: bool,\n\n pub is_closable: bool,\n\n pub is_collapsed: Option<bool>,\n\n _private: (),\n\n}\n\n\n\npub struct WindowEvent {\n\n pub collapse_clicked: widget::button::TimesClicked,\n\n pub close_clicked: widget::button::TimesClicked,\n\n pub title_bar_double_click_count: u32,\n\n}\n", "file_path": "src/windowing_area.rs", "rank": 75, "score": 6.1869549398754575 }, { "content": " }\n\n }\n\n }\n\n\n\n pub fn next_id(&mut self) -> WinId {\n\n let id = self.window_states.len() as u32;\n\n self.window_states.push(None);\n\n self.window_z_orders.push(id);\n\n let win_id = WinId(id);\n\n self.bottom_to_top_list.push(win_id);\n\n win_id\n\n }\n\n\n\n /// Ensures that the window specified by `win_id` has been initialized. If\n\n /// the window has not been initialized, the `init` callback is called to\n\n /// obtain the initial states for the window.\n\n pub fn ensure_init<F>(&mut self, win_id: WinId, init: F)\n\n where\n\n F: FnOnce() -> WindowInitialState,\n\n {\n", "file_path": "src/windowing_area/layout.rs", "rank": 76, "score": 6.1729028339895144 }, { "content": "use conrod_core::{color, widget};\n\n\n\nmacro_rules! value_iter_chain{\n\n ($item:expr, $(,)?) => {\n\n ::std::iter::once($item)\n\n };\n\n ($first:expr, $($others:expr),+ $(,)?) => {\n\n value_iter_chain!($first,).chain(value_iter_chain!($($others,)+))\n\n };\n\n}\n\n\n\nmacro_rules! iter_chain{\n\n ($item:expr, $(,)?) => {\n\n $item\n\n };\n\n ($first:expr, $($others:expr),+ $(,)?) => {\n\n iter_chain!($first,).chain(iter_chain!($($others,)+))\n\n };\n\n}\n\n\n", "file_path": "src/classic_frame.rs", "rank": 77, "score": 6.138277471285555 }, { "content": " let dy = (offset[1] * hidpi_factor).round() as i32;\n\n\n\n // Ensure the window being dragged is topmost.\n\n self.bring_to_top(win_id);\n\n\n\n let border_thickness = self.frame_metrics.border_thickness as f32;\n\n let title_bar_height = self.frame_metrics.title_bar_height as f32;\n\n\n\n let area_size = dim::SizeI {\n\n w: (self.area_size[0] * hidpi_factor) as i32,\n\n h: (self.area_size[1] * hidpi_factor) as i32,\n\n };\n\n\n\n let win = self.window_states[win_id.0 as usize]\n\n .as_ref()\n\n .unwrap_or_else(|| unreachable!());\n\n let min_w = ((border_thickness * 2.0 + win.min_size.w) * hidpi_factor).round() as i32;\n\n let min_h = ((border_thickness * 2.0 + title_bar_height + win.min_size.h) * hidpi_factor)\n\n .round() as i32;\n\n\n", "file_path": "src/windowing_area/layout.rs", "rank": 78, "score": 6.10561126902004 }, { "content": " line_thickness,\n\n )\n\n .map(move |point| (point, upper_a_color)),\n\n );\n\n\n\n // Inner (bottom-right) border:\n\n let lower_b = polygon_to_triangle_points(\n\n make_l_shape_polygon(\n\n [x_right - line_thickness, y_bottom + line_thickness],\n\n [x_left + line_thickness, y_top - line_thickness],\n\n line_thickness,\n\n )\n\n .map(move |point| (point, lower_b_color)),\n\n );\n\n\n\n // Inner (top-left) border:\n\n let upper_b = polygon_to_triangle_points(\n\n make_l_shape_polygon(\n\n [x_left + line_thickness, y_top - line_thickness],\n\n [x_right - border_thickness, y_bottom + border_thickness],\n", "file_path": "src/classic_frame.rs", "rank": 79, "score": 6.067232969735792 }, { "content": " is_focused: true,\n\n is_collapsed: false,\n\n is_collapsible: true,\n\n is_closable: false,\n\n }\n\n }\n\n\n\n builder_methods! {\n\n pub title { title = &'a str }\n\n pub is_focused { is_focused = bool }\n\n pub is_collapsed { is_collapsed = bool }\n\n pub is_collapsible { is_collapsible = bool }\n\n pub is_closable { is_closable = bool }\n\n }\n\n\n\n pub fn frame_color(mut self, color: Color) -> Self {\n\n self.style.frame_color = Some(color);\n\n self\n\n }\n\n\n", "file_path": "src/windowing_area/window_frame.rs", "rank": 80, "score": 6.057342786471772 }, { "content": " WindowPartX::LeftBorder\n\n } else if x > w - border_thickness {\n\n WindowPartX::RightBorder\n\n } else {\n\n WindowPartX::Content\n\n };\n\n let window_part_y = if y <= border_thickness {\n\n WindowPartY::TopBorder\n\n } else if y > h - border_thickness {\n\n WindowPartY::BottomBorder\n\n } else if y <= border_thickness + title_bar_height {\n\n WindowPartY::TitleBar\n\n } else {\n\n WindowPartY::Content\n\n };\n\n\n\n let corner_leeway = border_thickness * 3;\n\n let (is_near_l, is_near_r) = if x <= corner_leeway {\n\n (true, false)\n\n } else if x > w - corner_leeway {\n", "file_path": "src/windowing_area/layout.rs", "rank": 81, "score": 5.972611208295588 }, { "content": " .place_on_kid_area(false)\n\n .set(state.icon, ui);\n\n }\n\n ButtonType::Close => {\n\n let icon_triangles = classic_frame::make_close_button_icon(\n\n icon_rect.bottom_left(),\n\n icon_rect.top_right(),\n\n hidpi_factor,\n\n );\n\n widget::Triangles::single_color(color::BLACK, icon_triangles)\n\n .with_bounding_rect(icon_rect)\n\n .top_left_with_margin_on(id, border_thickness)\n\n .graphics_for(id)\n\n .place_on_kid_area(false)\n\n .set(state.icon, ui);\n\n }\n\n }\n\n\n\n TimesClicked(times_triggered)\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone, PartialEq, Eq)]\n", "file_path": "src/classic_button.rs", "rank": 82, "score": 5.922090514813339 }, { "content": " };\n\n if window_is_collapsed {\n\n (event, None)\n\n } else {\n\n (\n\n event,\n\n Some(WindowSetter {\n\n window_frame_id,\n\n content_widget_id,\n\n }),\n\n )\n\n }\n\n }\n\n}\n\n\n\nimpl WindowSetter {\n\n pub fn set<W>(self, widget: W, ui: &mut UiCell) -> (widget::Id, W::Event)\n\n where\n\n W: Widget,\n\n {\n\n let event = widget\n\n .kid_area_wh_of(self.window_frame_id)\n\n .parent(self.window_frame_id)\n\n .set(self.content_widget_id, ui);\n\n (self.content_widget_id, event)\n\n }\n\n}\n", "file_path": "src/windowing_area.rs", "rank": 83, "score": 5.778988066067124 }, { "content": "\n\npub struct WindowSetter {\n\n window_frame_id: widget::Id,\n\n content_widget_id: widget::Id,\n\n}\n\n\n\nwidget_ids! {\n\n struct Ids {\n\n capture_overlay,\n\n window_frames[],\n\n // window_titles[],\n\n window_contents[],\n\n debug,\n\n }\n\n}\n\n\n\nimpl<'a> WindowingArea<'a> {\n\n pub fn new(windowing_state: &'a mut WindowingState, hidpi_factor: f64) -> Self {\n\n Self {\n\n common: widget::CommonBuilder::default(),\n", "file_path": "src/windowing_area.rs", "rank": 84, "score": 5.744305067732904 }, { "content": " ids: Ids::new(id_gen),\n\n maybe_dragging_win: None,\n\n }\n\n }\n\n\n\n fn style(&self) -> Self::Style {\n\n self.style.clone()\n\n }\n\n\n\n fn is_over(&self) -> widget::IsOverFn {\n\n // We want this widget to not capture mouse events. This does not\n\n // affect individual window frames as they still capture mouse events\n\n // on their own. Alt+Drag window movement is handled by an overlay\n\n // widget that captures mouse events so it is also not affected.\n\n |_, _, _| widget::IsOver::Bool(false)\n\n }\n\n\n\n fn update(self, args: widget::UpdateArgs<Self>) -> Self::Event {\n\n let widget::UpdateArgs {\n\n id,\n", "file_path": "src/windowing_area.rs", "rank": 85, "score": 5.662359776585713 }, { "content": " )\n\n })\n\n .collect()\n\n }\n\n }\n\n }\n\n\n\n // Gather a list of borders of other windows that could\n\n // possibly be snapped to.\n\n // TODO: Possible optimization by filtering out impossible borders.\n\n let base_iter = self\n\n .window_states\n\n .iter()\n\n .enumerate()\n\n .filter_map(|(i, _)| {\n\n let i_win_id = WinId(i as u32);\n\n if i_win_id != win_id {\n\n Some(i_win_id)\n\n } else {\n\n None\n", "file_path": "src/windowing_area/layout.rs", "rank": 86, "score": 5.602539499976621 }, { "content": " type Event = TimesClicked;\n\n\n\n fn init_state(&self, id_gen: conrod_core::widget::id::Generator) -> Self::State {\n\n Ids::new(id_gen)\n\n }\n\n\n\n fn style(&self) -> Self::Style {}\n\n\n\n fn update(self, args: conrod_core::widget::UpdateArgs<Self>) -> Self::Event {\n\n let widget::UpdateArgs {\n\n id,\n\n state,\n\n rect,\n\n ui,\n\n ..\n\n } = args;\n\n let state: &mut widget::State<Ids> = state;\n\n let Self {\n\n button_type,\n\n hidpi_factor,\n", "file_path": "src/classic_button.rs", "rank": 87, "score": 5.381071053304257 }, { "content": "\n\n pub fn lower(self) -> T {\n\n self.lower\n\n }\n\n\n\n pub fn upper(self) -> T {\n\n self.upper\n\n }\n\n\n\n pub fn overlaps_with(self, other: Self) -> bool {\n\n self.lower < other.upper && other.lower < self.upper\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_dim_range_ctor() {\n", "file_path": "src/windowing_area/layout/dim.rs", "rank": 88, "score": 5.2071061160104755 }, { "content": " y: (rect.y * hidpi_factor).round() / hidpi_factor,\n\n w: collapsed_win_width,\n\n h: title_bar_height + border_thickness * 2.0,\n\n })\n\n } else {\n\n self.win_normal_rect(win_id)\n\n }\n\n }\n\n\n\n /// Retrieves the `RectInt` of a window for display. The `RectInt` is in\n\n /// unscaled physical pixels.\n\n pub fn win_display_rect_int(&self, win_id: WinId) -> Option<RectI> {\n\n let WinId(win_idx) = win_id;\n\n let win = self.window_states[win_idx as usize].as_ref()?;\n\n if win.is_hidden {\n\n return None;\n\n }\n\n if win.is_collapsed {\n\n let rect = win.rect;\n\n let hidpi_factor = self.hidpi_factor as f32;\n", "file_path": "src/windowing_area/layout.rs", "rank": 89, "score": 5.198309352990464 }, { "content": " ..\n\n } = self;\n\n\n\n let win_rect = match windowing_state.win_normal_rect_f64(win_id) {\n\n Some(x) => x,\n\n None => return,\n\n };\n\n\n\n let win_rect_in_conrod = util::win_rect_to_conrod_rect(win_rect, rect);\n\n\n\n widget::Rectangle::fill_with(rect.dim(), conrod_core::color::rgba(1.0, 0.8, 0.0, 0.3))\n\n .xy(win_rect_in_conrod.xy())\n\n .wh(win_rect_in_conrod.dim())\n\n .graphics_for(id)\n\n .set(state.ids.window_rect_display, &mut ui);\n\n\n\n let debug = windowing_state.debug();\n\n\n\n macro_rules! get_id {\n\n ($list:ident, $i:expr) => {{\n", "file_path": "src/windowing_area/debug.rs", "rank": 90, "score": 5.180320120417432 }, { "content": " w: (rect.w * hidpi_factor).round() as i32,\n\n h: (rect.h * hidpi_factor).round() as i32,\n\n })\n\n }\n\n\n\n /// Retrieves the x, y, width and height of a window in its normal state.\n\n /// The dimensions are adjusted to align to the physical pixel grid. The\n\n /// calculations use f64 so that the results are precise enough for GUI\n\n /// toolkits that use f64 internally.\n\n pub fn win_normal_rect_f64(&self, win_id: WinId) -> Option<[f64; 4]> {\n\n let WinId(win_idx) = win_id;\n\n let win = self.window_states[win_idx as usize].as_ref()?;\n\n let rect = win.rect;\n\n let hidpi_factor = self.hidpi_factor;\n\n Some([\n\n (rect.x as f64 * hidpi_factor).round() / hidpi_factor,\n\n (rect.y as f64 * hidpi_factor).round() / hidpi_factor,\n\n (rect.w as f64 * hidpi_factor).round() / hidpi_factor,\n\n (rect.h as f64 * hidpi_factor).round() / hidpi_factor,\n\n ])\n", "file_path": "src/windowing_area/layout.rs", "rank": 91, "score": 5.096112413109374 }, { "content": " style: Style::default(),\n\n windowing_state,\n\n hidpi_factor,\n\n enable_debug: false,\n\n }\n\n }\n\n\n\n pub fn with_debug(mut self, enabled: bool) -> Self {\n\n self.enable_debug = enabled;\n\n self\n\n }\n\n}\n\n\n\nimpl<'a> Widget for WindowingArea<'a> {\n\n type State = State;\n\n type Style = Style;\n\n type Event = WindowingContext<'a>;\n\n\n\n fn init_state(&self, id_gen: widget::id::Generator) -> Self::State {\n\n State {\n", "file_path": "src/windowing_area.rs", "rank": 92, "score": 5.043664714984902 }, { "content": " pub style: Style,\n\n pub windowing_state: &'a mut WindowingState,\n\n pub hidpi_factor: f64,\n\n pub enable_debug: bool,\n\n}\n\n\n\npub struct State {\n\n ids: Ids,\n\n maybe_dragging_win: Option<bool>,\n\n}\n\n\n\n#[derive(Copy, Clone, Debug, Default, PartialEq, WidgetStyle)]\n\npub struct Style {}\n\n\n\npub struct WindowingContext<'a> {\n\n windowing_area_id: widget::Id,\n\n windowing_area_rect: conrod_core::Rect,\n\n windowing_state: &'a mut WindowingState,\n\n frame_metrics: FrameMetrics,\n\n hidpi_factor: f64,\n", "file_path": "src/windowing_area.rs", "rank": 93, "score": 5.01554180746911 }, { "content": " collapsed_win_width,\n\n title_button_padding,\n\n title_button_width,\n\n title_text_padding,\n\n }\n\n }\n\n}\n\n\n\nimpl WindowingState {\n\n pub fn new() -> Self {\n\n Self {\n\n area_size: [16_777_216.0, 16_777_216.0],\n\n hidpi_factor: 1.0,\n\n window_states: Vec::new(),\n\n window_z_orders: Vec::new(),\n\n bottom_to_top_list: Vec::new(),\n\n frame_metrics: FrameMetrics::with_hidpi_factor(1.0),\n\n maybe_dragging_window: None,\n\n next_auto_position: [32.0, 32.0],\n\n }\n", "file_path": "src/windowing_area/layout.rs", "rank": 94, "score": 4.9662496090834765 }, { "content": "\n\n pub(crate) fn sweep_unneeded(&mut self) {\n\n for win in &mut self.window_states {\n\n if win.as_ref().map_or(false, |x| !x.is_needed) {\n\n *win = None;\n\n }\n\n }\n\n }\n\n\n\n pub fn win_hit_test(&self, pos: [f32; 2]) -> Option<(WinId, HitTest)> {\n\n self.bottom_to_top_list.iter().rev().find_map(|&win_id| {\n\n self.specific_win_hit_test(win_id, pos)\n\n .map(|ht| (win_id, ht))\n\n })\n\n }\n\n\n\n pub fn win_hit_test_filtered<F>(&self, pos: [f32; 2], mut f: F) -> Option<(WinId, HitTest)>\n\n where\n\n F: FnMut(WinId) -> bool,\n\n {\n", "file_path": "src/windowing_area/layout.rs", "rank": 95, "score": 4.960967211589306 }, { "content": " widget::Triangles::multi_color(triangles)\n\n .with_bounding_rect(rect)\n\n .middle_of(id)\n\n .graphics_for(id)\n\n .place_on_kid_area(false)\n\n .set(state.frame, ui);\n\n\n\n let click_shift = 1.0 * dpi_int / hidpi_factor;\n\n let icon_rect = {\n\n let icon_rect = rect.pad(border_thickness);\n\n if interaction == Interaction::Press {\n\n icon_rect.pad_left(click_shift).pad_top(click_shift)\n\n } else {\n\n icon_rect.pad_right(click_shift).pad_bottom(click_shift)\n\n }\n\n };\n\n match button_type {\n\n ButtonType::Collapse => {\n\n let icon_triangles = classic_frame::make_collapse_button_icon(\n\n icon_rect.bottom_left(),\n", "file_path": "src/classic_button.rs", "rank": 96, "score": 4.769127925179823 }, { "content": " state,\n\n rect,\n\n mut ui,\n\n ..\n\n } = args;\n\n let state: &mut conrod_core::widget::State<State> = state;\n\n let Self {\n\n windowing_state,\n\n hidpi_factor,\n\n enable_debug,\n\n ..\n\n } = self;\n\n\n\n // Snap the rect inward to the physical pixel grid if needed.\n\n let rect = if rect.dim() == ui.window_dim() {\n\n // The window dimensions are already aligned to the pixel grid.\n\n rect\n\n } else {\n\n // First, we need the coords relative to the bottom-left corner of\n\n // the window.\n", "file_path": "src/windowing_area.rs", "rank": 97, "score": 4.719825652566103 }, { "content": " };\n\n if win.is_collapsed == is_collapsed {\n\n return;\n\n }\n\n\n\n let win = self.window_states[win_idx as usize]\n\n .as_mut()\n\n .unwrap_or_else(|| unreachable!());\n\n win.is_collapsed = is_collapsed;\n\n\n\n self.win_recompute_snapping_rect(win_id);\n\n }\n\n\n\n pub fn win_z_order(&self, win_id: WinId) -> u32 {\n\n let WinId(win_idx) = win_id;\n\n self.window_z_orders[win_idx as usize]\n\n }\n\n\n\n pub fn bring_to_top(&mut self, win_id: WinId) {\n\n let WinId(win_idx) = win_id;\n", "file_path": "src/windowing_area/layout.rs", "rank": 98, "score": 4.6004863776770515 }, { "content": " *last_snapped = Some(i as u32);\n\n Some(snap)\n\n } else {\n\n *last_snapped = None;\n\n None\n\n }\n\n })\n\n };\n\n\n\n fn calc_new_dimensions<D: dim::Dir>(\n\n dragging_hit_test: HitTest,\n\n starting_rect: RectI,\n\n prev_display_rect: RectI,\n\n delta: i32,\n\n win_min_size: i32,\n\n area_size: dim::SizeI,\n\n snap_margin: i32,\n\n snap_threshold: i32,\n\n snap_candidates: &[(WinId, snapping::SnapSegment<D::PerpendicularDir>)],\n\n last_snapped: &mut Option<u32>,\n", "file_path": "src/windowing_area/layout.rs", "rank": 99, "score": 4.5872228470898655 } ]